diff --git a/.github/dependabot.yml b/.github/dependabot.yml index a73d1dc4e..7d2903d29 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + version: 2 updates: - package-ecosystem: cargo diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 4aab9cee7..b5744e863 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + name: Rust on: [push, pull_request] @@ -10,7 +27,7 @@ jobs: - uses: actions/checkout@v4 - name: Setup Rust Toolchain uses: ./.github/actions/setup-builder - - run: cargo fmt -- --check + - run: cargo fmt --all -- --check lint: runs-on: ubuntu-latest @@ -20,6 +37,14 @@ jobs: uses: ./.github/actions/setup-builder - run: cargo clippy --all-targets --all-features -- -D warnings + benchmark-lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Setup Rust Toolchain + uses: ./.github/actions/setup-builder + - run: cd sqlparser_bench && cargo clippy --all-targets --all-features -- -D warnings + compile: runs-on: ubuntu-latest steps: @@ -68,38 +93,3 @@ jobs: use-tool-cache: true - name: Test run: cargo test --all-features - - test-coverage: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Rust Toolchain - uses: ./.github/actions/setup-builder - with: - rust-version: stable - - name: Install Tarpaulin - uses: actions-rs/install@v0.1 - with: - crate: cargo-tarpaulin - version: 0.14.2 - use-tool-cache: true - - name: Coverage - run: cargo tarpaulin -o Lcov --output-dir ./coverage - - name: Coveralls - uses: coverallsapp/github-action@master - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - - publish-crate: - if: startsWith(github.ref, 'refs/tags/v0') - runs-on: ubuntu-latest - needs: [test] - steps: - - uses: actions/checkout@v4 - - name: Setup Rust Toolchain - uses: ./.github/actions/setup-builder - - name: Publish - shell: bash - run: | - cargo publish --token ${{ secrets.CRATES_TOKEN }} diff --git a/.gitignore b/.gitignore index 4c6821d47..f705d0b0d 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ /target/ /sqlparser_bench/target/ /derive/target/ +dev/dist # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries # More information here http://doc.crates.io/guide.html#cargotoml-vs-cargolock diff --git a/CHANGELOG.md b/CHANGELOG.md index 07142602d..a5511a053 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,1183 +18,19 @@ --> # Changelog -All notable changes to this project will be documented in this file. +All notable changes to this project will be documented in one of the linked +files. -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project aims to adhere to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). Given that the parser produces a typed AST, any changes to the AST will -technically be breaking and thus will result in a `0.(N+1)` version. We document -changes that break via addition as "Added". +technically be breaking and thus will result in a `0.(N+1)` version. -## [Unreleased] -Check https://github.com/sqlparser-rs/sqlparser-rs/commits/main for undocumented changes. - -## [0.51.0] 2024-09-11 -As always, huge props to @iffyio @jmhain and @lovasoa for their help reviewing and merging PRs 🙏. -Without them this project would not be possible. - -Reminder: we are in the final phases of moving sqlparser-rs into the Apache -DataFusion project: https://github.com/sqlparser-rs/sqlparser-rs/issues/1294 - -### Fixed -* Fix Hive table comment should be after table column definitions (#1413) - Thanks @git-hulk -* Fix stack overflow in `parse_subexpr` (#1410) - Thanks @eejbyfeldt -* Fix `INTERVAL` parsing to support expressions and units via dialect (#1398) - Thanks @samuelcolvin -* Fix identifiers starting with `$` should be regarded as a placeholder in SQLite (#1402) - Thanks @git-hulk - -### Added -* Support for MSSQL table options (#1414) - Thanks @bombsimon -* Test showing how negative constants are parsed (#1421) - Thanks @alamb -* Support databricks dialect to dialect_from_str (#1416) - Thanks @milenkovicmalamb -* Support `DROP|CLEAR|MATERIALIZE PROJECTION` syntax for ClickHouse (#1417) - Thanks @git-hulk -* Support postgres `TRUNCATE` syntax (#1406) - Thanks @tobyhede -* Support `CREATE INDEX` with clause (#1389) - Thanks @lewiszlw -* Support parsing `CLUSTERED BY` clause for Hive (#1397) - Thanks @git-hulk -* Support different `USE` statement syntaxes (#1387) - Thanks @kacpermuda -* Support `ADD PROJECTION` syntax for ClickHouse (#1390) - Thanks @git-hulk - -### Changed -* Implement common traits for OneOrManyWithParens (#1368) - Thanks @gstvg -* Cleanup parse_statement (#1407) - Thanks @samuelcolvin -* Allow `DateTimeField::Custom` with `EXTRACT` in Postgres (#1394) - Thanks @samuelcolvin - - -## [0.50.0] 2024-08-15 -Again, huge props to @iffyio @jmhain and @lovasoa for their help reviewing and merging PRs 🙏. -Without them this project would not be possible. - -Reminder: are in the process of moving sqlparser to governed as part of the Apache -DataFusion project: https://github.com/sqlparser-rs/sqlparser-rs/issues/1294 - -### Fixed -* Clippy 1.80 warnings (#1357) - Thanks @lovasoa - -### Added -* Support `STRUCT` and list of structs for DuckDB dialect (#1372) - Thanks @jayzhan211 -* Support custom lexical precedence in PostgreSQL dialect (#1379) - Thanks @samuelcolvin -* Support `FREEZE|UNFREEZE PARTITION` syntax for ClickHouse (#1380) - Thanks @git-hulk -* Support scale in `CEIL` and `FLOOR` functions (#1377) - Thanks @seve-martinez -* Support `CREATE TRIGGER` and `DROP TRIGGER` statements (#1352) - Thanks @LucaCappelletti94 -* Support `EXTRACT` syntax for snowflake (#1374) - Thanks @seve-martinez -* Support `ATTACH` / `DETACH PARTITION` for ClickHouse (#1362) - Thanks @git-hulk -* Support Dialect level precedence, update Postgres Dialect to match Postgres (#1360) - Thanks @samuelcolvin -* Support parsing empty map literal syntax for DuckDB and Generic dialects (#1361) - Thanks @goldmedal -* Support `SETTINGS` clause for ClickHouse table-valued functions (#1358) - Thanks @Jesse-Bakker -* Support `OPTIMIZE TABLE` statement for ClickHouse (#1359) - Thanks @git-hulk -* Support `ON CLUSTER` in `ALTER TABLE` for ClickHouse (#1342) - Thanks @git-hulk -* Support `GLOBAL` keyword before the join operator (#1353) - Thanks @git-hulk -* Support postgres String Constants with Unicode Escapes (#1355) - Thanks @lovasoa -* Support position with normal function call syntax for Snowflake (#1341) - Thanks @jmhain -* Support `TABLE` keyword in `DESC|DESCRIBE|EXPLAIN TABLE` statement (#1351) - Thanks @git-hulk - -### Changed -* Only require `DESCRIBE TABLE` for Snowflake and ClickHouse dialect (#1386) - Thanks @ alamb -* Rename (unreleased) `get_next_precedence_full` to `get_next_precedence_default` (#1378) - Thanks @samuelcolvin -* Use local GitHub Action to replace setup-rust-action (#1371) - Thanks @git-hulk -* Simplify arrow_cast tests (#1367) - Thanks @alamb -* Update version of GitHub Actions (#1363) - Thanks @git-hulk -* Make `Parser::maybe_parse` pub (#1364) - Thanks @Jesse-Bakker -* Improve comments on 1Dialect` (#1366) - Thanks @alamb - - -## [0.49.0] 2024-07-23 -As always, huge props to @iffyio @jmhain and @lovasoa for their help reviewing and merging PRs! - -We are in the process of moving sqlparser to governed as part of the Apache -DataFusion project: https://github.com/sqlparser-rs/sqlparser-rs/issues/1294 - -### Fixed -* Fix quoted identifier regression edge-case with "from" in SELECT (#1346) - Thanks @alexander-beedie -* Fix `AS` query clause should be after the create table options (#1339) - Thanks @git-hulk - -### Added - -* Support `MATERIALIZED`/`ALIAS`/`EPHERMERAL` default column options for ClickHouse (#1348) - Thanks @git-hulk -* Support `()` as the `GROUP BY` nothing (#1347) - Thanks @git-hulk -* Support Map literal syntax for DuckDB and Generic (#1344) - Thanks @goldmedal -* Support subquery expression in `SET` expressions (#1343) - Thanks @iffyio -* Support `WITH FILL` for ClickHouse (#1330) - Thanks @nickpresta -* Support `PARTITION BY` for PostgreSQL in `CREATE TABLE` statement (#1338) - Thanks @git-hulk -* Support of table function `WITH ORDINALITY` modifier for Postgres (#1337) - Thanks @git-hulk - - -## [0.48.0] 2024-07-09 - -Huge shout out to @iffyio @jmhain and @lovasoa for their help reviewing and merging PRs! - -### Fixed -* Fix CI error message in CI (#1333) - Thanks @alamb -* Fix typo in sqlparser-derive README (#1310) - Thanks @leoyvens -* Re-enable trailing commas in DCL (#1318) - Thanks @MohamedAbdeen21 -* Fix a few typos in comment lines (#1316) - Thanks @git-hulk -* Fix Snowflake `SELECT * wildcard REPLACE ... RENAME` order (#1321) - Thanks @alexander-beedie -* Allow semi-colon at the end of UNCACHE statement (#1320) - Thanks @LorrensP-2158466 -* Return errors, not panic, when integers fail to parse in `AUTO_INCREMENT` and `TOP` (#1305) - Thanks @eejbyfeldt - -### Added -* Support `OWNER TO` clause in Postgres (#1314) - Thanks @gainings -* Support `FORMAT` clause for ClickHouse (#1335) - Thanks @git-hulk -* Support `DROP PROCEDURE` statement (#1324) - Thanks @LorrensP-2158466 -* Support `PREWHERE` condition for ClickHouse dialect (#1328) - Thanks @git-hulk -* Support `SETTINGS` pairs for ClickHouse dialect (#1327) - Thanks @git-hulk -* Support `GROUP BY WITH MODIFIER` for ClickHouse dialect (#1323) - Thanks @git-hulk -* Support DuckDB Union datatype (#1322) - Thanks @gstvg -* Support parametric arguments to `FUNCTION` for ClickHouse dialect (#1315) - Thanks @git-hulk -* Support `TO` in `CREATE VIEW` clause for Clickhouse (#1313) - Thanks @Bidaya0 -* Support `UPDATE` statements that contain tuple assignments (#1317) - Thanks @lovasoa -* Support `BY NAME quantifier across all set ops (#1309) - Thanks @alexander-beedie -* Support SnowFlake exclusive `CREATE TABLE` options (#1233) - Thanks @balliegojr -* Support ClickHouse `CREATE TABLE` with primary key and parametrised table engine (#1289) - Thanks @7phs -* Support custom operators in Postgres (#1302) - Thanks @lovasoa -* Support ClickHouse data types (#1285) - Thanks @7phs - -### Changed -* Add stale PR github workflow (#1331) - Thanks @alamb -* Refine docs (#1326) - Thanks @emilsivervik -* Improve error messages with additional colons (#1319) - Thanks @LorrensP-2158466 -* Move Display fmt to struct for `CreateIndex` (#1307) - Thanks @philipcristiano -* Enhancing Trailing Comma Option (#1212) - Thanks @MohamedAbdeen21 -* Encapsulate `CreateTable`, `CreateIndex` into specific structs (#1291) - Thanks @philipcristiano - -## [0.47.0] 2024-06-01 - -### Fixed -* Re-support Postgres array slice syntax (#1290) - Thanks @jmhain -* Fix DoubleColon cast skipping AT TIME ZONE #1266 (#1267) - Thanks @dmitrybugakov -* Fix for values as table name in Databricks and generic (#1278) - Thanks @jmhain - -### Added -* Support `ASOF` joins in Snowflake (#1288) - Thanks @jmhain -* Support `CREATE VIEW` with fields and data types ClickHouse (#1292) - Thanks @7phs -* Support view comments for Snowflake (#1287) - Thanks @bombsimon -* Support dynamic pivot in Snowflake (#1280) - Thanks @jmhain -* Support `CREATE FUNCTION` for BigQuery, generalize AST (#1253) - Thanks @iffyio -* Support expression in `AT TIME ZONE` and fix precedence (#1272) - Thanks @jmhain -* Support `IGNORE/RESPECT NULLS` inside function argument list for Databricks (#1263) - Thanks @jmhain -* Support `SELECT * EXCEPT` Databricks (#1261) - Thanks @jmhain -* Support triple quoted strings (#1262) - Thanks @iffyio -* Support array indexing for duckdb (#1265) - Thanks @JichaoS -* Support multiple SET variables (#1252) - Thanks @iffyio -* Support `ANY_VALUE` `HAVING` clause (#1258) in BigQuery - Thanks @jmhain -* Support keywords as field names in BigQuery struct syntax (#1254) - Thanks @iffyio -* Support `GROUP_CONCAT()` in MySQL (#1256) - Thanks @jmhain -* Support lambda functions in Databricks (#1257) - Thanks @jmhain -* Add const generic peek_tokens method to parser (#1255) - Thanks @jmhain - - -## [0.46.0] 2024-05-03 - -### Changed -* Consolidate representation of function calls, remove `AggregateExpressionWithFilter`, `ArraySubquery`, `ListAgg` and `ArrayAgg` (#1247) - Thanks jmhain -* Extended dialect trait to support numeric prefixed identifiers (#1188) - Thanks @groobyming -* Update simple_logger requirement from 4.0 to 5.0 (#1246) - Thanks @dependabot -* Improve parsing of JSON accesses on Postgres and Snowflake (#1215) - Thanks @jmhain -* Encapsulate Insert and Delete into specific structs (#1224) - Thanks @tisonkun -* Preserve double colon casts (and simplify cast representations) (#1221) - Thanks @jmhain - -### Fixed -* Fix redundant brackets in Hive/Snowflake/Redshift (#1229) - Thanks @yuval-illumex - -### Added -* Support values without parens in Snowflake and DataBricks (#1249) - Thanks @HiranmayaGundu -* Support WINDOW clause after QUALIFY when parsing (#1248) - Thanks @iffyio -* Support `DECLARE` parsing for mssql (#1235) - Thanks @devanbenz -* Support `?`-based jsonb operators in Postgres (#1242) - THanks @ReppCodes -* Support Struct datatype parsing for GenericDialect (#1241) - Thanks @duongcongtoai -* Support BigQuery window function null treatment (#1239) - Thanks @iffyio -* Support extend pivot operator - Thanks @iffyio -* Support Databricks SQL dialect (#1220) - Thanks @jmhain -* Support for MSSQL CONVERT styles (#1219) - Thanks @iffyio -* Support window clause using named window in BigQuery (#1237) - Thanks @iffyio -* Support for CONNECT BY (#1138) - Thanks @jmhain -* Support object constants in Snowflake (#1223) - Thanks @jmhain -* Support BigQuery MERGE syntax (#1217) - Thanks @iffyio -* Support for MAX for NVARCHAR (#1232) - Thanks @ bombsimon -* Support fixed size list types (#1231) - @@universalmind303 -* Support Snowflake MATCH_RECOGNIZE syntax (#1222) - Thanks @jmhain -* Support quoted string backslash escaping (#1177) - Thanks @iffyio -* Support Modify Column for MySQL dialect (#1216) - Thanks @KKould -* Support `select * ilike` for snowflake (#1228) - Thanks @HiranmayaGundu -* Support wildcard replace in duckdb and snowflake syntax (#1226) - Thanks @HiranmayaGundu - - - -## [0.45.0] 2024-04-12 - -### Added -* Support `DateTimeField` variants: `CUSTOM` and `WEEK(MONDAY)` (#1191) - Thanks @iffyio -* Support for arbitrary expr in `MapAccessSyntax` (#1179) - Thanks @iffyio -* Support unquoted hyphen in table/view declaration for BigQuery (#1178) - Thanks @iffyio -* Support `CREATE/DROP SECRET` for duckdb dialect (#1208) - Thanks @JichaoS -* Support MySQL `UNIQUE` table constraint (#1164) - Thanks @Nikita-str -* Support tailing commas on Snowflake. (#1205) - Thanks @yassun7010 -* Support `[FIRST | AFTER column_name]` in `ALTER TABLE` for MySQL (#1180) - Thanks @xring -* Support inline comment with hash syntax for BigQuery (#1192) - Thanks @iffyio -* Support named windows in OVER (window_definition) clause (#1166) - Thanks @Nikita-str -* Support PARALLEL ... and for ..ON NULL INPUT ... to CREATE FUNCTION` (#1202) - Thanks @dimfeld -* Support DuckDB functions named arguments with assignment operator (#1195) - Thanks @alamb -* Support DuckDB struct literal syntax (#1194) - Thanks @gstvg -* Support `$$` in generic dialect ... (#1185)- Thanks @milenkovicm -* Support row_alias and col_aliases in `INSERT` statement for MySQL and Generic dialects (#1136) - Thanks @emin100 - -### Fixed -* Fix dollar quoted string tokenizer (#1193) - Thanks @ZacJW -* Do not allocate in `impl Display` for `DateTimeField` (#1209) - Thanks @alamb -* Fix parse `COPY INTO` stage names without parens for SnowFlake (#1187) - Thanks @mobuchowski -* Solve stack overflow on RecursionLimitExceeded on debug builds (#1171) - Thanks @Nikita-str -* Fix parsing of equality binary operator in function argument (#1182) - Thanks @jmhain -* Fix some comments (#1184) - Thanks @sunxunle - -### Changed -* Cleanup `CREATE FUNCTION` tests (#1203) - Thanks @alamb -* Parse `SUBSTRING FROM` syntax in all dialects, reflect change in the AST (#1173) - Thanks @lovasoa -* Add identifier quote style to Dialect trait (#1170) - Thanks @backkem - -## [0.44.0] 2024-03-02 - -### Added -* Support EXPLAIN / DESCR / DESCRIBE [FORMATTED | EXTENDED] (#1156) - Thanks @jonathanlehtoalamb -* Support ALTER TABLE ... SET LOCATION (#1154) - Thanks @jonathanlehto -* Support `ROW FORMAT DELIMITED` in Hive (#1155) - Thanks @jonathanlehto -* Support `SERDEPROPERTIES` for `CREATE TABLE` with Hive (#1152) - Thanks @jonathanlehto -* Support `EXECUTE ... USING` for Postgres (#1153) - Thanks @jonathanlehto -* Support Postgres style `CREATE FUNCTION` in GenericDialect (#1159) - Thanks @alamb -* Support `SET TBLPROPERTIES` (#1151) - Thanks @jonathanlehto -* Support `UNLOAD` statement (#1150) - Thanks @jonathanlehto -* Support `MATERIALIZED CTEs` (#1148) - Thanks @ReppCodes -* Support `DECLARE` syntax for snowflake and bigquery (#1122) - Thanks @iffyio -* Support `SELECT AS VALUE` and `SELECT AS STRUCT` for BigQuery (#1135) - Thanks @lustefaniak -* Support `(+)` outer join syntax (#1145) - Thanks @jmhain -* Support `INSERT INTO ... SELECT ... RETURNING`(#1132) - Thanks @lovasoa -* Support DuckDB `INSTALL` and `LOAD` (#1127) - Thanks @universalmind303 -* Support `=` operator in function args (#1128) - Thanks @universalmind303 -* Support `CREATE VIEW IF NOT EXISTS` (#1118) - Thanks @7phs -* Support `UPDATE FROM` for SQLite (further to #694) (#1117) - Thanks @ggaughan -* Support optional `DELETE FROM` statement (#1120) - Thanks @iffyio -* Support MySQL `SHOW STATUS` statement (#1119) - Thanks invm - -### Fixed -* Clean up nightly clippy lints (#1158) - Thanks @alamb -* Handle escape, unicode, and hex in tokenize_escaped_single_quoted_string (#1146) - Thanks @JasonLi-cn -* Fix panic while parsing `REPLACE` (#1140) - THanks @jjbayer -* Fix clippy warning from rust 1.76 (#1130) - Thanks @alamb -* Fix release instructions (#1115) - Thanks @alamb - -### Changed -* Add `parse_keyword_with_tokens` for paring keyword and tokens combination (#1141) - Thanks @viirya -* Add ParadeDB to list of known users (#1142) - Thanks @philippemnoel -* Accept JSON_TABLE both as an unquoted table name and a table-valued function (#1134) - Thanks @lovasoa - - -## [0.43.1] 2024-01-22 -### Changes -* Fixed CHANGELOG - - -## [0.43.0] 2024-01-22 -* NO CHANGES - -## [0.42.0] 2024-01-22 - -### Added -* Support for constraint `CHARACTERISTICS` clause (#1099) - Thanks @dimfeld -* Support for unquoted hyphenated identifiers on bigquery (#1109) - Thanks @jmhain -* Support `BigQuery` table and view options (#1061) - Thanks @iffyio -* Support Postgres operators for the LIKE expression variants (#1096) - Thanks @gruuya -* Support "timezone_region" and "timezone_abbr" for `EXTRACT` (and `DATE_PART`) (#1090) - Thanks @alexander-beedie -* Support `JSONB` datatype (#1089) - Thanks @alexander-beedie -* Support PostgreSQL `^@` starts-with operator (#1091) - Thanks @alexander-beedie -* Support PostgreSQL Insert table aliases (#1069) (#1084) - Thanks @boydjohnson -* Support PostgreSQL `CREATE EXTENSION` (#1078) - Thanks @tobyhede -* Support PostgreSQL `ADD GENERATED` in `ALTER COLUMN` statements (#1079) - Thanks @tobyhede -* Support SQLite column definitions with no type (#1075) - Thanks @takluyver -* Support PostgreSQL `ENABLE` and `DISABLE` on `ALTER TABLE` (#1077) - Thanks @tobyhede -* Support MySQL `FLUSH` statement (#1076) - Thanks @emin100 -* Support Mysql `REPLACE` statement and `PRIORITY` clause of `INSERT` (#1072) - Thanks @emin100 - -### Fixed -* Fix `:start` and `:end` json accesses on SnowFlake (#1110) - Thanks @jmhain -* Fix array_agg wildcard behavior (#1093) - Thanks @ReppCodes -* Error on dangling `NO` in `CREATE SEQUENCE` options (#1104) - Thanks @PartiallyTyped -* Allow string values in `PRAGMA` commands (#1101) - Thanks @invm - -### Changed -* Use `Option` for Min and Max vals in Seq Opts, fix alter col seq display (#1106) - Thanks @PartiallyTyped -* Replace `AtomicUsize` with Cell in the recursion counter (#1098) - Thanks @wzzzzd -* Add Qrlew as a user in README.md (#1107) - Thanks @ngrislain -* Add APIs to reuse token buffers in `Tokenizer` (#1094) - Thanks @0rphon -* Bump version of `sqlparser-derive` to 0.2.2 (#1083) - Thanks @alamb - -## [0.41.0] 2023-12-22 - -### Added -* Support `DEFERRED`, `IMMEDIATE`, and `EXCLUSIVE` in SQLite's `BEGIN TRANSACTION` command (#1067) - Thanks @takaebato -* Support generated columns skipping `GENERATED ALWAYS` keywords (#1058) - Thanks @takluyver -* Support `LOCK/UNLOCK TABLES` for MySQL (#1059) - Thanks @zzzdong -* Support `JSON_TABLE` (#1062) - Thanks @lovasoa -* Support `CALL` statements (#1063) - Thanks @lovasoa - -### Fixed -* fix rendering of SELECT TOP (#1070) for Snowflake - Thanks jmhain - -### Changed -* Improve documentation formatting (#1068) - Thanks @alamb -* Replace type_id() by trait method to allow wrapping dialects (#1065) - Thanks @jjbayer -* Document that comments aren't preserved for round trip (#1060) - Thanks @takluyver -* Update sqlparser-derive to use `syn 2.0` (#1040) - Thanks @serprex - -## [0.40.0] 2023-11-27 - -### Added -* Add `{pre,post}_visit_query` to `Visitor` (#1044) - Thanks @jmhain -* Support generated virtual columns with expression (#1051) - Thanks @takluyver -* Support PostgreSQL `END` (#1035) - Thanks @tobyhede -* Support `INSERT INTO ... DEFAULT VALUES ...` (#1036) - Thanks @CDThomas -* Support `RELEASE` and `ROLLBACK TO SAVEPOINT` (#1045) - Thanks @CDThomas -* Support `CONVERT` expressions (#1048) - Thanks @lovasoa -* Support `GLOBAL` and `SESSION` parts in `SHOW VARIABLES` for mysql and generic - Thanks @emin100 -* Support snowflake `PIVOT` on derived table factors (#1027) - Thanks @lustefaniak -* Support mssql json and xml extensions (#1043) - Thanks @lovasoa -* Support for `MAX` as a character length (#1038) - Thanks @lovasoa -* Support `IN ()` syntax of SQLite (#1028) - Thanks @alamb - -### Fixed -* Fix extra whitespace printed before `ON CONFLICT` (#1037) - Thanks @CDThomas - -### Changed -* Document round trip ability (#1052) - Thanks @alamb -* Add PRQL to list of users (#1031) - Thanks @vanillajonathan - -## [0.39.0] 2023-10-27 - -### Added -* Support for `LATERAL FLATTEN` and similar (#1026) - Thanks @lustefaniak -* Support BigQuery struct, array and bytes , int64, `float64` datatypes (#1003) - Thanks @iffyio -* Support numbers as placeholders in Snowflake (e.g. `:1)` (#1001) - Thanks @yuval-illumex -* Support date 'key' when using semi structured data (#1023) @yuval-illumex -* Support IGNORE|RESPECT NULLs clause in window functions (#998) - Thanks @yuval-illumex -* Support for single-quoted identifiers (#1021) - Thanks @lovasoa -* Support multiple PARTITION statements in ALTER TABLE ADD statement (#1011) - Thanks @bitemyapp -* Support "with" identifiers surrounded by backticks in GenericDialect (#1010) - Thanks @bitemyapp -* Support INSERT IGNORE in MySql and GenericDialect (#1004) - Thanks @emin100 -* Support SQLite `pragma` statement (#969) - Thanks @marhoily -* Support `position` as a column name (#1022) - Thanks @lustefaniak -* Support `FILTER` in Functions (for `OVER`) clause (#1007) - Thanks @lovasoa -* Support `SELECT * EXCEPT/REPLACE` syntax from ClickHouse (#1013) - Thanks @lustefaniak -* Support subquery as function arg w/o parens in Snowflake dialect (#996) - Thanks @jmhain -* Support `UNION DISTINCT BY NAME` syntax (#997) - Thanks @alexander-beedie -* Support mysql `RLIKE` and `REGEXP` binary operators (#1017) - Thanks @lovasoa -* Support bigquery `CAST AS x [STRING|DATE] FORMAT` syntax (#978) - Thanks @lustefaniak -* Support Snowflake/BigQuery `TRIM`. (#975) - Thanks @zdenal -* Support `CREATE [TEMPORARY|TEMP] VIEW [IF NOT EXISTS] `(#993) - Thanks @gabivlj -* Support for `CREATE VIEW … WITH NO SCHEMA BINDING` Redshift (#979) - Thanks @lustefaniak -* Support `UNPIVOT` and a fix for chained PIVOTs (#983) - @jmhain -* Support for `LIMIT BY` (#977) - Thanks @lustefaniak -* Support for mixed BigQuery table name quoting (#971) - Thanks @iffyio -* Support `DELETE` with `ORDER BY` and `LIMIT` (MySQL) (#992) - Thanks @ulrichsg -* Support `EXTRACT` for `DAYOFWEEK`, `DAYOFYEAR`, `ISOWEEK`, `TIME` (#980) - Thanks @lustefaniak -* Support `ATTACH DATABASE` (#989) - Thanks @lovasoa - -### Fixed -* Fix handling of `/~%` in Snowflake stage name (#1009) - Thanks @lustefaniak -* Fix column `COLLATE` not displayed (#1012) - Thanks @lustefaniak -* Fix for clippy 1.73 (#995) - Thanks @alamb - -### Changed -* Test to ensure `+ - * / %` binary operators work the same in all dialects (#1025) - Thanks @lustefaniak -* Improve documentation on Parser::consume_token and friends (#994) - Thanks @alamb -* Test that regexp can be used as an identifier in postgres (#1018) - Thanks @lovasoa -* Add docstrings for Dialects, update README (#1016) - Thanks @alamb -* Add JumpWire to users in README (#990) - Thanks @hexedpackets -* Add tests for clickhouse: `tokenize == as Token::DoubleEq` (#981)- Thanks @lustefaniak - -## [0.38.0] 2023-09-21 - -### Added - -* Support `==`operator for Sqlite (#970) - Thanks @marhoily -* Support mysql `PARTITION` to table selection (#959) - Thanks @chunshao90 -* Support `UNNEST` as a table factor for PostgreSQL (#968) @hexedpackets -* Support MySQL `UNIQUE KEY` syntax (#962) - Thanks @artorias1024 -* Support` `GROUP BY ALL` (#964) - @berkaysynnada -* Support multiple actions in one ALTER TABLE statement (#960) - Thanks @ForbesLindesay -* Add `--sqlite param` to CLI (#956) - Thanks @ddol - -### Fixed -* Fix Rust 1.72 clippy lints (#957) - Thanks @alamb - -### Changed -* Add missing token loc in parse err msg (#965) - Thanks @ding-young -* Change how `ANY` and `ALL` expressions are represented in AST (#963) - Thanks @SeanTroyUWO -* Show location info in parse errors (#958) - Thanks @MartinNowak -* Update release documentation (#954) - Thanks @alamb -* Break test and coverage test into separate jobs (#949) - Thanks @alamb - - -## [0.37.0] 2023-08-22 - -### Added -* Support `FOR SYSTEM_TIME AS OF` table time travel clause support, `visit_table_factor` to Visitor (#951) - Thanks @gruuya -* Support MySQL `auto_increment` offset in table definition (#950) - Thanks @ehoeve -* Test for mssql table name in square brackets (#952) - Thanks @lovasoa -* Support additional Postgres `CREATE INDEX` syntax (#943) - Thanks @ForbesLindesay -* Support `ALTER ROLE` syntax of PostgreSQL and MS SQL Server (#942) - Thanks @r4ntix -* Support table-level comments (#946) - Thanks @ehoeve -* Support `DROP TEMPORARY TABLE`, MySQL syntax (#916) - Thanks @liadgiladi -* Support posgres type alias (#933) - Thanks @Kikkon - -### Fixed -* Clarify the value of the special flag (#948) - Thanks @alamb -* Fix `SUBSTRING` from/to argument construction for mssql (#947) - Thanks @jmaness -* Fix: use Rust idiomatic capitalization for newly added DataType enums (#939) - Thanks @Kikkon -* Fix `BEGIN TRANSACTION` being serialized as `START TRANSACTION` (#935) - Thanks @lovasoa -* Fix parsing of datetime functions without parenthesis (#930) - Thanks @lovasoa - -## [0.36.1] 2023-07-19 - -### Fixed -* Fix parsing of identifiers after '%' symbol (#927) - Thanks @alamb - -## [0.36.0] 2023-07-19 - -### Added -* Support toggling "unescape" mode to retain original escaping (#870) - Thanks @canalun -* Support UNION (ALL) BY NAME syntax (#915) - Thanks @parkma99 -* Add doc comment for all operators (#917) - Thanks @izveigor -* Support `PGOverlap` operator (#912) - Thanks @izveigor -* Support multi args for unnest (#909) - Thanks @jayzhan211 -* Support `ALTER VIEW`, MySQL syntax (#907) - Thanks @liadgiladi -* Add DeltaLake keywords (#906) - Thanks @roeap - -### Fixed -* Parse JsonOperators correctly (#913) - Thanks @izveigor -* Fix dependabot by removing rust-toolchain toml (#922) - Thanks @alamb - -### Changed -* Clean up JSON operator tokenizing code (#923) - Thanks @alamb -* Upgrade bigdecimal to 0.4.1 (#921) - Thanks @jinlee0 -* Remove most instances of #[cfg(feature(bigdecimal))] in tests (#910) - Thanks @alamb - -## [0.35.0] 2023-06-23 - -### Added -* Support `CREATE PROCEDURE` of MSSQL (#900) - Thanks @delsehi -* Support DuckDB's `CREATE MACRO` statements (#897) - Thanks @MartinNowak -* Support for `CREATE TYPE (AS)` statements (#888) - Thanks @srijs -* Support `STRICT` tables of sqlite (#903) - Thanks @parkma99 - -### Fixed -* Fixed precedence of unary negation operator with operators: Mul, Div and Mod (#902) - Thanks @izveigor - -### Changed -* Add `support_group_by_expr` to `Dialect` trait (#896) - Thanks @jdye64 -* Update criterion requirement from `0.4` to `0.5` in `/sqlparser_bench` (#890) - Thanks @dependabot (!!) - -## [0.34.0] 2023-05-19 - -### Added - -* Support named window frames (#881) - Thanks @berkaysynnada, @mustafasrepo, and @ozankabak -* Support for `ORDER BY` clauses in aggregate functions (#882) - Thanks @mustafasrepo -* Support `DuckDB` dialect (#878) - Thanks @eitsupi -* Support optional `TABLE` keyword for `TRUNCATE TABLE` (#883) - Thanks @mobuchowski -* Support MySQL's `DIV` operator (#876) - Thanks @eitsupi -* Support Custom operators (#868) - Thanks @max-sixty -* Add `Parser::parse_multipart_identifier` (#860) - Thanks @Jefffrey -* Support for multiple expressions, order by in `ARRAY_AGG` (#879) - Thanks @mustafasrepo -* Support for query source in `COPY .. TO` statement (#858) - Thanks @aprimadi -* Support `DISTINCT ON (...)` (#852) - Thanks @aljazerzen -* Support multiple-table `DELETE` syntax (#855) - Thanks @AviRaboah -* Support `COPY INTO` in `SnowflakeDialect` (#841) - Thanks @pawel-big-lebowski -* Support identifiers beginning with digits in MySQL (#856) - Thanks @AviRaboah - -### Changed -* Include license file in published crate (#871) - Thanks @ankane -* Make `Expr::Interval` its own struct (#872) - Thanks @aprimadi -* Add dialect_from_str and improve Dialect documentation (#848) - Thanks @alamb -* Add clickhouse to example (#849) - Thanks @anglinb - -### Fixed -* Fix merge conflict (#885) - Thanks @alamb -* Fix tiny typo in custom_sql_parser.md (#864) - Thanks @okue -* Fix logical merge conflict (#865) - Thanks @alamb -* Test trailing commas (#859) - Thanks @aljazerzen - - -## [0.33.0] 2023-04-10 - -### Added -* Support for Mysql Backslash escapes (enabled by default) (#844) - Thanks @cobyge -* Support "UPDATE" statement in "WITH" subquery (#842) - Thanks @nicksrandall -* Support PIVOT table syntax (#836) - Thanks @pawel-big-lebowski -* Support CREATE/DROP STAGE for Snowflake (#833) - Thanks @pawel-big-lebowski -* Support Non-Latin characters (#840) - Thanks @mskrzypkows -* Support PostgreSQL: GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY and GENERATED - Thanks @sam-mmm -* Support IF EXISTS in COMMENT statements (#831) - Thanks @pawel-big-lebowski -* Support snowflake alter table swap with (#825) - Thanks @pawel-big-lebowski - -### Changed -* Move tests from parser.rs to appropriate parse_XX tests (#845) - Thanks @alamb -* Correct typos in parser.rs (#838) - Thanks @felixonmars -* Improve documentation on verified_* methods (#828) - Thanks @alamb - -## [0.32.0] 2023-03-6 - -### Added -* Support ClickHouse `CREATE TABLE` with `ORDER BY` (#824) - Thanks @ankrgyl -* Support PostgreSQL exponentiation `^` operator (#813) - Thanks @michael-2956 -* Support `BIGNUMERIC` type in BigQuery (#811) - Thanks @togami2864 -* Support for optional trailing commas (#810) - Thanks @ankrgyl - -### Fixed -* Fix table alias parsing regression by backing out redshift column definition list (#827) - Thanks @alamb -* Fix typo in `ReplaceSelectElement` `colum_name` --> `column_name` (#822) - Thanks @togami2864 - -## [0.31.0] 2023-03-1 - -### Added -* Support raw string literals for BigQuery dialect (#812) - Thanks @togami2864 -* Support `SELECT * REPLACE AS ` in BigQuery dialect (#798) - Thanks @togami2864 -* Support byte string literals for BigQuery dialect (#802) - Thanks @togami2864 -* Support columns definition list for system information functions in RedShift dialect (#769) - Thanks @mskrzypkows -* Support `TRANSIENT` keyword in Snowflake dialect (#807) - Thanks @mobuchowski -* Support `JSON` keyword (#799) - Thanks @togami2864 -* Support MySQL Character Set Introducers (#788) - Thanks @mskrzypkows - -### Fixed -* Fix clippy error in ci (#803) - Thanks @togami2864 -* Handle offset in map key in BigQuery dialect (#797) - Thanks @Ziinc -* Fix a typo (precendence -> precedence) (#794) - Thanks @SARDONYX-sard -* use post_* visitors for mutable visits (#789) - Thanks @lovasoa - -### Changed -* Add another known user (#787) - Thanks @joocer - -## [0.30.0] 2023-01-02 - -### Added -* Support `RENAME` for wildcard `SELECTs` (#784) - Thanks @Jefffrey -* Add a mutable visitor (#782) - Thanks @lovasoa - -### Changed -* Allow parsing of mysql empty row inserts (#783) - Thanks @Jefffrey - -### Fixed -* Fix logical conflict (#785) - Thanks @alamb - -## [0.29.0] 2022-12-29 - -### Highlights -* Partial source location tracking: see #710 -* Recursion limit to prevent stack overflows: #764 -* AST visitor: #765 - -### Added -feat: dollar-quoted strings support (#772) - Thanks @vasilev-alex -* Add derive based AST visitor (#765) - Thanks @tustvold -* Support `ALTER INDEX {INDEX_NAME} RENAME TO {NEW_INDEX_NAME}` (#767) - Thanks @devgony -* Support `CREATE TABLE ON UPDATE ` Function (#685) - Thanks @CEOJINSUNG -* Support `CREATE FUNCTION` definition with `$$` (#755)- Thanks @zidaye -* Add location tracking in the tokenizer and parser (#710) - Thanks @ankrgyl -* Add configurable recursion limit to parser, to protect against stackoverflows (#764) - Thanks @alamb -* Support parsing scientific notation (such as `10e5`) (#768) - Thanks @Jefffrey -* Support `DROP FUNCTION` syntax (#752) - Thanks @zidaye -* Support json operators `@>` `<@`, `@?` and `@@` - Thanks @audunska -* Support the type key (#750)- Thanks @yuval-illumex - -### Changed -* Improve docs and add examples for Visitor (#778) - Thanks @alamb -* Add a backlink from sqlparse_derive to sqlparser and publishing instructions (#779) - Thanks @alamb -* Document new features, update authors (#776) - Thanks @alamb -* Improve Readme (#774) - Thanks @alamb -* Standardize comments on parsing optional keywords (#773) - Thanks @alamb -* Enable grouping sets parsing for `GenericDialect` (#771) - Thanks @Jefffrey -* Generalize conflict target (#762) - Thanks @audunska -* Generalize locking clause (#759) - Thanks @audunska -* Add negative test for except clause on wildcards (#746)- Thanks @alamb -* Add `NANOSECOND` keyword (#749)- Thanks @waitingkuo - -### Fixed -* ParserError if nested explain (#781) - Thanks @Jefffrey -* Fix cargo docs / warnings and add CI check (#777) - Thanks @alamb -* unnest join constraint with alias parsing for BigQuery dialect (#732)- Thanks @Ziinc - -## [0.28.0] 2022-12-05 - -### Added -* Support for `EXCEPT` clause on wildcards (#745) - Thanks @AugustoFKL -* Support `CREATE FUNCTION` Postgres options (#722) - Thanks @wangrunji0408 -* Support `CREATE TABLE x AS TABLE y` (#704) - Thanks @sarahyurick -* Support MySQL `ROWS` syntax for `VALUES` (#737) - Thanks @aljazerzen -* Support `WHERE` condition for `UPDATE ON CONFLICT` (#735) - Thanks @zidaye -* Support `CLUSTER BY` when creating Materialized View (#736) - Thanks @yuval-illumex -* Support nested comments (#726) - Thanks @yang-han -* Support `USING` method when creating indexes. (#731) - Thanks @step-baby and @yangjiaxin01 -* Support `SEMI`/`ANTI` `JOIN` syntax (#723) - Thanks @mingmwang -* Support `EXCLUDE` support for snowflake and generic dialect (#721) - Thanks @AugustoFKL -* Support `MATCH AGAINST` (#708) - Thanks @AugustoFKL -* Support `IF NOT EXISTS` in `ALTER TABLE ADD COLUMN` (#707) - Thanks @AugustoFKL -* Support `SET TIME ZONE ` (#727) - Thanks @waitingkuo -* Support `UPDATE ... FROM ( subquery )` (#694) - Thanks @unvalley - -### Changed -* Add `Parser::index()` method to get current parsing index (#728) - Thanks @neverchanje -* Add `COMPRESSION` as keyword (#720)- Thanks @AugustoFKL -* Derive `PartialOrd`, `Ord`, and `Copy` whenever possible (#717) - Thanks @AugustoFKL -* Fixed `INTERVAL` parsing logic and precedence (#705) - Thanks @sarahyurick -* Support updating multiple column names whose names are the same as(#725) - Thanks @step-baby - -### Fixed -* Clean up some redundant code in parser (#741) - Thanks @alamb -* Fix logical conflict - Thanks @alamb -* Cleanup to avoid is_ok() (#740) - Thanks @alamb -* Cleanup to avoid using unreachable! when parsing semi/anti join (#738) - Thanks @alamb -* Add an example to docs to clarify semantic analysis (#739) - Thanks @alamb -* Add information about parting semantic logic to README.md (#724) - Thanks @AugustoFKL -* Logical conflicts - Thanks @alamb -* Tiny typo in docs (#709) - Thanks @pmcgee69 - - -## [0.27.0] 2022-11-11 - -### Added -* Support `ON CONFLICT` and `RETURNING` in `UPDATE` statement (#666) - Thanks @main and @gamife -* Support `FULLTEXT` option on create table for MySQL and Generic dialects (#702) - Thanks @AugustoFKL -* Support `ARRAY_AGG` for Bigquery and Snowflake (#662) - Thanks @SuperBo -* Support DISTINCT for SetOperator (#689) - Thanks @unvalley -* Support the ARRAY type of Snowflake (#699) - Thanks @yuval-illumex -* Support create sequence with options INCREMENT, MINVALUE, MAXVALUE, START etc. (#681) - Thanks @sam-mmm -* Support `:` operator for semi-structured data in Snowflake(#693) - Thanks @yuval-illumex -* Support ALTER TABLE DROP PRIMARY KEY (#682) - Thanks @ding-young -* Support `NUMERIC` and `DEC` ANSI data types (#695) - Thanks @AugustoFKL -* Support modifiers for Custom Datatype (#680) - Thanks @sunng87 - -### Changed -* Add precision for TIME, DATETIME, and TIMESTAMP data types (#701) - Thanks @AugustoFKL -* add Date keyword (#691) - Thanks @sarahyurick -* Update simple_logger requirement from 2.1 to 4.0 - Thanks @dependabot - -### Fixed -* Fix broken DataFusion link (#703) - Thanks @jmg-duarte -* Add MySql, BigQuery to all dialects tests, fixed bugs (#697) - Thanks @omer-shtivi - - -## [0.26.0] 2022-10-19 - -### Added -* Support MySQL table option `{INDEX | KEY}` in CREATE TABLE definiton (#665) - Thanks @AugustoFKL -* Support `CREATE [ { TEMPORARY | TEMP } ] SEQUENCE [ IF NOT EXISTS ] ` (#678) - Thanks @sam-mmm -* Support `DROP SEQUENCE` statement (#673) - Thanks @sam-mmm -* Support for ANSI types `CHARACTER LARGE OBJECT[(p)]` and `CHAR LARGE OBJECT[(p)]` (#671) - Thanks @AugustoFKL -* Support `[CACHE|UNCACHE] TABLE` (#670) - Thanks @francis-du -* Support `CEIL(expr TO DateTimeField)` and `FLOOR(expr TO DateTimeField)` - Thanks @sarahyurick -* Support all ansii character string types, (#648) - Thanks @AugustoFKL - -### Changed -* Support expressions inside window frames (#655) - Thanks @mustafasrepo and @ozankabak -* Support unit on char length units for small character strings (#663) - Thanks @AugustoFKL -* Replace booleans on `SET ROLE` with a single enum. (#664) - Thanks @AugustoFKL -* Replace `Option`s with enum for `DECIMAL` precision (#654) - Thanks @AugustoFKL - -## [0.25.0] 2022-10-03 - -### Added - -* Support `AUTHORIZATION` clause in `CREATE SCHEMA` statements (#641) - Thanks @AugustoFKL -* Support optional precision for `CLOB` and `BLOB` (#639) - Thanks @AugustoFKL -* Support optional precision in `VARBINARY` and `BINARY` (#637) - Thanks @AugustoFKL - - -### Changed -* `TIMESTAMP` and `TIME` parsing preserve zone information (#646) - Thanks @AugustoFKL - -### Fixed - -* Correct order of arguments when parsing `LIMIT x,y` , restrict to `MySql` and `Generic` dialects - Thanks @AugustoFKL - - -## [0.24.0] 2022-09-29 - -### Added - -* Support `MILLENNIUM` (2 Ns) (#633) - Thanks @sarahyurick -* Support `MEDIUMINT` (#630) - Thanks @AugustoFKL -* Support `DOUBLE PRECISION` (#629) - Thanks @AugustoFKL -* Support precision in `CLOB`, `BINARY`, `VARBINARY`, `BLOB` data type (#618) - Thanks @ding-young -* Support `CREATE ROLE` and `DROP ROLE` (#598) - Thanks @blx -* Support full range of sqlite prepared statement placeholders (#604) - Thanks @lovasoa -* Support National string literal with lower case `n` (#612) - Thanks @mskrzypkows -* Support SHOW FUNCTIONS (#620) - Thanks @joocer -* Support `set time zone to 'some-timezone'` (#617) - Thanks @waitingkuo - -### Changed -* Move `Value::Interval` to `Expr::Interval` (#609) - Thanks @ding-young -* Update `criterion` dev-requirement from 0.3 to 0.4 in /sqlparser_bench (#611) - Thanks @dependabot -* Box `Query` in `Cte` (#572) - Thanks @MazterQyou - -### Other -* Disambiguate CREATE ROLE ... USER and GROUP (#628) - Thanks @alamb -* Add test for optional WITH in CREATE ROLE (#627) - Thanks @alamb - -## [0.23.0] 2022-09-08 - -### Added -* Add support for aggregate expressions with filters (#585) - Thanks @andygrove -* Support `LOCALTIME` and `LOCALTIMESTAMP` time functions (#592) - Thanks @MazterQyou - -## [0.22.0] 2022-08-26 - -### Added -* Support `OVERLAY` expressions (#594) - Thanks @ayushg -* Support `WITH TIMEZONE` and `WITHOUT TIMEZONE` when parsing `TIMESTAMP` expressions (#589) - Thanks @waitingkuo -* Add ability for dialects to override prefix, infix, and statement parsing (#581) - Thanks @andygrove - -## [0.21.0] 2022-08-18 - -### Added -* Support `IS [NOT] TRUE`, `IS [NOT] FALSE`, and `IS [NOT] UNKNOWN` - Thanks (#583) @sarahyurick -* Support `SIMILAR TO` syntax (#569) - Thanks @ayushdg -* Support `SHOW COLLATION` (#564) - Thanks @MazterQyou -* Support `SHOW TABLES` (#563) - Thanks @MazterQyou -* Support `SET NAMES literal [COLLATE literal]` (#558) - Thanks @ovr -* Support trailing commas (#557) in `BigQuery` dialect - Thanks @komukomo -* Support `USE ` (#565) - Thanks @MazterQyou -* Support `SHOW COLUMNS FROM tbl FROM db` (#562) - Thanks @MazterQyou -* Support `SHOW VARIABLES` for `MySQL` dialect (#559) - Thanks @ovr and @vasilev-alex - -### Changed -* Support arbitrary expression in `SET` statement (#574) - Thanks @ovr and @vasilev-alex -* Parse LIKE patterns as Expr not Value (#579) - Thanks @andygrove -* Update Ballista link in README (#576) - Thanks @sanxiyn -* Parse `TRIM` from with optional expr and `FROM` expr (#573) - Thanks @ayushdg -* Support PostgreSQL array subquery constructor (#566) - Thanks @MazterQyou -* Clarify contribution licensing (#570) - Thanks @alamb -* Update for new clippy ints (#571) - Thanks @alamb -* Change `Like` and `ILike` to `Expr` variants, allow escape char (#569) - Thanks @ayushdg -* Parse special keywords as functions (`current_user`, `user`, etc) (#561) - Thanks @ovr -* Support expressions in `LIMIT`/`OFFSET` (#567) - Thanks @MazterQyou - -## [0.20.0] 2022-08-05 - -### Added -* Support custom `OPERATOR` postgres syntax (#548) - Thanks @iskakaushik -* Support `SAFE_CAST` for BigQuery (#552) - Thanks @togami2864 - -### Changed -* Added SECURITY.md (#546) - Thanks @JamieSlome -* Allow `>>` and `<<` binary operators in Generic dialect (#553) - Thanks @ovr -* Allow `NestedJoin` with an alias (#551) - Thanks @waitingkuo - -## [0.19.0] 2022-07-28 - -### Added - -* Support `ON CLUSTER` for `CREATE TABLE` statement (ClickHouse DDL) (#527) - Thanks @andyrichardson -* Support empty `ARRAY` literals (#532) - Thanks @bitemyapp -* Support `AT TIME ZONE` clause (#539) - Thanks @bitemyapp -* Support `USING` clause and table aliases in `DELETE` (#541) - Thanks @mobuchowski -* Support `SHOW CREATE VIEW` statement (#536) - Thanks @mrob95 -* Support `CLONE` clause in `CREATE TABLE` statements (#542) - Thanks @mobuchowski -* Support `WITH OFFSET Alias` in table references (#528) - Thanks @sivchari -* Support double quoted (`"`) literal strings: (#530) - Thanks @komukomo -* Support `ON UPDATE` clause on column definitions in `CREATE TABLE` statements (#522) - Thanks @frolovdev - - -### Changed: - -* `Box`ed `Query` body to save stack space (#540) - Thanks @5tan -* Distinguish between `INT` and `INTEGER` types (#525) - Thanks @frolovdev -* Parse `WHERE NOT EXISTS` as `Expr::Exists` rather than `Expr::UnaryOp` for consistency (#523) - Thanks @frolovdev -* Support `Expr` instead of `String` for argument to `INTERVAL` (#517) - Thanks @togami2864 - -### Fixed: - -* Report characters instead of bytes in error messages (#529) - Thanks @michael-2956 - - -## [0.18.0] 2022-06-06 - -### Added - -* Support `CLOSE` (cursors) (#515) - Thanks @ovr -* Support `DECLARE` (cursors) (#509) - Thanks @ovr -* Support `FETCH` (cursors) (#510) - Thanks @ovr -* Support `DATETIME` keyword (#512) - Thanks @komukomo -* Support `UNNEST` as a table factor (#493) - Thanks @sivchari -* Support `CREATE FUNCTION` (hive flavor) (#496) - Thanks @mobuchowski -* Support placeholders (`$` or `?`) in `LIMIT` clause (#494) - Thanks @step-baby -* Support escaped string literals (PostgreSQL) (#502) - Thanks @ovr -* Support `IS TRUE` and `IS FALSE` (#499) - Thanks @ovr -* Support `DISCARD [ALL | PLANS | SEQUENCES | TEMPORARY | TEMP]` (#500) - Thanks @gandronchik -* Support `array<..>` HIVE data types (#491) - Thanks @mobuchowski -* Support `SET` values that begin with `-` #495 - Thanks @mobuchowski -* Support unicode whitespace (#482) - Thanks @alexsatori -* Support `BigQuery` dialect (#490) - Thanks @komukomo - -### Changed: -* Add docs for MapAccess (#489) - Thanks @alamb -* Rename `ArrayIndex::indexs` to `ArrayIndex::indexes` (#492) - Thanks @alamb - -### Fixed: -* Fix escaping of trailing quote in quoted identifiers (#505) - Thanks @razzolini-qpq -* Fix parsing of `COLLATE` after parentheses in expressions (#507) - Thanks @razzolini-qpq -* Distinguish tables and nullary functions in `FROM` (#506) - Thanks @razzolini-qpq -* Fix `MERGE INTO` semicolon handling (#508) - Thanks @mskrzypkows - -## [0.17.0] 2022-05-09 - -### Added - -* Support `#` as first character in field name for `RedShift` dialect (#485) - Thanks @yuval-illumex -* Support for postgres composite types (#466) - Thanks @poonai -* Support `TABLE` keyword with SELECT INTO (#487) - Thanks @MazterQyou -* Support `ANY`/`ALL` operators (#477) - Thanks @ovr -* Support `ArrayIndex` in `GenericDialect` (#480) - Thanks @ovr -* Support `Redshift` dialect, handle square brackets properly (#471) - Thanks @mskrzypkows -* Support `KILL` statement (#479) - Thanks @ovr -* Support `QUALIFY` clause on `SELECT` for `Snowflake` dialect (#465) - Thanks @mobuchowski -* Support `POSITION(x IN y)` function syntax (#463) @yuval-illumex -* Support `global`,`local`, `on commit` for `create temporary table` (#456) - Thanks @gandronchik -* Support `NVARCHAR` data type (#462) - Thanks @yuval-illumex -* Support for postgres json operators `->`, `->>`, `#>`, and `#>>` (#458) - Thanks @poonai -* Support `SET ROLE` statement (#455) - Thanks @slhmy - -### Changed: -* Improve docstrings for `KILL` statement (#481) - Thanks @alamb -* Add negative tests for `POSITION` (#469) - Thanks @alamb -* Add negative tests for `IN` parsing (#468) - Thanks @alamb -* Suppport table names (as well as subqueries) as source in `MERGE` statements (#483) - Thanks @mskrzypkows - - -### Fixed: -* `INTO` keyword is optional for `INSERT`, `MERGE` (#473) - Thanks @mobuchowski -* Support `IS TRUE` and `IS FALSE` expressions in boolean filter (#474) - Thanks @yuval-illumex -* Support fully qualified object names in `SET VARIABLE` (#484) - Thanks mobuchowski - -## [0.16.0] 2022-04-03 - -### Added - -* Support `WEEK` keyword in `EXTRACT` (#436) - Thanks @Ted-Jiang -* Support `MERGE` statement (#430) - Thanks @mobuchowski -* Support `SAVEPOINT` statement (#438) - Thanks @poonai -* Support `TO` clause in `COPY` (#441) - Thanks @matthewmturner -* Support `CREATE DATABASE` statement (#451) - Thanks @matthewmturner -* Support `FROM` clause in `UPDATE` statement (#450) - Thanks @slhmy -* Support additional `COPY` options (#446) - Thanks @wangrunji0408 - -### Fixed: -* Bug in array / map access parsing (#433) - Thanks @monadbobo - -## [0.15.0] 2022-03-07 - -### Added - -* Support for ClickHouse array types (e.g. [1,2,3]) (#429) - Thanks @monadbobo -* Support for `unsigned tinyint`, `unsigned int`, `unsigned smallint` and `unsigned bigint` datatypes (#428) - Thanks @watarukura -* Support additional keywords for `EXTRACT` (#427) - Thanks @mobuchowski -* Support IN UNNEST(expression) (#426) - Thanks @komukomo -* Support COLLATION keywork on CREATE TABLE (#424) - Thanks @watarukura -* Support FOR UPDATE/FOR SHARE clause (#418) - Thanks @gamife -* Support prepared statement placeholder arg `?` and `$` (#420) - Thanks @gamife -* Support array expressions such as `ARRAY[1,2]` , `foo[1]` and `INT[][]` (#419) - Thanks @gamife - -### Changed: -* remove Travis CI (#421) - Thanks @efx - -### Fixed: -* Allow `array` to be used as a function name again (#432) - @alamb -* Update docstring reference to `Query` (#423) - Thanks @max-sixty - -## [0.14.0] 2022-02-09 - -### Added -* Support `CURRENT_TIMESTAMP`, `CURRENT_TIME`, and `CURRENT_DATE` (#391) - Thanks @yuval-illumex -* SUPPORT `SUPER` keyword (#387) - Thanks @flaneur2020 -* Support differing orders of `OFFSET` `LIMIT` as well as `LIMIT` `OFFSET` (#413) - Thanks @yuval-illumex -* Support for `FROM `, `DELIMITER`, and `CSV HEADER` options for `COPY` command (#409) - Thanks @poonai -* Support `CHARSET` and `ENGINE` clauses on `CREATE TABLE` for mysql (#392) - Thanks @antialize -* Support `DROP CONSTRAINT [ IF EXISTS ] [ CASCADE ]` (#396) - Thanks @tvallotton -* Support parsing tuples and add `Expr::Tuple` (#414) - @alamb -* Support MySQL style `LIMIT X, Y` (#415) - @alamb -* Support `SESSION TRANSACTION` and `TRANSACTION SNAPSHOT`. (#379) - Thanks @poonai -* Support `ALTER COLUMN` and `RENAME CONSTRAINT` (#381) - Thanks @zhamlin -* Support for Map access, add ClickHouse dialect (#382) - Thanks @monadbobo - -### Changed -* Restrict where wildcard (`*`) can appear, add to `FunctionArgExpr` remove `Expr::[Qualified]Wildcard`, (#378) - Thanks @panarch -* Update simple_logger requirement from 1.9 to 2.1 (#403) -* export all methods of parser (#397) - Thanks @neverchanje! -* Clarify maintenance status on README (#416) - @alamb - -### Fixed -* Fix new clippy errors (#412) - @alamb -* Fix panic with `GRANT/REVOKE` in `CONNECT`, `CREATE`, `EXECUTE` or `TEMPORARY` - Thanks @evgenyx00 -* Handle double quotes inside quoted identifiers correctly (#411) - Thanks @Marwes -* Handle mysql backslash escaping (#373) - Thanks @vasilev-alex - -## [0.13.0] 2021-12-10 - -### Added -* Add ALTER TABLE CHANGE COLUMN, extend the UPDATE statement with ON clause (#375) - Thanks @0xA537FD! -* Add support for GROUPIING SETS, ROLLUP and CUBE - Thanks @Jimexist! -* Add basic support for GRANT and REVOKE (#365) - Thanks @blx! - -### Changed -* Use Rust 2021 edition (#368) - Thanks @Jimexist! - -### Fixed -* Fix clippy errors (#367, #374) - Thanks @Jimexist! - - -## [0.12.0] 2021-10-14 - -### Added -* Add support for [NOT] IS DISTINCT FROM (#306) - @Dandandan - -### Changed -* Move the keywords module - Thanks @koushiro! - - -## [0.11.0] 2021-09-24 - -### Added -* Support minimum display width for integer data types (#337) Thanks @vasilev-alex! -* Add logical XOR operator (#357) - Thanks @xzmrdltl! -* Support DESCRIBE table_name (#340) - Thanks @ovr! -* Support SHOW CREATE TABLE|EVENT|FUNCTION (#338) - Thanks @ovr! -* Add referential actions to TableConstraint foreign key (#306) - Thanks @joshwd36! - -### Changed -* Enable map access for numbers, multiple nesting levels (#356) - Thanks @Igosuki! -* Rename Token::Mult to Token::Mul (#353) - Thanks @koushiro! -* Use derive(Default) for HiveFormat (#348) - Thanks @koushiro! -* Improve tokenizer error (#347) - Thanks @koushiro! -* Eliminate redundant string copy in Tokenizer (#343) - Thanks @koushiro! -* Update bigdecimal requirement from 0.2 to 0.3 dependencies (#341) -* Support parsing hexadecimal literals that start with `0x` (#324) - Thanks @TheSchemm! - - -## [0.10.0] 2021-08-23 - -### Added -* Support for `no_std` (#332) - Thanks @koushiro! -* Postgres regular expression operators (`~`, `~*`, `!~`, `!~*`) (#328) - Thanks @b41sh! -* tinyint (#320) - Thanks @sundy-li -* ILIKE (#300) - Thanks @maxcountryman! -* TRIM syntax (#331, #334) - Thanks ever0de - - -### Fixed -* Return error instead of panic (#316) - Thanks @BohuTANG! - -### Changed -- Rename `Modulus` to `Modulo` (#335) - Thanks @RGRAVITY817! -- Update links to reflect repository move to `sqlparser-rs` GitHub org (#333) - Thanks @andygrove -- Add default value for `WindowFrame` (#313) - Thanks @Jimexist! - -## [0.9.0] 2021-03-21 - -### Added -* Add support for `TRY_CAST` syntax (#299) - Thanks @seddonm1! - -## [0.8.0] 2021-02-20 - -### Added -* Introduce Hive QL dialect `HiveDialect` and syntax (#235) - Thanks @hntd187! -* Add `SUBSTRING(col [FROM ] [FOR ])` syntax (#293) -* Support parsing floats without leading digits `.01` (#294) -* Support parsing multiple show variables (#290) - Thanks @francis-du! -* Support SQLite `INSERT OR [..]` syntax (#281) - Thanks @zhangli-pear! - -## [0.7.0] 2020-12-28 - -### Changed -- Change the MySQL dialect to support `` `identifiers` `` quoted with backticks instead of the standard `"double-quoted"` identifiers (#247) - thanks @mashuai! -- Update bigdecimal requirement from 0.1 to 0.2 (#268) - -### Added -- Enable dialect-specific behaviours in the parser (`dialect_of!()`) (#254) - thanks @eyalleshem! -- Support named arguments in function invocations (`ARG_NAME => val`) (#250) - thanks @eyalleshem! -- Support `TABLE()` functions in `FROM` (#253) - thanks @eyalleshem! -- Support Snowflake's single-line comments starting with '#' or '//' (#264) - thanks @eyalleshem! -- Support PostgreSQL `PREPARE`, `EXECUTE`, and `DEALLOCATE` (#243) - thanks @silathdiir! -- Support PostgreSQL math operators (#267) - thanks @alex-dukhno! -- Add SQLite dialect (#248) - thanks @mashuai! -- Add Snowflake dialect (#259) - thanks @eyalleshem! -- Support for Recursive CTEs - thanks @rhanqtl! -- Support `FROM (table_name) alias` syntax - thanks @eyalleshem! -- Support for `EXPLAIN [ANALYZE] VERBOSE` - thanks @ovr! -- Support `ANALYZE TABLE` -- DDL: - - Support `OR REPLACE` in `CREATE VIEW`/`TABLE` (#239) - thanks @Dandandan! - - Support specifying `ASC`/`DESC` in index columns (#249) - thanks @mashuai! - - Support SQLite `AUTOINCREMENT` and MySQL `AUTO_INCREMENT` column option in `CREATE TABLE` (#234) - thanks @mashuai! - - Support PostgreSQL `IF NOT EXISTS` for `CREATE SCHEMA` (#276) - thanks @alex-dukhno! - -### Fixed -- Fix a typo in `JSONFILE` serialization, introduced in 0.3.1 (#237) -- Change `CREATE INDEX` serialization to not end with a semicolon, introduced in 0.5.1 (#245) -- Don't fail parsing `ALTER TABLE ADD COLUMN` ending with a semicolon, introduced in 0.5.1 (#246) - thanks @mashuai - -## [0.6.1] - 2020-07-20 - -### Added -- Support BigQuery `ASSERT` statement (#226) - -## [0.6.0] - 2020-07-20 - -### Added -- Support SQLite's `CREATE TABLE (...) WITHOUT ROWID` (#208) - thanks @mashuai! -- Support SQLite's `CREATE VIRTUAL TABLE` (#209) - thanks @mashuai! - -## [0.5.1] - 2020-06-26 -This release should have been called `0.6`, as it introduces multiple incompatible changes to the API. If you don't want to upgrade yet, you can revert to the previous version by changing your `Cargo.toml` to: - - sqlparser = "= 0.5.0" - - -### Changed -- **`Parser::parse_sql` now accepts a `&str` instead of `String` (#182)** - thanks @Dandandan! -- Change `Ident` (previously a simple `String`) to store the parsed (unquoted) `value` of the identifier and the `quote_style` separately (#143) - thanks @apparebit! -- Support Snowflake's `FROM (table_name)` (#155) - thanks @eyalleshem! -- Add line and column number to TokenizerError (#194) - thanks @Dandandan! -- Use Token::EOF instead of Option (#195) -- Make the units keyword following `INTERVAL '...'` optional (#184) - thanks @maxcountryman! -- Generalize `DATE`/`TIME`/`TIMESTAMP` literals representation in the AST (`TypedString { data_type, value }`) and allow `DATE` and other keywords to be used as identifiers when not followed by a string (#187) - thanks @maxcountryman! -- Output DataType capitalized (`fmt::Display`) (#202) - thanks @Dandandan! - -### Added -- Support MSSQL `TOP () [ PERCENT ] [ WITH TIES ]` (#150) - thanks @alexkyllo! -- Support MySQL `LIMIT row_count OFFSET offset` (not followed by `ROW` or `ROWS`) and remember which variant was parsed (#158) - thanks @mjibson! -- Support PostgreSQL `CREATE TABLE IF NOT EXISTS table_name` (#163) - thanks @alex-dukhno! -- Support basic forms of `CREATE INDEX` and `DROP INDEX` (#167) - thanks @mashuai! -- Support `ON { UPDATE | DELETE } { RESTRICT | CASCADE | SET NULL | NO ACTION | SET DEFAULT }` in `FOREIGN KEY` constraints (#170) - thanks @c7hm4r! -- Support basic forms of `CREATE SCHEMA` and `DROP SCHEMA` (#173) - thanks @alex-dukhno! -- Support `NULLS FIRST`/`LAST` in `ORDER BY` expressions (#176) - thanks @houqp! -- Support `LISTAGG()` (#174) - thanks @maxcountryman! -- Support the string concatentation operator `||` (#178) - thanks @Dandandan! -- Support bitwise AND (`&`), OR (`|`), XOR (`^`) (#181) - thanks @Dandandan! -- Add serde support to AST structs and enums (#196) - thanks @panarch! -- Support `ALTER TABLE ADD COLUMN`, `RENAME COLUMN`, and `RENAME TO` (#203) - thanks @mashuai! -- Support `ALTER TABLE DROP COLUMN` (#148) - thanks @ivanceras! -- Support `CREATE TABLE ... AS ...` (#206) - thanks @Dandandan! - -### Fixed -- Report an error for unterminated string literals (#165) -- Make file format (`STORED AS`) case insensitive (#200) and don't allow quoting it (#201) - thanks @Dandandan! - -## [0.5.0] - 2019-10-10 - -### Changed -- Replace the `Value::Long(u64)` and `Value::Double(f64)` variants with `Value::Number(String)` to avoid losing precision when parsing decimal literals (#130) - thanks @benesch! -- `--features bigdecimal` can be enabled to work with `Value::Number(BigDecimal)` instead, at the cost of an additional dependency. - -### Added -- Support MySQL `SHOW COLUMNS`, `SET =`, and `SHOW ` statements (#135) - thanks @quodlibetor and @benesch! - -### Fixed -- Don't fail to parse `START TRANSACTION` followed by a semicolon (#139) - thanks @gaffneyk! - - -## [0.4.0] - 2019-07-02 -This release brings us closer to SQL-92 support, mainly thanks to the improvements contributed back from @MaterializeInc's fork and other work by @benesch. - -### Changed -- Remove "SQL" from type and enum variant names, `SQLType` -> `DataType`, remove "sql" prefix from module names (#105, #122) -- Rename `ASTNode` -> `Expr` (#119) -- Improve consistency of binary/unary op nodes (#112): - - `ASTNode::SQLBinaryExpr` is now `Expr::BinaryOp` and `ASTNode::SQLUnary` is `Expr::UnaryOp`; - - The `op: SQLOperator` field is now either a `BinaryOperator` or an `UnaryOperator`. -- Change the representation of JOINs to match the standard (#109): `SQLSelect`'s `relation` and `joins` are replaced with `from: Vec`. Before this change `FROM foo NATURAL JOIN bar, baz` was represented as "foo" as the `relation` followed by two joins (`Inner(Natural)` and `Implicit`); now it's two `TableWithJoins` (`foo NATURAL JOIN bar` and `baz`). -- Extract a `SQLFunction` struct (#89) -- Replace `Option>` with `Vec` in the AST structs (#73) -- Change `Value::Long()` to be unsigned, use u64 consistently (#65) - -### Added -- Infra: - - Implement `fmt::Display` on AST nodes (#124) - thanks @vemoo! - - Implement `Hash` (#88) and `Eq` (#123) on all AST nodes - - Implement `std::error::Error` for `ParserError` (#72) - - Handle Windows line-breaks (#54) -- Expressions: - - Support `INTERVAL` literals (#103) - - Support `DATE` / `TIME` / `TIMESTAMP` literals (#99) - - Support `EXTRACT` (#96) - - Support `X'hex value'` literals (#95) - - Support `EXISTS` subqueries (#90) - - Support nested expressions in `BETWEEN` (#80) - - Support `COUNT(DISTINCT x)` and similar (#77) - - Support `CASE operand WHEN expected_value THEN ..` and table-valued functions (#59) - - Support analytic (window) functions (`OVER` clause) (#50) -- Queries / DML: - - Support nested joins (#100) and derived tables with set operations (#111) - - Support `UPDATE` statements (#97) - - Support `INSERT INTO foo SELECT * FROM bar` and `FROM VALUES (...)` (#91) - - Support `SELECT ALL` (#76) - - Add `FETCH` and `OFFSET` support, and `LATERAL` (#69) - thanks @thomas-jeepe! - - Support `COLLATE`, optional column list in CTEs (#64) -- DDL/TCL: - - Support `START/SET/COMMIT/ROLLBACK TRANSACTION` (#106) - thanks @SamuelMarks! - - Parse column constraints in any order (#93) - - Parse `DECIMAL` and `DEC` aliases for `NUMERIC` type (#92) - - Support `DROP [TABLE|VIEW]` (#75) - - Support arbitrary `WITH` options for `CREATE [TABLE|VIEW]` (#74) - - Support constraints in `CREATE TABLE` (#65) -- Add basic MSSQL dialect (#61) and some MSSQL-specific features: - - `CROSS`/`OUTER APPLY` (#120) - - MSSQL identifier and alias parsing rules (#66) - - `WITH` hints (#59) - -### Fixed -- Report an error for `SELECT * FROM a OUTER JOIN b` instead of parsing `OUTER` as an alias (#118) -- Fix the precedence of `NOT LIKE` (#82) and unary `NOT` (#107) -- Do not panic when `NOT` is not followed by an expected keyword (#71) -successfully instead of returning a parse error - thanks @ivanceras! (#67) - and similar fixes for queries with no `FROM` (#116) -- Fix issues with `ALTER TABLE ADD CONSTRAINT` parsing (#65) -- Serialize the "not equals" operator as `<>` instead of `!=` (#64) -- Remove dependencies on `uuid` (#59) and `chrono` (#61) -- Make `SELECT` query with `LIMIT` clause but no `WHERE` parse - Fix incorrect behavior of `ASTNode::SQLQualifiedWildcard::to_string()` (returned `foo*` instead of `foo.*`) - thanks @thomas-jeepe! (#52) - -## [0.3.1] - 2019-04-20 -### Added -- Extended `SQLStatement::SQLCreateTable` to support Hive's EXTERNAL TABLES (`CREATE EXTERNAL TABLE .. STORED AS .. LOCATION '..'`) - thanks @zhzy0077! (#46) -- Parse `SELECT DISTINCT` to `SQLSelect::distinct` (#49) - -## [0.3.0] - 2019-04-03 -### Changed -This release includes major changes to the AST structs to add a number of features, as described in #37 and #43. In particular: -- `ASTNode` variants that represent statements were extracted from `ASTNode` into a separate `SQLStatement` enum; - - `Parser::parse_sql` now returns a `Vec` of parsed statements. - - `ASTNode` now represents an expression (renamed to `Expr` in 0.4.0) -- The query representation (formerly `ASTNode::SQLSelect`) became more complicated to support: - - `WITH` and `UNION`/`EXCEPT`/`INTERSECT` (via `SQLQuery`, `Cte`, and `SQLSetExpr`), - - aliases and qualified wildcards in `SELECT` (via `SQLSelectItem`), - - and aliases in `FROM`/`JOIN` (via `TableFactor`). -- A new `SQLObjectName` struct is used instead of `String` or `ASTNode::SQLCompoundIdentifier` - for objects like tables, custom types, etc. -- Added support for "delimited identifiers" and made keywords context-specific (thus accepting them as valid identifiers in most contexts) - **this caused a regression in parsing `SELECT .. FROM .. LIMIT ..` (#67), fixed in 0.4.0** - -### Added -Other than the changes listed above, some less intrusive additions include: -- Support `CREATE [MATERIALIZED] VIEW` statement -- Support `IN`, `BETWEEN`, unary +/- in epressions -- Support `CHAR` data type and `NUMERIC` not followed by `(p,s)`. -- Support national string literals (`N'...'`) - -## [0.2.4] - 2019-03-08 -Same as 0.2.2. - -## [0.2.3] - 2019-03-08 [YANKED] - -## [0.2.2] - 2019-03-08 -### Changed -- Removed `Value::String`, `Value::DoubleQuotedString`, and `Token::String`, making - - `'...'` parse as a string literal (`Value::SingleQuotedString`), and - - `"..."` fail to parse until version 0.3.0 (#36) - -## [0.2.1] - 2019-01-13 -We don't have a changelog for the changes made in 2018, but thanks to @crw5996, @cswinter, @fredrikroos, @ivanceras, @nickolay, @virattara for their contributions in the early stages of the project! - -## [0.1.0] - 2018-09-03 -Initial release +- Unreleased: Check https://github.com/sqlparser-rs/sqlparser-rs/commits/main for undocumented changes. +- `0.56.0`: [changelog/0.56.0.md](changelog/0.56.0.md) +- `0.55.0`: [changelog/0.55.0.md](changelog/0.55.0.md) +- `0.54.0`: [changelog/0.54.0.md](changelog/0.54.0.md) +- `0.53.0`: [changelog/0.53.0.md](changelog/0.53.0.md) +- `0.52.0`: [changelog/0.52.0.md](changelog/0.52.0.md) +- `0.51.0` and earlier: [changelog/0.51.0-pre.md](changelog/0.51.0-pre.md) diff --git a/Cargo.toml b/Cargo.toml index 99546465b..d746775e4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,12 +18,12 @@ [package] name = "sqlparser" description = "Extensible SQL Lexer and Parser with support for ANSI SQL:2011" -version = "0.51.0" -authors = ["Andy Grove "] -homepage = "https://github.com/sqlparser-rs/sqlparser-rs" +version = "0.56.0" +authors = ["Apache DataFusion "] +homepage = "https://github.com/apache/datafusion-sqlparser-rs" documentation = "https://docs.rs/sqlparser/" keywords = ["ansi", "sql", "lexer", "parser"] -repository = "https://github.com/sqlparser-rs/sqlparser-rs" +repository = "https://github.com/apache/datafusion-sqlparser-rs" license = "Apache-2.0" include = [ "src/**/*.rs", @@ -37,8 +37,9 @@ name = "sqlparser" path = "src/lib.rs" [features] -default = ["std"] +default = ["std", "recursive-protection"] std = [] +recursive-protection = ["std", "recursive"] # Enable JSON output in the `cli` example: json_example = ["serde_json", "serde"] visitor = ["sqlparser_derive"] @@ -46,24 +47,20 @@ visitor = ["sqlparser_derive"] [dependencies] bigdecimal = { version = "0.4.1", features = ["serde"], optional = true } log = "0.4" -serde = { version = "1.0", features = ["derive"], optional = true } +recursive = { version = "0.1.1", optional = true} + +serde = { version = "1.0", default-features = false, features = ["derive", "alloc"], optional = true } # serde_json is only used in examples/cli, but we have to put it outside # of dev-dependencies because of # https://github.com/rust-lang/cargo/issues/1596 serde_json = { version = "1.0", optional = true } -sqlparser_derive = { version = "0.2.0", path = "derive", optional = true } +sqlparser_derive = { version = "0.3.0", path = "derive", optional = true } [dev-dependencies] simple_logger = "5.0" matches = "0.1" pretty_assertions = "1" -[package.metadata.release] -# Instruct `cargo release` to not run `cargo publish` locally: -# https://github.com/sunng87/cargo-release/blob/master/docs/reference.md#config-fields -# See docs/releasing.md for details. -publish = false - [package.metadata.docs.rs] # Document these features on docs.rs features = ["serde", "visitor"] diff --git a/README.md b/README.md index 3226b9549..6acfbcef7 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,22 @@ + + # Extensible SQL Lexer and Parser for Rust [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) @@ -34,7 +53,7 @@ println!("AST: {:?}", ast); This outputs ```rust -AST: [Query(Query { ctes: [], body: Select(Select { distinct: false, projection: [UnnamedExpr(Identifier("a")), UnnamedExpr(Identifier("b")), UnnamedExpr(Value(Long(123))), UnnamedExpr(Function(Function { name: ObjectName(["myfunc"]), args: [Identifier("b")], filter: None, over: None, distinct: false }))], from: [TableWithJoins { relation: Table { name: ObjectName(["table_1"]), alias: None, args: [], with_hints: [] }, joins: [] }], selection: Some(BinaryOp { left: BinaryOp { left: Identifier("a"), op: Gt, right: Identifier("b") }, op: And, right: BinaryOp { left: Identifier("b"), op: Lt, right: Value(Long(100)) } }), group_by: [], having: None }), order_by: [OrderByExpr { expr: Identifier("a"), asc: Some(false) }, OrderByExpr { expr: Identifier("b"), asc: None }], limit: None, offset: None, fetch: None })] +AST: [Query(Query { ctes: [], body: Select(Select { distinct: false, projection: [UnnamedExpr(Identifier("a")), UnnamedExpr(Identifier("b")), UnnamedExpr(Value(Long(123))), UnnamedExpr(Function(Function { name:ObjectName([Identifier(Ident { value: "myfunc", quote_style: None })]), args: [Identifier("b")], filter: None, over: None, distinct: false }))], from: [TableWithJoins { relation: Table { name: ObjectName([Identifier(Ident { value: "table_1", quote_style: None })]), alias: None, args: [], with_hints: [] }, joins: [] }], selection: Some(BinaryOp { left: BinaryOp { left: Identifier("a"), op: Gt, right: Identifier("b") }, op: And, right: BinaryOp { left: Identifier("b"), op: Lt, right: Value(Long(100)) } }), group_by: [], having: None }), order_by: [OrderByExpr { expr: Identifier("a"), asc: Some(false) }, OrderByExpr { expr: Identifier("b"), asc: None }], limit: None, offset: None, fetch: None })] ``` @@ -44,7 +63,7 @@ The following optional [crate features](https://doc.rust-lang.org/cargo/referen * `serde`: Adds [Serde](https://serde.rs/) support by implementing `Serialize` and `Deserialize` for all AST nodes. * `visitor`: Adds a `Visitor` capable of recursively walking the AST tree. - +* `recursive-protection` (enabled by default), uses [recursive](https://docs.rs/recursive/latest/recursive/) for stack overflow protection. ## Syntax vs Semantics @@ -81,15 +100,37 @@ similar semantics are represented with the same AST. We welcome PRs to fix such issues and distinguish different syntaxes in the AST. +## Source Locations (Work in Progress) + +This crate allows recovering source locations from AST nodes via the [Spanned] +trait, which can be used for advanced diagnostics tooling. Note that this +feature is a work in progress and many nodes report missing or inaccurate spans. +Please see [this ticket] for information on how to contribute missing +improvements. + +[Spanned]: https://docs.rs/sqlparser/latest/sqlparser/ast/trait.Spanned.html +[this ticket]: https://github.com/apache/datafusion-sqlparser-rs/issues/1548 + +```rust +// Parse SQL +let ast = Parser::parse_sql(&GenericDialect, "SELECT A FROM B").unwrap(); + +// The source span can be retrieved with start and end locations +assert_eq!(ast[0].span(), Span { + start: Location::of(1, 1), + end: Location::of(1, 16), +}); +``` + ## SQL compliance SQL was first standardized in 1987, and revisions of the standard have been published regularly since. Most revisions have added significant new features to the language, and as a result no database claims to support the full breadth of features. This parser currently supports most of the SQL-92 syntax, plus some -syntax from newer versions that have been explicitly requested, plus some MSSQL, -PostgreSQL, and other dialect-specific syntax. Whenever possible, the [online -SQL:2016 grammar][sql-2016-grammar] is used to guide what syntax to accept. +syntax from newer versions that have been explicitly requested, plus various +other dialect-specific syntax. Whenever possible, the [online SQL:2016 +grammar][sql-2016-grammar] is used to guide what syntax to accept. Unfortunately, stating anything more specific about compliance is difficult. There is no publicly available test suite that can assess compliance @@ -115,7 +156,8 @@ $ cargo run --features json_example --example cli FILENAME.sql [--dialectname] ## Users This parser is currently being used by the [DataFusion] query engine, [LocustDB], -[Ballista], [GlueSQL], [Opteryx], [Polars], [PRQL], [Qrlew], [JumpWire], and [ParadeDB]. +[Ballista], [GlueSQL], [Opteryx], [Polars], [PRQL], [Qrlew], [JumpWire], [ParadeDB], [CipherStash Proxy], +and [GreptimeDB]. If your project is using sqlparser-rs feel free to make a PR to add it to this list. @@ -191,6 +233,21 @@ Our goal as maintainers is to facilitate the integration of various features from various contributors, but not to provide the implementations ourselves, as we simply don't have the resources. +### Benchmarking + +There are several micro benchmarks in the `sqlparser_bench` directory. +You can run them with: + +``` +git checkout main +cd sqlparser_bench +cargo bench -- --save-baseline main +git checkout +cargo bench -- --baseline main +``` + +By adding the `--save-baseline main` and `--baseline main` you can track the +progress of your improvements as you continue working on the feature branch. ## Licensing @@ -219,3 +276,5 @@ licensed as above, without any additional terms or conditions. [sql-standard]: https://en.wikipedia.org/wiki/ISO/IEC_9075 [`Dialect`]: https://docs.rs/sqlparser/latest/sqlparser/dialect/trait.Dialect.html [`GenericDialect`]: https://docs.rs/sqlparser/latest/sqlparser/dialect/struct.GenericDialect.html +[CipherStash Proxy]: https://github.com/cipherstash/proxy +[GreptimeDB]: https://github.com/GreptimeTeam/greptimedb diff --git a/changelog/0.51.0-pre.md b/changelog/0.51.0-pre.md new file mode 100644 index 000000000..18c7aefb6 --- /dev/null +++ b/changelog/0.51.0-pre.md @@ -0,0 +1,1188 @@ + + + +## [0.51.0] 2024-09-11 +As always, huge props to @iffyio @jmhain and @lovasoa for their help reviewing and merging PRs 🙏. +Without them this project would not be possible. + +Reminder: we are in the final phases of moving sqlparser-rs into the Apache +DataFusion project: https://github.com/sqlparser-rs/sqlparser-rs/issues/1294 + +### Fixed +* Fix Hive table comment should be after table column definitions (#1413) - Thanks @git-hulk +* Fix stack overflow in `parse_subexpr` (#1410) - Thanks @eejbyfeldt +* Fix `INTERVAL` parsing to support expressions and units via dialect (#1398) - Thanks @samuelcolvin +* Fix identifiers starting with `$` should be regarded as a placeholder in SQLite (#1402) - Thanks @git-hulk + +### Added +* Support for MSSQL table options (#1414) - Thanks @bombsimon +* Test showing how negative constants are parsed (#1421) - Thanks @alamb +* Support databricks dialect to dialect_from_str (#1416) - Thanks @milenkovicmalamb +* Support `DROP|CLEAR|MATERIALIZE PROJECTION` syntax for ClickHouse (#1417) - Thanks @git-hulk +* Support postgres `TRUNCATE` syntax (#1406) - Thanks @tobyhede +* Support `CREATE INDEX` with clause (#1389) - Thanks @lewiszlw +* Support parsing `CLUSTERED BY` clause for Hive (#1397) - Thanks @git-hulk +* Support different `USE` statement syntaxes (#1387) - Thanks @kacpermuda +* Support `ADD PROJECTION` syntax for ClickHouse (#1390) - Thanks @git-hulk + +### Changed +* Implement common traits for OneOrManyWithParens (#1368) - Thanks @gstvg +* Cleanup parse_statement (#1407) - Thanks @samuelcolvin +* Allow `DateTimeField::Custom` with `EXTRACT` in Postgres (#1394) - Thanks @samuelcolvin + + +## [0.50.0] 2024-08-15 +Again, huge props to @iffyio @jmhain and @lovasoa for their help reviewing and merging PRs 🙏. +Without them this project would not be possible. + +Reminder: are in the process of moving sqlparser to governed as part of the Apache +DataFusion project: https://github.com/sqlparser-rs/sqlparser-rs/issues/1294 + +### Fixed +* Clippy 1.80 warnings (#1357) - Thanks @lovasoa + +### Added +* Support `STRUCT` and list of structs for DuckDB dialect (#1372) - Thanks @jayzhan211 +* Support custom lexical precedence in PostgreSQL dialect (#1379) - Thanks @samuelcolvin +* Support `FREEZE|UNFREEZE PARTITION` syntax for ClickHouse (#1380) - Thanks @git-hulk +* Support scale in `CEIL` and `FLOOR` functions (#1377) - Thanks @seve-martinez +* Support `CREATE TRIGGER` and `DROP TRIGGER` statements (#1352) - Thanks @LucaCappelletti94 +* Support `EXTRACT` syntax for snowflake (#1374) - Thanks @seve-martinez +* Support `ATTACH` / `DETACH PARTITION` for ClickHouse (#1362) - Thanks @git-hulk +* Support Dialect level precedence, update Postgres Dialect to match Postgres (#1360) - Thanks @samuelcolvin +* Support parsing empty map literal syntax for DuckDB and Generic dialects (#1361) - Thanks @goldmedal +* Support `SETTINGS` clause for ClickHouse table-valued functions (#1358) - Thanks @Jesse-Bakker +* Support `OPTIMIZE TABLE` statement for ClickHouse (#1359) - Thanks @git-hulk +* Support `ON CLUSTER` in `ALTER TABLE` for ClickHouse (#1342) - Thanks @git-hulk +* Support `GLOBAL` keyword before the join operator (#1353) - Thanks @git-hulk +* Support postgres String Constants with Unicode Escapes (#1355) - Thanks @lovasoa +* Support position with normal function call syntax for Snowflake (#1341) - Thanks @jmhain +* Support `TABLE` keyword in `DESC|DESCRIBE|EXPLAIN TABLE` statement (#1351) - Thanks @git-hulk + +### Changed +* Only require `DESCRIBE TABLE` for Snowflake and ClickHouse dialect (#1386) - Thanks @ alamb +* Rename (unreleased) `get_next_precedence_full` to `get_next_precedence_default` (#1378) - Thanks @samuelcolvin +* Use local GitHub Action to replace setup-rust-action (#1371) - Thanks @git-hulk +* Simplify arrow_cast tests (#1367) - Thanks @alamb +* Update version of GitHub Actions (#1363) - Thanks @git-hulk +* Make `Parser::maybe_parse` pub (#1364) - Thanks @Jesse-Bakker +* Improve comments on 1Dialect` (#1366) - Thanks @alamb + + +## [0.49.0] 2024-07-23 +As always, huge props to @iffyio @jmhain and @lovasoa for their help reviewing and merging PRs! + +We are in the process of moving sqlparser to governed as part of the Apache +DataFusion project: https://github.com/sqlparser-rs/sqlparser-rs/issues/1294 + +### Fixed +* Fix quoted identifier regression edge-case with "from" in SELECT (#1346) - Thanks @alexander-beedie +* Fix `AS` query clause should be after the create table options (#1339) - Thanks @git-hulk + +### Added + +* Support `MATERIALIZED`/`ALIAS`/`EPHERMERAL` default column options for ClickHouse (#1348) - Thanks @git-hulk +* Support `()` as the `GROUP BY` nothing (#1347) - Thanks @git-hulk +* Support Map literal syntax for DuckDB and Generic (#1344) - Thanks @goldmedal +* Support subquery expression in `SET` expressions (#1343) - Thanks @iffyio +* Support `WITH FILL` for ClickHouse (#1330) - Thanks @nickpresta +* Support `PARTITION BY` for PostgreSQL in `CREATE TABLE` statement (#1338) - Thanks @git-hulk +* Support of table function `WITH ORDINALITY` modifier for Postgres (#1337) - Thanks @git-hulk + + +## [0.48.0] 2024-07-09 + +Huge shout out to @iffyio @jmhain and @lovasoa for their help reviewing and merging PRs! + +### Fixed +* Fix CI error message in CI (#1333) - Thanks @alamb +* Fix typo in sqlparser-derive README (#1310) - Thanks @leoyvens +* Re-enable trailing commas in DCL (#1318) - Thanks @MohamedAbdeen21 +* Fix a few typos in comment lines (#1316) - Thanks @git-hulk +* Fix Snowflake `SELECT * wildcard REPLACE ... RENAME` order (#1321) - Thanks @alexander-beedie +* Allow semi-colon at the end of UNCACHE statement (#1320) - Thanks @LorrensP-2158466 +* Return errors, not panic, when integers fail to parse in `AUTO_INCREMENT` and `TOP` (#1305) - Thanks @eejbyfeldt + +### Added +* Support `OWNER TO` clause in Postgres (#1314) - Thanks @gainings +* Support `FORMAT` clause for ClickHouse (#1335) - Thanks @git-hulk +* Support `DROP PROCEDURE` statement (#1324) - Thanks @LorrensP-2158466 +* Support `PREWHERE` condition for ClickHouse dialect (#1328) - Thanks @git-hulk +* Support `SETTINGS` pairs for ClickHouse dialect (#1327) - Thanks @git-hulk +* Support `GROUP BY WITH MODIFIER` for ClickHouse dialect (#1323) - Thanks @git-hulk +* Support DuckDB Union datatype (#1322) - Thanks @gstvg +* Support parametric arguments to `FUNCTION` for ClickHouse dialect (#1315) - Thanks @git-hulk +* Support `TO` in `CREATE VIEW` clause for Clickhouse (#1313) - Thanks @Bidaya0 +* Support `UPDATE` statements that contain tuple assignments (#1317) - Thanks @lovasoa +* Support `BY NAME quantifier across all set ops (#1309) - Thanks @alexander-beedie +* Support SnowFlake exclusive `CREATE TABLE` options (#1233) - Thanks @balliegojr +* Support ClickHouse `CREATE TABLE` with primary key and parametrised table engine (#1289) - Thanks @7phs +* Support custom operators in Postgres (#1302) - Thanks @lovasoa +* Support ClickHouse data types (#1285) - Thanks @7phs + +### Changed +* Add stale PR github workflow (#1331) - Thanks @alamb +* Refine docs (#1326) - Thanks @emilsivervik +* Improve error messages with additional colons (#1319) - Thanks @LorrensP-2158466 +* Move Display fmt to struct for `CreateIndex` (#1307) - Thanks @philipcristiano +* Enhancing Trailing Comma Option (#1212) - Thanks @MohamedAbdeen21 +* Encapsulate `CreateTable`, `CreateIndex` into specific structs (#1291) - Thanks @philipcristiano + +## [0.47.0] 2024-06-01 + +### Fixed +* Re-support Postgres array slice syntax (#1290) - Thanks @jmhain +* Fix DoubleColon cast skipping AT TIME ZONE #1266 (#1267) - Thanks @dmitrybugakov +* Fix for values as table name in Databricks and generic (#1278) - Thanks @jmhain + +### Added +* Support `ASOF` joins in Snowflake (#1288) - Thanks @jmhain +* Support `CREATE VIEW` with fields and data types ClickHouse (#1292) - Thanks @7phs +* Support view comments for Snowflake (#1287) - Thanks @bombsimon +* Support dynamic pivot in Snowflake (#1280) - Thanks @jmhain +* Support `CREATE FUNCTION` for BigQuery, generalize AST (#1253) - Thanks @iffyio +* Support expression in `AT TIME ZONE` and fix precedence (#1272) - Thanks @jmhain +* Support `IGNORE/RESPECT NULLS` inside function argument list for Databricks (#1263) - Thanks @jmhain +* Support `SELECT * EXCEPT` Databricks (#1261) - Thanks @jmhain +* Support triple quoted strings (#1262) - Thanks @iffyio +* Support array indexing for duckdb (#1265) - Thanks @JichaoS +* Support multiple SET variables (#1252) - Thanks @iffyio +* Support `ANY_VALUE` `HAVING` clause (#1258) in BigQuery - Thanks @jmhain +* Support keywords as field names in BigQuery struct syntax (#1254) - Thanks @iffyio +* Support `GROUP_CONCAT()` in MySQL (#1256) - Thanks @jmhain +* Support lambda functions in Databricks (#1257) - Thanks @jmhain +* Add const generic peek_tokens method to parser (#1255) - Thanks @jmhain + + +## [0.46.0] 2024-05-03 + +### Changed +* Consolidate representation of function calls, remove `AggregateExpressionWithFilter`, `ArraySubquery`, `ListAgg` and `ArrayAgg` (#1247) - Thanks jmhain +* Extended dialect trait to support numeric prefixed identifiers (#1188) - Thanks @groobyming +* Update simple_logger requirement from 4.0 to 5.0 (#1246) - Thanks @dependabot +* Improve parsing of JSON accesses on Postgres and Snowflake (#1215) - Thanks @jmhain +* Encapsulate Insert and Delete into specific structs (#1224) - Thanks @tisonkun +* Preserve double colon casts (and simplify cast representations) (#1221) - Thanks @jmhain + +### Fixed +* Fix redundant brackets in Hive/Snowflake/Redshift (#1229) - Thanks @yuval-illumex + +### Added +* Support values without parens in Snowflake and DataBricks (#1249) - Thanks @HiranmayaGundu +* Support WINDOW clause after QUALIFY when parsing (#1248) - Thanks @iffyio +* Support `DECLARE` parsing for mssql (#1235) - Thanks @devanbenz +* Support `?`-based jsonb operators in Postgres (#1242) - THanks @ReppCodes +* Support Struct datatype parsing for GenericDialect (#1241) - Thanks @duongcongtoai +* Support BigQuery window function null treatment (#1239) - Thanks @iffyio +* Support extend pivot operator - Thanks @iffyio +* Support Databricks SQL dialect (#1220) - Thanks @jmhain +* Support for MSSQL CONVERT styles (#1219) - Thanks @iffyio +* Support window clause using named window in BigQuery (#1237) - Thanks @iffyio +* Support for CONNECT BY (#1138) - Thanks @jmhain +* Support object constants in Snowflake (#1223) - Thanks @jmhain +* Support BigQuery MERGE syntax (#1217) - Thanks @iffyio +* Support for MAX for NVARCHAR (#1232) - Thanks @ bombsimon +* Support fixed size list types (#1231) - @@universalmind303 +* Support Snowflake MATCH_RECOGNIZE syntax (#1222) - Thanks @jmhain +* Support quoted string backslash escaping (#1177) - Thanks @iffyio +* Support Modify Column for MySQL dialect (#1216) - Thanks @KKould +* Support `select * ilike` for snowflake (#1228) - Thanks @HiranmayaGundu +* Support wildcard replace in duckdb and snowflake syntax (#1226) - Thanks @HiranmayaGundu + + + +## [0.45.0] 2024-04-12 + +### Added +* Support `DateTimeField` variants: `CUSTOM` and `WEEK(MONDAY)` (#1191) - Thanks @iffyio +* Support for arbitrary expr in `MapAccessSyntax` (#1179) - Thanks @iffyio +* Support unquoted hyphen in table/view declaration for BigQuery (#1178) - Thanks @iffyio +* Support `CREATE/DROP SECRET` for duckdb dialect (#1208) - Thanks @JichaoS +* Support MySQL `UNIQUE` table constraint (#1164) - Thanks @Nikita-str +* Support tailing commas on Snowflake. (#1205) - Thanks @yassun7010 +* Support `[FIRST | AFTER column_name]` in `ALTER TABLE` for MySQL (#1180) - Thanks @xring +* Support inline comment with hash syntax for BigQuery (#1192) - Thanks @iffyio +* Support named windows in OVER (window_definition) clause (#1166) - Thanks @Nikita-str +* Support PARALLEL ... and for ..ON NULL INPUT ... to CREATE FUNCTION` (#1202) - Thanks @dimfeld +* Support DuckDB functions named arguments with assignment operator (#1195) - Thanks @alamb +* Support DuckDB struct literal syntax (#1194) - Thanks @gstvg +* Support `$$` in generic dialect ... (#1185)- Thanks @milenkovicm +* Support row_alias and col_aliases in `INSERT` statement for MySQL and Generic dialects (#1136) - Thanks @emin100 + +### Fixed +* Fix dollar quoted string tokenizer (#1193) - Thanks @ZacJW +* Do not allocate in `impl Display` for `DateTimeField` (#1209) - Thanks @alamb +* Fix parse `COPY INTO` stage names without parens for SnowFlake (#1187) - Thanks @mobuchowski +* Solve stack overflow on RecursionLimitExceeded on debug builds (#1171) - Thanks @Nikita-str +* Fix parsing of equality binary operator in function argument (#1182) - Thanks @jmhain +* Fix some comments (#1184) - Thanks @sunxunle + +### Changed +* Cleanup `CREATE FUNCTION` tests (#1203) - Thanks @alamb +* Parse `SUBSTRING FROM` syntax in all dialects, reflect change in the AST (#1173) - Thanks @lovasoa +* Add identifier quote style to Dialect trait (#1170) - Thanks @backkem + +## [0.44.0] 2024-03-02 + +### Added +* Support EXPLAIN / DESCR / DESCRIBE [FORMATTED | EXTENDED] (#1156) - Thanks @jonathanlehtoalamb +* Support ALTER TABLE ... SET LOCATION (#1154) - Thanks @jonathanlehto +* Support `ROW FORMAT DELIMITED` in Hive (#1155) - Thanks @jonathanlehto +* Support `SERDEPROPERTIES` for `CREATE TABLE` with Hive (#1152) - Thanks @jonathanlehto +* Support `EXECUTE ... USING` for Postgres (#1153) - Thanks @jonathanlehto +* Support Postgres style `CREATE FUNCTION` in GenericDialect (#1159) - Thanks @alamb +* Support `SET TBLPROPERTIES` (#1151) - Thanks @jonathanlehto +* Support `UNLOAD` statement (#1150) - Thanks @jonathanlehto +* Support `MATERIALIZED CTEs` (#1148) - Thanks @ReppCodes +* Support `DECLARE` syntax for snowflake and bigquery (#1122) - Thanks @iffyio +* Support `SELECT AS VALUE` and `SELECT AS STRUCT` for BigQuery (#1135) - Thanks @lustefaniak +* Support `(+)` outer join syntax (#1145) - Thanks @jmhain +* Support `INSERT INTO ... SELECT ... RETURNING`(#1132) - Thanks @lovasoa +* Support DuckDB `INSTALL` and `LOAD` (#1127) - Thanks @universalmind303 +* Support `=` operator in function args (#1128) - Thanks @universalmind303 +* Support `CREATE VIEW IF NOT EXISTS` (#1118) - Thanks @7phs +* Support `UPDATE FROM` for SQLite (further to #694) (#1117) - Thanks @ggaughan +* Support optional `DELETE FROM` statement (#1120) - Thanks @iffyio +* Support MySQL `SHOW STATUS` statement (#1119) - Thanks invm + +### Fixed +* Clean up nightly clippy lints (#1158) - Thanks @alamb +* Handle escape, unicode, and hex in tokenize_escaped_single_quoted_string (#1146) - Thanks @JasonLi-cn +* Fix panic while parsing `REPLACE` (#1140) - THanks @jjbayer +* Fix clippy warning from rust 1.76 (#1130) - Thanks @alamb +* Fix release instructions (#1115) - Thanks @alamb + +### Changed +* Add `parse_keyword_with_tokens` for paring keyword and tokens combination (#1141) - Thanks @viirya +* Add ParadeDB to list of known users (#1142) - Thanks @philippemnoel +* Accept JSON_TABLE both as an unquoted table name and a table-valued function (#1134) - Thanks @lovasoa + + +## [0.43.1] 2024-01-22 +### Changes +* Fixed CHANGELOG + + +## [0.43.0] 2024-01-22 +* NO CHANGES + +## [0.42.0] 2024-01-22 + +### Added +* Support for constraint `CHARACTERISTICS` clause (#1099) - Thanks @dimfeld +* Support for unquoted hyphenated identifiers on bigquery (#1109) - Thanks @jmhain +* Support `BigQuery` table and view options (#1061) - Thanks @iffyio +* Support Postgres operators for the LIKE expression variants (#1096) - Thanks @gruuya +* Support "timezone_region" and "timezone_abbr" for `EXTRACT` (and `DATE_PART`) (#1090) - Thanks @alexander-beedie +* Support `JSONB` datatype (#1089) - Thanks @alexander-beedie +* Support PostgreSQL `^@` starts-with operator (#1091) - Thanks @alexander-beedie +* Support PostgreSQL Insert table aliases (#1069) (#1084) - Thanks @boydjohnson +* Support PostgreSQL `CREATE EXTENSION` (#1078) - Thanks @tobyhede +* Support PostgreSQL `ADD GENERATED` in `ALTER COLUMN` statements (#1079) - Thanks @tobyhede +* Support SQLite column definitions with no type (#1075) - Thanks @takluyver +* Support PostgreSQL `ENABLE` and `DISABLE` on `ALTER TABLE` (#1077) - Thanks @tobyhede +* Support MySQL `FLUSH` statement (#1076) - Thanks @emin100 +* Support Mysql `REPLACE` statement and `PRIORITY` clause of `INSERT` (#1072) - Thanks @emin100 + +### Fixed +* Fix `:start` and `:end` json accesses on SnowFlake (#1110) - Thanks @jmhain +* Fix array_agg wildcard behavior (#1093) - Thanks @ReppCodes +* Error on dangling `NO` in `CREATE SEQUENCE` options (#1104) - Thanks @PartiallyTyped +* Allow string values in `PRAGMA` commands (#1101) - Thanks @invm + +### Changed +* Use `Option` for Min and Max vals in Seq Opts, fix alter col seq display (#1106) - Thanks @PartiallyTyped +* Replace `AtomicUsize` with Cell in the recursion counter (#1098) - Thanks @wzzzzd +* Add Qrlew as a user in README.md (#1107) - Thanks @ngrislain +* Add APIs to reuse token buffers in `Tokenizer` (#1094) - Thanks @0rphon +* Bump version of `sqlparser-derive` to 0.2.2 (#1083) - Thanks @alamb + +## [0.41.0] 2023-12-22 + +### Added +* Support `DEFERRED`, `IMMEDIATE`, and `EXCLUSIVE` in SQLite's `BEGIN TRANSACTION` command (#1067) - Thanks @takaebato +* Support generated columns skipping `GENERATED ALWAYS` keywords (#1058) - Thanks @takluyver +* Support `LOCK/UNLOCK TABLES` for MySQL (#1059) - Thanks @zzzdong +* Support `JSON_TABLE` (#1062) - Thanks @lovasoa +* Support `CALL` statements (#1063) - Thanks @lovasoa + +### Fixed +* fix rendering of SELECT TOP (#1070) for Snowflake - Thanks jmhain + +### Changed +* Improve documentation formatting (#1068) - Thanks @alamb +* Replace type_id() by trait method to allow wrapping dialects (#1065) - Thanks @jjbayer +* Document that comments aren't preserved for round trip (#1060) - Thanks @takluyver +* Update sqlparser-derive to use `syn 2.0` (#1040) - Thanks @serprex + +## [0.40.0] 2023-11-27 + +### Added +* Add `{pre,post}_visit_query` to `Visitor` (#1044) - Thanks @jmhain +* Support generated virtual columns with expression (#1051) - Thanks @takluyver +* Support PostgreSQL `END` (#1035) - Thanks @tobyhede +* Support `INSERT INTO ... DEFAULT VALUES ...` (#1036) - Thanks @CDThomas +* Support `RELEASE` and `ROLLBACK TO SAVEPOINT` (#1045) - Thanks @CDThomas +* Support `CONVERT` expressions (#1048) - Thanks @lovasoa +* Support `GLOBAL` and `SESSION` parts in `SHOW VARIABLES` for mysql and generic - Thanks @emin100 +* Support snowflake `PIVOT` on derived table factors (#1027) - Thanks @lustefaniak +* Support mssql json and xml extensions (#1043) - Thanks @lovasoa +* Support for `MAX` as a character length (#1038) - Thanks @lovasoa +* Support `IN ()` syntax of SQLite (#1028) - Thanks @alamb + +### Fixed +* Fix extra whitespace printed before `ON CONFLICT` (#1037) - Thanks @CDThomas + +### Changed +* Document round trip ability (#1052) - Thanks @alamb +* Add PRQL to list of users (#1031) - Thanks @vanillajonathan + +## [0.39.0] 2023-10-27 + +### Added +* Support for `LATERAL FLATTEN` and similar (#1026) - Thanks @lustefaniak +* Support BigQuery struct, array and bytes , int64, `float64` datatypes (#1003) - Thanks @iffyio +* Support numbers as placeholders in Snowflake (e.g. `:1)` (#1001) - Thanks @yuval-illumex +* Support date 'key' when using semi structured data (#1023) @yuval-illumex +* Support IGNORE|RESPECT NULLs clause in window functions (#998) - Thanks @yuval-illumex +* Support for single-quoted identifiers (#1021) - Thanks @lovasoa +* Support multiple PARTITION statements in ALTER TABLE ADD statement (#1011) - Thanks @bitemyapp +* Support "with" identifiers surrounded by backticks in GenericDialect (#1010) - Thanks @bitemyapp +* Support INSERT IGNORE in MySql and GenericDialect (#1004) - Thanks @emin100 +* Support SQLite `pragma` statement (#969) - Thanks @marhoily +* Support `position` as a column name (#1022) - Thanks @lustefaniak +* Support `FILTER` in Functions (for `OVER`) clause (#1007) - Thanks @lovasoa +* Support `SELECT * EXCEPT/REPLACE` syntax from ClickHouse (#1013) - Thanks @lustefaniak +* Support subquery as function arg w/o parens in Snowflake dialect (#996) - Thanks @jmhain +* Support `UNION DISTINCT BY NAME` syntax (#997) - Thanks @alexander-beedie +* Support mysql `RLIKE` and `REGEXP` binary operators (#1017) - Thanks @lovasoa +* Support bigquery `CAST AS x [STRING|DATE] FORMAT` syntax (#978) - Thanks @lustefaniak +* Support Snowflake/BigQuery `TRIM`. (#975) - Thanks @zdenal +* Support `CREATE [TEMPORARY|TEMP] VIEW [IF NOT EXISTS] `(#993) - Thanks @gabivlj +* Support for `CREATE VIEW … WITH NO SCHEMA BINDING` Redshift (#979) - Thanks @lustefaniak +* Support `UNPIVOT` and a fix for chained PIVOTs (#983) - @jmhain +* Support for `LIMIT BY` (#977) - Thanks @lustefaniak +* Support for mixed BigQuery table name quoting (#971) - Thanks @iffyio +* Support `DELETE` with `ORDER BY` and `LIMIT` (MySQL) (#992) - Thanks @ulrichsg +* Support `EXTRACT` for `DAYOFWEEK`, `DAYOFYEAR`, `ISOWEEK`, `TIME` (#980) - Thanks @lustefaniak +* Support `ATTACH DATABASE` (#989) - Thanks @lovasoa + +### Fixed +* Fix handling of `/~%` in Snowflake stage name (#1009) - Thanks @lustefaniak +* Fix column `COLLATE` not displayed (#1012) - Thanks @lustefaniak +* Fix for clippy 1.73 (#995) - Thanks @alamb + +### Changed +* Test to ensure `+ - * / %` binary operators work the same in all dialects (#1025) - Thanks @lustefaniak +* Improve documentation on Parser::consume_token and friends (#994) - Thanks @alamb +* Test that regexp can be used as an identifier in postgres (#1018) - Thanks @lovasoa +* Add docstrings for Dialects, update README (#1016) - Thanks @alamb +* Add JumpWire to users in README (#990) - Thanks @hexedpackets +* Add tests for clickhouse: `tokenize == as Token::DoubleEq` (#981)- Thanks @lustefaniak + +## [0.38.0] 2023-09-21 + +### Added + +* Support `==`operator for Sqlite (#970) - Thanks @marhoily +* Support mysql `PARTITION` to table selection (#959) - Thanks @chunshao90 +* Support `UNNEST` as a table factor for PostgreSQL (#968) @hexedpackets +* Support MySQL `UNIQUE KEY` syntax (#962) - Thanks @artorias1024 +* Support` `GROUP BY ALL` (#964) - @berkaysynnada +* Support multiple actions in one ALTER TABLE statement (#960) - Thanks @ForbesLindesay +* Add `--sqlite param` to CLI (#956) - Thanks @ddol + +### Fixed +* Fix Rust 1.72 clippy lints (#957) - Thanks @alamb + +### Changed +* Add missing token loc in parse err msg (#965) - Thanks @ding-young +* Change how `ANY` and `ALL` expressions are represented in AST (#963) - Thanks @SeanTroyUWO +* Show location info in parse errors (#958) - Thanks @MartinNowak +* Update release documentation (#954) - Thanks @alamb +* Break test and coverage test into separate jobs (#949) - Thanks @alamb + + +## [0.37.0] 2023-08-22 + +### Added +* Support `FOR SYSTEM_TIME AS OF` table time travel clause support, `visit_table_factor` to Visitor (#951) - Thanks @gruuya +* Support MySQL `auto_increment` offset in table definition (#950) - Thanks @ehoeve +* Test for mssql table name in square brackets (#952) - Thanks @lovasoa +* Support additional Postgres `CREATE INDEX` syntax (#943) - Thanks @ForbesLindesay +* Support `ALTER ROLE` syntax of PostgreSQL and MS SQL Server (#942) - Thanks @r4ntix +* Support table-level comments (#946) - Thanks @ehoeve +* Support `DROP TEMPORARY TABLE`, MySQL syntax (#916) - Thanks @liadgiladi +* Support posgres type alias (#933) - Thanks @Kikkon + +### Fixed +* Clarify the value of the special flag (#948) - Thanks @alamb +* Fix `SUBSTRING` from/to argument construction for mssql (#947) - Thanks @jmaness +* Fix: use Rust idiomatic capitalization for newly added DataType enums (#939) - Thanks @Kikkon +* Fix `BEGIN TRANSACTION` being serialized as `START TRANSACTION` (#935) - Thanks @lovasoa +* Fix parsing of datetime functions without parenthesis (#930) - Thanks @lovasoa + +## [0.36.1] 2023-07-19 + +### Fixed +* Fix parsing of identifiers after '%' symbol (#927) - Thanks @alamb + +## [0.36.0] 2023-07-19 + +### Added +* Support toggling "unescape" mode to retain original escaping (#870) - Thanks @canalun +* Support UNION (ALL) BY NAME syntax (#915) - Thanks @parkma99 +* Add doc comment for all operators (#917) - Thanks @izveigor +* Support `PGOverlap` operator (#912) - Thanks @izveigor +* Support multi args for unnest (#909) - Thanks @jayzhan211 +* Support `ALTER VIEW`, MySQL syntax (#907) - Thanks @liadgiladi +* Add DeltaLake keywords (#906) - Thanks @roeap + +### Fixed +* Parse JsonOperators correctly (#913) - Thanks @izveigor +* Fix dependabot by removing rust-toolchain toml (#922) - Thanks @alamb + +### Changed +* Clean up JSON operator tokenizing code (#923) - Thanks @alamb +* Upgrade bigdecimal to 0.4.1 (#921) - Thanks @jinlee0 +* Remove most instances of #[cfg(feature(bigdecimal))] in tests (#910) - Thanks @alamb + +## [0.35.0] 2023-06-23 + +### Added +* Support `CREATE PROCEDURE` of MSSQL (#900) - Thanks @delsehi +* Support DuckDB's `CREATE MACRO` statements (#897) - Thanks @MartinNowak +* Support for `CREATE TYPE (AS)` statements (#888) - Thanks @srijs +* Support `STRICT` tables of sqlite (#903) - Thanks @parkma99 + +### Fixed +* Fixed precedence of unary negation operator with operators: Mul, Div and Mod (#902) - Thanks @izveigor + +### Changed +* Add `support_group_by_expr` to `Dialect` trait (#896) - Thanks @jdye64 +* Update criterion requirement from `0.4` to `0.5` in `/sqlparser_bench` (#890) - Thanks @dependabot (!!) + +## [0.34.0] 2023-05-19 + +### Added + +* Support named window frames (#881) - Thanks @berkaysynnada, @mustafasrepo, and @ozankabak +* Support for `ORDER BY` clauses in aggregate functions (#882) - Thanks @mustafasrepo +* Support `DuckDB` dialect (#878) - Thanks @eitsupi +* Support optional `TABLE` keyword for `TRUNCATE TABLE` (#883) - Thanks @mobuchowski +* Support MySQL's `DIV` operator (#876) - Thanks @eitsupi +* Support Custom operators (#868) - Thanks @max-sixty +* Add `Parser::parse_multipart_identifier` (#860) - Thanks @Jefffrey +* Support for multiple expressions, order by in `ARRAY_AGG` (#879) - Thanks @mustafasrepo +* Support for query source in `COPY .. TO` statement (#858) - Thanks @aprimadi +* Support `DISTINCT ON (...)` (#852) - Thanks @aljazerzen +* Support multiple-table `DELETE` syntax (#855) - Thanks @AviRaboah +* Support `COPY INTO` in `SnowflakeDialect` (#841) - Thanks @pawel-big-lebowski +* Support identifiers beginning with digits in MySQL (#856) - Thanks @AviRaboah + +### Changed +* Include license file in published crate (#871) - Thanks @ankane +* Make `Expr::Interval` its own struct (#872) - Thanks @aprimadi +* Add dialect_from_str and improve Dialect documentation (#848) - Thanks @alamb +* Add clickhouse to example (#849) - Thanks @anglinb + +### Fixed +* Fix merge conflict (#885) - Thanks @alamb +* Fix tiny typo in custom_sql_parser.md (#864) - Thanks @okue +* Fix logical merge conflict (#865) - Thanks @alamb +* Test trailing commas (#859) - Thanks @aljazerzen + + +## [0.33.0] 2023-04-10 + +### Added +* Support for Mysql Backslash escapes (enabled by default) (#844) - Thanks @cobyge +* Support "UPDATE" statement in "WITH" subquery (#842) - Thanks @nicksrandall +* Support PIVOT table syntax (#836) - Thanks @pawel-big-lebowski +* Support CREATE/DROP STAGE for Snowflake (#833) - Thanks @pawel-big-lebowski +* Support Non-Latin characters (#840) - Thanks @mskrzypkows +* Support PostgreSQL: GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY and GENERATED - Thanks @sam-mmm +* Support IF EXISTS in COMMENT statements (#831) - Thanks @pawel-big-lebowski +* Support snowflake alter table swap with (#825) - Thanks @pawel-big-lebowski + +### Changed +* Move tests from parser.rs to appropriate parse_XX tests (#845) - Thanks @alamb +* Correct typos in parser.rs (#838) - Thanks @felixonmars +* Improve documentation on verified_* methods (#828) - Thanks @alamb + +## [0.32.0] 2023-03-6 + +### Added +* Support ClickHouse `CREATE TABLE` with `ORDER BY` (#824) - Thanks @ankrgyl +* Support PostgreSQL exponentiation `^` operator (#813) - Thanks @michael-2956 +* Support `BIGNUMERIC` type in BigQuery (#811) - Thanks @togami2864 +* Support for optional trailing commas (#810) - Thanks @ankrgyl + +### Fixed +* Fix table alias parsing regression by backing out redshift column definition list (#827) - Thanks @alamb +* Fix typo in `ReplaceSelectElement` `colum_name` --> `column_name` (#822) - Thanks @togami2864 + +## [0.31.0] 2023-03-1 + +### Added +* Support raw string literals for BigQuery dialect (#812) - Thanks @togami2864 +* Support `SELECT * REPLACE AS ` in BigQuery dialect (#798) - Thanks @togami2864 +* Support byte string literals for BigQuery dialect (#802) - Thanks @togami2864 +* Support columns definition list for system information functions in RedShift dialect (#769) - Thanks @mskrzypkows +* Support `TRANSIENT` keyword in Snowflake dialect (#807) - Thanks @mobuchowski +* Support `JSON` keyword (#799) - Thanks @togami2864 +* Support MySQL Character Set Introducers (#788) - Thanks @mskrzypkows + +### Fixed +* Fix clippy error in ci (#803) - Thanks @togami2864 +* Handle offset in map key in BigQuery dialect (#797) - Thanks @Ziinc +* Fix a typo (precendence -> precedence) (#794) - Thanks @SARDONYX-sard +* use post_* visitors for mutable visits (#789) - Thanks @lovasoa + +### Changed +* Add another known user (#787) - Thanks @joocer + +## [0.30.0] 2023-01-02 + +### Added +* Support `RENAME` for wildcard `SELECTs` (#784) - Thanks @Jefffrey +* Add a mutable visitor (#782) - Thanks @lovasoa + +### Changed +* Allow parsing of mysql empty row inserts (#783) - Thanks @Jefffrey + +### Fixed +* Fix logical conflict (#785) - Thanks @alamb + +## [0.29.0] 2022-12-29 + +### Highlights +* Partial source location tracking: see #710 +* Recursion limit to prevent stack overflows: #764 +* AST visitor: #765 + +### Added +feat: dollar-quoted strings support (#772) - Thanks @vasilev-alex +* Add derive based AST visitor (#765) - Thanks @tustvold +* Support `ALTER INDEX {INDEX_NAME} RENAME TO {NEW_INDEX_NAME}` (#767) - Thanks @devgony +* Support `CREATE TABLE ON UPDATE ` Function (#685) - Thanks @CEOJINSUNG +* Support `CREATE FUNCTION` definition with `$$` (#755)- Thanks @zidaye +* Add location tracking in the tokenizer and parser (#710) - Thanks @ankrgyl +* Add configurable recursion limit to parser, to protect against stackoverflows (#764) - Thanks @alamb +* Support parsing scientific notation (such as `10e5`) (#768) - Thanks @Jefffrey +* Support `DROP FUNCTION` syntax (#752) - Thanks @zidaye +* Support json operators `@>` `<@`, `@?` and `@@` - Thanks @audunska +* Support the type key (#750)- Thanks @yuval-illumex + +### Changed +* Improve docs and add examples for Visitor (#778) - Thanks @alamb +* Add a backlink from sqlparse_derive to sqlparser and publishing instructions (#779) - Thanks @alamb +* Document new features, update authors (#776) - Thanks @alamb +* Improve Readme (#774) - Thanks @alamb +* Standardize comments on parsing optional keywords (#773) - Thanks @alamb +* Enable grouping sets parsing for `GenericDialect` (#771) - Thanks @Jefffrey +* Generalize conflict target (#762) - Thanks @audunska +* Generalize locking clause (#759) - Thanks @audunska +* Add negative test for except clause on wildcards (#746)- Thanks @alamb +* Add `NANOSECOND` keyword (#749)- Thanks @waitingkuo + +### Fixed +* ParserError if nested explain (#781) - Thanks @Jefffrey +* Fix cargo docs / warnings and add CI check (#777) - Thanks @alamb +* unnest join constraint with alias parsing for BigQuery dialect (#732)- Thanks @Ziinc + +## [0.28.0] 2022-12-05 + +### Added +* Support for `EXCEPT` clause on wildcards (#745) - Thanks @AugustoFKL +* Support `CREATE FUNCTION` Postgres options (#722) - Thanks @wangrunji0408 +* Support `CREATE TABLE x AS TABLE y` (#704) - Thanks @sarahyurick +* Support MySQL `ROWS` syntax for `VALUES` (#737) - Thanks @aljazerzen +* Support `WHERE` condition for `UPDATE ON CONFLICT` (#735) - Thanks @zidaye +* Support `CLUSTER BY` when creating Materialized View (#736) - Thanks @yuval-illumex +* Support nested comments (#726) - Thanks @yang-han +* Support `USING` method when creating indexes. (#731) - Thanks @step-baby and @yangjiaxin01 +* Support `SEMI`/`ANTI` `JOIN` syntax (#723) - Thanks @mingmwang +* Support `EXCLUDE` support for snowflake and generic dialect (#721) - Thanks @AugustoFKL +* Support `MATCH AGAINST` (#708) - Thanks @AugustoFKL +* Support `IF NOT EXISTS` in `ALTER TABLE ADD COLUMN` (#707) - Thanks @AugustoFKL +* Support `SET TIME ZONE ` (#727) - Thanks @waitingkuo +* Support `UPDATE ... FROM ( subquery )` (#694) - Thanks @unvalley + +### Changed +* Add `Parser::index()` method to get current parsing index (#728) - Thanks @neverchanje +* Add `COMPRESSION` as keyword (#720)- Thanks @AugustoFKL +* Derive `PartialOrd`, `Ord`, and `Copy` whenever possible (#717) - Thanks @AugustoFKL +* Fixed `INTERVAL` parsing logic and precedence (#705) - Thanks @sarahyurick +* Support updating multiple column names whose names are the same as(#725) - Thanks @step-baby + +### Fixed +* Clean up some redundant code in parser (#741) - Thanks @alamb +* Fix logical conflict - Thanks @alamb +* Cleanup to avoid is_ok() (#740) - Thanks @alamb +* Cleanup to avoid using unreachable! when parsing semi/anti join (#738) - Thanks @alamb +* Add an example to docs to clarify semantic analysis (#739) - Thanks @alamb +* Add information about parting semantic logic to README.md (#724) - Thanks @AugustoFKL +* Logical conflicts - Thanks @alamb +* Tiny typo in docs (#709) - Thanks @pmcgee69 + + +## [0.27.0] 2022-11-11 + +### Added +* Support `ON CONFLICT` and `RETURNING` in `UPDATE` statement (#666) - Thanks @main and @gamife +* Support `FULLTEXT` option on create table for MySQL and Generic dialects (#702) - Thanks @AugustoFKL +* Support `ARRAY_AGG` for Bigquery and Snowflake (#662) - Thanks @SuperBo +* Support DISTINCT for SetOperator (#689) - Thanks @unvalley +* Support the ARRAY type of Snowflake (#699) - Thanks @yuval-illumex +* Support create sequence with options INCREMENT, MINVALUE, MAXVALUE, START etc. (#681) - Thanks @sam-mmm +* Support `:` operator for semi-structured data in Snowflake(#693) - Thanks @yuval-illumex +* Support ALTER TABLE DROP PRIMARY KEY (#682) - Thanks @ding-young +* Support `NUMERIC` and `DEC` ANSI data types (#695) - Thanks @AugustoFKL +* Support modifiers for Custom Datatype (#680) - Thanks @sunng87 + +### Changed +* Add precision for TIME, DATETIME, and TIMESTAMP data types (#701) - Thanks @AugustoFKL +* add Date keyword (#691) - Thanks @sarahyurick +* Update simple_logger requirement from 2.1 to 4.0 - Thanks @dependabot + +### Fixed +* Fix broken DataFusion link (#703) - Thanks @jmg-duarte +* Add MySql, BigQuery to all dialects tests, fixed bugs (#697) - Thanks @omer-shtivi + + +## [0.26.0] 2022-10-19 + +### Added +* Support MySQL table option `{INDEX | KEY}` in CREATE TABLE definiton (#665) - Thanks @AugustoFKL +* Support `CREATE [ { TEMPORARY | TEMP } ] SEQUENCE [ IF NOT EXISTS ] ` (#678) - Thanks @sam-mmm +* Support `DROP SEQUENCE` statement (#673) - Thanks @sam-mmm +* Support for ANSI types `CHARACTER LARGE OBJECT[(p)]` and `CHAR LARGE OBJECT[(p)]` (#671) - Thanks @AugustoFKL +* Support `[CACHE|UNCACHE] TABLE` (#670) - Thanks @francis-du +* Support `CEIL(expr TO DateTimeField)` and `FLOOR(expr TO DateTimeField)` - Thanks @sarahyurick +* Support all ansii character string types, (#648) - Thanks @AugustoFKL + +### Changed +* Support expressions inside window frames (#655) - Thanks @mustafasrepo and @ozankabak +* Support unit on char length units for small character strings (#663) - Thanks @AugustoFKL +* Replace booleans on `SET ROLE` with a single enum. (#664) - Thanks @AugustoFKL +* Replace `Option`s with enum for `DECIMAL` precision (#654) - Thanks @AugustoFKL + +## [0.25.0] 2022-10-03 + +### Added + +* Support `AUTHORIZATION` clause in `CREATE SCHEMA` statements (#641) - Thanks @AugustoFKL +* Support optional precision for `CLOB` and `BLOB` (#639) - Thanks @AugustoFKL +* Support optional precision in `VARBINARY` and `BINARY` (#637) - Thanks @AugustoFKL + + +### Changed +* `TIMESTAMP` and `TIME` parsing preserve zone information (#646) - Thanks @AugustoFKL + +### Fixed + +* Correct order of arguments when parsing `LIMIT x,y` , restrict to `MySql` and `Generic` dialects - Thanks @AugustoFKL + + +## [0.24.0] 2022-09-29 + +### Added + +* Support `MILLENNIUM` (2 Ns) (#633) - Thanks @sarahyurick +* Support `MEDIUMINT` (#630) - Thanks @AugustoFKL +* Support `DOUBLE PRECISION` (#629) - Thanks @AugustoFKL +* Support precision in `CLOB`, `BINARY`, `VARBINARY`, `BLOB` data type (#618) - Thanks @ding-young +* Support `CREATE ROLE` and `DROP ROLE` (#598) - Thanks @blx +* Support full range of sqlite prepared statement placeholders (#604) - Thanks @lovasoa +* Support National string literal with lower case `n` (#612) - Thanks @mskrzypkows +* Support SHOW FUNCTIONS (#620) - Thanks @joocer +* Support `set time zone to 'some-timezone'` (#617) - Thanks @waitingkuo + +### Changed +* Move `Value::Interval` to `Expr::Interval` (#609) - Thanks @ding-young +* Update `criterion` dev-requirement from 0.3 to 0.4 in /sqlparser_bench (#611) - Thanks @dependabot +* Box `Query` in `Cte` (#572) - Thanks @MazterQyou + +### Other +* Disambiguate CREATE ROLE ... USER and GROUP (#628) - Thanks @alamb +* Add test for optional WITH in CREATE ROLE (#627) - Thanks @alamb + +## [0.23.0] 2022-09-08 + +### Added +* Add support for aggregate expressions with filters (#585) - Thanks @andygrove +* Support `LOCALTIME` and `LOCALTIMESTAMP` time functions (#592) - Thanks @MazterQyou + +## [0.22.0] 2022-08-26 + +### Added +* Support `OVERLAY` expressions (#594) - Thanks @ayushg +* Support `WITH TIMEZONE` and `WITHOUT TIMEZONE` when parsing `TIMESTAMP` expressions (#589) - Thanks @waitingkuo +* Add ability for dialects to override prefix, infix, and statement parsing (#581) - Thanks @andygrove + +## [0.21.0] 2022-08-18 + +### Added +* Support `IS [NOT] TRUE`, `IS [NOT] FALSE`, and `IS [NOT] UNKNOWN` - Thanks (#583) @sarahyurick +* Support `SIMILAR TO` syntax (#569) - Thanks @ayushdg +* Support `SHOW COLLATION` (#564) - Thanks @MazterQyou +* Support `SHOW TABLES` (#563) - Thanks @MazterQyou +* Support `SET NAMES literal [COLLATE literal]` (#558) - Thanks @ovr +* Support trailing commas (#557) in `BigQuery` dialect - Thanks @komukomo +* Support `USE ` (#565) - Thanks @MazterQyou +* Support `SHOW COLUMNS FROM tbl FROM db` (#562) - Thanks @MazterQyou +* Support `SHOW VARIABLES` for `MySQL` dialect (#559) - Thanks @ovr and @vasilev-alex + +### Changed +* Support arbitrary expression in `SET` statement (#574) - Thanks @ovr and @vasilev-alex +* Parse LIKE patterns as Expr not Value (#579) - Thanks @andygrove +* Update Ballista link in README (#576) - Thanks @sanxiyn +* Parse `TRIM` from with optional expr and `FROM` expr (#573) - Thanks @ayushdg +* Support PostgreSQL array subquery constructor (#566) - Thanks @MazterQyou +* Clarify contribution licensing (#570) - Thanks @alamb +* Update for new clippy ints (#571) - Thanks @alamb +* Change `Like` and `ILike` to `Expr` variants, allow escape char (#569) - Thanks @ayushdg +* Parse special keywords as functions (`current_user`, `user`, etc) (#561) - Thanks @ovr +* Support expressions in `LIMIT`/`OFFSET` (#567) - Thanks @MazterQyou + +## [0.20.0] 2022-08-05 + +### Added +* Support custom `OPERATOR` postgres syntax (#548) - Thanks @iskakaushik +* Support `SAFE_CAST` for BigQuery (#552) - Thanks @togami2864 + +### Changed +* Added SECURITY.md (#546) - Thanks @JamieSlome +* Allow `>>` and `<<` binary operators in Generic dialect (#553) - Thanks @ovr +* Allow `NestedJoin` with an alias (#551) - Thanks @waitingkuo + +## [0.19.0] 2022-07-28 + +### Added + +* Support `ON CLUSTER` for `CREATE TABLE` statement (ClickHouse DDL) (#527) - Thanks @andyrichardson +* Support empty `ARRAY` literals (#532) - Thanks @bitemyapp +* Support `AT TIME ZONE` clause (#539) - Thanks @bitemyapp +* Support `USING` clause and table aliases in `DELETE` (#541) - Thanks @mobuchowski +* Support `SHOW CREATE VIEW` statement (#536) - Thanks @mrob95 +* Support `CLONE` clause in `CREATE TABLE` statements (#542) - Thanks @mobuchowski +* Support `WITH OFFSET Alias` in table references (#528) - Thanks @sivchari +* Support double quoted (`"`) literal strings: (#530) - Thanks @komukomo +* Support `ON UPDATE` clause on column definitions in `CREATE TABLE` statements (#522) - Thanks @frolovdev + + +### Changed: + +* `Box`ed `Query` body to save stack space (#540) - Thanks @5tan +* Distinguish between `INT` and `INTEGER` types (#525) - Thanks @frolovdev +* Parse `WHERE NOT EXISTS` as `Expr::Exists` rather than `Expr::UnaryOp` for consistency (#523) - Thanks @frolovdev +* Support `Expr` instead of `String` for argument to `INTERVAL` (#517) - Thanks @togami2864 + +### Fixed: + +* Report characters instead of bytes in error messages (#529) - Thanks @michael-2956 + + +## [0.18.0] 2022-06-06 + +### Added + +* Support `CLOSE` (cursors) (#515) - Thanks @ovr +* Support `DECLARE` (cursors) (#509) - Thanks @ovr +* Support `FETCH` (cursors) (#510) - Thanks @ovr +* Support `DATETIME` keyword (#512) - Thanks @komukomo +* Support `UNNEST` as a table factor (#493) - Thanks @sivchari +* Support `CREATE FUNCTION` (hive flavor) (#496) - Thanks @mobuchowski +* Support placeholders (`$` or `?`) in `LIMIT` clause (#494) - Thanks @step-baby +* Support escaped string literals (PostgreSQL) (#502) - Thanks @ovr +* Support `IS TRUE` and `IS FALSE` (#499) - Thanks @ovr +* Support `DISCARD [ALL | PLANS | SEQUENCES | TEMPORARY | TEMP]` (#500) - Thanks @gandronchik +* Support `array<..>` HIVE data types (#491) - Thanks @mobuchowski +* Support `SET` values that begin with `-` #495 - Thanks @mobuchowski +* Support unicode whitespace (#482) - Thanks @alexsatori +* Support `BigQuery` dialect (#490) - Thanks @komukomo + +### Changed: +* Add docs for MapAccess (#489) - Thanks @alamb +* Rename `ArrayIndex::indexs` to `ArrayIndex::indexes` (#492) - Thanks @alamb + +### Fixed: +* Fix escaping of trailing quote in quoted identifiers (#505) - Thanks @razzolini-qpq +* Fix parsing of `COLLATE` after parentheses in expressions (#507) - Thanks @razzolini-qpq +* Distinguish tables and nullary functions in `FROM` (#506) - Thanks @razzolini-qpq +* Fix `MERGE INTO` semicolon handling (#508) - Thanks @mskrzypkows + +## [0.17.0] 2022-05-09 + +### Added + +* Support `#` as first character in field name for `RedShift` dialect (#485) - Thanks @yuval-illumex +* Support for postgres composite types (#466) - Thanks @poonai +* Support `TABLE` keyword with SELECT INTO (#487) - Thanks @MazterQyou +* Support `ANY`/`ALL` operators (#477) - Thanks @ovr +* Support `ArrayIndex` in `GenericDialect` (#480) - Thanks @ovr +* Support `Redshift` dialect, handle square brackets properly (#471) - Thanks @mskrzypkows +* Support `KILL` statement (#479) - Thanks @ovr +* Support `QUALIFY` clause on `SELECT` for `Snowflake` dialect (#465) - Thanks @mobuchowski +* Support `POSITION(x IN y)` function syntax (#463) @yuval-illumex +* Support `global`,`local`, `on commit` for `create temporary table` (#456) - Thanks @gandronchik +* Support `NVARCHAR` data type (#462) - Thanks @yuval-illumex +* Support for postgres json operators `->`, `->>`, `#>`, and `#>>` (#458) - Thanks @poonai +* Support `SET ROLE` statement (#455) - Thanks @slhmy + +### Changed: +* Improve docstrings for `KILL` statement (#481) - Thanks @alamb +* Add negative tests for `POSITION` (#469) - Thanks @alamb +* Add negative tests for `IN` parsing (#468) - Thanks @alamb +* Suppport table names (as well as subqueries) as source in `MERGE` statements (#483) - Thanks @mskrzypkows + + +### Fixed: +* `INTO` keyword is optional for `INSERT`, `MERGE` (#473) - Thanks @mobuchowski +* Support `IS TRUE` and `IS FALSE` expressions in boolean filter (#474) - Thanks @yuval-illumex +* Support fully qualified object names in `SET VARIABLE` (#484) - Thanks mobuchowski + +## [0.16.0] 2022-04-03 + +### Added + +* Support `WEEK` keyword in `EXTRACT` (#436) - Thanks @Ted-Jiang +* Support `MERGE` statement (#430) - Thanks @mobuchowski +* Support `SAVEPOINT` statement (#438) - Thanks @poonai +* Support `TO` clause in `COPY` (#441) - Thanks @matthewmturner +* Support `CREATE DATABASE` statement (#451) - Thanks @matthewmturner +* Support `FROM` clause in `UPDATE` statement (#450) - Thanks @slhmy +* Support additional `COPY` options (#446) - Thanks @wangrunji0408 + +### Fixed: +* Bug in array / map access parsing (#433) - Thanks @monadbobo + +## [0.15.0] 2022-03-07 + +### Added + +* Support for ClickHouse array types (e.g. [1,2,3]) (#429) - Thanks @monadbobo +* Support for `unsigned tinyint`, `unsigned int`, `unsigned smallint` and `unsigned bigint` datatypes (#428) - Thanks @watarukura +* Support additional keywords for `EXTRACT` (#427) - Thanks @mobuchowski +* Support IN UNNEST(expression) (#426) - Thanks @komukomo +* Support COLLATION keywork on CREATE TABLE (#424) - Thanks @watarukura +* Support FOR UPDATE/FOR SHARE clause (#418) - Thanks @gamife +* Support prepared statement placeholder arg `?` and `$` (#420) - Thanks @gamife +* Support array expressions such as `ARRAY[1,2]` , `foo[1]` and `INT[][]` (#419) - Thanks @gamife + +### Changed: +* remove Travis CI (#421) - Thanks @efx + +### Fixed: +* Allow `array` to be used as a function name again (#432) - @alamb +* Update docstring reference to `Query` (#423) - Thanks @max-sixty + +## [0.14.0] 2022-02-09 + +### Added +* Support `CURRENT_TIMESTAMP`, `CURRENT_TIME`, and `CURRENT_DATE` (#391) - Thanks @yuval-illumex +* SUPPORT `SUPER` keyword (#387) - Thanks @flaneur2020 +* Support differing orders of `OFFSET` `LIMIT` as well as `LIMIT` `OFFSET` (#413) - Thanks @yuval-illumex +* Support for `FROM `, `DELIMITER`, and `CSV HEADER` options for `COPY` command (#409) - Thanks @poonai +* Support `CHARSET` and `ENGINE` clauses on `CREATE TABLE` for mysql (#392) - Thanks @antialize +* Support `DROP CONSTRAINT [ IF EXISTS ] [ CASCADE ]` (#396) - Thanks @tvallotton +* Support parsing tuples and add `Expr::Tuple` (#414) - @alamb +* Support MySQL style `LIMIT X, Y` (#415) - @alamb +* Support `SESSION TRANSACTION` and `TRANSACTION SNAPSHOT`. (#379) - Thanks @poonai +* Support `ALTER COLUMN` and `RENAME CONSTRAINT` (#381) - Thanks @zhamlin +* Support for Map access, add ClickHouse dialect (#382) - Thanks @monadbobo + +### Changed +* Restrict where wildcard (`*`) can appear, add to `FunctionArgExpr` remove `Expr::[Qualified]Wildcard`, (#378) - Thanks @panarch +* Update simple_logger requirement from 1.9 to 2.1 (#403) +* export all methods of parser (#397) - Thanks @neverchanje! +* Clarify maintenance status on README (#416) - @alamb + +### Fixed +* Fix new clippy errors (#412) - @alamb +* Fix panic with `GRANT/REVOKE` in `CONNECT`, `CREATE`, `EXECUTE` or `TEMPORARY` - Thanks @evgenyx00 +* Handle double quotes inside quoted identifiers correctly (#411) - Thanks @Marwes +* Handle mysql backslash escaping (#373) - Thanks @vasilev-alex + +## [0.13.0] 2021-12-10 + +### Added +* Add ALTER TABLE CHANGE COLUMN, extend the UPDATE statement with ON clause (#375) - Thanks @0xA537FD! +* Add support for GROUPIING SETS, ROLLUP and CUBE - Thanks @Jimexist! +* Add basic support for GRANT and REVOKE (#365) - Thanks @blx! + +### Changed +* Use Rust 2021 edition (#368) - Thanks @Jimexist! + +### Fixed +* Fix clippy errors (#367, #374) - Thanks @Jimexist! + + +## [0.12.0] 2021-10-14 + +### Added +* Add support for [NOT] IS DISTINCT FROM (#306) - @Dandandan + +### Changed +* Move the keywords module - Thanks @koushiro! + + +## [0.11.0] 2021-09-24 + +### Added +* Support minimum display width for integer data types (#337) Thanks @vasilev-alex! +* Add logical XOR operator (#357) - Thanks @xzmrdltl! +* Support DESCRIBE table_name (#340) - Thanks @ovr! +* Support SHOW CREATE TABLE|EVENT|FUNCTION (#338) - Thanks @ovr! +* Add referential actions to TableConstraint foreign key (#306) - Thanks @joshwd36! + +### Changed +* Enable map access for numbers, multiple nesting levels (#356) - Thanks @Igosuki! +* Rename Token::Mult to Token::Mul (#353) - Thanks @koushiro! +* Use derive(Default) for HiveFormat (#348) - Thanks @koushiro! +* Improve tokenizer error (#347) - Thanks @koushiro! +* Eliminate redundant string copy in Tokenizer (#343) - Thanks @koushiro! +* Update bigdecimal requirement from 0.2 to 0.3 dependencies (#341) +* Support parsing hexadecimal literals that start with `0x` (#324) - Thanks @TheSchemm! + + +## [0.10.0] 2021-08-23 + +### Added +* Support for `no_std` (#332) - Thanks @koushiro! +* Postgres regular expression operators (`~`, `~*`, `!~`, `!~*`) (#328) - Thanks @b41sh! +* tinyint (#320) - Thanks @sundy-li +* ILIKE (#300) - Thanks @maxcountryman! +* TRIM syntax (#331, #334) - Thanks ever0de + + +### Fixed +* Return error instead of panic (#316) - Thanks @BohuTANG! + +### Changed +- Rename `Modulus` to `Modulo` (#335) - Thanks @RGRAVITY817! +- Update links to reflect repository move to `sqlparser-rs` GitHub org (#333) - Thanks @andygrove +- Add default value for `WindowFrame` (#313) - Thanks @Jimexist! + +## [0.9.0] 2021-03-21 + +### Added +* Add support for `TRY_CAST` syntax (#299) - Thanks @seddonm1! + +## [0.8.0] 2021-02-20 + +### Added +* Introduce Hive QL dialect `HiveDialect` and syntax (#235) - Thanks @hntd187! +* Add `SUBSTRING(col [FROM ] [FOR ])` syntax (#293) +* Support parsing floats without leading digits `.01` (#294) +* Support parsing multiple show variables (#290) - Thanks @francis-du! +* Support SQLite `INSERT OR [..]` syntax (#281) - Thanks @zhangli-pear! + +## [0.7.0] 2020-12-28 + +### Changed +- Change the MySQL dialect to support `` `identifiers` `` quoted with backticks instead of the standard `"double-quoted"` identifiers (#247) - thanks @mashuai! +- Update bigdecimal requirement from 0.1 to 0.2 (#268) + +### Added +- Enable dialect-specific behaviours in the parser (`dialect_of!()`) (#254) - thanks @eyalleshem! +- Support named arguments in function invocations (`ARG_NAME => val`) (#250) - thanks @eyalleshem! +- Support `TABLE()` functions in `FROM` (#253) - thanks @eyalleshem! +- Support Snowflake's single-line comments starting with '#' or '//' (#264) - thanks @eyalleshem! +- Support PostgreSQL `PREPARE`, `EXECUTE`, and `DEALLOCATE` (#243) - thanks @silathdiir! +- Support PostgreSQL math operators (#267) - thanks @alex-dukhno! +- Add SQLite dialect (#248) - thanks @mashuai! +- Add Snowflake dialect (#259) - thanks @eyalleshem! +- Support for Recursive CTEs - thanks @rhanqtl! +- Support `FROM (table_name) alias` syntax - thanks @eyalleshem! +- Support for `EXPLAIN [ANALYZE] VERBOSE` - thanks @ovr! +- Support `ANALYZE TABLE` +- DDL: + - Support `OR REPLACE` in `CREATE VIEW`/`TABLE` (#239) - thanks @Dandandan! + - Support specifying `ASC`/`DESC` in index columns (#249) - thanks @mashuai! + - Support SQLite `AUTOINCREMENT` and MySQL `AUTO_INCREMENT` column option in `CREATE TABLE` (#234) - thanks @mashuai! + - Support PostgreSQL `IF NOT EXISTS` for `CREATE SCHEMA` (#276) - thanks @alex-dukhno! + +### Fixed +- Fix a typo in `JSONFILE` serialization, introduced in 0.3.1 (#237) +- Change `CREATE INDEX` serialization to not end with a semicolon, introduced in 0.5.1 (#245) +- Don't fail parsing `ALTER TABLE ADD COLUMN` ending with a semicolon, introduced in 0.5.1 (#246) - thanks @mashuai + +## [0.6.1] - 2020-07-20 + +### Added +- Support BigQuery `ASSERT` statement (#226) + +## [0.6.0] - 2020-07-20 + +### Added +- Support SQLite's `CREATE TABLE (...) WITHOUT ROWID` (#208) - thanks @mashuai! +- Support SQLite's `CREATE VIRTUAL TABLE` (#209) - thanks @mashuai! + +## [0.5.1] - 2020-06-26 +This release should have been called `0.6`, as it introduces multiple incompatible changes to the API. If you don't want to upgrade yet, you can revert to the previous version by changing your `Cargo.toml` to: + + sqlparser = "= 0.5.0" + + +### Changed +- **`Parser::parse_sql` now accepts a `&str` instead of `String` (#182)** - thanks @Dandandan! +- Change `Ident` (previously a simple `String`) to store the parsed (unquoted) `value` of the identifier and the `quote_style` separately (#143) - thanks @apparebit! +- Support Snowflake's `FROM (table_name)` (#155) - thanks @eyalleshem! +- Add line and column number to TokenizerError (#194) - thanks @Dandandan! +- Use Token::EOF instead of Option (#195) +- Make the units keyword following `INTERVAL '...'` optional (#184) - thanks @maxcountryman! +- Generalize `DATE`/`TIME`/`TIMESTAMP` literals representation in the AST (`TypedString { data_type, value }`) and allow `DATE` and other keywords to be used as identifiers when not followed by a string (#187) - thanks @maxcountryman! +- Output DataType capitalized (`fmt::Display`) (#202) - thanks @Dandandan! + +### Added +- Support MSSQL `TOP () [ PERCENT ] [ WITH TIES ]` (#150) - thanks @alexkyllo! +- Support MySQL `LIMIT row_count OFFSET offset` (not followed by `ROW` or `ROWS`) and remember which variant was parsed (#158) - thanks @mjibson! +- Support PostgreSQL `CREATE TABLE IF NOT EXISTS table_name` (#163) - thanks @alex-dukhno! +- Support basic forms of `CREATE INDEX` and `DROP INDEX` (#167) - thanks @mashuai! +- Support `ON { UPDATE | DELETE } { RESTRICT | CASCADE | SET NULL | NO ACTION | SET DEFAULT }` in `FOREIGN KEY` constraints (#170) - thanks @c7hm4r! +- Support basic forms of `CREATE SCHEMA` and `DROP SCHEMA` (#173) - thanks @alex-dukhno! +- Support `NULLS FIRST`/`LAST` in `ORDER BY` expressions (#176) - thanks @houqp! +- Support `LISTAGG()` (#174) - thanks @maxcountryman! +- Support the string concatentation operator `||` (#178) - thanks @Dandandan! +- Support bitwise AND (`&`), OR (`|`), XOR (`^`) (#181) - thanks @Dandandan! +- Add serde support to AST structs and enums (#196) - thanks @panarch! +- Support `ALTER TABLE ADD COLUMN`, `RENAME COLUMN`, and `RENAME TO` (#203) - thanks @mashuai! +- Support `ALTER TABLE DROP COLUMN` (#148) - thanks @ivanceras! +- Support `CREATE TABLE ... AS ...` (#206) - thanks @Dandandan! + +### Fixed +- Report an error for unterminated string literals (#165) +- Make file format (`STORED AS`) case insensitive (#200) and don't allow quoting it (#201) - thanks @Dandandan! + +## [0.5.0] - 2019-10-10 + +### Changed +- Replace the `Value::Long(u64)` and `Value::Double(f64)` variants with `Value::Number(String)` to avoid losing precision when parsing decimal literals (#130) - thanks @benesch! +- `--features bigdecimal` can be enabled to work with `Value::Number(BigDecimal)` instead, at the cost of an additional dependency. + +### Added +- Support MySQL `SHOW COLUMNS`, `SET =`, and `SHOW ` statements (#135) - thanks @quodlibetor and @benesch! + +### Fixed +- Don't fail to parse `START TRANSACTION` followed by a semicolon (#139) - thanks @gaffneyk! + + +## [0.4.0] - 2019-07-02 +This release brings us closer to SQL-92 support, mainly thanks to the improvements contributed back from @MaterializeInc's fork and other work by @benesch. + +### Changed +- Remove "SQL" from type and enum variant names, `SQLType` -> `DataType`, remove "sql" prefix from module names (#105, #122) +- Rename `ASTNode` -> `Expr` (#119) +- Improve consistency of binary/unary op nodes (#112): + - `ASTNode::SQLBinaryExpr` is now `Expr::BinaryOp` and `ASTNode::SQLUnary` is `Expr::UnaryOp`; + - The `op: SQLOperator` field is now either a `BinaryOperator` or an `UnaryOperator`. +- Change the representation of JOINs to match the standard (#109): `SQLSelect`'s `relation` and `joins` are replaced with `from: Vec`. Before this change `FROM foo NATURAL JOIN bar, baz` was represented as "foo" as the `relation` followed by two joins (`Inner(Natural)` and `Implicit`); now it's two `TableWithJoins` (`foo NATURAL JOIN bar` and `baz`). +- Extract a `SQLFunction` struct (#89) +- Replace `Option>` with `Vec` in the AST structs (#73) +- Change `Value::Long()` to be unsigned, use u64 consistently (#65) + +### Added +- Infra: + - Implement `fmt::Display` on AST nodes (#124) - thanks @vemoo! + - Implement `Hash` (#88) and `Eq` (#123) on all AST nodes + - Implement `std::error::Error` for `ParserError` (#72) + - Handle Windows line-breaks (#54) +- Expressions: + - Support `INTERVAL` literals (#103) + - Support `DATE` / `TIME` / `TIMESTAMP` literals (#99) + - Support `EXTRACT` (#96) + - Support `X'hex value'` literals (#95) + - Support `EXISTS` subqueries (#90) + - Support nested expressions in `BETWEEN` (#80) + - Support `COUNT(DISTINCT x)` and similar (#77) + - Support `CASE operand WHEN expected_value THEN ..` and table-valued functions (#59) + - Support analytic (window) functions (`OVER` clause) (#50) +- Queries / DML: + - Support nested joins (#100) and derived tables with set operations (#111) + - Support `UPDATE` statements (#97) + - Support `INSERT INTO foo SELECT * FROM bar` and `FROM VALUES (...)` (#91) + - Support `SELECT ALL` (#76) + - Add `FETCH` and `OFFSET` support, and `LATERAL` (#69) - thanks @thomas-jeepe! + - Support `COLLATE`, optional column list in CTEs (#64) +- DDL/TCL: + - Support `START/SET/COMMIT/ROLLBACK TRANSACTION` (#106) - thanks @SamuelMarks! + - Parse column constraints in any order (#93) + - Parse `DECIMAL` and `DEC` aliases for `NUMERIC` type (#92) + - Support `DROP [TABLE|VIEW]` (#75) + - Support arbitrary `WITH` options for `CREATE [TABLE|VIEW]` (#74) + - Support constraints in `CREATE TABLE` (#65) +- Add basic MSSQL dialect (#61) and some MSSQL-specific features: + - `CROSS`/`OUTER APPLY` (#120) + - MSSQL identifier and alias parsing rules (#66) + - `WITH` hints (#59) + +### Fixed +- Report an error for `SELECT * FROM a OUTER JOIN b` instead of parsing `OUTER` as an alias (#118) +- Fix the precedence of `NOT LIKE` (#82) and unary `NOT` (#107) +- Do not panic when `NOT` is not followed by an expected keyword (#71) + successfully instead of returning a parse error - thanks @ivanceras! (#67) - and similar fixes for queries with no `FROM` (#116) +- Fix issues with `ALTER TABLE ADD CONSTRAINT` parsing (#65) +- Serialize the "not equals" operator as `<>` instead of `!=` (#64) +- Remove dependencies on `uuid` (#59) and `chrono` (#61) +- Make `SELECT` query with `LIMIT` clause but no `WHERE` parse - Fix incorrect behavior of `ASTNode::SQLQualifiedWildcard::to_string()` (returned `foo*` instead of `foo.*`) - thanks @thomas-jeepe! (#52) + +## [0.3.1] - 2019-04-20 +### Added +- Extended `SQLStatement::SQLCreateTable` to support Hive's EXTERNAL TABLES (`CREATE EXTERNAL TABLE .. STORED AS .. LOCATION '..'`) - thanks @zhzy0077! (#46) +- Parse `SELECT DISTINCT` to `SQLSelect::distinct` (#49) + +## [0.3.0] - 2019-04-03 +### Changed +This release includes major changes to the AST structs to add a number of features, as described in #37 and #43. In particular: +- `ASTNode` variants that represent statements were extracted from `ASTNode` into a separate `SQLStatement` enum; + - `Parser::parse_sql` now returns a `Vec` of parsed statements. + - `ASTNode` now represents an expression (renamed to `Expr` in 0.4.0) +- The query representation (formerly `ASTNode::SQLSelect`) became more complicated to support: + - `WITH` and `UNION`/`EXCEPT`/`INTERSECT` (via `SQLQuery`, `Cte`, and `SQLSetExpr`), + - aliases and qualified wildcards in `SELECT` (via `SQLSelectItem`), + - and aliases in `FROM`/`JOIN` (via `TableFactor`). +- A new `SQLObjectName` struct is used instead of `String` or `ASTNode::SQLCompoundIdentifier` - for objects like tables, custom types, etc. +- Added support for "delimited identifiers" and made keywords context-specific (thus accepting them as valid identifiers in most contexts) - **this caused a regression in parsing `SELECT .. FROM .. LIMIT ..` (#67), fixed in 0.4.0** + +### Added +Other than the changes listed above, some less intrusive additions include: +- Support `CREATE [MATERIALIZED] VIEW` statement +- Support `IN`, `BETWEEN`, unary +/- in epressions +- Support `CHAR` data type and `NUMERIC` not followed by `(p,s)`. +- Support national string literals (`N'...'`) + +## [0.2.4] - 2019-03-08 +Same as 0.2.2. + +## [0.2.3] - 2019-03-08 [YANKED] + +## [0.2.2] - 2019-03-08 +### Changed +- Removed `Value::String`, `Value::DoubleQuotedString`, and `Token::String`, making + - `'...'` parse as a string literal (`Value::SingleQuotedString`), and + - `"..."` fail to parse until version 0.3.0 (#36) + +## [0.2.1] - 2019-01-13 +We don't have a changelog for the changes made in 2018, but thanks to @crw5996, @cswinter, @fredrikroos, @ivanceras, @nickolay, @virattara for their contributions in the early stages of the project! + +## [0.1.0] - 2018-09-03 +Initial release diff --git a/changelog/0.52.0.md b/changelog/0.52.0.md new file mode 100644 index 000000000..9d5b16c7c --- /dev/null +++ b/changelog/0.52.0.md @@ -0,0 +1,104 @@ + + +# sqlparser-rs 0.52.0 Changelog + +This release consists of 45 commits from 20 contributors. See credits at the end of this changelog for more information. + +**Implemented enhancements:** + +- feat: support explain options [#1426](https://github.com/apache/datafusion-sqlparser-rs/pull/1426) (kysshsy) +- feat: adding Display implementation to DELETE and INSERT [#1427](https://github.com/apache/datafusion-sqlparser-rs/pull/1427) (seve-martinez) + +**Fixed bugs:** + +- fix: `maybe_parse` preventing parser from erroring on recursion limit [#1464](https://github.com/apache/datafusion-sqlparser-rs/pull/1464) (tomershaniii) + +**Other:** + +- Fix parsing of negative values [#1419](https://github.com/apache/datafusion-sqlparser-rs/pull/1419) (agscpp) +- Allow to use ON CLUSTER cluster_name in TRUNCATE syntax [#1428](https://github.com/apache/datafusion-sqlparser-rs/pull/1428) (git-hulk) +- chore: remove redundant punctuation [#1434](https://github.com/apache/datafusion-sqlparser-rs/pull/1434) (Fischer0522) +- MS SQL Server: add support for IDENTITY column option [#1432](https://github.com/apache/datafusion-sqlparser-rs/pull/1432) (7phs) +- Update to ASF header / add when missing [#1437](https://github.com/apache/datafusion-sqlparser-rs/pull/1437) (alamb) +- Some small optimizations [#1424](https://github.com/apache/datafusion-sqlparser-rs/pull/1424) (exrok) +- Fix `codestyle` CI check [#1438](https://github.com/apache/datafusion-sqlparser-rs/pull/1438) (alamb) +- Implements CREATE POLICY syntax for PostgreSQL [#1440](https://github.com/apache/datafusion-sqlparser-rs/pull/1440) (git-hulk) +- make `parse_expr_with_alias` public [#1444](https://github.com/apache/datafusion-sqlparser-rs/pull/1444) (Eason0729) +- Implements DROP POLICY syntax for PostgreSQL [#1445](https://github.com/apache/datafusion-sqlparser-rs/pull/1445) (git-hulk) +- Support `DROP DATABASE` [#1443](https://github.com/apache/datafusion-sqlparser-rs/pull/1443) (linhr) +- Implements ALTER POLICY syntax for PostgreSQL [#1446](https://github.com/apache/datafusion-sqlparser-rs/pull/1446) (git-hulk) +- Add a note discouraging new use of `dialect_of` macro [#1448](https://github.com/apache/datafusion-sqlparser-rs/pull/1448) (alamb) +- Expand handling of `LIMIT 1, 2` handling to include sqlite [#1447](https://github.com/apache/datafusion-sqlparser-rs/pull/1447) (joshuawarner32) +- Fix always uses CommentDef::WithoutEq while parsing the inline comment [#1453](https://github.com/apache/datafusion-sqlparser-rs/pull/1453) (git-hulk) +- Add support for the LIKE ANY and ILIKE ANY pattern-matching condition [#1456](https://github.com/apache/datafusion-sqlparser-rs/pull/1456) (yoavcloud) +- added ability to parse extension to parse_comment inside postgres dialect [#1451](https://github.com/apache/datafusion-sqlparser-rs/pull/1451) (MaxwellKnight) +- Snowflake: support of views column comment [#1441](https://github.com/apache/datafusion-sqlparser-rs/pull/1441) (7phs) +- Add SQLite "ON CONFLICT" column option in CREATE TABLE statements [#1442](https://github.com/apache/datafusion-sqlparser-rs/pull/1442) (nucccc) +- Add support for ASC and DESC in CREATE TABLE column constraints for SQLite. [#1462](https://github.com/apache/datafusion-sqlparser-rs/pull/1462) (caldwell) +- Add support of `EXPLAIN QUERY PLAN` syntax for SQLite dialect [#1458](https://github.com/apache/datafusion-sqlparser-rs/pull/1458) (git-hulk) +- Add "DROP TYPE" support. [#1461](https://github.com/apache/datafusion-sqlparser-rs/pull/1461) (caldwell) +- chore: Add asf.yaml [#1463](https://github.com/apache/datafusion-sqlparser-rs/pull/1463) (Xuanwo) +- Add support for quantified comparison predicates (ALL/ANY/SOME) [#1459](https://github.com/apache/datafusion-sqlparser-rs/pull/1459) (yoavcloud) +- MySQL dialect: Add support for hash comments [#1466](https://github.com/apache/datafusion-sqlparser-rs/pull/1466) (hansott) +- Fix #1469 (SET ROLE regression) [#1474](https://github.com/apache/datafusion-sqlparser-rs/pull/1474) (lovasoa) +- Add support for parsing MsSql alias with equals [#1467](https://github.com/apache/datafusion-sqlparser-rs/pull/1467) (yoavcloud) +- Snowflake: support for extended column options in `CREATE TABLE` [#1454](https://github.com/apache/datafusion-sqlparser-rs/pull/1454) (7phs) +- MsSQL TRY_CONVERT [#1477](https://github.com/apache/datafusion-sqlparser-rs/pull/1477) (yoavcloud) +- Add PostgreSQL specfic "CREATE TYPE t AS ENUM (...)" support. [#1460](https://github.com/apache/datafusion-sqlparser-rs/pull/1460) (caldwell) +- Fix build [#1483](https://github.com/apache/datafusion-sqlparser-rs/pull/1483) (yoavcloud) +- Fix complex blocks warning when running clippy [#1488](https://github.com/apache/datafusion-sqlparser-rs/pull/1488) (git-hulk) +- Add support for SHOW DATABASES/SCHEMAS/TABLES/VIEWS in Hive [#1487](https://github.com/apache/datafusion-sqlparser-rs/pull/1487) (yoavcloud) +- Fix typo in `Dialect::supports_eq_alias_assigment` [#1478](https://github.com/apache/datafusion-sqlparser-rs/pull/1478) (alamb) +- Add support for PostgreSQL `LISTEN/NOTIFY` syntax [#1485](https://github.com/apache/datafusion-sqlparser-rs/pull/1485) (wugeer) +- Add support for TOP before ALL/DISTINCT [#1495](https://github.com/apache/datafusion-sqlparser-rs/pull/1495) (yoavcloud) +- add support for `FOR ORDINALITY` and `NESTED` in JSON_TABLE [#1493](https://github.com/apache/datafusion-sqlparser-rs/pull/1493) (lovasoa) +- Add Apache License to additional files [#1502](https://github.com/apache/datafusion-sqlparser-rs/pull/1502) (alamb) +- Move CHANGELOG content [#1503](https://github.com/apache/datafusion-sqlparser-rs/pull/1503) (alamb) +- improve support for T-SQL EXECUTE statements [#1490](https://github.com/apache/datafusion-sqlparser-rs/pull/1490) (lovasoa) + +## Credits + +Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor. + +``` + 8 Andrew Lamb + 7 Yoav Cohen + 7 hulk + 3 Aleksei Piianin + 3 David Caldwell + 3 Ophir LOJKINE + 1 Agaev Guseyn + 1 Eason + 1 Fischer + 1 Hans Ott + 1 Heran Lin + 1 Joshua Warner + 1 Maxwell Knight + 1 Seve Martinez + 1 Siyuan Huang + 1 Thomas Dagenais + 1 Xuanwo + 1 nucccc + 1 tomershaniii + 1 wugeer +``` + +Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release. + diff --git a/changelog/0.53.0.md b/changelog/0.53.0.md new file mode 100644 index 000000000..5b9de07d3 --- /dev/null +++ b/changelog/0.53.0.md @@ -0,0 +1,95 @@ + + +# sqlparser-rs 0.53.0 Changelog + +This release consists of 47 commits from 16 contributors. See credits at the end of this changelog for more information. + +**Other:** + +- hive: support for special not expression `!a` and raise error for `a!` factorial operator [#1472](https://github.com/apache/datafusion-sqlparser-rs/pull/1472) (wugeer) +- Add support for MSSQL's `OPENJSON WITH` clause [#1498](https://github.com/apache/datafusion-sqlparser-rs/pull/1498) (gaoqiangz) +- Parse true and false as identifiers in mssql [#1510](https://github.com/apache/datafusion-sqlparser-rs/pull/1510) (lovasoa) +- Fix the parsing error in MSSQL for multiple statements that include `DECLARE` statements [#1497](https://github.com/apache/datafusion-sqlparser-rs/pull/1497) (wugeer) +- Add support for Snowflake SHOW DATABASES/SCHEMAS/TABLES/VIEWS/COLUMNS statements [#1501](https://github.com/apache/datafusion-sqlparser-rs/pull/1501) (yoavcloud) +- Add support of COMMENT ON syntax for Snowflake [#1516](https://github.com/apache/datafusion-sqlparser-rs/pull/1516) (git-hulk) +- Add support for MYSQL's `CREATE TABLE SELECT` expr [#1515](https://github.com/apache/datafusion-sqlparser-rs/pull/1515) (wugeer) +- Add support for MSSQL's `XQuery` methods [#1500](https://github.com/apache/datafusion-sqlparser-rs/pull/1500) (gaoqiangz) +- Add support for Hive's `LOAD DATA` expr [#1520](https://github.com/apache/datafusion-sqlparser-rs/pull/1520) (wugeer) +- Fix ClickHouse document link from `Russian` to `English` [#1527](https://github.com/apache/datafusion-sqlparser-rs/pull/1527) (git-hulk) +- Support ANTI and SEMI joins without LEFT/RIGHT [#1528](https://github.com/apache/datafusion-sqlparser-rs/pull/1528) (delamarch3) +- support sqlite's OR clauses in update statements [#1530](https://github.com/apache/datafusion-sqlparser-rs/pull/1530) (lovasoa) +- support column type definitions in table aliases [#1526](https://github.com/apache/datafusion-sqlparser-rs/pull/1526) (lovasoa) +- Add support for MSSQL's `JSON_ARRAY`/`JSON_OBJECT` expr [#1507](https://github.com/apache/datafusion-sqlparser-rs/pull/1507) (gaoqiangz) +- Add support for PostgreSQL `UNLISTEN` syntax and Add support for Postgres `LOAD extension` expr [#1531](https://github.com/apache/datafusion-sqlparser-rs/pull/1531) (wugeer) +- Parse byte/bit string literals in MySQL and Postgres [#1532](https://github.com/apache/datafusion-sqlparser-rs/pull/1532) (mvzink) +- Allow example CLI to read from stdin [#1536](https://github.com/apache/datafusion-sqlparser-rs/pull/1536) (mvzink) +- recursive select calls are parsed with bad trailing_commas parameter [#1521](https://github.com/apache/datafusion-sqlparser-rs/pull/1521) (tomershaniii) +- PartiQL queries in Redshift [#1534](https://github.com/apache/datafusion-sqlparser-rs/pull/1534) (yoavcloud) +- Include license file in sqlparser_derive crate [#1543](https://github.com/apache/datafusion-sqlparser-rs/pull/1543) (ankane) +- Fallback to identifier parsing if expression parsing fails [#1513](https://github.com/apache/datafusion-sqlparser-rs/pull/1513) (yoavcloud) +- support `json_object('k':'v')` in postgres [#1546](https://github.com/apache/datafusion-sqlparser-rs/pull/1546) (lovasoa) +- Document micro benchmarks [#1555](https://github.com/apache/datafusion-sqlparser-rs/pull/1555) (alamb) +- Implement `Spanned` to retrieve source locations on AST nodes [#1435](https://github.com/apache/datafusion-sqlparser-rs/pull/1435) (Nyrox) +- Fix error in benchmark queries [#1560](https://github.com/apache/datafusion-sqlparser-rs/pull/1560) (alamb) +- Fix clippy warnings on rust 1.83 [#1570](https://github.com/apache/datafusion-sqlparser-rs/pull/1570) (iffyio) +- Support relation visitor to visit the `Option` field [#1556](https://github.com/apache/datafusion-sqlparser-rs/pull/1556) (goldmedal) +- Rename `TokenWithLocation` to `TokenWithSpan`, in backwards compatible way [#1562](https://github.com/apache/datafusion-sqlparser-rs/pull/1562) (alamb) +- Support MySQL size variants for BLOB and TEXT columns [#1564](https://github.com/apache/datafusion-sqlparser-rs/pull/1564) (mvzink) +- Increase version of sqlparser_derive from 0.2.2 to 0.3.0 [#1571](https://github.com/apache/datafusion-sqlparser-rs/pull/1571) (alamb) +- `json_object('k' VALUE 'v')` in postgres [#1547](https://github.com/apache/datafusion-sqlparser-rs/pull/1547) (lovasoa) +- Support snowflake double dot notation for object name [#1540](https://github.com/apache/datafusion-sqlparser-rs/pull/1540) (ayman-sigma) +- Update comments / docs for `Spanned` [#1549](https://github.com/apache/datafusion-sqlparser-rs/pull/1549) (alamb) +- Support Databricks struct literal [#1542](https://github.com/apache/datafusion-sqlparser-rs/pull/1542) (ayman-sigma) +- Encapsulate CreateFunction [#1573](https://github.com/apache/datafusion-sqlparser-rs/pull/1573) (philipcristiano) +- Support BIT column types [#1577](https://github.com/apache/datafusion-sqlparser-rs/pull/1577) (mvzink) +- Support parsing optional nulls handling for unique constraint [#1567](https://github.com/apache/datafusion-sqlparser-rs/pull/1567) (mvzink) +- Fix displaying WORK or TRANSACTION after BEGIN [#1565](https://github.com/apache/datafusion-sqlparser-rs/pull/1565) (mvzink) +- Add support of the ENUM8|ENUM16 for ClickHouse dialect [#1574](https://github.com/apache/datafusion-sqlparser-rs/pull/1574) (git-hulk) +- Parse Snowflake USE ROLE and USE SECONDARY ROLES [#1578](https://github.com/apache/datafusion-sqlparser-rs/pull/1578) (yoavcloud) +- Snowflake ALTER TABLE clustering options [#1579](https://github.com/apache/datafusion-sqlparser-rs/pull/1579) (yoavcloud) +- Support INSERT OVERWRITE INTO syntax [#1584](https://github.com/apache/datafusion-sqlparser-rs/pull/1584) (yuval-illumex) +- Parse `INSERT` with subquery when lacking column names [#1586](https://github.com/apache/datafusion-sqlparser-rs/pull/1586) (iffyio) +- Add support for ODBC functions [#1585](https://github.com/apache/datafusion-sqlparser-rs/pull/1585) (iffyio) + +## Credits + +Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor. + +``` + 8 Andrew Lamb + 6 Michael Victor Zink + 5 Ophir LOJKINE + 5 Yoav Cohen + 5 wugeer + 3 Ifeanyi Ubah + 3 gaoqiangz + 3 hulk + 2 Ayman Elkfrawy + 1 Andrew Kane + 1 Jax Liu + 1 Mark-Oliver Junge + 1 Philip Cristiano + 1 Yuval Shkolar + 1 delamarch3 + 1 tomershaniii +``` + +Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release. + diff --git a/changelog/0.54.0.md b/changelog/0.54.0.md new file mode 100644 index 000000000..c0a63ae45 --- /dev/null +++ b/changelog/0.54.0.md @@ -0,0 +1,118 @@ + + +# sqlparser-rs 0.54.0 Changelog + +This release consists of 57 commits from 24 contributors. See credits at the end of this changelog for more information. + +**Implemented enhancements:** + +- feat: support `INSERT INTO [TABLE] FUNCTION` of Clickhouse [#1633](https://github.com/apache/datafusion-sqlparser-rs/pull/1633) (byte-sourcerer) + +**Other:** + +- Run cargo fmt on `derive` crate [#1595](https://github.com/apache/datafusion-sqlparser-rs/pull/1595) (alamb) +- Add Apache license header to spans.rs [#1594](https://github.com/apache/datafusion-sqlparser-rs/pull/1594) (alamb) +- Add support for BigQuery `ANY TYPE` data type [#1602](https://github.com/apache/datafusion-sqlparser-rs/pull/1602) (MartinSahlen) +- Add support for TABLESAMPLE [#1580](https://github.com/apache/datafusion-sqlparser-rs/pull/1580) (yoavcloud) +- Redshift: Fix parsing for quoted numbered columns [#1576](https://github.com/apache/datafusion-sqlparser-rs/pull/1576) (7phs) +- Add the alter table ON COMMIT option to Snowflake [#1606](https://github.com/apache/datafusion-sqlparser-rs/pull/1606) (yoavcloud) +- Support parsing `EXPLAIN ESTIMATE` of Clickhouse [#1605](https://github.com/apache/datafusion-sqlparser-rs/pull/1605) (byte-sourcerer) +- Fix BigQuery hyphenated ObjectName with numbers [#1598](https://github.com/apache/datafusion-sqlparser-rs/pull/1598) (ayman-sigma) +- Fix test compilation issue [#1609](https://github.com/apache/datafusion-sqlparser-rs/pull/1609) (iffyio) +- Allow foreign table constraint without columns [#1608](https://github.com/apache/datafusion-sqlparser-rs/pull/1608) (ramnivas) +- Support optional table for `ANALYZE` statement [#1599](https://github.com/apache/datafusion-sqlparser-rs/pull/1599) (yuyang-ok) +- Support DOUBLE data types with precision for Mysql [#1611](https://github.com/apache/datafusion-sqlparser-rs/pull/1611) (artorias1024) +- Add `#[recursive]` [#1522](https://github.com/apache/datafusion-sqlparser-rs/pull/1522) (blaginin) +- Support arbitrary composite access expressions [#1600](https://github.com/apache/datafusion-sqlparser-rs/pull/1600) (ayman-sigma) +- Consolidate `MapAccess`, and `Subscript` into `CompoundExpr` to handle the complex field access chain [#1551](https://github.com/apache/datafusion-sqlparser-rs/pull/1551) (goldmedal) +- Handle empty projection in Postgres SELECT statements [#1613](https://github.com/apache/datafusion-sqlparser-rs/pull/1613) (tobyhede) +- Merge composite and compound expr test cases [#1615](https://github.com/apache/datafusion-sqlparser-rs/pull/1615) (iffyio) +- Support Snowflake Update-From-Select [#1604](https://github.com/apache/datafusion-sqlparser-rs/pull/1604) (yuval-illumex) +- Improve parsing performance by reducing token cloning [#1587](https://github.com/apache/datafusion-sqlparser-rs/pull/1587) (davisp) +- Improve Parser documentation [#1617](https://github.com/apache/datafusion-sqlparser-rs/pull/1617) (alamb) +- Add support for DROP EXTENSION [#1610](https://github.com/apache/datafusion-sqlparser-rs/pull/1610) (ramnivas) +- Refactor advancing token to avoid duplication, avoid borrow checker issues [#1618](https://github.com/apache/datafusion-sqlparser-rs/pull/1618) (alamb) +- Fix the parsing result for the special double number [#1621](https://github.com/apache/datafusion-sqlparser-rs/pull/1621) (goldmedal) +- SQLite: Allow dollar signs in placeholder names [#1620](https://github.com/apache/datafusion-sqlparser-rs/pull/1620) (hansott) +- Improve error for an unexpected token after DROP [#1623](https://github.com/apache/datafusion-sqlparser-rs/pull/1623) (ramnivas) +- Fix `sqlparser_bench` benchmark compilation [#1625](https://github.com/apache/datafusion-sqlparser-rs/pull/1625) (alamb) +- Improve parsing speed by avoiding some clones in parse_identifier [#1624](https://github.com/apache/datafusion-sqlparser-rs/pull/1624) (alamb) +- Simplify `parse_keyword_apis` more [#1626](https://github.com/apache/datafusion-sqlparser-rs/pull/1626) (alamb) +- Test benchmarks and Improve benchmark README.md [#1627](https://github.com/apache/datafusion-sqlparser-rs/pull/1627) (alamb) +- Add support for MYSQL's `RENAME TABLE` [#1616](https://github.com/apache/datafusion-sqlparser-rs/pull/1616) (wugeer) +- Correctly tokenize nested comments [#1629](https://github.com/apache/datafusion-sqlparser-rs/pull/1629) (hansott) +- Add support for USE SECONDARY ROLE (vs. ROLES) [#1637](https://github.com/apache/datafusion-sqlparser-rs/pull/1637) (yoavcloud) +- Add support for various Snowflake grantees [#1640](https://github.com/apache/datafusion-sqlparser-rs/pull/1640) (yoavcloud) +- Add support for the SQL OVERLAPS predicate [#1638](https://github.com/apache/datafusion-sqlparser-rs/pull/1638) (yoavcloud) +- Add support for Snowflake LIST and REMOVE [#1639](https://github.com/apache/datafusion-sqlparser-rs/pull/1639) (yoavcloud) +- Add support for MySQL's INSERT INTO ... SET syntax [#1641](https://github.com/apache/datafusion-sqlparser-rs/pull/1641) (yoavcloud) +- Start new line if \r in Postgres dialect [#1647](https://github.com/apache/datafusion-sqlparser-rs/pull/1647) (hansott) +- Support pluralized time units [#1630](https://github.com/apache/datafusion-sqlparser-rs/pull/1630) (wugeer) +- Replace `ReferentialAction` enum in `DROP` statements [#1648](https://github.com/apache/datafusion-sqlparser-rs/pull/1648) (stepancheg) +- Add support for MS-SQL BEGIN/END TRY/CATCH [#1649](https://github.com/apache/datafusion-sqlparser-rs/pull/1649) (yoavcloud) +- Fix MySQL parsing of GRANT, REVOKE, and CREATE VIEW [#1538](https://github.com/apache/datafusion-sqlparser-rs/pull/1538) (mvzink) +- Add support for the Snowflake MINUS set operator [#1652](https://github.com/apache/datafusion-sqlparser-rs/pull/1652) (yoavcloud) +- ALTER TABLE DROP {COLUMN|CONSTRAINT} RESTRICT [#1651](https://github.com/apache/datafusion-sqlparser-rs/pull/1651) (stepancheg) +- Add support for ClickHouse `FORMAT` on `INSERT` [#1628](https://github.com/apache/datafusion-sqlparser-rs/pull/1628) (bombsimon) +- MsSQL SET for session params [#1646](https://github.com/apache/datafusion-sqlparser-rs/pull/1646) (yoavcloud) +- Correctly look for end delimiter dollar quoted string [#1650](https://github.com/apache/datafusion-sqlparser-rs/pull/1650) (hansott) +- Support single line comments starting with '#' for Hive [#1654](https://github.com/apache/datafusion-sqlparser-rs/pull/1654) (wugeer) +- Support trailing commas in `FROM` clause [#1645](https://github.com/apache/datafusion-sqlparser-rs/pull/1645) (barsela1) +- Allow empty options for BigQuery [#1657](https://github.com/apache/datafusion-sqlparser-rs/pull/1657) (MartinSahlen) +- Add support for parsing RAISERROR [#1656](https://github.com/apache/datafusion-sqlparser-rs/pull/1656) (AvivDavid-Satori) +- Add support for Snowflake column aliases that use SQL keywords [#1632](https://github.com/apache/datafusion-sqlparser-rs/pull/1632) (yoavcloud) +- fix parsing of `INSERT INTO ... SELECT ... RETURNING ` [#1661](https://github.com/apache/datafusion-sqlparser-rs/pull/1661) (lovasoa) +- Add support for `IS [NOT] [form] NORMALIZED` [#1655](https://github.com/apache/datafusion-sqlparser-rs/pull/1655) (alexander-beedie) +- Add support for qualified column names in JOIN ... USING [#1663](https://github.com/apache/datafusion-sqlparser-rs/pull/1663) (yoavcloud) +- Add support for Snowflake AT/BEFORE [#1667](https://github.com/apache/datafusion-sqlparser-rs/pull/1667) (yoavcloud) + +## Credits + +Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor. + +``` + 13 Yoav Cohen + 9 Andrew Lamb + 4 Hans Ott + 3 Ramnivas Laddad + 3 wugeer + 2 Ayman Elkfrawy + 2 Ifeanyi Ubah + 2 Jax Liu + 2 Martin Abelson Sahlen + 2 Stepan Koltsov + 2 cjw + 1 Aleksei Piianin + 1 Alexander Beedie + 1 AvivDavid-Satori + 1 Dmitrii Blaginin + 1 Michael Victor Zink + 1 Ophir LOJKINE + 1 Paul J. Davis + 1 Simon Sawert + 1 Toby Hede + 1 Yuval Shkolar + 1 artorias1024 + 1 bar sela + 1 yuyang +``` + +Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release. + diff --git a/changelog/0.55.0.md b/changelog/0.55.0.md new file mode 100644 index 000000000..046bf22bc --- /dev/null +++ b/changelog/0.55.0.md @@ -0,0 +1,173 @@ + + +# sqlparser-rs 0.55.0 Changelog + +This release consists of 55 commits from 25 contributors. See credits at the end of this changelog for more information. + +## Migrating usages of `Expr::Value` + +In v0.55 of sqlparser the `Expr::Value` enum variant contains a `ValueWithSpan` instead of a `Value`. Here is how to migrate. + +### When pattern matching + +```diff +- Expr::Value(Value::SingleQuotedString(my_string)) => { ... } ++ Expr::Value(ValueWithSpan{ value: Value::SingleQuotedString(my_string), span: _ }) => { ... } +``` + +### When creating an `Expr` + +Use the new `Expr::value` method (notice the lowercase `v`), which will create a `ValueWithSpan` containing an empty span: + +```diff +- Expr::Value(Value::SingleQuotedString(my_string)) ++ Expr::value(Value::SingleQuotedString(my_string)) +``` + +## Migrating usages of `ObjectName` + +In v0.55 of sqlparser, the `ObjectName` structure has been changed as shown below. Here is now to migrate. + +```diff +- pub struct ObjectName(pub Vec); ++ pub struct ObjectName(pub Vec) +``` + +### When constructing `ObjectName` + +Use the `From` impl: + +```diff +- name: ObjectName(vec![Ident::new("f")]), ++ name: ObjectName::from(vec![Ident::new("f")]), +``` + +### Accessing Spans + +Use the `span()` function + +```diff +- name.span ++ name.span() +``` + + + +**Breaking changes:** + +- Enhance object name path segments [#1539](https://github.com/apache/datafusion-sqlparser-rs/pull/1539) (ayman-sigma) +- Store spans for Value expressions [#1738](https://github.com/apache/datafusion-sqlparser-rs/pull/1738) (lovasoa) + +**Implemented enhancements:** + +- feat: adjust create and drop trigger for mysql dialect [#1734](https://github.com/apache/datafusion-sqlparser-rs/pull/1734) (invm) + +**Fixed bugs:** + +- fix: make `serde` feature no_std [#1730](https://github.com/apache/datafusion-sqlparser-rs/pull/1730) (iajoiner) + +**Other:** + +- Update rat_exclude_file.txt [#1670](https://github.com/apache/datafusion-sqlparser-rs/pull/1670) (alamb) +- Add support for Snowflake account privileges [#1666](https://github.com/apache/datafusion-sqlparser-rs/pull/1666) (yoavcloud) +- Add support for Create Iceberg Table statement for Snowflake parser [#1664](https://github.com/apache/datafusion-sqlparser-rs/pull/1664) (Vedin) +- National strings: check if dialect supports backslash escape [#1672](https://github.com/apache/datafusion-sqlparser-rs/pull/1672) (hansott) +- Only support escape literals for Postgres, Redshift and generic dialect [#1674](https://github.com/apache/datafusion-sqlparser-rs/pull/1674) (hansott) +- BigQuery: Support trailing commas in column definitions list [#1682](https://github.com/apache/datafusion-sqlparser-rs/pull/1682) (iffyio) +- Enable GROUP BY exp for Snowflake dialect [#1683](https://github.com/apache/datafusion-sqlparser-rs/pull/1683) (yoavcloud) +- Add support for parsing empty dictionary expressions [#1684](https://github.com/apache/datafusion-sqlparser-rs/pull/1684) (yoavcloud) +- Support multiple tables in `UPDATE FROM` clause [#1681](https://github.com/apache/datafusion-sqlparser-rs/pull/1681) (iffyio) +- Add support for mysql table hints [#1675](https://github.com/apache/datafusion-sqlparser-rs/pull/1675) (AvivDavid-Satori) +- BigQuery: Add support for select expr star [#1680](https://github.com/apache/datafusion-sqlparser-rs/pull/1680) (iffyio) +- Support underscore separators in numbers for Clickhouse. Fixes #1659 [#1677](https://github.com/apache/datafusion-sqlparser-rs/pull/1677) (graup) +- BigQuery: Fix column identifier reserved keywords list [#1678](https://github.com/apache/datafusion-sqlparser-rs/pull/1678) (iffyio) +- Fix bug when parsing a Snowflake stage with `;` suffix [#1688](https://github.com/apache/datafusion-sqlparser-rs/pull/1688) (yoavcloud) +- Allow plain JOIN without turning it into INNER [#1692](https://github.com/apache/datafusion-sqlparser-rs/pull/1692) (mvzink) +- Fix DDL generation in case of an empty arguments function. [#1690](https://github.com/apache/datafusion-sqlparser-rs/pull/1690) (remysaissy) +- Fix `CREATE FUNCTION` round trip for Hive dialect [#1693](https://github.com/apache/datafusion-sqlparser-rs/pull/1693) (iffyio) +- Make numeric literal underscore test dialect agnostic [#1685](https://github.com/apache/datafusion-sqlparser-rs/pull/1685) (iffyio) +- Extend lambda support for ClickHouse and DuckDB dialects [#1686](https://github.com/apache/datafusion-sqlparser-rs/pull/1686) (gstvg) +- Make TypedString preserve quote style [#1679](https://github.com/apache/datafusion-sqlparser-rs/pull/1679) (graup) +- Do not parse ASOF and MATCH_CONDITION as table factor aliases [#1698](https://github.com/apache/datafusion-sqlparser-rs/pull/1698) (yoavcloud) +- Add support for GRANT on some common Snowflake objects [#1699](https://github.com/apache/datafusion-sqlparser-rs/pull/1699) (yoavcloud) +- Add RETURNS TABLE() support for CREATE FUNCTION in Postgresql [#1687](https://github.com/apache/datafusion-sqlparser-rs/pull/1687) (remysaissy) +- Add parsing for GRANT ROLE and GRANT DATABASE ROLE in Snowflake dialect [#1689](https://github.com/apache/datafusion-sqlparser-rs/pull/1689) (yoavcloud) +- Add support for `CREATE/ALTER/DROP CONNECTOR` syntax [#1701](https://github.com/apache/datafusion-sqlparser-rs/pull/1701) (wugeer) +- Parse Snowflake COPY INTO [#1669](https://github.com/apache/datafusion-sqlparser-rs/pull/1669) (yoavcloud) +- Require space after -- to start single line comment in MySQL [#1705](https://github.com/apache/datafusion-sqlparser-rs/pull/1705) (hansott) +- Add suppport for Show Objects statement for the Snowflake parser [#1702](https://github.com/apache/datafusion-sqlparser-rs/pull/1702) (DanCodedThis) +- Fix incorrect parsing of JsonAccess bracket notation after cast in Snowflake [#1708](https://github.com/apache/datafusion-sqlparser-rs/pull/1708) (yoavcloud) +- Parse Postgres VARBIT datatype [#1703](https://github.com/apache/datafusion-sqlparser-rs/pull/1703) (mvzink) +- Implement FROM-first selects [#1713](https://github.com/apache/datafusion-sqlparser-rs/pull/1713) (mitsuhiko) +- Enable custom dialects to support `MATCH() AGAINST()` [#1719](https://github.com/apache/datafusion-sqlparser-rs/pull/1719) (joocer) +- Support group by cube/rollup etc in BigQuery [#1720](https://github.com/apache/datafusion-sqlparser-rs/pull/1720) (Groennbeck) +- Add support for MS Varbinary(MAX) (#1714) [#1715](https://github.com/apache/datafusion-sqlparser-rs/pull/1715) (TylerBrinks) +- Add supports for Hive's `SELECT ... GROUP BY .. GROUPING SETS` syntax [#1653](https://github.com/apache/datafusion-sqlparser-rs/pull/1653) (wugeer) +- Differentiate LEFT JOIN from LEFT OUTER JOIN [#1726](https://github.com/apache/datafusion-sqlparser-rs/pull/1726) (mvzink) +- Add support for Postgres `ALTER TYPE` [#1727](https://github.com/apache/datafusion-sqlparser-rs/pull/1727) (jvatic) +- Replace `Method` and `CompositeAccess` with `CompoundFieldAccess` [#1716](https://github.com/apache/datafusion-sqlparser-rs/pull/1716) (iffyio) +- Add support for `EXECUTE IMMEDIATE` [#1717](https://github.com/apache/datafusion-sqlparser-rs/pull/1717) (iffyio) +- Treat COLLATE like any other column option [#1731](https://github.com/apache/datafusion-sqlparser-rs/pull/1731) (mvzink) +- Add support for PostgreSQL/Redshift geometric operators [#1723](https://github.com/apache/datafusion-sqlparser-rs/pull/1723) (benrsatori) +- Implement SnowFlake ALTER SESSION [#1712](https://github.com/apache/datafusion-sqlparser-rs/pull/1712) (osipovartem) +- Extend Visitor trait for Value type [#1725](https://github.com/apache/datafusion-sqlparser-rs/pull/1725) (tomershaniii) +- Add support for `ORDER BY ALL` [#1724](https://github.com/apache/datafusion-sqlparser-rs/pull/1724) (PokIsemaine) +- Parse casting to array using double colon operator in Redshift [#1737](https://github.com/apache/datafusion-sqlparser-rs/pull/1737) (yoavcloud) +- Replace parallel condition/result vectors with single CaseWhen vector in Expr::Case. This fixes the iteration order when using the `Visitor` trait. Expressions are now visited in the same order as they appear in the sql source. [#1733](https://github.com/apache/datafusion-sqlparser-rs/pull/1733) (lovasoa) +- BigQuery: Add support for `BEGIN` [#1718](https://github.com/apache/datafusion-sqlparser-rs/pull/1718) (iffyio) +- Parse SIGNED INTEGER type in MySQL CAST [#1739](https://github.com/apache/datafusion-sqlparser-rs/pull/1739) (mvzink) +- Parse MySQL ALTER TABLE ALGORITHM option [#1745](https://github.com/apache/datafusion-sqlparser-rs/pull/1745) (mvzink) +- Random test cleanups use Expr::value [#1749](https://github.com/apache/datafusion-sqlparser-rs/pull/1749) (alamb) +- Parse ALTER TABLE AUTO_INCREMENT operation for MySQL [#1748](https://github.com/apache/datafusion-sqlparser-rs/pull/1748) (mvzink) + +## Credits + +Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor. + +``` + 10 Yoav Cohen + 9 Ifeanyi Ubah + 7 Michael Victor Zink + 3 Hans Ott + 2 Andrew Lamb + 2 Ophir LOJKINE + 2 Paul Grau + 2 Rémy SAISSY + 2 wugeer + 1 Armin Ronacher + 1 Artem Osipov + 1 AvivDavid-Satori + 1 Ayman Elkfrawy + 1 DanCodedThis + 1 Denys Tsomenko + 1 Emil + 1 Ian Alexander Joiner + 1 Jesse Stuart + 1 Justin Joyce + 1 Michael + 1 SiLe Zhou + 1 Tyler Brinks + 1 benrsatori + 1 gstvg + 1 tomershaniii +``` + +Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release. + diff --git a/changelog/0.56.0.md b/changelog/0.56.0.md new file mode 100644 index 000000000..b3c8a67aa --- /dev/null +++ b/changelog/0.56.0.md @@ -0,0 +1,102 @@ + + +# sqlparser-rs 0.56.0 Changelog + +This release consists of 48 commits from 19 contributors. See credits at the end of this changelog for more information. + +**Other:** + +- Ignore escaped LIKE wildcards in MySQL [#1735](https://github.com/apache/datafusion-sqlparser-rs/pull/1735) (mvzink) +- Parse SET NAMES syntax in Postgres [#1752](https://github.com/apache/datafusion-sqlparser-rs/pull/1752) (mvzink) +- re-add support for nested comments in mssql [#1754](https://github.com/apache/datafusion-sqlparser-rs/pull/1754) (lovasoa) +- Extend support for INDEX parsing [#1707](https://github.com/apache/datafusion-sqlparser-rs/pull/1707) (LucaCappelletti94) +- Parse MySQL `ALTER TABLE DROP FOREIGN KEY` syntax [#1762](https://github.com/apache/datafusion-sqlparser-rs/pull/1762) (mvzink) +- add support for `with` clauses (CTEs) in `delete` statements [#1764](https://github.com/apache/datafusion-sqlparser-rs/pull/1764) (lovasoa) +- SET with a list of comma separated assignments [#1757](https://github.com/apache/datafusion-sqlparser-rs/pull/1757) (MohamedAbdeen21) +- Preserve MySQL-style `LIMIT , ` syntax [#1765](https://github.com/apache/datafusion-sqlparser-rs/pull/1765) (mvzink) +- Add support for `DROP MATERIALIZED VIEW` [#1743](https://github.com/apache/datafusion-sqlparser-rs/pull/1743) (iffyio) +- Add `CASE` and `IF` statement support [#1741](https://github.com/apache/datafusion-sqlparser-rs/pull/1741) (iffyio) +- BigQuery: Add support for `CREATE SCHEMA` options [#1742](https://github.com/apache/datafusion-sqlparser-rs/pull/1742) (iffyio) +- Snowflake: Support dollar quoted comments [#1755](https://github.com/apache/datafusion-sqlparser-rs/pull/1755) +- Add LOCK operation for ALTER TABLE [#1768](https://github.com/apache/datafusion-sqlparser-rs/pull/1768) (MohamedAbdeen21) +- Add support for `RAISE` statement [#1766](https://github.com/apache/datafusion-sqlparser-rs/pull/1766) (iffyio) +- Add GLOBAL context/modifier to SET statements [#1767](https://github.com/apache/datafusion-sqlparser-rs/pull/1767) (MohamedAbdeen21) +- Parse `SUBSTR` as alias for `SUBSTRING` [#1769](https://github.com/apache/datafusion-sqlparser-rs/pull/1769) (mvzink) +- SET statements: scope modifier for multiple assignments [#1772](https://github.com/apache/datafusion-sqlparser-rs/pull/1772) (MohamedAbdeen21) +- Support qualified column names in `MATCH AGAINST` clause [#1774](https://github.com/apache/datafusion-sqlparser-rs/pull/1774) (tomershaniii) +- Mysql: Add support for := operator [#1779](https://github.com/apache/datafusion-sqlparser-rs/pull/1779) (barsela1) +- Add cipherstash-proxy to list of users in README.md [#1782](https://github.com/apache/datafusion-sqlparser-rs/pull/1782) (coderdan) +- Fix typos [#1785](https://github.com/apache/datafusion-sqlparser-rs/pull/1785) (jayvdb) +- Add support for Databricks TIMESTAMP_NTZ. [#1781](https://github.com/apache/datafusion-sqlparser-rs/pull/1781) (romanb) +- Enable double-dot-notation for mssql. [#1787](https://github.com/apache/datafusion-sqlparser-rs/pull/1787) (romanb) +- Fix: Snowflake ALTER SESSION cannot be followed by other statements. [#1786](https://github.com/apache/datafusion-sqlparser-rs/pull/1786) (romanb) +- Add GreptimeDB to the "Users" in README [#1788](https://github.com/apache/datafusion-sqlparser-rs/pull/1788) (MichaelScofield) +- Extend snowflake grant options support [#1794](https://github.com/apache/datafusion-sqlparser-rs/pull/1794) (yoavcloud) +- Fix clippy lint on rust 1.86 [#1796](https://github.com/apache/datafusion-sqlparser-rs/pull/1796) (iffyio) +- Allow single quotes in EXTRACT() for Redshift. [#1795](https://github.com/apache/datafusion-sqlparser-rs/pull/1795) (romanb) +- MSSQL: Add support for functionality `MERGE` output clause [#1790](https://github.com/apache/datafusion-sqlparser-rs/pull/1790) (dilovancelik) +- Support additional DuckDB integer types such as HUGEINT, UHUGEINT, etc [#1797](https://github.com/apache/datafusion-sqlparser-rs/pull/1797) (alexander-beedie) +- Add support for MSSQL IF/ELSE statements. [#1791](https://github.com/apache/datafusion-sqlparser-rs/pull/1791) (romanb) +- Allow literal backslash escapes for string literals in Redshift dialect. [#1801](https://github.com/apache/datafusion-sqlparser-rs/pull/1801) (romanb) +- Add support for MySQL's STRAIGHT_JOIN join operator. [#1802](https://github.com/apache/datafusion-sqlparser-rs/pull/1802) (romanb) +- Snowflake COPY INTO target columns, select items and optional alias [#1805](https://github.com/apache/datafusion-sqlparser-rs/pull/1805) (yoavcloud) +- Fix tokenization of qualified identifiers with numeric prefix. [#1803](https://github.com/apache/datafusion-sqlparser-rs/pull/1803) (romanb) +- Add support for `INHERITS` option in `CREATE TABLE` statement [#1806](https://github.com/apache/datafusion-sqlparser-rs/pull/1806) (LucaCappelletti94) +- Add `DROP TRIGGER` support for SQL Server [#1813](https://github.com/apache/datafusion-sqlparser-rs/pull/1813) (aharpervc) +- Snowflake: support nested join without parentheses [#1799](https://github.com/apache/datafusion-sqlparser-rs/pull/1799) (barsela1) +- Add support for parenthesized subquery as `IN` predicate [#1793](https://github.com/apache/datafusion-sqlparser-rs/pull/1793) (adamchainz) +- Fix `STRAIGHT_JOIN` constraint when table alias is absent [#1812](https://github.com/apache/datafusion-sqlparser-rs/pull/1812) (killertux) +- Add support for `PRINT` statement for SQL Server [#1811](https://github.com/apache/datafusion-sqlparser-rs/pull/1811) (aharpervc) +- enable `supports_filter_during_aggregation` for Generic dialect [#1815](https://github.com/apache/datafusion-sqlparser-rs/pull/1815) (goldmedal) +- Add support for `XMLTABLE` [#1817](https://github.com/apache/datafusion-sqlparser-rs/pull/1817) (lovasoa) +- Add `CREATE FUNCTION` support for SQL Server [#1808](https://github.com/apache/datafusion-sqlparser-rs/pull/1808) (aharpervc) +- Add `OR ALTER` support for `CREATE VIEW` [#1818](https://github.com/apache/datafusion-sqlparser-rs/pull/1818) (aharpervc) +- Add `DECLARE ... CURSOR FOR` support for SQL Server [#1821](https://github.com/apache/datafusion-sqlparser-rs/pull/1821) (aharpervc) +- Handle missing login in changelog generate script [#1823](https://github.com/apache/datafusion-sqlparser-rs/pull/1823) (iffyio) +- Snowflake: Add support for `CONNECT_BY_ROOT` [#1780](https://github.com/apache/datafusion-sqlparser-rs/pull/1780) (tomershaniii) + +## Credits + +Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) per contributor. + +``` + 8 Roman Borschel + 6 Ifeanyi Ubah + 5 Andrew Harper + 5 Michael Victor Zink + 4 Mohamed Abdeen + 3 Ophir LOJKINE + 2 Luca Cappelletti + 2 Yoav Cohen + 2 bar sela + 2 tomershaniii + 1 Adam Johnson + 1 Aleksei Piianin + 1 Alexander Beedie + 1 Bruno Clemente + 1 Dan Draper + 1 DilovanCelik + 1 Jax Liu + 1 John Vandenberg + 1 LFC +``` + +Thank you also to everyone who contributed in other ways such as filing issues, reviewing PRs, and providing feedback on this release. + diff --git a/derive/Cargo.toml b/derive/Cargo.toml index 0c5852c4c..7b6477300 100644 --- a/derive/Cargo.toml +++ b/derive/Cargo.toml @@ -17,8 +17,8 @@ [package] name = "sqlparser_derive" -description = "proc macro for sqlparser" -version = "0.2.2" +description = "Procedural (proc) macros for sqlparser" +version = "0.3.0" authors = ["sqlparser-rs authors"] homepage = "https://github.com/sqlparser-rs/sqlparser-rs" documentation = "https://docs.rs/sqlparser_derive/" @@ -28,6 +28,7 @@ license = "Apache-2.0" include = [ "src/**/*.rs", "Cargo.toml", + "LICENSE.TXT", ] edition = "2021" diff --git a/derive/LICENSE.TXT b/derive/LICENSE.TXT new file mode 120000 index 000000000..14259afe2 --- /dev/null +++ b/derive/LICENSE.TXT @@ -0,0 +1 @@ +../LICENSE.TXT \ No newline at end of file diff --git a/derive/README.md b/derive/README.md index aa70e7c71..b5ccc69e0 100644 --- a/derive/README.md +++ b/derive/README.md @@ -151,6 +151,55 @@ visitor.post_visit_expr() visitor.post_visit_expr() ``` +If the field is a `Option` and add `#[with = "visit_xxx"]` to the field, the generated code +will try to access the field only if it is `Some`: + +```rust +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct ShowStatementIn { + pub clause: ShowStatementInClause, + pub parent_type: Option, + #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] + pub parent_name: Option, +} +``` + +This will generate + +```rust +impl sqlparser::ast::Visit for ShowStatementIn { + fn visit( + &self, + visitor: &mut V, + ) -> ::std::ops::ControlFlow { + sqlparser::ast::Visit::visit(&self.clause, visitor)?; + sqlparser::ast::Visit::visit(&self.parent_type, visitor)?; + if let Some(value) = &self.parent_name { + visitor.pre_visit_relation(value)?; + sqlparser::ast::Visit::visit(value, visitor)?; + visitor.post_visit_relation(value)?; + } + ::std::ops::ControlFlow::Continue(()) + } +} + +impl sqlparser::ast::VisitMut for ShowStatementIn { + fn visit( + &mut self, + visitor: &mut V, + ) -> ::std::ops::ControlFlow { + sqlparser::ast::VisitMut::visit(&mut self.clause, visitor)?; + sqlparser::ast::VisitMut::visit(&mut self.parent_type, visitor)?; + if let Some(value) = &mut self.parent_name { + visitor.pre_visit_relation(value)?; + sqlparser::ast::VisitMut::visit(value, visitor)?; + visitor.post_visit_relation(value)?; + } + ::std::ops::ControlFlow::Continue(()) + } +} +``` + ## Releasing This crate's release is not automated. Instead it is released manually as needed diff --git a/derive/src/lib.rs b/derive/src/lib.rs index 5ad1607f9..08c5c5db4 100644 --- a/derive/src/lib.rs +++ b/derive/src/lib.rs @@ -21,8 +21,9 @@ use syn::spanned::Spanned; use syn::{ parse::{Parse, ParseStream}, parse_macro_input, parse_quote, Attribute, Data, DeriveInput, Fields, GenericParam, Generics, - Ident, Index, LitStr, Meta, Token, + Ident, Index, LitStr, Meta, Token, Type, TypePath, }; +use syn::{Path, PathArguments}; /// Implementation of `[#derive(Visit)]` #[proc_macro_derive(VisitMut, attributes(visit))] @@ -77,7 +78,10 @@ fn derive_visit(input: proc_macro::TokenStream, visit_type: &VisitType) -> proc_ let expanded = quote! { // The generated impl. + // Note that it uses [`recursive::recursive`] to protect from stack overflow. + // See tests in https://github.com/apache/datafusion-sqlparser-rs/pull/1522/ for more info. impl #impl_generics sqlparser::ast::#visit_trait for #name #ty_generics #where_clause { + #[cfg_attr(feature = "recursive-protection", recursive::recursive)] fn visit( &#modifier self, visitor: &mut V @@ -182,9 +186,21 @@ fn visit_children( Fields::Named(fields) => { let recurse = fields.named.iter().map(|f| { let name = &f.ident; + let is_option = is_option(&f.ty); let attributes = Attributes::parse(&f.attrs); - let (pre_visit, post_visit) = attributes.visit(quote!(&#modifier self.#name)); - quote_spanned!(f.span() => #pre_visit sqlparser::ast::#visit_trait::visit(&#modifier self.#name, visitor)?; #post_visit) + if is_option && attributes.with.is_some() { + let (pre_visit, post_visit) = attributes.visit(quote!(value)); + quote_spanned!(f.span() => + if let Some(value) = &#modifier self.#name { + #pre_visit sqlparser::ast::#visit_trait::visit(value, visitor)?; #post_visit + } + ) + } else { + let (pre_visit, post_visit) = attributes.visit(quote!(&#modifier self.#name)); + quote_spanned!(f.span() => + #pre_visit sqlparser::ast::#visit_trait::visit(&#modifier self.#name, visitor)?; #post_visit + ) + } }); quote! { #(#recurse)* @@ -256,3 +272,20 @@ fn visit_children( Data::Union(_) => unimplemented!(), } } + +fn is_option(ty: &Type) -> bool { + if let Type::Path(TypePath { + path: Path { segments, .. }, + .. + }) = ty + { + if let Some(segment) = segments.last() { + if segment.ident == "Option" { + if let PathArguments::AngleBracketed(args) = &segment.arguments { + return args.args.len() == 1; + } + } + } + } + false +} diff --git a/dev/release/README.md b/dev/release/README.md new file mode 100644 index 000000000..c3018dd68 --- /dev/null +++ b/dev/release/README.md @@ -0,0 +1,187 @@ + + + +## Process Overview + +As part of the Apache governance model, official releases consist of signed +source tarballs approved by the DataFusion PMC. + +We then use the code in the approved artifacts to release to crates.io. + +### Change Log + +We maintain a `CHANGELOG.md` so our users know what has been changed between releases. + +You will need a GitHub Personal Access Token for the following steps. Follow +[these instructions](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) +to generate one if you do not already have one. + +The changelog is generated using a Python script which needs `PyGitHub`, installed using pip: + +```shell +pip3 install PyGitHub +``` + +To generate the changelog, set the `GITHUB_TOKEN` environment variable to a valid token and then run the script +providing two commit ids or tags followed by the version number of the release being created. The following +example generates a change log of all changes between the first commit and the current HEAD revision. + +```shell +export GITHUB_TOKEN= +python ./dev/release/generate-changelog.py v0.51.0 HEAD 0.52.0 > changelog/0.52.0.md +``` + +This script creates a changelog from GitHub PRs based on the labels associated with them as well as looking for +titles starting with `feat:`, `fix:`, or `docs:`. + +Add an entry to CHANGELOG.md for the new version + +## Prepare release commits and PR + +### Update Version + +Checkout the main commit to be released + +```shell +git fetch apache +git checkout apache/main +``` + +Manually update the version in the root `Cargo.toml` to the release version (e.g. `0.52.0`). + +Lastly commit the version change: + +```shell +git commit -a -m 'Update version' +``` + +## Prepare release candidate artifacts + +After the PR gets merged, you are ready to create release artifacts from the +merged commit. + +(Note you need to be a committer to run these scripts as they upload to the apache svn distribution servers) + +### Pick a Release Candidate (RC) number + +Pick numbers in sequential order, with `0` for `rc0`, `1` for `rc1`, etc. + +### Create git tag for the release: + +While the official release artifacts are signed tarballs and zip files, we also +tag the commit it was created for convenience and code archaeology. + +Using a string such as `v0.52.0` as the ``, create and push the tag by running these commands: + +For example to tag version `0.52.0` + +```shell +git fetch apache +git tag v0.52.0-rc1 apache/main +# push tag to Github remote +git push apache v0.52.0-rc1 +``` + +### Create, sign, and upload artifacts + +Run `create-tarball.sh` with the `` tag and `` and you found in previous steps: + +```shell +GITHUB_TOKEN= ./dev/release/create-tarball.sh 0.52.0 1 +``` + +The `create-tarball.sh` script + +1. creates and uploads all release candidate artifacts to the [datafusion + dev](https://dist.apache.org/repos/dist/dev/datafusion) location on the + apache distribution svn server + +2. provide you an email template to + send to dev@datafusion.apache.org for release voting. + +### Vote on Release Candidate artifacts + +Send the email output from the script to dev@datafusion.apache.org. + +For the release to become "official" it needs at least three PMC members to vote +1 on it. + +### Verifying Release Candidates + +The `dev/release/verify-release-candidate.sh` is a script in this repository that can assist in the verification process. Run it like: + +```shell +./dev/release/verify-release-candidate.sh 0.52.0 1 +``` + +#### If the release is not approved + +If the release is not approved, fix whatever the problem is, merge changelog +changes into main if there is any and try again with the next RC number. + +## Finalize the release + +NOTE: steps in this section can only be done by PMC members. + +### After the release is approved + +Move artifacts to the release location in SVN, using the `release-tarball.sh` script: + +```shell +./dev/release/release-tarball.sh 0.52.0 1 +``` + +Promote the rc tag to the release tag +```shell +git tag v0.52.0 v0.52.0-rc3 +git push apache v0.52.0 +``` + +Congratulations! The release is now official! + +### Publish on Crates.io + +Only approved releases of the tarball should be published to +crates.io, in order to conform to Apache Software Foundation +governance standards. + +A DataFusion committer can publish this crate after an official project release has +been made to crates.io using the following instructions. + +Follow [these +instructions](https://doc.rust-lang.org/cargo/reference/publishing.html) to +create an account and login to crates.io before asking to be added as an owner +to the sqlparser DataFusion crates. + +Download and unpack the official release tarball + +Verify that the Cargo.toml in the tarball contains the correct version +(e.g. `version = "0.52.0"`) and then publish the crates by running the following commands + +```shell +cargo publish +``` + +If necessary, also publish the `sqlparser_derive` crate: + +crates.io homepage: https://crates.io/crates/sqlparser_derive + +```shell +(cd derive && cargo publish +``` diff --git a/dev/release/check-rat-report.py b/dev/release/check-rat-report.py new file mode 100644 index 000000000..e30d72bdd --- /dev/null +++ b/dev/release/check-rat-report.py @@ -0,0 +1,59 @@ +#!/usr/bin/python +############################################################################## +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +############################################################################## +import fnmatch +import re +import sys +import xml.etree.ElementTree as ET + +if len(sys.argv) != 3: + sys.stderr.write("Usage: %s exclude_globs.lst rat_report.xml\n" % + sys.argv[0]) + sys.exit(1) + +exclude_globs_filename = sys.argv[1] +xml_filename = sys.argv[2] + +globs = [line.strip() for line in open(exclude_globs_filename, "r")] + +tree = ET.parse(xml_filename) +root = tree.getroot() +resources = root.findall('resource') + +all_ok = True +for r in resources: + approvals = r.findall('license-approval') + if not approvals or approvals[0].attrib['name'] == 'true': + continue + clean_name = re.sub('^[^/]+/', '', r.attrib['name']) + excluded = False + for g in globs: + if fnmatch.fnmatch(clean_name, g): + excluded = True + break + if not excluded: + sys.stdout.write("NOT APPROVED: %s (%s): %s\n" % ( + clean_name, r.attrib['name'], approvals[0].attrib['name'])) + all_ok = False + +if not all_ok: + sys.exit(1) + +print('OK') +sys.exit(0) diff --git a/dev/release/create-tarball.sh b/dev/release/create-tarball.sh new file mode 100755 index 000000000..4cb17cd36 --- /dev/null +++ b/dev/release/create-tarball.sh @@ -0,0 +1,135 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Adapted from https://github.com/apache/datafusion/tree/master/dev/release/create-tarball.sh + +# This script creates a signed tarball in +# dev/dist/apache-datafusion-sqlparser-rs--rc.tar.gz and uploads it to +# the "dev" area of the dist.apache.datafusion repository and prepares an +# email for sending to the dev@datafusion.apache.org list for a formal +# vote. +# +# See release/README.md for full release instructions +# +# Requirements: +# +# 1. gpg setup for signing and have uploaded your public +# signature to https://pgp.mit.edu/ +# +# 2. Logged into the apache svn server with the appropriate +# credentials +# +# 3. Install the requests python package +# +# +# Based in part on 02-source.sh from apache/arrow +# + +set -e + +SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +SOURCE_TOP_DIR="$(cd "${SOURCE_DIR}/../../" && pwd)" + +if [ "$#" -ne 2 ]; then + echo "Usage: $0 " + echo "ex. $0 0.52.0 2" + exit +fi + +if [[ -z "${GITHUB_TOKEN}" ]]; then + echo "Please set personal github token through GITHUB_TOKEN environment variable" + exit +fi + +version=$1 +rc=$2 +tag="v${version}-rc${rc}" + +echo "Attempting to create ${tarball} from tag ${tag}" +release_hash=$(cd "${SOURCE_TOP_DIR}" && git rev-list --max-count=1 ${tag}) + +release=apache-datafusion-sqlparser-rs-${version} +distdir=${SOURCE_TOP_DIR}/dev/dist/${release}-rc${rc} +tarname=${release}.tar.gz +tarball=${distdir}/${tarname} +url="https://dist.apache.org/repos/dist/dev/datafusion/${release}-rc${rc}" + +if [ -z "$release_hash" ]; then + echo "Cannot continue: unknown git tag: ${tag}" +fi + +echo "Draft email for dev@datafusion.apache.org mailing list" +echo "" +echo "---------------------------------------------------------" +cat < containing the files in git at $release_hash +# the files in the tarball are prefixed with {version} (e.g. 4.0.1) +mkdir -p ${distdir} +(cd "${SOURCE_TOP_DIR}" && git archive ${release_hash} --prefix ${release}/ | gzip > ${tarball}) + +echo "Running rat license checker on ${tarball}" +${SOURCE_DIR}/run-rat.sh ${tarball} + +echo "Signing tarball and creating checksums" +gpg --armor --output ${tarball}.asc --detach-sig ${tarball} +# create signing with relative path of tarball +# so that they can be verified with a command such as +# shasum --check apache-datafusion-sqlparser-rs-0.52.0-rc1.tar.gz.sha512 +(cd ${distdir} && shasum -a 256 ${tarname}) > ${tarball}.sha256 +(cd ${distdir} && shasum -a 512 ${tarname}) > ${tarball}.sha512 + + +echo "Uploading to sqlparser-rs dist/dev to ${url}" +svn co --depth=empty https://dist.apache.org/repos/dist/dev/datafusion ${SOURCE_TOP_DIR}/dev/dist +svn add ${distdir} +svn ci -m "Apache DataFusion ${version} ${rc}" ${distdir} diff --git a/dev/release/generate-changelog.py b/dev/release/generate-changelog.py new file mode 100755 index 000000000..6f2b7c41c --- /dev/null +++ b/dev/release/generate-changelog.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python + +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import sys +from github import Github +import os +import re +import subprocess + +def print_pulls(repo_name, title, pulls): + if len(pulls) > 0: + print("**{}:**".format(title)) + print() + for (pull, commit) in pulls: + url = "https://github.com/{}/pull/{}".format(repo_name, pull.number) + author = f"({commit.author.login})" if commit.author else '' + print("- {} [#{}]({}) {}".format(pull.title, pull.number, url, author)) + print() + + +def generate_changelog(repo, repo_name, tag1, tag2, version): + + # get a list of commits between two tags + print(f"Fetching list of commits between {tag1} and {tag2}", file=sys.stderr) + comparison = repo.compare(tag1, tag2) + + # get the pull requests for these commits + print("Fetching pull requests", file=sys.stderr) + unique_pulls = [] + all_pulls = [] + for commit in comparison.commits: + pulls = commit.get_pulls() + for pull in pulls: + # there can be multiple commits per PR if squash merge is not being used and + # in this case we should get all the author names, but for now just pick one + if pull.number not in unique_pulls: + unique_pulls.append(pull.number) + all_pulls.append((pull, commit)) + + # we split the pulls into categories + breaking = [] + bugs = [] + docs = [] + enhancements = [] + performance = [] + other = [] + + # categorize the pull requests based on GitHub labels + print("Categorizing pull requests", file=sys.stderr) + for (pull, commit) in all_pulls: + + # see if PR title uses Conventional Commits + cc_type = '' + cc_scope = '' + cc_breaking = '' + parts = re.findall(r'^([a-z]+)(\([a-z]+\))?(!)?:', pull.title) + if len(parts) == 1: + parts_tuple = parts[0] + cc_type = parts_tuple[0] # fix, feat, docs, chore + cc_scope = parts_tuple[1] # component within project + cc_breaking = parts_tuple[2] == '!' + + labels = [label.name for label in pull.labels] + if 'api change' in labels or cc_breaking: + breaking.append((pull, commit)) + elif 'bug' in labels or cc_type == 'fix': + bugs.append((pull, commit)) + elif 'performance' in labels or cc_type == 'perf': + performance.append((pull, commit)) + elif 'enhancement' in labels or cc_type == 'feat': + enhancements.append((pull, commit)) + elif 'documentation' in labels or cc_type == 'docs' or cc_type == 'doc': + docs.append((pull, commit)) + else: + other.append((pull, commit)) + + # produce the changelog content + print("Generating changelog content", file=sys.stderr) + + # ASF header + print("""\n""") + + print(f"# sqlparser-rs {version} Changelog\n") + + # get the number of commits + commit_count = subprocess.check_output(f"git log --pretty=oneline {tag1}..{tag2} | wc -l", shell=True, text=True).strip() + + # get number of contributors + contributor_count = subprocess.check_output(f"git shortlog -sn {tag1}..{tag2} | wc -l", shell=True, text=True).strip() + + print(f"This release consists of {commit_count} commits from {contributor_count} contributors. " + f"See credits at the end of this changelog for more information.\n") + + print_pulls(repo_name, "Breaking changes", breaking) + print_pulls(repo_name, "Performance related", performance) + print_pulls(repo_name, "Implemented enhancements", enhancements) + print_pulls(repo_name, "Fixed bugs", bugs) + print_pulls(repo_name, "Documentation updates", docs) + print_pulls(repo_name, "Other", other) + + # show code contributions + credits = subprocess.check_output(f"git shortlog -sn {tag1}..{tag2}", shell=True, text=True).rstrip() + + print("## Credits\n") + print("Thank you to everyone who contributed to this release. Here is a breakdown of commits (PRs merged) " + "per contributor.\n") + print("```") + print(credits) + print("```\n") + + print("Thank you also to everyone who contributed in other ways such as filing issues, reviewing " + "PRs, and providing feedback on this release.\n") + +def cli(args=None): + """Process command line arguments.""" + if not args: + args = sys.argv[1:] + + parser = argparse.ArgumentParser() + parser.add_argument("tag1", help="The previous commit or tag (e.g. 0.1.0)") + parser.add_argument("tag2", help="The current commit or tag (e.g. HEAD)") + parser.add_argument("version", help="The version number to include in the changelog") + args = parser.parse_args() + + token = os.getenv("GITHUB_TOKEN") + project = "apache/datafusion-sqlparser-rs" + + g = Github(token) + repo = g.get_repo(project) + generate_changelog(repo, project, args.tag1, args.tag2, args.version) + +if __name__ == "__main__": + cli() diff --git a/dev/release/rat_exclude_files.txt b/dev/release/rat_exclude_files.txt new file mode 100644 index 000000000..562eec2f1 --- /dev/null +++ b/dev/release/rat_exclude_files.txt @@ -0,0 +1,7 @@ +# Files to exclude from the Apache Rat (license) check +.gitignore +.tool-versions +dev/release/rat_exclude_files.txt +fuzz/.gitignore +sqlparser_bench/img/flamegraph.svg + diff --git a/dev/release/release-tarball.sh b/dev/release/release-tarball.sh new file mode 100755 index 000000000..e59b2776c --- /dev/null +++ b/dev/release/release-tarball.sh @@ -0,0 +1,74 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Adapted from https://github.com/apache/arrow-rs/tree/master/dev/release/release-tarball.sh + +# This script copies a tarball from the "dev" area of the +# dist.apache.datafusion repository to the "release" area +# +# This script should only be run after the release has been approved +# by the Apache DataFusion PMC committee. +# +# See release/README.md for full release instructions +# +# Based in part on post-01-upload.sh from apache/arrow + + +set -e +set -u + +if [ "$#" -ne 2 ]; then + echo "Usage: $0 " + echo "ex. $0 0.52.0 2" + exit +fi + +version=$1 +rc=$2 + +tmp_dir=tmp-apache-datafusion-dist + +echo "Recreate temporary directory: ${tmp_dir}" +rm -rf ${tmp_dir} +mkdir -p ${tmp_dir} + +echo "Clone dev dist repository" +svn \ + co \ + https://dist.apache.org/repos/dist/dev/datafusion/apache-datafusion-sqlparser-rs-${version}-rc${rc} \ + ${tmp_dir}/dev + +echo "Clone release dist repository" +svn co https://dist.apache.org/repos/dist/release/datafusion ${tmp_dir}/release + +echo "Copy ${version}-rc${rc} to release working copy" +release_version=datafusion-sqlparser-rs-${version} +mkdir -p ${tmp_dir}/release/${release_version} +cp -r ${tmp_dir}/dev/* ${tmp_dir}/release/${release_version}/ +svn add ${tmp_dir}/release/${release_version} + +echo "Commit release" +svn ci -m "Apache DataFusion sqlparser-rs ${version}" ${tmp_dir}/release + +echo "Clean up" +rm -rf ${tmp_dir} + +echo "Success! The release is available here:" +echo " https://dist.apache.org/repos/dist/release/datafusion/${release_version}" diff --git a/dev/release/run-rat.sh b/dev/release/run-rat.sh new file mode 100755 index 000000000..94fa55fbe --- /dev/null +++ b/dev/release/run-rat.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +RAT_VERSION=0.13 + +# download apache rat +if [ ! -f apache-rat-${RAT_VERSION}.jar ]; then + curl -s https://repo1.maven.org/maven2/org/apache/rat/apache-rat/${RAT_VERSION}/apache-rat-${RAT_VERSION}.jar > apache-rat-${RAT_VERSION}.jar +fi + +RAT="java -jar apache-rat-${RAT_VERSION}.jar -x " + +RELEASE_DIR=$(cd "$(dirname "$BASH_SOURCE")"; pwd) + +# generate the rat report +$RAT $1 > rat.txt +python $RELEASE_DIR/check-rat-report.py $RELEASE_DIR/rat_exclude_files.txt rat.txt > filtered_rat.txt +cat filtered_rat.txt +UNAPPROVED=`cat filtered_rat.txt | grep "NOT APPROVED" | wc -l` + +if [ "0" -eq "${UNAPPROVED}" ]; then + echo "No unapproved licenses" +else + echo "${UNAPPROVED} unapproved licences. Check rat report: rat.txt" + exit 1 +fi diff --git a/dev/release/verify-release-candidate.sh b/dev/release/verify-release-candidate.sh new file mode 100755 index 000000000..9ff7e17b5 --- /dev/null +++ b/dev/release/verify-release-candidate.sh @@ -0,0 +1,152 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +case $# in + 2) VERSION="$1" + RC_NUMBER="$2" + ;; + *) echo "Usage: $0 X.Y.Z RC_NUMBER" + exit 1 + ;; +esac + +set -e +set -x +set -o pipefail + +SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]:-$0}")" && pwd)" +ARROW_DIR="$(dirname $(dirname ${SOURCE_DIR}))" +ARROW_DIST_URL='https://dist.apache.org/repos/dist/dev/datafusion' + +download_dist_file() { + curl \ + --silent \ + --show-error \ + --fail \ + --location \ + --remote-name $ARROW_DIST_URL/$1 +} + +download_rc_file() { + download_dist_file apache-datafusion-sqlparser-rs-${VERSION}-rc${RC_NUMBER}/$1 +} + +import_gpg_keys() { + download_dist_file KEYS + gpg --import KEYS +} + +if type shasum >/dev/null 2>&1; then + sha256_verify="shasum -a 256 -c" + sha512_verify="shasum -a 512 -c" +else + sha256_verify="sha256sum -c" + sha512_verify="sha512sum -c" +fi + +fetch_archive() { + local dist_name=$1 + download_rc_file ${dist_name}.tar.gz + download_rc_file ${dist_name}.tar.gz.asc + download_rc_file ${dist_name}.tar.gz.sha256 + download_rc_file ${dist_name}.tar.gz.sha512 + verify_dir_artifact_signatures +} + +verify_dir_artifact_signatures() { + # verify the signature and the checksums of each artifact + find . -name '*.asc' | while read sigfile; do + artifact=${sigfile/.asc/} + gpg --verify $sigfile $artifact || exit 1 + + # go into the directory because the checksum files contain only the + # basename of the artifact + pushd $(dirname $artifact) + base_artifact=$(basename $artifact) + ${sha256_verify} $base_artifact.sha256 || exit 1 + ${sha512_verify} $base_artifact.sha512 || exit 1 + popd + done +} + +setup_tempdir() { + cleanup() { + if [ "${TEST_SUCCESS}" = "yes" ]; then + rm -fr "${ARROW_TMPDIR}" + else + echo "Failed to verify release candidate. See ${ARROW_TMPDIR} for details." + fi + } + + if [ -z "${ARROW_TMPDIR}" ]; then + # clean up automatically if ARROW_TMPDIR is not defined + ARROW_TMPDIR=$(mktemp -d -t "$1.XXXXX") + trap cleanup EXIT + else + # don't clean up automatically + mkdir -p "${ARROW_TMPDIR}" + fi +} + +test_source_distribution() { + # install rust toolchain in a similar fashion like test-miniconda + export RUSTUP_HOME=$PWD/test-rustup + export CARGO_HOME=$PWD/test-rustup + + curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path + + export PATH=$RUSTUP_HOME/bin:$PATH + source $RUSTUP_HOME/env + + # build and test rust + + # raises on any formatting errors + rustup component add rustfmt --toolchain stable + cargo fmt --all -- --check + + cargo build + cargo test --all-features + + if ( find -iname 'Cargo.toml' | xargs grep SNAPSHOT ); then + echo "Cargo.toml version should not contain SNAPSHOT for releases" + exit 1 + fi + + # Check that publish works + cargo publish --dry-run +} + +TEST_SUCCESS=no + +setup_tempdir "datafusion-sqlparser-rs-${VERSION}" +echo "Working in sandbox ${ARROW_TMPDIR}" +cd ${ARROW_TMPDIR} + +dist_name="apache-datafusion-sqlparser-rs-${VERSION}" +import_gpg_keys +fetch_archive ${dist_name} +tar xf ${dist_name}.tar.gz +pushd ${dist_name} + test_source_distribution +popd + +TEST_SUCCESS=yes +echo 'Release candidate looks good!' +exit 0 diff --git a/docs/releasing.md b/docs/releasing.md deleted file mode 100644 index c1b85a20c..000000000 --- a/docs/releasing.md +++ /dev/null @@ -1,81 +0,0 @@ - - -# Releasing - -## Prerequisites -Publishing to crates.io has been automated via GitHub Actions, so you will only -need push access to the [sqlparser-rs GitHub repository](https://github.com/sqlparser-rs/sqlparser-rs) -in order to publish a release. - -We use the [`cargo release`](https://github.com/sunng87/cargo-release) -subcommand to ensure correct versioning. Install via: - -``` -$ cargo install cargo-release -``` - -## Process - -1. **Before releasing** ensure `CHANGELOG.md` is updated appropriately and that - you have a clean checkout of the `main` branch of the sqlparser repository: - ``` - $ git fetch && git status - On branch main - Your branch is up to date with 'origin/main'. - - nothing to commit, working tree clean - ``` - * If you have the time, check that the examples in the README are up to date. - -2. Using `cargo-release` we can publish a new release like so: - - ``` - $ cargo release minor --push-remote origin - ``` - - After verifying, you can rerun with `--execute` if all looks good. - You can add `--no-push` to stop before actually publishing the release. - - `cargo release` will then: - - * Bump the minor part of the version in `Cargo.toml` (e.g. `0.7.1-alpha.0` - -> `0.8.0`. You can use `patch` instead of `minor`, as appropriate). - * Create a new tag (e.g. `v0.8.0`) locally - * Push the new tag to the specified remote (`origin` in the above - example), which will trigger a publishing process to crates.io as part of - the [corresponding GitHub Action](https://github.com/sqlparser-rs/sqlparser-rs/blob/main/.github/workflows/rust.yml). - - Note that credentials for authoring in this way are securely stored in - the (GitHub) repo secrets as `CRATE_TOKEN`. - -4. Check that the new version of the crate is available on crates.io: - https://crates.io/crates/sqlparser - - -## `sqlparser_derive` crate - -Currently this crate is manually published via `cargo publish`. - -crates.io homepage: https://crates.io/crates/sqlparser_derive - -```shell -cd derive -cargo publish -``` diff --git a/examples/cli.rs b/examples/cli.rs index 8a5d6501e..0252fca74 100644 --- a/examples/cli.rs +++ b/examples/cli.rs @@ -17,9 +17,11 @@ #![warn(clippy::all)] -/// A small command-line app to run the parser. -/// Run with `cargo run --example cli` +//! A small command-line app to run the parser. +//! Run with `cargo run --example cli` + use std::fs; +use std::io::{stdin, Read}; use simple_logger::SimpleLogger; use sqlparser::dialect::*; @@ -38,6 +40,9 @@ $ cargo run --example cli FILENAME.sql [--dialectname] To print the parse results as JSON: $ cargo run --feature json_example --example cli FILENAME.sql [--dialectname] +To read from stdin instead of a file: +$ cargo run --example cli - [--dialectname] + "#, ); @@ -57,9 +62,18 @@ $ cargo run --feature json_example --example cli FILENAME.sql [--dialectname] s => panic!("Unexpected parameter: {s}"), }; - println!("Parsing from file '{}' using {:?}", &filename, dialect); - let contents = fs::read_to_string(&filename) - .unwrap_or_else(|_| panic!("Unable to read the file {}", &filename)); + let contents = if filename == "-" { + println!("Parsing from stdin using {:?}", dialect); + let mut buf = Vec::new(); + stdin() + .read_to_end(&mut buf) + .expect("failed to read from stdin"); + String::from_utf8(buf).expect("stdin content wasn't valid utf8") + } else { + println!("Parsing from file '{}' using {:?}", &filename, dialect); + fs::read_to_string(&filename) + .unwrap_or_else(|_| panic!("Unable to read the file {}", &filename)) + }; let without_bom = if contents.chars().next().unwrap() as u64 != 0xfeff { contents.as_str() } else { diff --git a/sqlparser_bench/Cargo.toml b/sqlparser_bench/Cargo.toml index 9c33658a2..2c1f0ae4d 100644 --- a/sqlparser_bench/Cargo.toml +++ b/sqlparser_bench/Cargo.toml @@ -17,6 +17,7 @@ [package] name = "sqlparser_bench" +description = "Benchmarks for sqlparser" version = "0.1.0" authors = ["Dandandan "] edition = "2018" diff --git a/sqlparser_bench/README.md b/sqlparser_bench/README.md new file mode 100644 index 000000000..7f2c26254 --- /dev/null +++ b/sqlparser_bench/README.md @@ -0,0 +1,42 @@ + + +Benchmarks for sqlparser. See [the main README](../README.md) for more information. + +Note: this is in a separate, non workspace crate to avoid adding a dependency +on `criterion` to the main crate (which complicates testing without std). + +# Running Benchmarks + +```shell +cargo bench --bench sqlparser_bench +``` + +# Profiling + +Note you can generate a [flamegraph] using the following command: + +```shell +cargo flamegraph --bench sqlparser_bench +``` + +[flamegraph]: https://crates.io/crates/flamegraph + +Here is an example flamegraph: +![flamegraph](img/flamegraph.svg) diff --git a/sqlparser_bench/benches/sqlparser_bench.rs b/sqlparser_bench/benches/sqlparser_bench.rs index 27c58b450..a7768cbc9 100644 --- a/sqlparser_bench/benches/sqlparser_bench.rs +++ b/sqlparser_bench/benches/sqlparser_bench.rs @@ -23,9 +23,9 @@ fn basic_queries(c: &mut Criterion) { let mut group = c.benchmark_group("sqlparser-rs parsing benchmark"); let dialect = GenericDialect {}; - let string = "SELECT * FROM table WHERE 1 = 1"; + let string = "SELECT * FROM my_table WHERE 1 = 1"; group.bench_function("sqlparser::select", |b| { - b.iter(|| Parser::parse_sql(&dialect, string)); + b.iter(|| Parser::parse_sql(&dialect, string).unwrap()); }); let with_query = " @@ -33,14 +33,53 @@ fn basic_queries(c: &mut Criterion) { SELECT MAX(a) AS max_a, COUNT(b) AS b_num, user_id - FROM TABLE + FROM MY_TABLE GROUP BY user_id ) - SELECT * FROM table + SELECT * FROM my_table LEFT JOIN derived USING (user_id) "; group.bench_function("sqlparser::with_select", |b| { - b.iter(|| Parser::parse_sql(&dialect, with_query)); + b.iter(|| Parser::parse_sql(&dialect, with_query).unwrap()); + }); + + let large_statement = { + let expressions = (0..1000) + .map(|n| format!("FN_{}(COL_{})", n, n)) + .collect::>() + .join(", "); + let tables = (0..1000) + .map(|n| format!("TABLE_{}", n)) + .collect::>() + .join(" JOIN "); + let where_condition = (0..1000) + .map(|n| format!("COL_{} = {}", n, n)) + .collect::>() + .join(" OR "); + let order_condition = (0..1000) + .map(|n| format!("COL_{} DESC", n)) + .collect::>() + .join(", "); + + format!( + "SELECT {} FROM {} WHERE {} ORDER BY {}", + expressions, tables, where_condition, order_condition + ) + }; + + group.bench_function("parse_large_statement", |b| { + b.iter(|| Parser::parse_sql(&dialect, criterion::black_box(large_statement.as_str()))); + }); + + let large_statement = Parser::parse_sql(&dialect, large_statement.as_str()) + .unwrap() + .pop() + .unwrap(); + + group.bench_function("format_large_statement", |b| { + b.iter(|| { + let _formatted_query = large_statement.to_string(); + }); }); } diff --git a/sqlparser_bench/img/flamegraph.svg b/sqlparser_bench/img/flamegraph.svg new file mode 100644 index 000000000..0aaa17e06 --- /dev/null +++ b/sqlparser_bench/img/flamegraph.svg @@ -0,0 +1,491 @@ +Flame Graph Reset ZoomSearch sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<plotters_svg::svg::SVGBackend> (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<plotters_svg::svg::SVGBackend as core::ops::drop::Drop>::drop (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::kde::sweep_and_estimate (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<criterion::plot::plotters_backend::PlottersBackend as criterion::plot::Plotter>::abs_distributions (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::plot::plotters_backend::distributions::abs_distributions (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<criterion::plot::plotters_backend::PlottersBackend as criterion::plot::Plotter>::pdf (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<criterion::html::Html as criterion::report::Report>::measurement_complete (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`criterion::estimate::build_estimates (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates (7 samples, 0.01%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_malloc.dylib`_free (38 samples, 0.08%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (32 samples, 0.07%)libsystem_malloc.dylib`_szone_free (13 samples, 0.03%)libsystem_malloc.dylib`small_free_list_add_ptr (12 samples, 0.03%)libsystem_malloc.dylib`small_free_list_remove_ptr (5 samples, 0.01%)libsystem_malloc.dylib`free_small (55 samples, 0.12%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::OrderByExpr as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (10 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (21 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (164 samples, 0.34%)sqlparser_bench-959bc5267970ca34`core::fmt::write (128 samples, 0.27%)libdyld.dylib`tlv_get_addr (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::write (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::OrderByExpr as core::fmt::Display>::fmt (435 samples, 0.91%)sqlparser_bench-959bc5267970ca34`core::fmt::write (309 samples, 0.65%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (629 samples, 1.32%)sqlparser_bench-959bc5267970ca34`core::fmt::write (578 samples, 1.21%)sqlparser_bench-959bc5267970ca34`core::fmt::write (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::OrderBy as core::fmt::Display>::fmt (661 samples, 1.39%)sqlparser_bench-959bc5267970ca34`core::fmt::write (661 samples, 1.39%)sqlparser_bench-959bc5267970ca34`core::fmt::write (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::SelectItem as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (14 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Join as core::fmt::Display>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<<sqlparser::ast::query::Join as core::fmt::Display>::fmt::suffix::Suffix as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::TableFactor as core::fmt::Display>::fmt (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (77 samples, 0.16%)sqlparser_bench-959bc5267970ca34`<<sqlparser::ast::query::Join as core::fmt::Display>::fmt::suffix::Suffix as core::fmt::Display>::fmt (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (23 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::fmt::write (192 samples, 0.40%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (347 samples, 0.73%)sqlparser_bench-959bc5267970ca34`core::fmt::write (321 samples, 0.67%)sqlparser_bench-959bc5267970ca34`core::fmt::write (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::TableFactor as core::fmt::Display>::fmt (473 samples, 0.99%)sqlparser_bench-959bc5267970ca34`core::fmt::write (401 samples, 0.84%)sqlparser_bench-959bc5267970ca34`core::fmt::write (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Join as core::fmt::Display>::fmt (859 samples, 1.80%)s..sqlparser_bench-959bc5267970ca34`core::fmt::write (753 samples, 1.58%)sqlparser_bench-959bc5267970ca34`core::fmt::write (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (931 samples, 1.96%)s..sqlparser_bench-959bc5267970ca34`core::fmt::write (902 samples, 1.89%)s..sqlparser_bench-959bc5267970ca34`core::fmt::write (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (11 samples, 0.02%)libdyld.dylib`tlv_get_addr (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Function as core::fmt::Display>::fmt (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArguments as core::fmt::Display>::fmt (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::ObjectName as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgumentList as core::fmt::Display>::fmt (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgExpr as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (10 samples, 0.02%)libdyld.dylib`tlv_get_addr (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (231 samples, 0.49%)sqlparser_bench-959bc5267970ca34`core::fmt::write (188 samples, 0.39%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_str (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::write (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgExpr as core::fmt::Display>::fmt (450 samples, 0.95%)sqlparser_bench-959bc5267970ca34`core::fmt::write (408 samples, 0.86%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArg as core::fmt::Display>::fmt (528 samples, 1.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (500 samples, 1.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (711 samples, 1.49%)sqlparser_bench-959bc5267970ca34`core::fmt::write (645 samples, 1.35%)sqlparser_bench-959bc5267970ca34`core::fmt::write (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgumentList as core::fmt::Display>::fmt (805 samples, 1.69%)sqlparser_bench-959bc5267970ca34`core::fmt::write (772 samples, 1.62%)sqlparser_bench-959bc5267970ca34`core::fmt::write (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArguments as core::fmt::Display>::fmt (1,024 samples, 2.15%)s..sqlparser_bench-959bc5267970ca34`core::fmt::write (972 samples, 2.04%)s..sqlparser_bench-959bc5267970ca34`core::fmt::write (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (18 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (50 samples, 0.11%)libsystem_malloc.dylib`szone_realloc (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_realloc (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (8 samples, 0.02%)libsystem_malloc.dylib`_realloc (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (321 samples, 0.67%)sqlparser_bench-959bc5267970ca34`core::fmt::write (219 samples, 0.46%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_str (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::ObjectName as core::fmt::Display>::fmt (425 samples, 0.89%)sqlparser_bench-959bc5267970ca34`core::fmt::write (390 samples, 0.82%)sqlparser_bench-959bc5267970ca34`core::fmt::write (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Function as core::fmt::Display>::fmt (1,627 samples, 3.42%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,551 samples, 3.26%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,728 samples, 3.63%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,678 samples, 3.52%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::SelectItem as core::fmt::Display>::fmt (1,910 samples, 4.01%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,868 samples, 3.92%)sqlp..sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (3,038 samples, 6.38%)sqlparse..sqlparser_bench-959bc5267970ca34`core::fmt::write (2,973 samples, 6.24%)sqlparse..sqlparser_bench-959bc5267970ca34`core::fmt::write (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::write (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::write (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::write (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::write (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`core::fmt::write (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`core::fmt::write (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`core::fmt::write (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`core::fmt::write (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (59 samples, 0.12%)sqlparser_bench-959bc5267970ca34`core::fmt::write (59 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::write (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (67 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::write (67 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::write (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`core::fmt::write (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (78 samples, 0.16%)sqlparser_bench-959bc5267970ca34`core::fmt::write (78 samples, 0.16%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (75 samples, 0.16%)sqlparser_bench-959bc5267970ca34`core::fmt::write (75 samples, 0.16%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (74 samples, 0.16%)sqlparser_bench-959bc5267970ca34`core::fmt::write (74 samples, 0.16%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (82 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (82 samples, 0.17%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (81 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (81 samples, 0.17%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Query as core::fmt::Display>::fmt (3,806 samples, 7.99%)sqlparser_b..sqlparser_bench-959bc5267970ca34`core::fmt::write (3,806 samples, 7.99%)sqlparser_b..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::SetExpr as core::fmt::Display>::fmt (3,145 samples, 6.60%)sqlparser..sqlparser_bench-959bc5267970ca34`core::fmt::write (3,145 samples, 6.60%)sqlparser..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Select as core::fmt::Display>::fmt (3,144 samples, 6.60%)sqlparser..sqlparser_bench-959bc5267970ca34`core::fmt::write (3,143 samples, 6.60%)sqlparser..sqlparser_bench-959bc5267970ca34`core::fmt::write (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Statement as core::fmt::Display>::fmt (3,809 samples, 8.00%)sqlparser_b..sqlparser_bench-959bc5267970ca34`core::fmt::write (3,808 samples, 8.00%)sqlparser_b..sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (32 samples, 0.07%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`_free (19 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (17 samples, 0.04%)libsystem_malloc.dylib`_szone_free (12 samples, 0.03%)libsystem_malloc.dylib`small_free_list_add_ptr (18 samples, 0.04%)libsystem_malloc.dylib`small_free_list_remove_ptr (8 samples, 0.02%)libsystem_malloc.dylib`free_small (83 samples, 0.17%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (11 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`free_small (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)libsystem_malloc.dylib`free_small (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::vec::Vec<T,A> as core::ops::drop::Drop>::drop (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::TableFactor> (11 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (13 samples, 0.03%)libsystem_malloc.dylib`free_small (12 samples, 0.03%)libsystem_malloc.dylib`small_free_list_add_ptr (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::Cte> (128 samples, 0.27%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::Query> (112 samples, 0.24%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::SetExpr> (91 samples, 0.19%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::SelectItem> (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Function> (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::FunctionArgumentList> (17 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_malloc.dylib`free_medium (39 samples, 0.08%)libsystem_kernel.dylib`madvise (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::OrderBy> (58 samples, 0.12%)libsystem_malloc.dylib`nanov2_madvise_block (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_madvise_block_locked (5 samples, 0.01%)libsystem_kernel.dylib`madvise (5 samples, 0.01%)libsystem_malloc.dylib`_free (16 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (16 samples, 0.03%)libsystem_malloc.dylib`_szone_free (12 samples, 0.03%)libsystem_malloc.dylib`free_medium (18 samples, 0.04%)libsystem_kernel.dylib`madvise (18 samples, 0.04%)libsystem_malloc.dylib`small_free_list_add_ptr (10 samples, 0.02%)libsystem_malloc.dylib`small_free_list_find_by_ptr (9 samples, 0.02%)libsystem_malloc.dylib`free_small (48 samples, 0.10%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (8 samples, 0.02%)libsystem_malloc.dylib`tiny_free_list_add_ptr (8 samples, 0.02%)libsystem_malloc.dylib`free_tiny (44 samples, 0.09%)libsystem_malloc.dylib`tiny_free_no_lock (30 samples, 0.06%)libsystem_malloc.dylib`tiny_free_list_remove_ptr (10 samples, 0.02%)libsystem_malloc.dylib`_free (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (52 samples, 0.11%)libsystem_malloc.dylib`nanov2_madvise_block (13 samples, 0.03%)libsystem_malloc.dylib`nanov2_madvise_block_locked (13 samples, 0.03%)libsystem_kernel.dylib`madvise (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::vec::Vec<T,A> as core::ops::drop::Drop>::drop (131 samples, 0.28%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::TableFactor> (96 samples, 0.20%)libsystem_platform.dylib`_platform_memset (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`tiny_free_list_add_ptr (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (28 samples, 0.06%)libsystem_malloc.dylib`tiny_free_no_lock (16 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (70 samples, 0.15%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (13 samples, 0.03%)libsystem_malloc.dylib`nanov2_madvise_block (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_madvise_block_locked (6 samples, 0.01%)libsystem_kernel.dylib`madvise (6 samples, 0.01%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (12 samples, 0.03%)libsystem_malloc.dylib`free_small (38 samples, 0.08%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Function> (95 samples, 0.20%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::FunctionArgumentList> (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::SetExpr> (510 samples, 1.07%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::query::Query>> (873 samples, 1.83%)s..sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::Query> (854 samples, 1.79%)s..sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (175 samples, 0.37%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Whitespace> (57 samples, 0.12%)libsystem_malloc.dylib`_nanov2_free (123 samples, 0.26%)libsystem_malloc.dylib`free_medium (62 samples, 0.13%)libsystem_kernel.dylib`madvise (62 samples, 0.13%)libsystem_malloc.dylib`free_small (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_madvise_block (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_madvise_block_locked (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (83 samples, 0.17%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (17 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (67 samples, 0.14%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (48 samples, 0.10%)libsystem_malloc.dylib`szone_malloc_should_clear (31 samples, 0.07%)libsystem_malloc.dylib`small_malloc_should_clear (25 samples, 0.05%)libsystem_malloc.dylib`small_malloc_from_free_list (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (12 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_malloc (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (154 samples, 0.32%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::alloc::exchange_malloc (48 samples, 0.10%)libsystem_malloc.dylib`szone_malloc_should_clear (48 samples, 0.10%)libsystem_malloc.dylib`small_malloc_should_clear (36 samples, 0.08%)libsystem_malloc.dylib`small_malloc_from_free_list (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (8 samples, 0.02%)libsystem_malloc.dylib`szone_malloc_should_clear (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_token (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::alloc::exchange_malloc (11 samples, 0.02%)libsystem_malloc.dylib`szone_malloc_should_clear (11 samples, 0.02%)libsystem_malloc.dylib`small_malloc_should_clear (8 samples, 0.02%)libsystem_malloc.dylib`small_malloc_from_free_list (7 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (16 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_keyword (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_all_or_distinct (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (14 samples, 0.03%)libsystem_malloc.dylib`szone_malloc_should_clear (6 samples, 0.01%)libsystem_malloc.dylib`_free (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (13 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_alias (41 samples, 0.09%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (28 samples, 0.06%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (9 samples, 0.02%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_pointer_size (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_realloc (25 samples, 0.05%)libsystem_malloc.dylib`nanov2_malloc (10 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (47 samples, 0.10%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`_realloc (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (62 samples, 0.13%)libsystem_malloc.dylib`nanov2_size (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (76 samples, 0.16%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (115 samples, 0.24%)sqlparser_bench-959bc5267970ca34`core::fmt::write (109 samples, 0.23%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (106 samples, 0.22%)sqlparser_bench-959bc5267970ca34`core::fmt::write (104 samples, 0.22%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (16 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (16 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (81 samples, 0.17%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (16 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_type_modifiers (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (158 samples, 0.33%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (168 samples, 0.35%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_compound_field_access (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_token (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (14 samples, 0.03%)libsystem_malloc.dylib`szone_malloc_should_clear (14 samples, 0.03%)libsystem_malloc.dylib`small_malloc_should_clear (11 samples, 0.02%)libsystem_malloc.dylib`small_malloc_from_free_list (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (23 samples, 0.05%)libsystem_malloc.dylib`_realloc (21 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (119 samples, 0.25%)sqlparser_bench-959bc5267970ca34`core::fmt::write (105 samples, 0.22%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_named_arg_operator (134 samples, 0.28%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (19 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (5 samples, 0.01%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (24 samples, 0.05%)libsystem_malloc.dylib`nanov2_malloc (10 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (34 samples, 0.07%)libsystem_malloc.dylib`_realloc (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::write (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (80 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (77 samples, 0.16%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_parse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (198 samples, 0.42%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_args (436 samples, 0.92%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (264 samples, 0.55%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (241 samples, 0.51%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (480 samples, 1.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_argument_list (541 samples, 1.14%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_call (564 samples, 1.18%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (641 samples, 1.35%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (1,035 samples, 2.17%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select_item (1,271 samples, 2.67%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (1,200 samples, 2.52%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,148 samples, 2.41%)sq..libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_table_alias (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_alias (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_factor (90 samples, 0.19%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_tokens (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (1,454 samples, 3.05%)sql..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_and_joins (122 samples, 0.26%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_realloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (18 samples, 0.04%)libsystem_malloc.dylib`_realloc (11 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (106 samples, 0.22%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_group_by (154 samples, 0.32%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (135 samples, 0.28%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query_body (1,727 samples, 3.63%)sqlp..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select (1,681 samples, 3.53%)sql..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_cte (1,854 samples, 3.89%)sqlp..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query (1,798 samples, 3.78%)sqlp..libsystem_platform.dylib`_platform_memmove (81 samples, 0.17%)libsystem_platform.dylib`_platform_memmove (18 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (10 samples, 0.02%)libsystem_malloc.dylib`_realloc (10 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (26 samples, 0.05%)libsystem_malloc.dylib`_realloc (49 samples, 0.10%)libsystem_malloc.dylib`_malloc_zone_realloc (46 samples, 0.10%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (64 samples, 0.13%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (60 samples, 0.13%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (54 samples, 0.11%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (106 samples, 0.22%)sqlparser_bench-959bc5267970ca34`core::fmt::write (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (92 samples, 0.19%)sqlparser_bench-959bc5267970ca34`core::fmt::write (90 samples, 0.19%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (13 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (10 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (73 samples, 0.15%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (71 samples, 0.15%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (277 samples, 0.58%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_parse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_order_by_expr (357 samples, 0.75%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (343 samples, 0.72%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_order_by (481 samples, 1.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (397 samples, 0.83%)libsystem_malloc.dylib`szone_malloc_should_clear (23 samples, 0.05%)libsystem_malloc.dylib`small_malloc_should_clear (18 samples, 0.04%)libsystem_malloc.dylib`small_malloc_from_free_list (15 samples, 0.03%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (173 samples, 0.36%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (11 samples, 0.02%)libsystem_malloc.dylib`rack_get_thread_index (5 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::FnOnce::call_once (42 samples, 0.09%)libsystem_malloc.dylib`szone_malloc_should_clear (36 samples, 0.08%)libsystem_malloc.dylib`small_malloc_should_clear (26 samples, 0.05%)libsystem_malloc.dylib`small_malloc_from_free_list (21 samples, 0.04%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keywords (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_one_of_keywords (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_group_by (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (106 samples, 0.22%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (13 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_keyword (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::is_parse_comma_separated_end_with_trailing_commas (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_all_or_distinct (11 samples, 0.02%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (195 samples, 0.41%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::cmp::PartialEq>::eq (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (10 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (9 samples, 0.02%)libsystem_malloc.dylib`small_free_list_add_ptr (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (94 samples, 0.20%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (56 samples, 0.12%)libsystem_malloc.dylib`szone_malloc_should_clear (28 samples, 0.06%)libsystem_malloc.dylib`small_malloc_should_clear (19 samples, 0.04%)libsystem_malloc.dylib`small_malloc_from_free_list (13 samples, 0.03%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::is_parse_comma_separated_end_with_trailing_commas (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_select_item_exclude (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_select_item_ilike (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_select_item_rename (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_additional_options (48 samples, 0.10%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (20 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (11 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (22 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (18 samples, 0.04%)libsystem_platform.dylib`_platform_memset (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_pointer_size (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (29 samples, 0.06%)libsystem_malloc.dylib`nanov2_realloc (16 samples, 0.03%)libsystem_malloc.dylib`_realloc (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (59 samples, 0.12%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (94 samples, 0.20%)sqlparser_bench-959bc5267970ca34`core::fmt::write (93 samples, 0.20%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (111 samples, 0.23%)sqlparser_bench-959bc5267970ca34`core::fmt::write (102 samples, 0.21%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (26 samples, 0.05%)libsystem_malloc.dylib`_nanov2_free (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (9 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_type_modifiers (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (111 samples, 0.23%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (118 samples, 0.25%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_compound_field_access (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_token (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_token (23 samples, 0.05%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (16 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (21 samples, 0.04%)libsystem_malloc.dylib`szone_malloc_should_clear (18 samples, 0.04%)libsystem_malloc.dylib`small_malloc_should_clear (17 samples, 0.04%)libsystem_malloc.dylib`small_malloc_from_free_list (15 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (17 samples, 0.04%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (14 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (11 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (15 samples, 0.03%)libsystem_malloc.dylib`_realloc (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (60 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (106 samples, 0.22%)sqlparser_bench-959bc5267970ca34`core::fmt::write (97 samples, 0.20%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_named_arg_operator (117 samples, 0.25%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (31 samples, 0.07%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (14 samples, 0.03%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (15 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_realloc (33 samples, 0.07%)libsystem_platform.dylib`_platform_memmove (10 samples, 0.02%)libsystem_malloc.dylib`_realloc (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (48 samples, 0.10%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (78 samples, 0.16%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (102 samples, 0.21%)sqlparser_bench-959bc5267970ca34`core::fmt::write (94 samples, 0.20%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (19 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (64 samples, 0.13%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (69 samples, 0.14%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_parse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (259 samples, 0.54%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_args (554 samples, 1.16%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (354 samples, 0.74%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (318 samples, 0.67%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (616 samples, 1.29%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_argument_list (706 samples, 1.48%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_json_null_clause (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_call (781 samples, 1.64%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (824 samples, 1.73%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (1,166 samples, 2.45%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select_item (1,371 samples, 2.88%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (1,283 samples, 2.69%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,236 samples, 2.60%)sq..libsystem_malloc.dylib`_free (19 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (20 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<T as core::any::Any>::type_id (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (15 samples, 0.03%)libsystem_malloc.dylib`szone_malloc_should_clear (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_join_constraint (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_parenthesized_column_list (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keywords (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_one_of_keywords (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::cmp::PartialEq>::eq (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::consume_token (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::cmp::PartialEq>::eq (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::maybe_parse_table_sample (12 samples, 0.03%)libsystem_malloc.dylib`_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (27 samples, 0.06%)libsystem_malloc.dylib`nanov2_malloc_type (25 samples, 0.05%)libsystem_malloc.dylib`nanov2_allocate_outlined (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (6 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (19 samples, 0.04%)libsystem_malloc.dylib`nanov2_malloc_type (13 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (13 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (170 samples, 0.36%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (98 samples, 0.21%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (14 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_table_alias (111 samples, 0.23%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_alias (95 samples, 0.20%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (37 samples, 0.08%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (12 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (22 samples, 0.05%)libsystem_malloc.dylib`nanov2_malloc_type (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_factor (711 samples, 1.49%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_tokens (208 samples, 0.44%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (91 samples, 0.19%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_and_joins (1,055 samples, 2.22%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_tokens (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (2,821 samples, 5.92%)sqlparse..libsystem_malloc.dylib`_malloc_zone_malloc (15 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (73 samples, 0.15%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)libsystem_malloc.dylib`rack_get_thread_index (10 samples, 0.02%)libsystem_malloc.dylib`tiny_malloc_from_free_list (8 samples, 0.02%)libsystem_malloc.dylib`set_tiny_meta_header_in_use (18 samples, 0.04%)libsystem_malloc.dylib`szone_malloc_should_clear (129 samples, 0.27%)libsystem_malloc.dylib`tiny_malloc_should_clear (95 samples, 0.20%)libsystem_malloc.dylib`tiny_malloc_from_free_list (59 samples, 0.12%)libsystem_malloc.dylib`tiny_free_list_add_ptr (8 samples, 0.02%)libsystem_malloc.dylib`tiny_malloc_should_clear (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (83 samples, 0.17%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)libsystem_malloc.dylib`set_tiny_meta_header_in_use (8 samples, 0.02%)libsystem_malloc.dylib`szone_malloc_should_clear (50 samples, 0.11%)libsystem_malloc.dylib`tiny_malloc_should_clear (42 samples, 0.09%)libsystem_malloc.dylib`tiny_malloc_from_free_list (30 samples, 0.06%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memset (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (19 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)libsystem_malloc.dylib`_realloc (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (64 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::write (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (142 samples, 0.30%)sqlparser_bench-959bc5267970ca34`core::fmt::write (130 samples, 0.27%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (168 samples, 0.35%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (159 samples, 0.33%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_value (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (213 samples, 0.45%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_infix (349 samples, 0.73%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (271 samples, 0.57%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (20 samples, 0.04%)libsystem_platform.dylib`_platform_memset (11 samples, 0.02%)libsystem_malloc.dylib`nanov2_pointer_size (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (17 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (36 samples, 0.08%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`_realloc (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (59 samples, 0.12%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (63 samples, 0.13%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (91 samples, 0.19%)sqlparser_bench-959bc5267970ca34`core::fmt::write (89 samples, 0.19%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (105 samples, 0.22%)sqlparser_bench-959bc5267970ca34`core::fmt::write (99 samples, 0.21%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (13 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memset (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`_realloc (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_pointer_size (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (25 samples, 0.05%)libsystem_platform.dylib`_platform_memset (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_realloc (47 samples, 0.10%)libsystem_platform.dylib`_platform_memmove (9 samples, 0.02%)libsystem_malloc.dylib`_realloc (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (80 samples, 0.17%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (73 samples, 0.15%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (120 samples, 0.25%)sqlparser_bench-959bc5267970ca34`core::fmt::write (118 samples, 0.25%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (261 samples, 0.55%)sqlparser_bench-959bc5267970ca34`core::fmt::write (205 samples, 0.43%)sqlparser_bench-959bc5267970ca34`core::fmt::write (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (379 samples, 0.80%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (360 samples, 0.76%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (17 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_value (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_parse (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (673 samples, 1.41%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_infix (1,392 samples, 2.92%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,215 samples, 2.55%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (24 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (23 samples, 0.05%)libsystem_platform.dylib`_platform_memset (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (35 samples, 0.07%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)libsystem_malloc.dylib`_realloc (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (47 samples, 0.10%)libsystem_malloc.dylib`nanov2_size (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (128 samples, 0.27%)sqlparser_bench-959bc5267970ca34`core::fmt::write (123 samples, 0.26%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral::write_prefix (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (246 samples, 0.52%)sqlparser_bench-959bc5267970ca34`core::fmt::write (223 samples, 0.47%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (273 samples, 0.57%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (269 samples, 0.56%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_value (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (343 samples, 0.72%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,947 samples, 4.09%)sqlp..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select (5,109 samples, 10.73%)sqlparser_bench-..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query_body (5,450 samples, 11.45%)sqlparser_bench-9..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query (8,118 samples, 17.05%)sqlparser_bench-959bc52679..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_settings (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_statements (8,347 samples, 17.53%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_statement (8,222 samples, 17.27%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::next_token (177 samples, 0.37%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_identifier_or_keyword (36 samples, 0.08%)libsystem_malloc.dylib`_free (161 samples, 0.34%)libsystem_malloc.dylib`_nanov2_free (39 samples, 0.08%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::iter::traits::collect::FromIterator<char>>::from_iter (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::generic::GenericDialect as sqlparser::dialect::Dialect>::is_delimited_identifier_start (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::mssql::MsSqlDialect as sqlparser::dialect::Dialect>::is_identifier_start (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`__rdl_dealloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$realloc (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_realloc (7 samples, 0.01%)libsystem_malloc.dylib`szone_good_size (6 samples, 0.01%)libsystem_malloc.dylib`_szone_free (5 samples, 0.01%)libsystem_malloc.dylib`tiny_free_list_add_ptr (6 samples, 0.01%)libsystem_malloc.dylib`free_tiny (29 samples, 0.06%)libsystem_malloc.dylib`tiny_free_no_lock (20 samples, 0.04%)libsystem_malloc.dylib`small_try_realloc_in_place (14 samples, 0.03%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (7 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (5 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (43 samples, 0.09%)libsystem_malloc.dylib`small_malloc_should_clear (33 samples, 0.07%)libsystem_malloc.dylib`small_malloc_from_free_list (28 samples, 0.06%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (7 samples, 0.01%)libsystem_malloc.dylib`szone_size (23 samples, 0.05%)libsystem_malloc.dylib`tiny_size (22 samples, 0.05%)libsystem_malloc.dylib`tiny_try_realloc_in_place (23 samples, 0.05%)libsystem_malloc.dylib`tiny_free_list_remove_ptr (6 samples, 0.01%)libsystem_malloc.dylib`szone_realloc (186 samples, 0.39%)libsystem_platform.dylib`_platform_memset (9 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (252 samples, 0.53%)libsystem_platform.dylib`_platform_memmove (29 samples, 0.06%)libsystem_malloc.dylib`_realloc (330 samples, 0.69%)libsystem_malloc.dylib`szone_size (42 samples, 0.09%)libsystem_malloc.dylib`tiny_size (40 samples, 0.08%)libsystem_malloc.dylib`szone_malloc_should_clear (47 samples, 0.10%)libsystem_malloc.dylib`tiny_malloc_should_clear (34 samples, 0.07%)libsystem_malloc.dylib`tiny_malloc_from_free_list (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (436 samples, 0.92%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (405 samples, 0.85%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::is_custom_operator_part (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::State::next (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::State::peek (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::consume_and_return (25 samples, 0.05%)libsystem_malloc.dylib`_free (9 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (13 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (30 samples, 0.06%)libsystem_platform.dylib`_platform_memcmp (80 samples, 0.17%)libsystem_platform.dylib`_platform_memmove (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::mssql::MsSqlDialect as sqlparser::dialect::Dialect>::is_identifier_part (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcmp (21 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_malloc (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (18 samples, 0.04%)libsystem_malloc.dylib`nanov2_malloc_type (17 samples, 0.04%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Token::make_word (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::str::_<impl str>::to_uppercase (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_pointer_size (11 samples, 0.02%)libsystem_malloc.dylib`_realloc (30 samples, 0.06%)libsystem_malloc.dylib`_malloc_zone_realloc (23 samples, 0.05%)libsystem_malloc.dylib`nanov2_realloc (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (39 samples, 0.08%)libsystem_malloc.dylib`nanov2_size (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::next_token (884 samples, 1.86%)s..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_word (116 samples, 0.24%)libsystem_malloc.dylib`_free (37 samples, 0.08%)libsystem_malloc.dylib`_malloc_zone_malloc (39 samples, 0.08%)libsystem_malloc.dylib`_nanov2_free (231 samples, 0.49%)libsystem_platform.dylib`_platform_memcmp (593 samples, 1.25%)libsystem_platform.dylib`_platform_memmove (110 samples, 0.23%)libsystem_malloc.dylib`_malloc_zone_malloc (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (183 samples, 0.38%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (83 samples, 0.17%)libsystem_malloc.dylib`nanov2_malloc_type (66 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::iter::traits::collect::FromIterator<char>>::from_iter (235 samples, 0.49%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::mssql::MsSqlDialect as sqlparser::dialect::Dialect>::is_identifier_part (119 samples, 0.25%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$malloc (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcmp (194 samples, 0.41%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::str::_<impl str>::to_uppercase (38 samples, 0.08%)libsystem_malloc.dylib`_malloc_zone_malloc (42 samples, 0.09%)libsystem_malloc.dylib`nanov2_malloc_type (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$malloc (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Token::make_word (363 samples, 0.76%)sqlparser_bench-959bc5267970ca34`alloc::str::_<impl str>::to_uppercase (137 samples, 0.29%)libsystem_malloc.dylib`nanov2_malloc_type (59 samples, 0.12%)libsystem_malloc.dylib`_nanov2_free (78 samples, 0.16%)libsystem_malloc.dylib`_malloc_zone_malloc (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$malloc (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (158 samples, 0.33%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (55 samples, 0.12%)libsystem_malloc.dylib`nanov2_malloc_type (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_identifier_or_keyword (2,644 samples, 5.55%)sqlpars..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_word (528 samples, 1.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_with_location (5,051 samples, 10.61%)sqlparser_bench..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_with_location_into_buf (4,835 samples, 10.15%)sqlparser_bench..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_word (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_with_sql (5,081 samples, 10.67%)sqlparser_bench-..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_sql (13,813 samples, 29.01%)sqlparser_bench-959bc5267970ca34`sqlparser::par..sqlparser_bench-959bc5267970ca34`criterion::bencher::Bencher<M>::iter (18,926 samples, 39.75%)sqlparser_bench-959bc5267970ca34`criterion::bencher::Bencher<M>::..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_with_sql (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::vec::Vec<T> as alloc::vec::spec_from_iter::SpecFromIter<T,I>>::from_iter (18,946 samples, 39.79%)sqlparser_bench-959bc5267970ca34`<alloc::vec::Vec<T> as alloc::ve..libsystem_malloc.dylib`_free (8 samples, 0.02%)libsystem_malloc.dylib`_free (57 samples, 0.12%)libsystem_malloc.dylib`_nanov2_free (33 samples, 0.07%)libsystem_malloc.dylib`_szone_free (12 samples, 0.03%)libsystem_malloc.dylib`small_free_list_add_ptr (14 samples, 0.03%)libsystem_malloc.dylib`small_free_list_find_by_ptr (8 samples, 0.02%)libsystem_malloc.dylib`free_small (60 samples, 0.13%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::OrderByExpr as core::fmt::Display>::fmt (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (22 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (143 samples, 0.30%)sqlparser_bench-959bc5267970ca34`core::fmt::write (113 samples, 0.24%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_str (8 samples, 0.02%)libdyld.dylib`tlv_get_addr (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_fmt (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::write (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::OrderByExpr as core::fmt::Display>::fmt (376 samples, 0.79%)sqlparser_bench-959bc5267970ca34`core::fmt::write (282 samples, 0.59%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (542 samples, 1.14%)sqlparser_bench-959bc5267970ca34`core::fmt::write (500 samples, 1.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::OrderBy as core::fmt::Display>::fmt (570 samples, 1.20%)sqlparser_bench-959bc5267970ca34`core::fmt::write (570 samples, 1.20%)sqlparser_bench-959bc5267970ca34`core::fmt::write (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Join as core::fmt::Display>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<<sqlparser::ast::query::Join as core::fmt::Display>::fmt::suffix::Suffix as core::fmt::Display>::fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::TableFactor as core::fmt::Display>::fmt (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<<sqlparser::ast::query::Join as core::fmt::Display>::fmt::suffix::Suffix as core::fmt::Display>::fmt (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (18 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (232 samples, 0.49%)sqlparser_bench-959bc5267970ca34`core::fmt::write (161 samples, 0.34%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (311 samples, 0.65%)sqlparser_bench-959bc5267970ca34`core::fmt::write (272 samples, 0.57%)sqlparser_bench-959bc5267970ca34`core::fmt::write (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::TableFactor as core::fmt::Display>::fmt (412 samples, 0.87%)sqlparser_bench-959bc5267970ca34`core::fmt::write (347 samples, 0.73%)sqlparser_bench-959bc5267970ca34`core::fmt::write (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Join as core::fmt::Display>::fmt (733 samples, 1.54%)sqlparser_bench-959bc5267970ca34`core::fmt::write (648 samples, 1.36%)sqlparser_bench-959bc5267970ca34`core::fmt::write (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (801 samples, 1.68%)sqlparser_bench-959bc5267970ca34`core::fmt::write (780 samples, 1.64%)sqlparser_bench-959bc5267970ca34`core::fmt::write (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (11 samples, 0.02%)libdyld.dylib`tlv_get_addr (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Function as core::fmt::Display>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArguments as core::fmt::Display>::fmt (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (23 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArg as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (21 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgExpr as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (11 samples, 0.02%)libdyld.dylib`tlv_get_addr (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (178 samples, 0.37%)sqlparser_bench-959bc5267970ca34`core::fmt::write (133 samples, 0.28%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_str (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::write (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgExpr as core::fmt::Display>::fmt (340 samples, 0.71%)sqlparser_bench-959bc5267970ca34`core::fmt::write (311 samples, 0.65%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArg as core::fmt::Display>::fmt (401 samples, 0.84%)sqlparser_bench-959bc5267970ca34`core::fmt::write (378 samples, 0.79%)sqlparser_bench-959bc5267970ca34`core::fmt::write (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (580 samples, 1.22%)sqlparser_bench-959bc5267970ca34`core::fmt::write (508 samples, 1.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArgumentList as core::fmt::Display>::fmt (655 samples, 1.38%)sqlparser_bench-959bc5267970ca34`core::fmt::write (637 samples, 1.34%)sqlparser_bench-959bc5267970ca34`core::fmt::write (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::FunctionArguments as core::fmt::Display>::fmt (813 samples, 1.71%)sqlparser_bench-959bc5267970ca34`core::fmt::write (772 samples, 1.62%)sqlparser_bench-959bc5267970ca34`core::fmt::write (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (15 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (239 samples, 0.50%)sqlparser_bench-959bc5267970ca34`core::fmt::write (152 samples, 0.32%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::ObjectName as core::fmt::Display>::fmt (320 samples, 0.67%)sqlparser_bench-959bc5267970ca34`core::fmt::write (294 samples, 0.62%)sqlparser_bench-959bc5267970ca34`core::fmt::write (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Function as core::fmt::Display>::fmt (1,284 samples, 2.70%)sq..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,221 samples, 2.56%)sq..sqlparser_bench-959bc5267970ca34`core::fmt::write (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,364 samples, 2.86%)sq..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,331 samples, 2.80%)sq..sqlparser_bench-959bc5267970ca34`core::fmt::write (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::SelectItem as core::fmt::Display>::fmt (1,540 samples, 3.23%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,504 samples, 3.16%)sql..sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::DisplaySeparated<T> as core::fmt::Display>::fmt (2,493 samples, 5.24%)sqlpar..sqlparser_bench-959bc5267970ca34`core::fmt::write (2,446 samples, 5.14%)sqlpar..sqlparser_bench-959bc5267970ca34`core::fmt::write (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::write (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::write (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::fmt::write (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::write (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`core::fmt::write (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`core::fmt::write (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`core::fmt::write (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`core::fmt::write (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`core::fmt::write (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Statement as core::fmt::Display>::fmt (3,140 samples, 6.59%)sqlparser..sqlparser_bench-959bc5267970ca34`core::fmt::write (3,140 samples, 6.59%)sqlparser..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Query as core::fmt::Display>::fmt (3,140 samples, 6.59%)sqlparser..sqlparser_bench-959bc5267970ca34`core::fmt::write (3,140 samples, 6.59%)sqlparser..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::SetExpr as core::fmt::Display>::fmt (2,570 samples, 5.40%)sqlpars..sqlparser_bench-959bc5267970ca34`core::fmt::write (2,570 samples, 5.40%)sqlpars..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::query::Select as core::fmt::Display>::fmt (2,570 samples, 5.40%)sqlpars..sqlparser_bench-959bc5267970ca34`core::fmt::write (2,570 samples, 5.40%)sqlpars..sqlparser_bench-959bc5267970ca34`core::fmt::write (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (24 samples, 0.05%)libsystem_malloc.dylib`_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)libsystem_malloc.dylib`_free (14 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_malloc.dylib`_szone_free (18 samples, 0.04%)libsystem_malloc.dylib`small_free_list_add_ptr (8 samples, 0.02%)libsystem_malloc.dylib`small_free_list_remove_ptr (7 samples, 0.01%)libsystem_malloc.dylib`free_small (64 samples, 0.13%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (8 samples, 0.02%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`free_small (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)libsystem_malloc.dylib`free_small (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Function> (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::FunctionArgumentList> (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::Cte> (77 samples, 0.16%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::Query> (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::SetExpr> (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::SelectItem> (32 samples, 0.07%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_malloc.dylib`free_medium (28 samples, 0.06%)libsystem_kernel.dylib`madvise (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::OrderBy> (42 samples, 0.09%)libsystem_malloc.dylib`_free (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (20 samples, 0.04%)libsystem_malloc.dylib`_szone_free (10 samples, 0.02%)libsystem_malloc.dylib`free_medium (27 samples, 0.06%)libsystem_kernel.dylib`madvise (27 samples, 0.06%)libsystem_malloc.dylib`small_free_list_add_ptr (14 samples, 0.03%)libsystem_malloc.dylib`small_free_list_find_by_ptr (5 samples, 0.01%)libsystem_malloc.dylib`free_small (64 samples, 0.13%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (8 samples, 0.02%)libsystem_malloc.dylib`tiny_free_list_add_ptr (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (40 samples, 0.08%)libsystem_malloc.dylib`tiny_free_no_lock (23 samples, 0.05%)libsystem_malloc.dylib`tiny_free_list_remove_ptr (6 samples, 0.01%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)libsystem_malloc.dylib`free_medium (12 samples, 0.03%)libsystem_kernel.dylib`madvise (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (45 samples, 0.09%)libsystem_malloc.dylib`nanov2_madvise_block (13 samples, 0.03%)libsystem_malloc.dylib`nanov2_madvise_block_locked (13 samples, 0.03%)libsystem_kernel.dylib`madvise (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::vec::Vec<T,A> as core::ops::drop::Drop>::drop (133 samples, 0.28%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::TableFactor> (89 samples, 0.19%)libsystem_platform.dylib`_platform_memset (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (11 samples, 0.02%)libsystem_malloc.dylib`_free (6 samples, 0.01%)libsystem_malloc.dylib`free_tiny (25 samples, 0.05%)libsystem_malloc.dylib`tiny_free_no_lock (15 samples, 0.03%)libsystem_malloc.dylib`tiny_free_list_add_ptr (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (56 samples, 0.12%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`_szone_free (7 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (14 samples, 0.03%)libsystem_malloc.dylib`free_small (28 samples, 0.06%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Function> (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::FunctionArgumentList> (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::SetExpr> (478 samples, 1.00%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::query::Query>> (753 samples, 1.58%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::query::Query> (737 samples, 1.55%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (142 samples, 0.30%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Whitespace> (32 samples, 0.07%)libsystem_malloc.dylib`_nanov2_free (121 samples, 0.25%)libsystem_malloc.dylib`free_medium (45 samples, 0.09%)libsystem_kernel.dylib`madvise (45 samples, 0.09%)libsystem_malloc.dylib`free_small (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_madvise_block (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_madvise_block_locked (6 samples, 0.01%)libsystem_kernel.dylib`madvise (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (95 samples, 0.20%)libsystem_platform.dylib`_platform_memset (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (27 samples, 0.06%)libsystem_malloc.dylib`small_free_list_add_ptr (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (80 samples, 0.17%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (58 samples, 0.12%)libsystem_malloc.dylib`szone_malloc_should_clear (36 samples, 0.08%)libsystem_malloc.dylib`small_malloc_should_clear (30 samples, 0.06%)libsystem_malloc.dylib`small_malloc_from_free_list (25 samples, 0.05%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (14 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (54 samples, 0.11%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (138 samples, 0.29%)libsystem_malloc.dylib`small_free_list_add_ptr (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::alloc::exchange_malloc (35 samples, 0.07%)libsystem_malloc.dylib`szone_malloc_should_clear (32 samples, 0.07%)libsystem_malloc.dylib`small_malloc_should_clear (24 samples, 0.05%)libsystem_malloc.dylib`small_malloc_from_free_list (23 samples, 0.05%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_token (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::ops::function::FnOnce::call_once (6 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keywords (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_keyword (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (13 samples, 0.03%)libsystem_malloc.dylib`szone_malloc_should_clear (5 samples, 0.01%)libsystem_malloc.dylib`small_malloc_should_clear (5 samples, 0.01%)libsystem_malloc.dylib`small_malloc_from_free_list (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_alias (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (7 samples, 0.01%)libsystem_malloc.dylib`_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (13 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_realloc (20 samples, 0.04%)libsystem_malloc.dylib`_realloc (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (67 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::write (66 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (14 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (96 samples, 0.20%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_type_modifiers (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_compound_field_access (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (7 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (10 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (7 samples, 0.01%)libsystem_malloc.dylib`_realloc (13 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_realloc (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`core::fmt::write (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_named_arg_operator (70 samples, 0.15%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (13 samples, 0.03%)libsystem_malloc.dylib`_realloc (13 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_realloc (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::fmt::write (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (93 samples, 0.20%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_args (239 samples, 0.50%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (120 samples, 0.25%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_argument_list (315 samples, 0.66%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (274 samples, 0.58%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_call (334 samples, 0.70%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (364 samples, 0.76%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (614 samples, 1.29%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_parse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select_item (740 samples, 1.55%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (698 samples, 1.47%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (669 samples, 1.40%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_factor (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_tokens (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (865 samples, 1.82%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_and_joins (82 samples, 0.17%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (6 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_realloc (13 samples, 0.03%)libsystem_malloc.dylib`_realloc (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::fmt::write (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_group_by (93 samples, 0.20%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (76 samples, 0.16%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query (1,096 samples, 2.30%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query_body (1,055 samples, 2.22%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select (1,000 samples, 2.10%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_cte (1,138 samples, 2.39%)sq..libsystem_platform.dylib`_platform_memmove (79 samples, 0.17%)libsystem_platform.dylib`_platform_memmove (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (11 samples, 0.02%)libsystem_malloc.dylib`_realloc (11 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (20 samples, 0.04%)libsystem_malloc.dylib`_realloc (42 samples, 0.09%)libsystem_malloc.dylib`_malloc_zone_realloc (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (84 samples, 0.18%)sqlparser_bench-959bc5267970ca34`core::fmt::write (81 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`core::fmt::write (93 samples, 0.20%)libsystem_malloc.dylib`_nanov2_free (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (16 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (269 samples, 0.56%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (377 samples, 0.79%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_order_by_expr (348 samples, 0.73%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (323 samples, 0.68%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_order_by (462 samples, 0.97%)libsystem_malloc.dylib`small_free_list_add_ptr (6 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (36 samples, 0.08%)libsystem_malloc.dylib`small_malloc_should_clear (23 samples, 0.05%)libsystem_malloc.dylib`small_malloc_from_free_list (20 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (139 samples, 0.29%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (9 samples, 0.02%)libsystem_malloc.dylib`small_free_list_remove_ptr (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::FnOnce::call_once (40 samples, 0.08%)libsystem_malloc.dylib`szone_malloc_should_clear (34 samples, 0.07%)libsystem_malloc.dylib`small_malloc_should_clear (25 samples, 0.05%)libsystem_malloc.dylib`small_malloc_from_free_list (22 samples, 0.05%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keywords (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (95 samples, 0.20%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (6 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_keyword (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::is_parse_comma_separated_end_with_trailing_commas (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_all_or_distinct (12 samples, 0.03%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (191 samples, 0.40%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (8 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_malloc (16 samples, 0.03%)libsystem_malloc.dylib`_realloc (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_realloc (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`rack_get_thread_index (5 samples, 0.01%)libsystem_malloc.dylib`small_free_list_add_ptr (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (104 samples, 0.22%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (72 samples, 0.15%)libsystem_malloc.dylib`szone_malloc_should_clear (47 samples, 0.10%)libsystem_malloc.dylib`small_malloc_should_clear (32 samples, 0.07%)libsystem_malloc.dylib`small_malloc_from_free_list (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::is_parse_comma_separated_end_with_trailing_commas (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::cmp::PartialEq>::eq (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_select_item_exclude (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_select_item_rename (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_additional_options (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_select_item_replace (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::cmp::PartialEq>::eq (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (17 samples, 0.04%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (18 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (30 samples, 0.06%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_malloc.dylib`_realloc (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (78 samples, 0.16%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (88 samples, 0.18%)sqlparser_bench-959bc5267970ca34`core::fmt::write (84 samples, 0.18%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (9 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_type_modifiers (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (130 samples, 0.27%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (131 samples, 0.28%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_compound_field_access (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::expect_token (19 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (14 samples, 0.03%)libsystem_malloc.dylib`small_free_list_add_ptr (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (26 samples, 0.05%)libsystem_malloc.dylib`szone_malloc_should_clear (22 samples, 0.05%)libsystem_malloc.dylib`small_malloc_should_clear (22 samples, 0.05%)libsystem_malloc.dylib`small_malloc_from_free_list (18 samples, 0.04%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (5 samples, 0.01%)libsystem_malloc.dylib`_free (8 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (10 samples, 0.02%)libsystem_malloc.dylib`_realloc (20 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::write (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (115 samples, 0.24%)sqlparser_bench-959bc5267970ca34`core::fmt::write (96 samples, 0.20%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_named_arg_operator (131 samples, 0.28%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (32 samples, 0.07%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (16 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (15 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_realloc (29 samples, 0.06%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)libsystem_malloc.dylib`_realloc (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (39 samples, 0.08%)libsystem_malloc.dylib`nanov2_size (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (70 samples, 0.15%)sqlparser_bench-959bc5267970ca34`core::fmt::write (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (80 samples, 0.17%)sqlparser_bench-959bc5267970ca34`core::fmt::write (74 samples, 0.16%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (11 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (200 samples, 0.42%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (559 samples, 1.17%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_args (485 samples, 1.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (277 samples, 0.58%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (243 samples, 0.51%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_argument_list (663 samples, 1.39%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_listagg_on_overflow (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_json_null_clause (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_function_call (723 samples, 1.52%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_listagg_on_overflow (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (766 samples, 1.61%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (1,085 samples, 2.28%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select_item (1,285 samples, 2.70%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_wildcard_expr (1,190 samples, 2.50%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,144 samples, 2.40%)sq..libsystem_malloc.dylib`_free (15 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (15 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<T as core::any::Any>::type_id (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (8 samples, 0.02%)libsystem_malloc.dylib`_realloc (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_realloc (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_join_constraint (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_parenthesized_column_list (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keywords (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_one_of_keywords (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<T as core::any::Any>::type_id (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::cmp::PartialEq>::eq (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::maybe_parse_table_sample (6 samples, 0.01%)libsystem_malloc.dylib`_free (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (26 samples, 0.05%)libsystem_malloc.dylib`nanov2_malloc_type (25 samples, 0.05%)libsystem_malloc.dylib`nanov2_allocate_outlined (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (12 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (23 samples, 0.05%)libsystem_malloc.dylib`nanov2_malloc_type (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (23 samples, 0.05%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (231 samples, 0.49%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (147 samples, 0.31%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (13 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_table_alias (100 samples, 0.21%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_optional_alias (82 samples, 0.17%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (37 samples, 0.08%)libsystem_malloc.dylib`_malloc_zone_malloc (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (15 samples, 0.03%)libsystem_malloc.dylib`nanov2_malloc_type (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (33 samples, 0.07%)libsystem_malloc.dylib`_malloc_zone_malloc (15 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (19 samples, 0.04%)libsystem_malloc.dylib`nanov2_malloc_type (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_factor (701 samples, 1.47%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_tokens (177 samples, 0.37%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_version (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_table_and_joins (991 samples, 2.08%)s..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_comma_separated_with_trailing_commas (2,665 samples, 5.60%)sqlpars..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_infix (6 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (85 samples, 0.18%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (8 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (14 samples, 0.03%)libsystem_malloc.dylib`rack_get_thread_index (8 samples, 0.02%)libsystem_malloc.dylib`tiny_malloc_from_free_list (8 samples, 0.02%)libsystem_malloc.dylib`set_tiny_meta_header_in_use (11 samples, 0.02%)libsystem_malloc.dylib`_tiny_check_and_zero_inline_meta_from_freelist (5 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (114 samples, 0.24%)libsystem_malloc.dylib`tiny_malloc_should_clear (81 samples, 0.17%)libsystem_malloc.dylib`tiny_malloc_from_free_list (53 samples, 0.11%)libsystem_malloc.dylib`tiny_free_list_add_ptr (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (23 samples, 0.05%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::tokenizer::Token> (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (12 samples, 0.03%)libsystem_malloc.dylib`set_tiny_meta_header_in_use (8 samples, 0.02%)libsystem_malloc.dylib`szone_malloc_should_clear (37 samples, 0.08%)libsystem_malloc.dylib`tiny_malloc_should_clear (36 samples, 0.08%)libsystem_malloc.dylib`tiny_malloc_from_free_list (25 samples, 0.05%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (19 samples, 0.04%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::dialect::Dialect::get_next_precedence_default (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::peek_token (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (13 samples, 0.03%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (13 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (9 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (18 samples, 0.04%)libsystem_malloc.dylib`_realloc (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (22 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`core::fmt::write (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::write (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (157 samples, 0.33%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (155 samples, 0.33%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`core::fmt::write (121 samples, 0.25%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (223 samples, 0.47%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_value (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_infix (338 samples, 0.71%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (291 samples, 0.61%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (7 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (24 samples, 0.05%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_platform.dylib`_platform_memset (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_pointer_size (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)libsystem_malloc.dylib`nanov2_realloc (18 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (38 samples, 0.08%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)libsystem_malloc.dylib`_realloc (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (55 samples, 0.12%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (85 samples, 0.18%)sqlparser_bench-959bc5267970ca34`core::fmt::write (84 samples, 0.18%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (96 samples, 0.20%)sqlparser_bench-959bc5267970ca34`core::fmt::write (89 samples, 0.19%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::data_type::DataType> (11 samples, 0.02%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (15 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_pointer_size (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (21 samples, 0.04%)libsystem_malloc.dylib`_realloc (46 samples, 0.10%)libsystem_malloc.dylib`_malloc_zone_realloc (39 samples, 0.08%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (57 samples, 0.12%)libsystem_malloc.dylib`nanov2_size (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (73 samples, 0.15%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral::write_prefix (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (121 samples, 0.25%)sqlparser_bench-959bc5267970ca34`core::fmt::write (120 samples, 0.25%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (260 samples, 0.55%)sqlparser_bench-959bc5267970ca34`core::fmt::write (214 samples, 0.45%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (6 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (8 samples, 0.02%)libsystem_platform.dylib`_platform_memmove (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_object_name (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_identifier (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (352 samples, 0.74%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (365 samples, 0.77%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_expr_prefix_by_unreserved_word (11 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (6 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_value (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (622 samples, 1.31%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_infix (1,318 samples, 2.77%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,147 samples, 2.41%)sq..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_keyword (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (10 samples, 0.02%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (9 samples, 0.02%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (23 samples, 0.05%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (20 samples, 0.04%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc (7 samples, 0.01%)libsystem_malloc.dylib`nanov2_realloc (21 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_realloc (34 samples, 0.07%)libsystem_platform.dylib`_platform_memmove (8 samples, 0.02%)libsystem_malloc.dylib`_realloc (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (48 samples, 0.10%)libsystem_malloc.dylib`nanov2_size (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral (14 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad_integral::write_prefix (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Location as core::fmt::Display>::fmt (126 samples, 0.26%)sqlparser_bench-959bc5267970ca34`core::fmt::write (123 samples, 0.26%)sqlparser_bench-959bc5267970ca34`core::fmt::num::imp::_<impl core::fmt::Display for u64>::fmt (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (5 samples, 0.01%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::fmt::Display>::fmt (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::fmt::format::format_inner (245 samples, 0.51%)sqlparser_bench-959bc5267970ca34`core::fmt::write (224 samples, 0.47%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type (281 samples, 0.59%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_data_type_helper (270 samples, 0.57%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_value (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<sqlparser::tokenizer::Token as core::clone::Clone>::clone (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::clone::Clone>::clone (9 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_prefix (365 samples, 0.77%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_subexpr (1,893 samples, 3.98%)sqlp..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query_body (5,198 samples, 10.92%)sqlparser_bench-..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_select (4,878 samples, 10.24%)sqlparser_bench..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_query (7,112 samples, 14.94%)sqlparser_bench-959bc52..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_statements (7,412 samples, 15.57%)sqlparser_bench-959bc526..sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_statement (7,260 samples, 15.25%)sqlparser_bench-959bc52..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::next_token (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_identifier_or_keyword (26 samples, 0.05%)libsystem_malloc.dylib`_free (121 samples, 0.25%)libsystem_malloc.dylib`_nanov2_free (33 samples, 0.07%)libsystem_platform.dylib`_platform_memmove (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::iter::traits::collect::FromIterator<char>>::from_iter (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::generic::GenericDialect as sqlparser::dialect::Dialect>::is_delimited_identifier_start (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::mssql::MsSqlDialect as sqlparser::dialect::Dialect>::is_identifier_start (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`__rdl_dealloc (5 samples, 0.01%)libsystem_malloc.dylib`_malloc_zone_malloc (11 samples, 0.02%)libsystem_malloc.dylib`_szone_free (5 samples, 0.01%)libsystem_malloc.dylib`free_medium (5 samples, 0.01%)libsystem_kernel.dylib`madvise (5 samples, 0.01%)libsystem_malloc.dylib`tiny_free_list_add_ptr (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (33 samples, 0.07%)libsystem_malloc.dylib`tiny_free_no_lock (27 samples, 0.06%)libsystem_malloc.dylib`tiny_free_list_remove_ptr (8 samples, 0.02%)libsystem_malloc.dylib`small_try_realloc_in_place (12 samples, 0.03%)libsystem_malloc.dylib`small_free_list_add_ptr (5 samples, 0.01%)libsystem_malloc.dylib`szone_malloc_should_clear (38 samples, 0.08%)libsystem_malloc.dylib`small_malloc_should_clear (27 samples, 0.06%)libsystem_malloc.dylib`small_malloc_from_free_list (23 samples, 0.05%)libsystem_malloc.dylib`small_free_list_remove_ptr_no_clear (8 samples, 0.02%)libsystem_malloc.dylib`szone_size (9 samples, 0.02%)libsystem_malloc.dylib`tiny_size (8 samples, 0.02%)libsystem_malloc.dylib`tiny_try_realloc_in_place (24 samples, 0.05%)libsystem_malloc.dylib`szone_realloc (166 samples, 0.35%)libsystem_platform.dylib`_platform_memset (10 samples, 0.02%)libsystem_malloc.dylib`_malloc_zone_realloc (223 samples, 0.47%)libsystem_platform.dylib`_platform_memmove (36 samples, 0.08%)libsystem_malloc.dylib`nanov2_realloc (6 samples, 0.01%)libsystem_malloc.dylib`szone_realloc (10 samples, 0.02%)libsystem_malloc.dylib`_realloc (297 samples, 0.62%)libsystem_malloc.dylib`szone_size (34 samples, 0.07%)libsystem_malloc.dylib`tiny_size (33 samples, 0.07%)libsystem_malloc.dylib`szone_malloc_should_clear (39 samples, 0.08%)libsystem_malloc.dylib`tiny_malloc_should_clear (31 samples, 0.07%)libsystem_malloc.dylib`tiny_malloc_from_free_list (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (401 samples, 0.84%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (358 samples, 0.75%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::State::next (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::State::peek (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::consume_and_return (18 samples, 0.04%)libsystem_malloc.dylib`_malloc_zone_malloc (7 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (21 samples, 0.04%)libsystem_platform.dylib`_platform_memcmp (42 samples, 0.09%)libsystem_platform.dylib`_platform_memmove (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::mssql::MsSqlDialect as sqlparser::dialect::Dialect>::is_identifier_part (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcmp (15 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_malloc (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (10 samples, 0.02%)libsystem_malloc.dylib`nanov2_malloc_type (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Token::make_word (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::str::_<impl str>::to_uppercase (6 samples, 0.01%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`nanov2_malloc_type (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::next_token (641 samples, 1.35%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_word (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (7 samples, 0.01%)libsystem_malloc.dylib`_free (37 samples, 0.08%)libsystem_malloc.dylib`_malloc_zone_malloc (32 samples, 0.07%)libsystem_malloc.dylib`_nanov2_free (214 samples, 0.45%)libsystem_platform.dylib`_platform_memcmp (527 samples, 1.11%)libsystem_platform.dylib`_platform_memmove (89 samples, 0.19%)libsystem_malloc.dylib`_malloc_zone_malloc (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (63 samples, 0.13%)libsystem_malloc.dylib`nanov2_malloc_type (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::iter::traits::collect::FromIterator<char>>::from_iter (201 samples, 0.42%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<sqlparser::dialect::mssql::MsSqlDialect as sqlparser::dialect::Dialect>::is_identifier_part (105 samples, 0.22%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$free (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$malloc (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcmp (176 samples, 0.37%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`alloc::str::_<impl str>::to_uppercase (27 samples, 0.06%)libsystem_malloc.dylib`_malloc_zone_malloc (37 samples, 0.08%)libsystem_malloc.dylib`nanov2_malloc_type (57 samples, 0.12%)libsystem_malloc.dylib`nanov2_allocate_outlined (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$malloc (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Token::make_word (300 samples, 0.63%)sqlparser_bench-959bc5267970ca34`alloc::str::_<impl str>::to_uppercase (111 samples, 0.23%)libsystem_malloc.dylib`nanov2_malloc_type (57 samples, 0.12%)libsystem_malloc.dylib`_nanov2_free (68 samples, 0.14%)libsystem_malloc.dylib`_malloc_zone_malloc (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$malloc (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`__rdl_alloc (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVec<T,A>::grow_one (149 samples, 0.31%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (72 samples, 0.15%)libsystem_malloc.dylib`nanov2_malloc_type (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_identifier_or_keyword (2,345 samples, 4.92%)sqlpar..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_word (492 samples, 1.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_with_location (4,225 samples, 8.87%)sqlparser_ben..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_with_location_into_buf (4,059 samples, 8.52%)sqlparser_be..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_word (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::try_with_sql (4,258 samples, 8.94%)sqlparser_ben..sqlparser_bench-959bc5267970ca34`sqlparser::tokenizer::Tokenizer::tokenize_with_location_into_buf (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_sql (12,017 samples, 25.24%)sqlparser_bench-959bc5267970ca34`sqlpars..sqlparser_bench-959bc5267970ca34`criterion::bencher::Bencher<M>::iter (16,292 samples, 34.21%)sqlparser_bench-959bc5267970ca34`criterion::bencher::Be..sqlparser_bench-959bc5267970ca34`criterion::benchmark_group::BenchmarkGroup<M>::bench_function (35,307 samples, 74.15%)sqlparser_bench-959bc5267970ca34`criterion::benchmark_group::BenchmarkGroup<M>::bench_functionsqlparser_bench-959bc5267970ca34`criterion::analysis::common (35,307 samples, 74.15%)sqlparser_bench-959bc5267970ca34`criterion::analysis::commonsqlparser_bench-959bc5267970ca34`criterion::routine::Routine::sample (35,254 samples, 74.04%)sqlparser_bench-959bc5267970ca34`criterion::routine::Routine::samplesqlparser_bench-959bc5267970ca34`<criterion::routine::Function<M,F,T> as criterion::routine::Routine<M,T>>::warm_up (16,308 samples, 34.25%)sqlparser_bench-959bc5267970ca34`<criterion::routine::Fu..dyld`start (35,315 samples, 74.16%)dyld`startsqlparser_bench-959bc5267970ca34`main (35,314 samples, 74.16%)sqlparser_bench-959bc5267970ca34`mainsqlparser_bench-959bc5267970ca34`std::rt::lang_start_internal (35,314 samples, 74.16%)sqlparser_bench-959bc5267970ca34`std::rt::lang_start_internalsqlparser_bench-959bc5267970ca34`std::rt::lang_start::_{{closure}} (35,314 samples, 74.16%)sqlparser_bench-959bc5267970ca34`std::rt::lang_start::_{{closure}}sqlparser_bench-959bc5267970ca34`std::sys::backtrace::__rust_begin_short_backtrace (35,314 samples, 74.16%)sqlparser_bench-959bc5267970ca34`std::sys::backtrace::__rust_begin_short_backtracesqlparser_bench-959bc5267970ca34`sqlparser_bench::main (35,314 samples, 74.16%)sqlparser_bench-959bc5267970ca34`sqlparser_bench::mainsqlparser_bench-959bc5267970ca34`sqlparser::parser::Parser::parse_sql (5 samples, 0.01%)libsystem_kernel.dylib`swtch_pri (133 samples, 0.28%)libsystem_m.dylib`exp (23 samples, 0.05%)libsystem_m.dylib`exp (43 samples, 0.09%)libsystem_m.dylib`exp (47 samples, 0.10%)libsystem_m.dylib`exp (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (8 samples, 0.02%)libsystem_m.dylib`exp (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)libsystem_m.dylib`exp (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::resamples::Resamples<A>::next (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (108 samples, 0.23%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::bivariate::resamples::Resamples<X,Y>::next (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (224 samples, 0.47%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (283 samples, 0.59%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (12 samples, 0.03%)libsystem_m.dylib`exp (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (134 samples, 0.28%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (121 samples, 0.25%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (492 samples, 1.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (440 samples, 0.92%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)libsystem_m.dylib`exp (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (11 samples, 0.02%)libsystem_m.dylib`exp (70 samples, 0.15%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (126 samples, 0.26%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (9 samples, 0.02%)libsystem_m.dylib`exp (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (137 samples, 0.29%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (125 samples, 0.26%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (311 samples, 0.65%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (309 samples, 0.65%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (284 samples, 0.60%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (943 samples, 1.98%)s..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (860 samples, 1.81%)s..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (7 samples, 0.01%)libsystem_m.dylib`exp (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (105 samples, 0.22%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (8 samples, 0.02%)libsystem_m.dylib`exp (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (115 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (106 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (278 samples, 0.58%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (229 samples, 0.48%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (103 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (104 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (96 samples, 0.20%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (276 samples, 0.58%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (270 samples, 0.57%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (228 samples, 0.48%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (653 samples, 1.37%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (647 samples, 1.36%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (592 samples, 1.24%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::bivariate::resamples::Resamples<X,Y>::next (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (104 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (100 samples, 0.21%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (1,836 samples, 3.86%)sqlp..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (1,775 samples, 3.73%)sqlp..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (169 samples, 0.35%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (168 samples, 0.35%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (168 samples, 0.35%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (166 samples, 0.35%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)libsystem_m.dylib`exp (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (13 samples, 0.03%)libsystem_m.dylib`exp (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (7 samples, 0.01%)libsystem_m.dylib`exp (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (7 samples, 0.01%)libsystem_m.dylib`exp (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (159 samples, 0.33%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (123 samples, 0.26%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (54 samples, 0.11%)libsystem_m.dylib`exp (30 samples, 0.06%)libsystem_m.dylib`exp (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (60 samples, 0.13%)libsystem_m.dylib`exp (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (169 samples, 0.35%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (167 samples, 0.35%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (129 samples, 0.27%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (408 samples, 0.86%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (364 samples, 0.76%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (8 samples, 0.02%)libsystem_m.dylib`exp (29 samples, 0.06%)libsystem_m.dylib`exp (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (13 samples, 0.03%)libsystem_m.dylib`exp (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (46 samples, 0.10%)libsystem_m.dylib`exp (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (129 samples, 0.27%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (88 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)libsystem_m.dylib`exp (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (6 samples, 0.01%)libsystem_m.dylib`exp (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (41 samples, 0.09%)libsystem_m.dylib`exp (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (142 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (96 samples, 0.20%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (375 samples, 0.79%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (367 samples, 0.77%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (332 samples, 0.70%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (73 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (147 samples, 0.31%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (147 samples, 0.31%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (143 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (1,005 samples, 2.11%)s..sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (998 samples, 2.10%)s..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (945 samples, 1.98%)s..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (148 samples, 0.31%)libsystem_m.dylib`exp (9 samples, 0.02%)libsystem_m.dylib`exp (6 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (114 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (107 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (185 samples, 0.39%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (178 samples, 0.37%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (75 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (75 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (3,267 samples, 6.86%)sqlparser..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (3,209 samples, 6.74%)sqlparser..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (364 samples, 0.76%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (362 samples, 0.76%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (360 samples, 0.76%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (348 samples, 0.73%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (87 samples, 0.18%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (87 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (86 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (82 samples, 0.17%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (24 samples, 0.05%)libsystem_m.dylib`exp (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)libsystem_m.dylib`exp (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)libsystem_m.dylib`exp (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (9 samples, 0.02%)libsystem_m.dylib`exp (15 samples, 0.03%)libsystem_m.dylib`exp (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (8 samples, 0.02%)libsystem_m.dylib`exp (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (61 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)libsystem_m.dylib`exp (14 samples, 0.03%)libsystem_m.dylib`exp (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (13 samples, 0.03%)libsystem_m.dylib`exp (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (177 samples, 0.37%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (142 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (74 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (11 samples, 0.02%)libsystem_m.dylib`exp (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)libsystem_m.dylib`exp (19 samples, 0.04%)libsystem_m.dylib`exp (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (8 samples, 0.02%)libsystem_m.dylib`exp (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (74 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_m.dylib`exp (15 samples, 0.03%)libsystem_m.dylib`exp (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (73 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (223 samples, 0.47%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (219 samples, 0.46%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (183 samples, 0.38%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (466 samples, 0.98%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (409 samples, 0.86%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_m.dylib`exp (52 samples, 0.11%)libsystem_m.dylib`exp (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (9 samples, 0.02%)libsystem_m.dylib`exp (19 samples, 0.04%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (11 samples, 0.02%)libsystem_m.dylib`exp (12 samples, 0.03%)libsystem_m.dylib`exp (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (12 samples, 0.03%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (150 samples, 0.32%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (105 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (7 samples, 0.01%)libsystem_m.dylib`exp (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)libsystem_m.dylib`exp (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (8 samples, 0.02%)libsystem_m.dylib`exp (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (145 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (141 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (113 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (70 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (435 samples, 0.91%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (431 samples, 0.91%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (370 samples, 0.78%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (88 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (81 samples, 0.17%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (1,110 samples, 2.33%)s..sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (1,106 samples, 2.32%)s..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (1,064 samples, 2.23%)s..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (156 samples, 0.33%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (155 samples, 0.33%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (155 samples, 0.33%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (153 samples, 0.32%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)libsystem_kernel.dylib`swtch_pri (5 samples, 0.01%)libsystem_m.dylib`exp (12 samples, 0.03%)libsystem_m.dylib`exp (11 samples, 0.02%)libsystem_m.dylib`exp (14 samples, 0.03%)libsystem_m.dylib`exp (10 samples, 0.02%)libsystem_m.dylib`exp (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (103 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (91 samples, 0.19%)libsystem_m.dylib`exp (6 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (194 samples, 0.41%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (178 samples, 0.37%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)libsystem_m.dylib`exp (10 samples, 0.02%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (129 samples, 0.27%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (128 samples, 0.27%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (117 samples, 0.25%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (385 samples, 0.81%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (369 samples, 0.77%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (136 samples, 0.29%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (132 samples, 0.28%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (56 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (70 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (64 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (142 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (140 samples, 0.29%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (133 samples, 0.28%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (684 samples, 1.44%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (681 samples, 1.43%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (667 samples, 1.40%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (145 samples, 0.30%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (690 samples, 1.45%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5,126 samples, 10.77%)sqlparser_bench-..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5,072 samples, 10.65%)sqlparser_bench-..libsystem_m.dylib`exp (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (7 samples, 0.01%)libsystem_m.dylib`exp (18 samples, 0.04%)libsystem_m.dylib`exp (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)libsystem_m.dylib`exp (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (124 samples, 0.26%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (95 samples, 0.20%)libsystem_m.dylib`exp (18 samples, 0.04%)libsystem_m.dylib`exp (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (112 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (109 samples, 0.23%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (86 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (285 samples, 0.60%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (260 samples, 0.55%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)libsystem_m.dylib`exp (23 samples, 0.05%)libsystem_m.dylib`exp (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)libsystem_m.dylib`exp (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (75 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)libsystem_m.dylib`exp (7 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (60 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (59 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (276 samples, 0.58%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (272 samples, 0.57%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (248 samples, 0.52%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (99 samples, 0.21%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (86 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (84 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (159 samples, 0.33%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (155 samples, 0.33%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (880 samples, 1.85%)s..sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (878 samples, 1.84%)s..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (847 samples, 1.78%)s..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (279 samples, 0.59%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (278 samples, 0.58%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (278 samples, 0.58%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (269 samples, 0.56%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_kernel.dylib`swtch_pri (12 samples, 0.03%)libsystem_m.dylib`exp (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)libsystem_m.dylib`exp (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::Producer::fold_with (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::compare::estimates::stats (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)libsystem_m.dylib`exp (17 samples, 0.04%)libsystem_m.dylib`exp (17 samples, 0.04%)libsystem_m.dylib`exp (20 samples, 0.04%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ops::function::impls::_<impl core::ops::function::Fn<A> for &F>::call (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`oorandom::Rand64::rand_range (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (43 samples, 0.09%)libsystem_m.dylib`exp (5 samples, 0.01%)libsystem_m.dylib`exp (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (112 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (88 samples, 0.18%)libsystem_m.dylib`exp (15 samples, 0.03%)libsystem_m.dylib`exp (6 samples, 0.01%)libsystem_m.dylib`exp (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (5 samples, 0.01%)libsystem_m.dylib`exp (9 samples, 0.02%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (107 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (104 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (83 samples, 0.17%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (241 samples, 0.51%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (221 samples, 0.46%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$exp (5 samples, 0.01%)libsystem_m.dylib`exp (15 samples, 0.03%)libsystem_m.dylib`exp (13 samples, 0.03%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)libsystem_m.dylib`exp (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (66 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (50 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (6 samples, 0.01%)libsystem_m.dylib`exp (10 samples, 0.02%)libsystem_m.dylib`exp (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (188 samples, 0.39%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (183 samples, 0.38%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (164 samples, 0.34%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (485 samples, 1.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (463 samples, 0.97%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)libsystem_m.dylib`exp (14 samples, 0.03%)libsystem_m.dylib`exp (12 samples, 0.03%)libsystem_m.dylib`exp (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)libsystem_m.dylib`exp (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (84 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (70 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (26 samples, 0.05%)libsystem_m.dylib`exp (13 samples, 0.03%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (24 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (55 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (231 samples, 0.49%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (227 samples, 0.48%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (211 samples, 0.44%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (53 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_m.dylib`exp (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (48 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (90 samples, 0.19%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (86 samples, 0.18%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (922 samples, 1.94%)s..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (896 samples, 1.88%)s..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (178 samples, 0.37%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (176 samples, 0.37%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (176 samples, 0.37%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (169 samples, 0.35%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (46 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)libsystem_m.dylib`exp (7 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (69 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (58 samples, 0.12%)libsystem_m.dylib`exp (6 samples, 0.01%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (51 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (220 samples, 0.46%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (220 samples, 0.46%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (204 samples, 0.43%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (79 samples, 0.17%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (78 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (77 samples, 0.16%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (19 samples, 0.04%)libsystem_kernel.dylib`swtch_pri (5 samples, 0.01%)libsystem_m.dylib`exp (9 samples, 0.02%)libsystem_m.dylib`exp (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (43 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (40 samples, 0.08%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (107 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (94 samples, 0.20%)libsystem_m.dylib`exp (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (22 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (215 samples, 0.45%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (210 samples, 0.44%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (33 samples, 0.07%)libsystem_m.dylib`exp (5 samples, 0.01%)libsystem_m.dylib`exp (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (41 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (42 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (101 samples, 0.21%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (100 samples, 0.21%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (97 samples, 0.20%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (31 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (1,554 samples, 3.26%)sql..sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (1,554 samples, 3.26%)sql..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (1,525 samples, 3.20%)sql..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (380 samples, 0.80%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (375 samples, 0.79%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (374 samples, 0.79%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (363 samples, 0.76%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (103 samples, 0.22%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7,610 samples, 15.98%)sqlparser_bench-959bc5267..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7,579 samples, 15.92%)sqlparser_bench-959bc526..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (1,568 samples, 3.29%)sql..sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (64 samples, 0.13%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (22 samples, 0.05%)libsystem_m.dylib`exp (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::median_abs_dev (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`criterion::analysis::estimates::stats (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`criterion::stats::univariate::sample::Sample<A>::percentiles (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (71 samples, 0.15%)libsystem_m.dylib`exp (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (30 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (23 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (67 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (67 samples, 0.14%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (28 samples, 0.06%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (52 samples, 0.11%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (49 samples, 0.10%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (133 samples, 0.28%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (132 samples, 0.28%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (193 samples, 0.41%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (193 samples, 0.41%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (193 samples, 0.41%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (192 samples, 0.40%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (298 samples, 0.63%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (298 samples, 0.63%)libsystem_m.dylib`exp (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (38 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon::iter::fold::FoldFolder<C,ID,F> as rayon::iter::plumbing::Folder<T>>::consume_iter (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (26 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (59 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (16 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (10 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (116 samples, 0.24%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (116 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (116 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (116 samples, 0.24%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (40 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (39 samples, 0.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (453 samples, 0.95%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (453 samples, 0.95%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (21 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (19 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::job::StackJob<L,F,R>::run_inline (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (509 samples, 1.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (509 samples, 1.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (35 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (33 samples, 0.07%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (514 samples, 1.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (514 samples, 1.08%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (514 samples, 1.08%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (514 samples, 1.08%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::iter::plumbing::bridge_producer_consumer::helper (517 samples, 1.09%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (515 samples, 1.08%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`<rayon_core::job::StackJob<L,F,R> as rayon_core::job::Job>::execute (8,206 samples, 17.23%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (525 samples, 1.10%)sqlparser_bench-959bc5267970ca34`rayon::slice::quicksort::recurse (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`rayon_core::join::join_context::_{{closure}} (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<core::iter::adapters::chain::Chain<A,B> as core::iter::traits::iterator::Iterator>::try_fold (13 samples, 0.03%)sqlparser_bench-959bc5267970ca34`crossbeam_deque::deque::Stealer<T>::steal (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`crossbeam_epoch::default::with_handle (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::find_work (14 samples, 0.03%)libsystem_kernel.dylib`__psynch_cvwait (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`rayon_core::sleep::Sleep::sleep (8 samples, 0.02%)libsystem_pthread.dylib`_pthread_mutex_firstfit_lock_slow (10 samples, 0.02%)libsystem_kernel.dylib`__psynch_mutexwait (10 samples, 0.02%)libsystem_pthread.dylib`thread_start (8,379 samples, 17.60%)libsystem_pthread.dylib`thr..libsystem_pthread.dylib`_pthread_start (8,379 samples, 17.60%)libsystem_pthread.dylib`_pt..sqlparser_bench-959bc5267970ca34`std::sys::pal::unix::thread::Thread::new::thread_start (8,379 samples, 17.60%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`core::ops::function::FnOnce::call_once{{vtable.shim}} (8,379 samples, 17.60%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`std::sys::backtrace::__rust_begin_short_backtrace (8,379 samples, 17.60%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`rayon_core::registry::ThreadBuilder::run (8,379 samples, 17.60%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`rayon_core::registry::WorkerThread::wait_until_cold (8,379 samples, 17.60%)sqlparser_bench-959bc526797..sqlparser_bench-959bc5267970ca34`rayon_core::sleep::Sleep::wake_any_threads (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`rayon_core::sleep::Sleep::wake_specific_thread (17 samples, 0.04%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::RawVecInner<A>::reserve::do_reserve_and_handle (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`alloc::raw_vec::finish_grow (12 samples, 0.03%)libsystem_malloc.dylib`_realloc (12 samples, 0.03%)libsystem_malloc.dylib`_malloc_zone_realloc (12 samples, 0.03%)libsystem_platform.dylib`_platform_memmove (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (72 samples, 0.15%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (133 samples, 0.28%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (96 samples, 0.20%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::operator::BinaryOperator as core::fmt::Display>::fmt (58 samples, 0.12%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::value::Value as core::fmt::Display>::fmt (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_str (12 samples, 0.03%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,141 samples, 2.40%)sq..sqlparser_bench-959bc5267970ca34`core::fmt::write (713 samples, 1.50%)libdyld.dylib`tlv_get_addr (127 samples, 0.27%)sqlparser_bench-959bc5267970ca34`psm::stack_pointer (44 samples, 0.09%)sqlparser_bench-959bc5267970ca34`rust_psm_stack_pointer (124 samples, 0.26%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (408 samples, 0.86%)sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (113 samples, 0.24%)sqlparser_bench-959bc5267970ca34`<&T as core::fmt::Display>::fmt (27 samples, 0.06%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (68 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (20 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (65 samples, 0.14%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,858 samples, 3.90%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,846 samples, 3.88%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,846 samples, 3.88%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,846 samples, 3.88%)sqlp..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,846 samples, 3.88%)sqlp..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::value::Value as core::fmt::Display>::fmt (297 samples, 0.62%)sqlparser_bench-959bc5267970ca34`core::fmt::write (117 samples, 0.25%)libsystem_platform.dylib`_platform_memmove (253 samples, 0.53%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (108 samples, 0.23%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt (62 samples, 0.13%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (372 samples, 0.78%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Ident as core::fmt::Display>::fmt (45 samples, 0.09%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::value::Value as core::fmt::Display>::fmt (57 samples, 0.12%)sqlparser_bench-959bc5267970ca34`DYLD-STUB$$memcpy (71 samples, 0.15%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_fmt (32 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::write_str (34 samples, 0.07%)sqlparser_bench-959bc5267970ca34`core::fmt::write (127 samples, 0.27%)sqlparser_bench-959bc5267970ca34`recursive::get_minimum_stack_size (101 samples, 0.21%)sqlparser_bench-959bc5267970ca34`recursive::get_stack_allocation_size (63 samples, 0.13%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,396 samples, 2.93%)sq..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,396 samples, 2.93%)sq..sqlparser_bench-959bc5267970ca34`stacker::remaining_stack (71 samples, 0.15%)libsystem_platform.dylib`_platform_memmove (84 samples, 0.18%)sqlparser_bench-959bc5267970ca34`<alloc::string::String as core::fmt::Write>::write_str (47 samples, 0.10%)sqlparser_bench-959bc5267970ca34`<str as core::fmt::Display>::fmt (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,590 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::Expr as core::fmt::Display>::fmt::_{{closure}} (1,589 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`core::fmt::write (1,589 samples, 3.34%)sql..sqlparser_bench-959bc5267970ca34`<sqlparser::ast::value::Value as core::fmt::Display>::fmt (193 samples, 0.41%)sqlparser_bench-959bc5267970ca34`core::fmt::write (193 samples, 0.41%)sqlparser_bench-959bc5267970ca34`core::fmt::Formatter::pad (36 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::fmt::write (1,598 samples, 3.36%)sql..libsystem_malloc.dylib`free_tiny (6 samples, 0.01%)libsystem_malloc.dylib`tiny_free_no_lock (6 samples, 0.01%)libsystem_malloc.dylib`tiny_free_list_add_ptr (7 samples, 0.01%)libsystem_malloc.dylib`tiny_free_scan_madvise_free (6 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (29 samples, 0.06%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (20 samples, 0.04%)libsystem_malloc.dylib`free_tiny (20 samples, 0.04%)libsystem_malloc.dylib`tiny_free_no_lock (20 samples, 0.04%)libsystem_malloc.dylib`tiny_free_scan_madvise_free (5 samples, 0.01%)libsystem_kernel.dylib`madvise (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (9 samples, 0.02%)libsystem_malloc.dylib`tiny_free_no_lock (9 samples, 0.02%)libsystem_malloc.dylib`tiny_free_list_add_ptr (8 samples, 0.02%)libsystem_malloc.dylib`tiny_free_list_remove_ptr (7 samples, 0.01%)libsystem_malloc.dylib`free_tiny (20 samples, 0.04%)libsystem_malloc.dylib`tiny_free_no_lock (20 samples, 0.04%)libsystem_malloc.dylib`free_tiny (11 samples, 0.02%)libsystem_malloc.dylib`tiny_free_no_lock (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (7 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (10 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (11 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (54 samples, 0.11%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (33 samples, 0.07%)libsystem_malloc.dylib`free_tiny (16 samples, 0.03%)libsystem_malloc.dylib`tiny_free_no_lock (12 samples, 0.03%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_szone_free (8 samples, 0.02%)libsystem_malloc.dylib`free_tiny (13 samples, 0.03%)libsystem_platform.dylib`_platform_memset (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (94 samples, 0.20%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (267 samples, 0.56%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (238 samples, 0.50%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (229 samples, 0.48%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (155 samples, 0.33%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (139 samples, 0.29%)libsystem_malloc.dylib`tiny_free_scan_madvise_free (11 samples, 0.02%)libsystem_kernel.dylib`madvise (11 samples, 0.02%)libsystem_malloc.dylib`free_tiny (15 samples, 0.03%)libsystem_malloc.dylib`tiny_free_no_lock (15 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (18 samples, 0.04%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (18 samples, 0.04%)libsystem_kernel.dylib`madvise (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (8 samples, 0.02%)libsystem_malloc.dylib`tiny_free_no_lock (8 samples, 0.02%)libsystem_malloc.dylib`tiny_madvise_free_range_no_lock (6 samples, 0.01%)libsystem_malloc.dylib`tiny_free_list_remove_ptr (6 samples, 0.01%)libsystem_malloc.dylib`free_tiny (12 samples, 0.03%)libsystem_malloc.dylib`tiny_free_no_lock (12 samples, 0.03%)libsystem_malloc.dylib`free_tiny (11 samples, 0.02%)libsystem_malloc.dylib`tiny_free_no_lock (9 samples, 0.02%)libsystem_malloc.dylib`_nanov2_free (8 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (37 samples, 0.08%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (25 samples, 0.05%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (14 samples, 0.03%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (6 samples, 0.01%)libsystem_malloc.dylib`free_tiny (16 samples, 0.03%)libsystem_malloc.dylib`tiny_free_no_lock (16 samples, 0.03%)libsystem_malloc.dylib`tiny_free_no_lock (12 samples, 0.03%)libsystem_malloc.dylib`free_tiny (16 samples, 0.03%)libsystem_malloc.dylib`_free (7 samples, 0.01%)libsystem_malloc.dylib`_szone_free (5 samples, 0.01%)libsystem_malloc.dylib`free_tiny (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (36 samples, 0.08%)libsystem_malloc.dylib`free_tiny (5 samples, 0.01%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (9 samples, 0.02%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (165 samples, 0.35%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (147 samples, 0.31%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (147 samples, 0.31%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (102 samples, 0.21%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<alloc::boxed::Box<sqlparser::ast::Expr>> (86 samples, 0.18%)sqlparser_bench-959bc5267970ca34`core::ptr::drop_in_place<sqlparser::ast::Expr> (34 samples, 0.07%)all (47,617 samples, 100%) \ No newline at end of file diff --git a/src/ast/data_type.rs b/src/ast/data_type.rs index bc48341c4..52919de8a 100644 --- a/src/ast/data_type.rs +++ b/src/ast/data_type.rs @@ -25,309 +25,417 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; -use crate::ast::{display_comma_separated, ObjectName, StructField, UnionField}; +use crate::ast::{display_comma_separated, Expr, ObjectName, StructField, UnionField}; use super::{value::escape_single_quote_string, ColumnDef}; +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum EnumMember { + Name(String), + /// ClickHouse allows to specify an integer value for each enum value. + /// + /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/data-types/enum) + NamedValue(String, Expr), +} + /// SQL data types #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum DataType { - /// Fixed-length character type e.g. CHARACTER(10) + /// Table type in [PostgreSQL], e.g. CREATE FUNCTION RETURNS TABLE(...). + /// + /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html + Table(Vec), + /// Fixed-length character type, e.g. CHARACTER(10). Character(Option), - /// Fixed-length char type e.g. CHAR(10) + /// Fixed-length char type, e.g. CHAR(10). Char(Option), - /// Character varying type e.g. CHARACTER VARYING(10) + /// Character varying type, e.g. CHARACTER VARYING(10). CharacterVarying(Option), - /// Char varying type e.g. CHAR VARYING(10) + /// Char varying type, e.g. CHAR VARYING(10). CharVarying(Option), - /// Variable-length character type e.g. VARCHAR(10) + /// Variable-length character type, e.g. VARCHAR(10). Varchar(Option), - /// Variable-length character type e.g. NVARCHAR(10) + /// Variable-length character type, e.g. NVARCHAR(10). Nvarchar(Option), - /// Uuid type + /// Uuid type. Uuid, - /// Large character object with optional length e.g. CHARACTER LARGE OBJECT, CHARACTER LARGE OBJECT(1000), [standard] + /// Large character object with optional length, + /// e.g. CHARACTER LARGE OBJECT, CHARACTER LARGE OBJECT(1000), [SQL Standard]. /// - /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-type + /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-type CharacterLargeObject(Option), - /// Large character object with optional length e.g. CHAR LARGE OBJECT, CHAR LARGE OBJECT(1000), [standard] + /// Large character object with optional length, + /// e.g. CHAR LARGE OBJECT, CHAR LARGE OBJECT(1000), [SQL Standard]. /// - /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-type + /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-type CharLargeObject(Option), - /// Large character object with optional length e.g. CLOB, CLOB(1000), [standard] + /// Large character object with optional length, + /// e.g. CLOB, CLOB(1000), [SQL Standard]. /// - /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-type + /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-type /// [Oracle]: https://docs.oracle.com/javadb/10.10.1.2/ref/rrefclob.html Clob(Option), - /// Fixed-length binary type with optional length e.g. [standard], [MS SQL Server] + /// Fixed-length binary type with optional length, + /// see [SQL Standard], [MS SQL Server]. /// - /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#binary-string-type + /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#binary-string-type /// [MS SQL Server]: https://learn.microsoft.com/pt-br/sql/t-sql/data-types/binary-and-varbinary-transact-sql?view=sql-server-ver16 Binary(Option), - /// Variable-length binary with optional length type e.g. [standard], [MS SQL Server] + /// Variable-length binary with optional length type, + /// see [SQL Standard], [MS SQL Server]. /// - /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#binary-string-type + /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#binary-string-type /// [MS SQL Server]: https://learn.microsoft.com/pt-br/sql/t-sql/data-types/binary-and-varbinary-transact-sql?view=sql-server-ver16 - Varbinary(Option), - /// Large binary object with optional length e.g. BLOB, BLOB(1000), [standard], [Oracle] + Varbinary(Option), + /// Large binary object with optional length, + /// see [SQL Standard], [Oracle]. /// - /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#binary-large-object-string-type + /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#binary-large-object-string-type /// [Oracle]: https://docs.oracle.com/javadb/10.8.3.0/ref/rrefblob.html Blob(Option), + /// [MySQL] blob with up to 2**8 bytes. + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/blob.html + TinyBlob, + /// [MySQL] blob with up to 2**24 bytes. + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/blob.html + MediumBlob, + /// [MySQL] blob with up to 2**32 bytes. + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/blob.html + LongBlob, /// Variable-length binary data with optional length. /// - /// [bigquery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#bytes_type + /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#bytes_type Bytes(Option), - /// Numeric type with optional precision and scale e.g. NUMERIC(10,2), [standard][1] + /// Numeric type with optional precision and scale, e.g. NUMERIC(10,2), [SQL Standard][1]. /// /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type Numeric(ExactNumberInfo), - /// Decimal type with optional precision and scale e.g. DECIMAL(10,2), [standard][1] + /// Decimal type with optional precision and scale, e.g. DECIMAL(10,2), [SQL Standard][1]. /// /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type Decimal(ExactNumberInfo), - /// [BigNumeric] type used in BigQuery + /// [BigNumeric] type used in BigQuery. /// /// [BigNumeric]: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#bignumeric_literals BigNumeric(ExactNumberInfo), - /// This is alias for `BigNumeric` type used in BigQuery + /// This is alias for `BigNumeric` type used in BigQuery. /// /// [BigDecimal]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#decimal_types BigDecimal(ExactNumberInfo), - /// Dec type with optional precision and scale e.g. DEC(10,2), [standard][1] + /// Dec type with optional precision and scale, e.g. DEC(10,2), [SQL Standard][1]. /// /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type Dec(ExactNumberInfo), - /// Floating point with optional precision e.g. FLOAT(8) + /// Floating point with optional precision, e.g. FLOAT(8). Float(Option), - /// Tiny integer with optional display width e.g. TINYINT or TINYINT(3) + /// Tiny integer with optional display width, e.g. TINYINT or TINYINT(3). TinyInt(Option), - /// Unsigned tiny integer with optional display width e.g. TINYINT UNSIGNED or TINYINT(3) UNSIGNED - UnsignedTinyInt(Option), - /// Int2 as alias for SmallInt in [postgresql] - /// Note: Int2 mean 2 bytes in postgres (not 2 bits) - /// Int2 with optional display width e.g. INT2 or INT2(5) + /// Unsigned tiny integer with optional display width, + /// e.g. TINYINT UNSIGNED or TINYINT(3) UNSIGNED. + TinyIntUnsigned(Option), + /// Unsigned tiny integer, e.g. UTINYINT + UTinyInt, + /// Int2 is an alias for SmallInt in [PostgreSQL]. + /// Note: Int2 means 2 bytes in PostgreSQL (not 2 bits). + /// Int2 with optional display width, e.g. INT2 or INT2(5). /// - /// [postgresql]: https://www.postgresql.org/docs/15/datatype.html + /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html Int2(Option), - /// Unsigned Int2 with optional display width e.g. INT2 Unsigned or INT2(5) Unsigned - UnsignedInt2(Option), - /// Small integer with optional display width e.g. SMALLINT or SMALLINT(5) + /// Unsigned Int2 with optional display width, e.g. INT2 UNSIGNED or INT2(5) UNSIGNED. + Int2Unsigned(Option), + /// Small integer with optional display width, e.g. SMALLINT or SMALLINT(5). SmallInt(Option), - /// Unsigned small integer with optional display width e.g. SMALLINT UNSIGNED or SMALLINT(5) UNSIGNED - UnsignedSmallInt(Option), - /// MySQL medium integer ([1]) with optional display width e.g. MEDIUMINT or MEDIUMINT(5) + /// Unsigned small integer with optional display width, + /// e.g. SMALLINT UNSIGNED or SMALLINT(5) UNSIGNED. + SmallIntUnsigned(Option), + /// Unsigned small integer, e.g. USMALLINT. + USmallInt, + /// MySQL medium integer ([1]) with optional display width, + /// e.g. MEDIUMINT or MEDIUMINT(5). /// /// [1]: https://dev.mysql.com/doc/refman/8.0/en/integer-types.html MediumInt(Option), - /// Unsigned medium integer ([1]) with optional display width e.g. MEDIUMINT UNSIGNED or MEDIUMINT(5) UNSIGNED + /// Unsigned medium integer ([1]) with optional display width, + /// e.g. MEDIUMINT UNSIGNED or MEDIUMINT(5) UNSIGNED. /// /// [1]: https://dev.mysql.com/doc/refman/8.0/en/integer-types.html - UnsignedMediumInt(Option), - /// Int with optional display width e.g. INT or INT(11) + MediumIntUnsigned(Option), + /// Int with optional display width, e.g. INT or INT(11). Int(Option), - /// Int4 as alias for Integer in [postgresql] - /// Note: Int4 mean 4 bytes in postgres (not 4 bits) - /// Int4 with optional display width e.g. Int4 or Int4(11) + /// Int4 is an alias for Integer in [PostgreSQL]. + /// Note: Int4 means 4 bytes in PostgreSQL (not 4 bits). + /// Int4 with optional display width, e.g. Int4 or Int4(11). /// - /// [postgresql]: https://www.postgresql.org/docs/15/datatype.html + /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html Int4(Option), - /// Int8 as alias for Bigint in [postgresql] and integer type in [clickhouse] - /// Note: Int8 mean 8 bytes in [postgresql] (not 8 bits) - /// Int8 with optional display width e.g. INT8 or INT8(11) - /// Note: Int8 mean 8 bits in [clickhouse] + /// Int8 is an alias for BigInt in [PostgreSQL] and Integer type in [ClickHouse]. + /// Int8 with optional display width, e.g. INT8 or INT8(11). + /// Note: Int8 means 8 bytes in [PostgreSQL], but 8 bits in [ClickHouse]. /// - /// [postgresql]: https://www.postgresql.org/docs/15/datatype.html - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint Int8(Option), - /// Integer type in [clickhouse] - /// Note: Int16 mean 16 bits in [clickhouse] + /// Integer type in [ClickHouse]. + /// Note: Int16 means 16 bits in [ClickHouse]. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint Int16, - /// Integer type in [clickhouse] - /// Note: Int16 mean 32 bits in [clickhouse] + /// Integer type in [ClickHouse]. + /// Note: Int32 means 32 bits in [ClickHouse]. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint Int32, - /// Integer type in [bigquery], [clickhouse] + /// Integer type in [BigQuery], [ClickHouse]. /// - /// [bigquery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#integer_types - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#integer_types + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint Int64, - /// Integer type in [clickhouse] - /// Note: Int128 mean 128 bits in [clickhouse] + /// Integer type in [ClickHouse]. + /// Note: Int128 means 128 bits in [ClickHouse]. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint Int128, - /// Integer type in [clickhouse] - /// Note: Int256 mean 256 bits in [clickhouse] + /// Integer type in [ClickHouse]. + /// Note: Int256 means 256 bits in [ClickHouse]. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint Int256, - /// Integer with optional display width e.g. INTEGER or INTEGER(11) + /// Integer with optional display width, e.g. INTEGER or INTEGER(11). Integer(Option), - /// Unsigned int with optional display width e.g. INT UNSIGNED or INT(11) UNSIGNED - UnsignedInt(Option), - /// Unsigned int4 with optional display width e.g. INT4 UNSIGNED or INT4(11) UNSIGNED - UnsignedInt4(Option), - /// Unsigned integer with optional display width e.g. INTEGER UNSIGNED or INTEGER(11) UNSIGNED - UnsignedInteger(Option), - /// Unsigned integer type in [clickhouse] - /// Note: UInt8 mean 8 bits in [clickhouse] - /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// Unsigned int with optional display width, e.g. INT UNSIGNED or INT(11) UNSIGNED. + IntUnsigned(Option), + /// Unsigned int4 with optional display width, e.g. INT4 UNSIGNED or INT4(11) UNSIGNED. + Int4Unsigned(Option), + /// Unsigned integer with optional display width, e.g. INTEGER UNSIGNED or INTEGER(11) UNSIGNED. + IntegerUnsigned(Option), + /// 128-bit integer type, e.g. HUGEINT. + HugeInt, + /// Unsigned 128-bit integer type, e.g. UHUGEINT. + UHugeInt, + /// Unsigned integer type in [ClickHouse]. + /// Note: UInt8 means 8 bits in [ClickHouse]. + /// + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint UInt8, - /// Unsigned integer type in [clickhouse] - /// Note: UInt16 mean 16 bits in [clickhouse] + /// Unsigned integer type in [ClickHouse]. + /// Note: UInt16 means 16 bits in [ClickHouse]. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint UInt16, - /// Unsigned integer type in [clickhouse] - /// Note: UInt32 mean 32 bits in [clickhouse] + /// Unsigned integer type in [ClickHouse]. + /// Note: UInt32 means 32 bits in [ClickHouse]. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint UInt32, - /// Unsigned integer type in [clickhouse] - /// Note: UInt64 mean 64 bits in [clickhouse] + /// Unsigned integer type in [ClickHouse]. + /// Note: UInt64 means 64 bits in [ClickHouse]. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint UInt64, - /// Unsigned integer type in [clickhouse] - /// Note: UInt128 mean 128 bits in [clickhouse] + /// Unsigned integer type in [ClickHouse]. + /// Note: UInt128 means 128 bits in [ClickHouse]. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint UInt128, - /// Unsigned integer type in [clickhouse] - /// Note: UInt256 mean 256 bits in [clickhouse] + /// Unsigned integer type in [ClickHouse]. + /// Note: UInt256 means 256 bits in [ClickHouse]. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/int-uint UInt256, - /// Big integer with optional display width e.g. BIGINT or BIGINT(20) + /// Big integer with optional display width, e.g. BIGINT or BIGINT(20). BigInt(Option), - /// Unsigned big integer with optional display width e.g. BIGINT UNSIGNED or BIGINT(20) UNSIGNED - UnsignedBigInt(Option), - /// Unsigned Int8 with optional display width e.g. INT8 UNSIGNED or INT8(11) UNSIGNED - UnsignedInt8(Option), - /// Float4 as alias for Real in [postgresql] + /// Unsigned big integer with optional display width, e.g. BIGINT UNSIGNED or BIGINT(20) UNSIGNED. + BigIntUnsigned(Option), + /// Unsigned big integer, e.g. UBIGINT. + UBigInt, + /// Unsigned Int8 with optional display width, e.g. INT8 UNSIGNED or INT8(11) UNSIGNED. + Int8Unsigned(Option), + /// Signed integer as used in [MySQL CAST] target types, without optional `INTEGER` suffix, + /// e.g. `SIGNED` /// - /// [postgresql]: https://www.postgresql.org/docs/15/datatype.html + /// [MySQL CAST]: https://dev.mysql.com/doc/refman/8.4/en/cast-functions.html + Signed, + /// Signed integer as used in [MySQL CAST] target types, with optional `INTEGER` suffix, + /// e.g. `SIGNED INTEGER` + /// + /// [MySQL CAST]: https://dev.mysql.com/doc/refman/8.4/en/cast-functions.html + SignedInteger, + /// Signed integer as used in [MySQL CAST] target types, without optional `INTEGER` suffix, + /// e.g. `SIGNED` + /// + /// [MySQL CAST]: https://dev.mysql.com/doc/refman/8.4/en/cast-functions.html + Unsigned, + /// Unsigned integer as used in [MySQL CAST] target types, with optional `INTEGER` suffix, + /// e.g. `UNSIGNED INTEGER`. + /// + /// [MySQL CAST]: https://dev.mysql.com/doc/refman/8.4/en/cast-functions.html + UnsignedInteger, + /// Float4 is an alias for Real in [PostgreSQL]. + /// + /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html Float4, - /// Floating point in [clickhouse] + /// Floating point in [ClickHouse]. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/float + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/float Float32, - /// Floating point in [bigquery] + /// Floating point in [BigQuery]. /// - /// [bigquery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#floating_point_types - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/float + /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#floating_point_types + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/float Float64, - /// Floating point e.g. REAL + /// Floating point, e.g. REAL. Real, - /// Float8 as alias for Double in [postgresql] + /// Float8 is an alias for Double in [PostgreSQL]. /// - /// [postgresql]: https://www.postgresql.org/docs/15/datatype.html + /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html Float8, /// Double - Double, - /// Double PRECISION e.g. [standard], [postgresql] + Double(ExactNumberInfo), + /// Double Precision, see [SQL Standard], [PostgreSQL]. /// - /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#approximate-numeric-type - /// [postgresql]: https://www.postgresql.org/docs/current/datatype-numeric.html + /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#approximate-numeric-type + /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype-numeric.html DoublePrecision, - /// Bool as alias for Boolean in [postgresql] + /// Bool is an alias for Boolean, see [PostgreSQL]. /// - /// [postgresql]: https://www.postgresql.org/docs/15/datatype.html + /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html Bool, - /// Boolean + /// Boolean type. Boolean, - /// Date + /// Date type. Date, - /// Date32 with the same range as Datetime64 + /// Date32 with the same range as Datetime64. /// /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/date32 Date32, - /// Time with optional time precision and time zone information e.g. [standard][1]. + /// Time with optional time precision and time zone information, see [SQL Standard][1]. /// /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type Time(Option, TimezoneInfo), - /// Datetime with optional time precision e.g. [MySQL][1]. + /// Datetime with optional time precision, see [MySQL][1]. /// /// [1]: https://dev.mysql.com/doc/refman/8.0/en/datetime.html Datetime(Option), - /// Datetime with time precision and optional timezone e.g. [ClickHouse][1]. + /// Datetime with time precision and optional timezone, see [ClickHouse][1]. /// /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64 Datetime64(u64, Option), - /// Timestamp with optional time precision and time zone information e.g. [standard][1]. + /// Timestamp with optional time precision and time zone information, see [SQL Standard][1]. /// /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type Timestamp(Option, TimezoneInfo), - /// Interval + /// Databricks timestamp without time zone. See [1]. + /// + /// [1]: https://docs.databricks.com/aws/en/sql/language-manual/data-types/timestamp-ntz-type + TimestampNtz, + /// Interval type. Interval, - /// JSON type + /// JSON type. JSON, - /// Binary JSON type + /// Binary JSON type. JSONB, - /// Regclass used in postgresql serial + /// Regclass used in [PostgreSQL] serial. + /// + /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html Regclass, - /// Text + /// Text type. Text, + /// [MySQL] text with up to 2**8 bytes. + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/blob.html + TinyText, + /// [MySQL] text with up to 2**24 bytes. + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/blob.html + MediumText, + /// [MySQL] text with up to 2**32 bytes. + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/blob.html + LongText, /// String with optional length. String(Option), /// A fixed-length string e.g [ClickHouse][1]. /// /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/fixedstring FixedString(u64), - /// Bytea + /// Bytea type, see [PostgreSQL]. + /// + /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype-bit.html Bytea, - /// Custom type such as enums + /// Bit string, see [PostgreSQL], [MySQL], or [MSSQL]. + /// + /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype-bit.html + /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/bit-type.html + /// [MSSQL]: https://learn.microsoft.com/en-us/sql/t-sql/data-types/bit-transact-sql?view=sql-server-ver16 + Bit(Option), + /// `BIT VARYING(n)`: Variable-length bit string, see [PostgreSQL]. + /// + /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype-bit.html + BitVarying(Option), + /// `VARBIT(n)`: Variable-length bit string. [PostgreSQL] alias for `BIT VARYING`. + /// + /// [PostgreSQL]: https://www.postgresql.org/docs/current/datatype.html + VarBit(Option), + /// Custom types. Custom(ObjectName, Vec), - /// Arrays + /// Arrays. Array(ArrayElemTypeDef), - /// Map + /// Map, see [ClickHouse]. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/map + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/map Map(Box, Box), - /// Tuple + /// Tuple, see [ClickHouse]. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple Tuple(Vec), - /// Nested + /// Nested type, see [ClickHouse]. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/nested-data-structures/nested + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/nested-data-structures/nested Nested(Vec), - /// Enums - Enum(Vec), - /// Set + /// Enum type. + Enum(Vec, Option), + /// Set type. Set(Vec), - /// Struct + /// Struct type, see [Hive], [BigQuery]. /// - /// [hive]: https://docs.cloudera.com/cdw-runtime/cloud/impala-sql-reference/topics/impala-struct.html - /// [bigquery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct_type + /// [Hive]: https://docs.cloudera.com/cdw-runtime/cloud/impala-sql-reference/topics/impala-struct.html + /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct_type Struct(Vec, StructBracketKind), - /// Union + /// Union type, see [DuckDB]. /// - /// [duckdb]: https://duckdb.org/docs/sql/data_types/union.html + /// [DuckDB]: https://duckdb.org/docs/sql/data_types/union.html Union(Vec), /// Nullable - special marker NULL represents in ClickHouse as a data type. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/nullable + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/nullable Nullable(Box), /// LowCardinality - changes the internal representation of other data types to be dictionary-encoded. /// - /// [clickhouse]: https://clickhouse.com/docs/en/sql-reference/data-types/lowcardinality + /// [ClickHouse]: https://clickhouse.com/docs/en/sql-reference/data-types/lowcardinality LowCardinality(Box), /// No type specified - only used with /// [`SQLiteDialect`](crate::dialect::SQLiteDialect), from statements such /// as `CREATE TABLE t1 (a)`. Unspecified, - /// Trigger data type, returned by functions associated with triggers + /// Trigger data type, returned by functions associated with triggers, see [PostgreSQL]. /// - /// [postgresql]: https://www.postgresql.org/docs/current/plpgsql-trigger.html + /// [PostgreSQL]: https://www.postgresql.org/docs/current/plpgsql-trigger.html Trigger, + /// Any data type, used in BigQuery UDF definitions for templated parameters, see [BigQuery]. + /// + /// [BigQuery]: https://cloud.google.com/bigquery/docs/user-defined-functions#templated-sql-udf-parameters + AnyType, + /// Geometric type, see [PostgreSQL]. + /// + /// [PostgreSQL]: https://www.postgresql.org/docs/9.5/functions-geometry.html + GeometricType(GeometricTypeKind), } impl fmt::Display for DataType { @@ -338,7 +446,6 @@ impl fmt::Display for DataType { DataType::CharacterVarying(size) => { format_character_string_type(f, "CHARACTER VARYING", size) } - DataType::CharVarying(size) => format_character_string_type(f, "CHAR VARYING", size), DataType::Varchar(size) => format_character_string_type(f, "VARCHAR", size), DataType::Nvarchar(size) => format_character_string_type(f, "NVARCHAR", size), @@ -351,10 +458,11 @@ impl fmt::Display for DataType { } DataType::Clob(size) => format_type_with_optional_length(f, "CLOB", size, false), DataType::Binary(size) => format_type_with_optional_length(f, "BINARY", size, false), - DataType::Varbinary(size) => { - format_type_with_optional_length(f, "VARBINARY", size, false) - } + DataType::Varbinary(size) => format_varbinary_type(f, "VARBINARY", size), DataType::Blob(size) => format_type_with_optional_length(f, "BLOB", size, false), + DataType::TinyBlob => write!(f, "TINYBLOB"), + DataType::MediumBlob => write!(f, "MEDIUMBLOB"), + DataType::LongBlob => write!(f, "LONGBLOB"), DataType::Bytes(size) => format_type_with_optional_length(f, "BYTES", size, false), DataType::Numeric(info) => { write!(f, "NUMERIC{info}") @@ -371,29 +479,29 @@ impl fmt::Display for DataType { DataType::TinyInt(zerofill) => { format_type_with_optional_length(f, "TINYINT", zerofill, false) } - DataType::UnsignedTinyInt(zerofill) => { + DataType::TinyIntUnsigned(zerofill) => { format_type_with_optional_length(f, "TINYINT", zerofill, true) } DataType::Int2(zerofill) => { format_type_with_optional_length(f, "INT2", zerofill, false) } - DataType::UnsignedInt2(zerofill) => { + DataType::Int2Unsigned(zerofill) => { format_type_with_optional_length(f, "INT2", zerofill, true) } DataType::SmallInt(zerofill) => { format_type_with_optional_length(f, "SMALLINT", zerofill, false) } - DataType::UnsignedSmallInt(zerofill) => { + DataType::SmallIntUnsigned(zerofill) => { format_type_with_optional_length(f, "SMALLINT", zerofill, true) } DataType::MediumInt(zerofill) => { format_type_with_optional_length(f, "MEDIUMINT", zerofill, false) } - DataType::UnsignedMediumInt(zerofill) => { + DataType::MediumIntUnsigned(zerofill) => { format_type_with_optional_length(f, "MEDIUMINT", zerofill, true) } DataType::Int(zerofill) => format_type_with_optional_length(f, "INT", zerofill, false), - DataType::UnsignedInt(zerofill) => { + DataType::IntUnsigned(zerofill) => { format_type_with_optional_length(f, "INT", zerofill, true) } DataType::Int4(zerofill) => { @@ -417,24 +525,39 @@ impl fmt::Display for DataType { DataType::Int256 => { write!(f, "Int256") } - DataType::UnsignedInt4(zerofill) => { + DataType::HugeInt => { + write!(f, "HUGEINT") + } + DataType::Int4Unsigned(zerofill) => { format_type_with_optional_length(f, "INT4", zerofill, true) } DataType::Integer(zerofill) => { format_type_with_optional_length(f, "INTEGER", zerofill, false) } - DataType::UnsignedInteger(zerofill) => { + DataType::IntegerUnsigned(zerofill) => { format_type_with_optional_length(f, "INTEGER", zerofill, true) } DataType::BigInt(zerofill) => { format_type_with_optional_length(f, "BIGINT", zerofill, false) } - DataType::UnsignedBigInt(zerofill) => { + DataType::BigIntUnsigned(zerofill) => { format_type_with_optional_length(f, "BIGINT", zerofill, true) } - DataType::UnsignedInt8(zerofill) => { + DataType::Int8Unsigned(zerofill) => { format_type_with_optional_length(f, "INT8", zerofill, true) } + DataType::UTinyInt => { + write!(f, "UTINYINT") + } + DataType::USmallInt => { + write!(f, "USMALLINT") + } + DataType::UBigInt => { + write!(f, "UBIGINT") + } + DataType::UHugeInt => { + write!(f, "UHUGEINT") + } DataType::UInt8 => { write!(f, "UInt8") } @@ -453,11 +576,23 @@ impl fmt::Display for DataType { DataType::UInt256 => { write!(f, "UInt256") } + DataType::Signed => { + write!(f, "SIGNED") + } + DataType::SignedInteger => { + write!(f, "SIGNED INTEGER") + } + DataType::Unsigned => { + write!(f, "UNSIGNED") + } + DataType::UnsignedInteger => { + write!(f, "UNSIGNED INTEGER") + } DataType::Real => write!(f, "REAL"), DataType::Float4 => write!(f, "FLOAT4"), DataType::Float32 => write!(f, "Float32"), DataType::Float64 => write!(f, "FLOAT64"), - DataType::Double => write!(f, "DOUBLE"), + DataType::Double(info) => write!(f, "DOUBLE{info}"), DataType::Float8 => write!(f, "FLOAT8"), DataType::DoublePrecision => write!(f, "DOUBLE PRECISION"), DataType::Bool => write!(f, "BOOL"), @@ -473,6 +608,7 @@ impl fmt::Display for DataType { DataType::Timestamp(precision, timezone_info) => { format_datetime_precision_and_tz(f, "TIMESTAMP", precision, timezone_info) } + DataType::TimestampNtz => write!(f, "TIMESTAMP_NTZ"), DataType::Datetime64(precision, timezone) => { format_clickhouse_datetime_precision_and_timezone( f, @@ -486,8 +622,16 @@ impl fmt::Display for DataType { DataType::JSONB => write!(f, "JSONB"), DataType::Regclass => write!(f, "REGCLASS"), DataType::Text => write!(f, "TEXT"), + DataType::TinyText => write!(f, "TINYTEXT"), + DataType::MediumText => write!(f, "MEDIUMTEXT"), + DataType::LongText => write!(f, "LONGTEXT"), DataType::String(size) => format_type_with_optional_length(f, "STRING", size, false), DataType::Bytea => write!(f, "BYTEA"), + DataType::Bit(size) => format_type_with_optional_length(f, "BIT", size, false), + DataType::BitVarying(size) => { + format_type_with_optional_length(f, "BIT VARYING", size, false) + } + DataType::VarBit(size) => format_type_with_optional_length(f, "VARBIT", size, false), DataType::Array(ty) => match ty { ArrayElemTypeDef::None => write!(f, "ARRAY"), ArrayElemTypeDef::SquareBracket(t, None) => write!(f, "{t}[]"), @@ -502,13 +646,24 @@ impl fmt::Display for DataType { write!(f, "{}({})", ty, modifiers.join(", ")) } } - DataType::Enum(vals) => { - write!(f, "ENUM(")?; + DataType::Enum(vals, bits) => { + match bits { + Some(bits) => write!(f, "ENUM{}", bits), + None => write!(f, "ENUM"), + }?; + write!(f, "(")?; for (i, v) in vals.iter().enumerate() { if i != 0 { write!(f, ", ")?; } - write!(f, "'{}'", escape_single_quote_string(v))?; + match v { + EnumMember::Name(name) => { + write!(f, "'{}'", escape_single_quote_string(name))? + } + EnumMember::NamedValue(name, value) => { + write!(f, "'{}' = {}", escape_single_quote_string(name), value)? + } + } } write!(f, ")") } @@ -560,6 +715,9 @@ impl fmt::Display for DataType { } DataType::Unspecified => Ok(()), DataType::Trigger => write!(f, "TRIGGER"), + DataType::AnyType => write!(f, "ANY TYPE"), + DataType::Table(fields) => write!(f, "TABLE({})", display_comma_separated(fields)), + DataType::GeometricType(kind) => write!(f, "{}", kind), } } } @@ -592,6 +750,18 @@ fn format_character_string_type( Ok(()) } +fn format_varbinary_type( + f: &mut fmt::Formatter, + sql_type: &str, + size: &Option, +) -> fmt::Result { + write!(f, "{sql_type}")?; + if let Some(size) = size { + write!(f, "({size})")?; + } + Ok(()) +} + fn format_datetime_precision_and_tz( f: &mut fmt::Formatter, sql_type: &'static str, @@ -649,19 +819,19 @@ pub enum StructBracketKind { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum TimezoneInfo { - /// No information about time zone. E.g., TIMESTAMP + /// No information about time zone, e.g. TIMESTAMP None, - /// Temporal type 'WITH TIME ZONE'. E.g., TIMESTAMP WITH TIME ZONE, [standard], [Oracle] + /// Temporal type 'WITH TIME ZONE', e.g. TIMESTAMP WITH TIME ZONE, [SQL Standard], [Oracle] /// - /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type + /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type /// [Oracle]: https://docs.oracle.com/en/database/oracle/oracle-database/12.2/nlspg/datetime-data-types-and-time-zone-support.html#GUID-3F1C388E-C651-43D5-ADBC-1A49E5C2CA05 WithTimeZone, - /// Temporal type 'WITHOUT TIME ZONE'. E.g., TIME WITHOUT TIME ZONE, [standard], [Postgresql] + /// Temporal type 'WITHOUT TIME ZONE', e.g. TIME WITHOUT TIME ZONE, [SQL Standard], [Postgresql] /// - /// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type + /// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type /// [Postgresql]: https://www.postgresql.org/docs/current/datatype-datetime.html WithoutTimeZone, - /// Postgresql specific `WITH TIME ZONE` formatting, for both TIME and TIMESTAMP. E.g., TIMETZ, [Postgresql] + /// Postgresql specific `WITH TIME ZONE` formatting, for both TIME and TIMESTAMP, e.g. TIMETZ, [Postgresql] /// /// [Postgresql]: https://www.postgresql.org/docs/current/datatype-datetime.html Tz, @@ -690,18 +860,18 @@ impl fmt::Display for TimezoneInfo { } /// Additional information for `NUMERIC`, `DECIMAL`, and `DEC` data types -/// following the 2016 [standard]. +/// following the 2016 [SQL Standard]. /// -/// [standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type +/// [SQL Standard]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum ExactNumberInfo { - /// No additional information e.g. `DECIMAL` + /// No additional information, e.g. `DECIMAL` None, - /// Only precision information e.g. `DECIMAL(10)` + /// Only precision information, e.g. `DECIMAL(10)` Precision(u64), - /// Precision and scale information e.g. `DECIMAL(10,2)` + /// Precision and scale information, e.g. `DECIMAL(10,2)` PrecisionAndScale(u64, u64), } @@ -755,7 +925,7 @@ impl fmt::Display for CharacterLength { } } -/// Possible units for characters, initially based on 2016 ANSI [standard][1]. +/// Possible units for characters, initially based on 2016 ANSI [SQL Standard][1]. /// /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#char-length-units #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -781,6 +951,32 @@ impl fmt::Display for CharLengthUnits { } } +#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum BinaryLength { + IntegerLength { + /// Default (if VARYING) + length: u64, + }, + /// VARBINARY(MAX) used in T-SQL (Microsoft SQL Server) + Max, +} + +impl fmt::Display for BinaryLength { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + BinaryLength::IntegerLength { length } => { + write!(f, "{}", length)?; + } + BinaryLength::Max => { + write!(f, "MAX")?; + } + } + Ok(()) + } +} + /// Represents the data type of the elements in an array (if any) as well as /// the syntax used to declare the array. /// @@ -798,3 +994,34 @@ pub enum ArrayElemTypeDef { /// `Array(Int64)` Parenthesis(Box), } + +/// Represents different types of geometric shapes which are commonly used in +/// PostgreSQL/Redshift for spatial operations and geometry-related computations. +/// +/// [PostgreSQL]: https://www.postgresql.org/docs/9.5/functions-geometry.html +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum GeometricTypeKind { + Point, + Line, + LineSegment, + GeometricBox, + GeometricPath, + Polygon, + Circle, +} + +impl fmt::Display for GeometricTypeKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + GeometricTypeKind::Point => write!(f, "point"), + GeometricTypeKind::Line => write!(f, "line"), + GeometricTypeKind::LineSegment => write!(f, "lseg"), + GeometricTypeKind::GeometricBox => write!(f, "box"), + GeometricTypeKind::GeometricPath => write!(f, "path"), + GeometricTypeKind::Polygon => write!(f, "polygon"), + GeometricTypeKind::Circle => write!(f, "circle"), + } + } +} diff --git a/src/ast/dcl.rs b/src/ast/dcl.rs index d47476ffa..735ab0cce 100644 --- a/src/ast/dcl.rs +++ b/src/ast/dcl.rs @@ -28,7 +28,7 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; -use super::{Expr, Ident, Password}; +use super::{display_comma_separated, Expr, Ident, Password}; use crate::ast::{display_separated, ObjectName}; /// An option in `ROLE` statement. @@ -204,12 +204,14 @@ impl fmt::Display for AlterRoleOperation { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum Use { - Catalog(ObjectName), // e.g. `USE CATALOG foo.bar` - Schema(ObjectName), // e.g. `USE SCHEMA foo.bar` - Database(ObjectName), // e.g. `USE DATABASE foo.bar` - Warehouse(ObjectName), // e.g. `USE WAREHOUSE foo.bar` - Object(ObjectName), // e.g. `USE foo.bar` - Default, // e.g. `USE DEFAULT` + Catalog(ObjectName), // e.g. `USE CATALOG foo.bar` + Schema(ObjectName), // e.g. `USE SCHEMA foo.bar` + Database(ObjectName), // e.g. `USE DATABASE foo.bar` + Warehouse(ObjectName), // e.g. `USE WAREHOUSE foo.bar` + Role(ObjectName), // e.g. `USE ROLE PUBLIC` + SecondaryRoles(SecondaryRoles), // e.g. `USE SECONDARY ROLES ALL` + Object(ObjectName), // e.g. `USE foo.bar` + Default, // e.g. `USE DEFAULT` } impl fmt::Display for Use { @@ -220,8 +222,33 @@ impl fmt::Display for Use { Use::Schema(name) => write!(f, "SCHEMA {}", name), Use::Database(name) => write!(f, "DATABASE {}", name), Use::Warehouse(name) => write!(f, "WAREHOUSE {}", name), + Use::Role(name) => write!(f, "ROLE {}", name), + Use::SecondaryRoles(secondary_roles) => { + write!(f, "SECONDARY ROLES {}", secondary_roles) + } Use::Object(name) => write!(f, "{}", name), Use::Default => write!(f, "DEFAULT"), } } } + +/// Snowflake `SECONDARY ROLES` USE variant +/// See: +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum SecondaryRoles { + All, + None, + List(Vec), +} + +impl fmt::Display for SecondaryRoles { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + SecondaryRoles::All => write!(f, "ALL"), + SecondaryRoles::None => write!(f, "NONE"), + SecondaryRoles::List(roles) => write!(f, "{}", display_comma_separated(roles)), + } + } +} diff --git a/src/ast/ddl.rs b/src/ast/ddl.rs index 21a716d25..a457a0655 100644 --- a/src/ast/ddl.rs +++ b/src/ast/ddl.rs @@ -30,8 +30,11 @@ use sqlparser_derive::{Visit, VisitMut}; use crate::ast::value::escape_single_quote_string; use crate::ast::{ - display_comma_separated, display_separated, DataType, Expr, Ident, MySQLColumnPosition, - ObjectName, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag, Value, + display_comma_separated, display_separated, CommentDef, CreateFunctionBody, + CreateFunctionUsing, DataType, Expr, FunctionBehavior, FunctionCalledOnNull, + FunctionDeterminismSpecifier, FunctionParallel, Ident, MySQLColumnPosition, ObjectName, + OperateFunctionArg, OrderByExpr, ProjectionSelect, SequenceOptions, SqlOption, Tag, Value, + ValueWithSpan, }; use crate::keywords::Keyword; use crate::tokenizer::Token; @@ -63,13 +66,14 @@ pub enum AlterTableOperation { name: Ident, select: ProjectionSelect, }, - /// `DROP PROJECTION [IF EXISTS] name` /// /// Note: this is a ClickHouse-specific operation. /// Please refer to [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/alter/projection#drop-projection) - DropProjection { if_exists: bool, name: Ident }, - + DropProjection { + if_exists: bool, + name: Ident, + }, /// `MATERIALIZE PROJECTION [IF EXISTS] name [IN PARTITION partition_name]` /// /// Note: this is a ClickHouse-specific operation. @@ -79,7 +83,6 @@ pub enum AlterTableOperation { name: Ident, partition: Option, }, - /// `CLEAR PROJECTION [IF EXISTS] name [IN PARTITION partition_name]` /// /// Note: this is a ClickHouse-specific operation. @@ -89,7 +92,6 @@ pub enum AlterTableOperation { name: Ident, partition: Option, }, - /// `DISABLE ROW LEVEL SECURITY` /// /// Note: this is a PostgreSQL-specific operation. @@ -97,22 +99,26 @@ pub enum AlterTableOperation { /// `DISABLE RULE rewrite_rule_name` /// /// Note: this is a PostgreSQL-specific operation. - DisableRule { name: Ident }, + DisableRule { + name: Ident, + }, /// `DISABLE TRIGGER [ trigger_name | ALL | USER ]` /// /// Note: this is a PostgreSQL-specific operation. - DisableTrigger { name: Ident }, + DisableTrigger { + name: Ident, + }, /// `DROP CONSTRAINT [ IF EXISTS ] ` DropConstraint { if_exists: bool, name: Ident, - cascade: bool, + drop_behavior: Option, }, /// `DROP [ COLUMN ] [ IF EXISTS ] [ CASCADE ]` DropColumn { column_name: Ident, if_exists: bool, - cascade: bool, + drop_behavior: Option, }, /// `ATTACH PART|PARTITION ` /// Note: this is a ClickHouse-specific operation, please refer to @@ -145,24 +151,42 @@ pub enum AlterTableOperation { }, /// `DROP PRIMARY KEY` /// - /// Note: this is a MySQL-specific operation. + /// Note: this is a [MySQL]-specific operation. + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html DropPrimaryKey, + /// `DROP FOREIGN KEY ` + /// + /// Note: this is a [MySQL]-specific operation. + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html + DropForeignKey { + name: Ident, + }, /// `ENABLE ALWAYS RULE rewrite_rule_name` /// /// Note: this is a PostgreSQL-specific operation. - EnableAlwaysRule { name: Ident }, + EnableAlwaysRule { + name: Ident, + }, /// `ENABLE ALWAYS TRIGGER trigger_name` /// /// Note: this is a PostgreSQL-specific operation. - EnableAlwaysTrigger { name: Ident }, + EnableAlwaysTrigger { + name: Ident, + }, /// `ENABLE REPLICA RULE rewrite_rule_name` /// /// Note: this is a PostgreSQL-specific operation. - EnableReplicaRule { name: Ident }, + EnableReplicaRule { + name: Ident, + }, /// `ENABLE REPLICA TRIGGER trigger_name` /// /// Note: this is a PostgreSQL-specific operation. - EnableReplicaTrigger { name: Ident }, + EnableReplicaTrigger { + name: Ident, + }, /// `ENABLE ROW LEVEL SECURITY` /// /// Note: this is a PostgreSQL-specific operation. @@ -170,11 +194,15 @@ pub enum AlterTableOperation { /// `ENABLE RULE rewrite_rule_name` /// /// Note: this is a PostgreSQL-specific operation. - EnableRule { name: Ident }, + EnableRule { + name: Ident, + }, /// `ENABLE TRIGGER [ trigger_name | ALL | USER ]` /// /// Note: this is a PostgreSQL-specific operation. - EnableTrigger { name: Ident }, + EnableTrigger { + name: Ident, + }, /// `RENAME TO PARTITION (partition=val)` RenamePartitions { old_partitions: Vec, @@ -195,7 +223,9 @@ pub enum AlterTableOperation { new_column_name: Ident, }, /// `RENAME TO ` - RenameTable { table_name: ObjectName }, + RenameTable { + table_name: ObjectName, + }, // CHANGE [ COLUMN ] [ ] ChangeColumn { old_name: Ident, @@ -216,7 +246,10 @@ pub enum AlterTableOperation { /// `RENAME CONSTRAINT TO ` /// /// Note: this is a PostgreSQL-specific operation. - RenameConstraint { old_name: Ident, new_name: Ident }, + RenameConstraint { + old_name: Ident, + new_name: Ident, + }, /// `ALTER [ COLUMN ]` AlterColumn { column_name: Ident, @@ -225,14 +258,55 @@ pub enum AlterTableOperation { /// 'SWAP WITH ' /// /// Note: this is Snowflake specific - SwapWith { table_name: ObjectName }, + SwapWith { + table_name: ObjectName, + }, /// 'SET TBLPROPERTIES ( { property_key [ = ] property_val } [, ...] )' - SetTblProperties { table_properties: Vec }, - + SetTblProperties { + table_properties: Vec, + }, /// `OWNER TO { | CURRENT_ROLE | CURRENT_USER | SESSION_USER }` /// /// Note: this is PostgreSQL-specific - OwnerTo { new_owner: Owner }, + OwnerTo { + new_owner: Owner, + }, + /// Snowflake table clustering options + /// + ClusterBy { + exprs: Vec, + }, + DropClusteringKey, + SuspendRecluster, + ResumeRecluster, + /// `ALGORITHM [=] { DEFAULT | INSTANT | INPLACE | COPY }` + /// + /// [MySQL]-specific table alter algorithm. + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html + Algorithm { + equals: bool, + algorithm: AlterTableAlgorithm, + }, + + /// `LOCK [=] { DEFAULT | NONE | SHARED | EXCLUSIVE }` + /// + /// [MySQL]-specific table alter lock. + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html + Lock { + equals: bool, + lock: AlterTableLock, + }, + /// `AUTO_INCREMENT [=] ` + /// + /// [MySQL]-specific table option for raising current auto increment value. + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html + AutoIncrement { + equals: bool, + value: ValueWithSpan, + }, } /// An `ALTER Policy` (`Statement::AlterPolicy`) operation @@ -278,6 +352,54 @@ impl fmt::Display for AlterPolicyOperation { } } +/// [MySQL] `ALTER TABLE` algorithm. +/// +/// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum AlterTableAlgorithm { + Default, + Instant, + Inplace, + Copy, +} + +impl fmt::Display for AlterTableAlgorithm { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(match self { + Self::Default => "DEFAULT", + Self::Instant => "INSTANT", + Self::Inplace => "INPLACE", + Self::Copy => "COPY", + }) + } +} + +/// [MySQL] `ALTER TABLE` lock. +/// +/// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/alter-table.html +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum AlterTableLock { + Default, + None, + Shared, + Exclusive, +} + +impl fmt::Display for AlterTableLock { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(match self { + Self::Default => "DEFAULT", + Self::None => "NONE", + Self::Shared => "SHARED", + Self::Exclusive => "EXCLUSIVE", + }) + } +} + #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -299,6 +421,23 @@ impl fmt::Display for Owner { } } +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum AlterConnectorOwner { + User(Ident), + Role(Ident), +} + +impl fmt::Display for AlterConnectorOwner { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + AlterConnectorOwner::User(ident) => write!(f, "USER {ident}"), + AlterConnectorOwner::Role(ident) => write!(f, "ROLE {ident}"), + } + } +} + #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -351,6 +490,14 @@ impl fmt::Display for AlterTableOperation { } write!(f, " {} ({})", name, query) } + AlterTableOperation::Algorithm { equals, algorithm } => { + write!( + f, + "ALGORITHM {}{}", + if *equals { "= " } else { "" }, + algorithm + ) + } AlterTableOperation::DropProjection { if_exists, name } => { write!(f, "DROP PROJECTION")?; if *if_exists { @@ -412,27 +559,36 @@ impl fmt::Display for AlterTableOperation { AlterTableOperation::DropConstraint { if_exists, name, - cascade, + drop_behavior, } => { write!( f, "DROP CONSTRAINT {}{}{}", if *if_exists { "IF EXISTS " } else { "" }, name, - if *cascade { " CASCADE" } else { "" }, + match drop_behavior { + None => "", + Some(DropBehavior::Restrict) => " RESTRICT", + Some(DropBehavior::Cascade) => " CASCADE", + } ) } AlterTableOperation::DropPrimaryKey => write!(f, "DROP PRIMARY KEY"), + AlterTableOperation::DropForeignKey { name } => write!(f, "DROP FOREIGN KEY {name}"), AlterTableOperation::DropColumn { column_name, if_exists, - cascade, + drop_behavior, } => write!( f, "DROP COLUMN {}{}{}", if *if_exists { "IF EXISTS " } else { "" }, column_name, - if *cascade { " CASCADE" } else { "" } + match drop_behavior { + None => "", + Some(DropBehavior::Restrict) => " RESTRICT", + Some(DropBehavior::Cascade) => " CASCADE", + } ), AlterTableOperation::AttachPartition { partition } => { write!(f, "ATTACH {partition}") @@ -546,6 +702,33 @@ impl fmt::Display for AlterTableOperation { } Ok(()) } + AlterTableOperation::ClusterBy { exprs } => { + write!(f, "CLUSTER BY ({})", display_comma_separated(exprs))?; + Ok(()) + } + AlterTableOperation::DropClusteringKey => { + write!(f, "DROP CLUSTERING KEY")?; + Ok(()) + } + AlterTableOperation::SuspendRecluster => { + write!(f, "SUSPEND RECLUSTER")?; + Ok(()) + } + AlterTableOperation::ResumeRecluster => { + write!(f, "RESUME RECLUSTER")?; + Ok(()) + } + AlterTableOperation::AutoIncrement { equals, value } => { + write!( + f, + "AUTO_INCREMENT {}{}", + if *equals { "= " } else { "" }, + value + ) + } + AlterTableOperation::Lock { equals, lock } => { + write!(f, "LOCK {}{}", if *equals { "= " } else { "" }, lock) + } } } } @@ -560,6 +743,95 @@ impl fmt::Display for AlterIndexOperation { } } +/// An `ALTER TYPE` statement (`Statement::AlterType`) +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct AlterType { + pub name: ObjectName, + pub operation: AlterTypeOperation, +} + +/// An [AlterType] operation +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum AlterTypeOperation { + Rename(AlterTypeRename), + AddValue(AlterTypeAddValue), + RenameValue(AlterTypeRenameValue), +} + +/// See [AlterTypeOperation::Rename] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct AlterTypeRename { + pub new_name: Ident, +} + +/// See [AlterTypeOperation::AddValue] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct AlterTypeAddValue { + pub if_not_exists: bool, + pub value: Ident, + pub position: Option, +} + +/// See [AlterTypeAddValue] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum AlterTypeAddValuePosition { + Before(Ident), + After(Ident), +} + +/// See [AlterTypeOperation::RenameValue] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct AlterTypeRenameValue { + pub from: Ident, + pub to: Ident, +} + +impl fmt::Display for AlterTypeOperation { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::Rename(AlterTypeRename { new_name }) => { + write!(f, "RENAME TO {new_name}") + } + Self::AddValue(AlterTypeAddValue { + if_not_exists, + value, + position, + }) => { + write!(f, "ADD VALUE")?; + if *if_not_exists { + write!(f, " IF NOT EXISTS")?; + } + write!(f, " {value}")?; + match position { + Some(AlterTypeAddValuePosition::Before(neighbor_value)) => { + write!(f, " BEFORE {neighbor_value}")?; + } + Some(AlterTypeAddValuePosition::After(neighbor_value)) => { + write!(f, " AFTER {neighbor_value}")?; + } + None => {} + }; + Ok(()) + } + Self::RenameValue(AlterTypeRenameValue { from, to }) => { + write!(f, "RENAME VALUE {from} TO {to}") + } + } + } +} + /// An `ALTER COLUMN` (`Statement::AlterTable`) operation #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -596,7 +868,7 @@ impl fmt::Display for AlterColumnOperation { AlterColumnOperation::SetDefault { value } => { write!(f, "SET DEFAULT {value}") } - AlterColumnOperation::DropDefault {} => { + AlterColumnOperation::DropDefault => { write!(f, "DROP DEFAULT") } AlterColumnOperation::SetDataType { data_type, using } => { @@ -667,6 +939,8 @@ pub enum TableConstraint { columns: Vec, index_options: Vec, characteristics: Option, + /// Optional Postgres nulls handling: `[ NULLS [ NOT ] DISTINCT ]` + nulls_distinct: NullsDistinctOption, }, /// MySQL [definition][1] for `PRIMARY KEY` constraints statements:\ /// * `[CONSTRAINT []] PRIMARY KEY [index_name] [index_type] () ` @@ -775,10 +1049,11 @@ impl fmt::Display for TableConstraint { columns, index_options, characteristics, + nulls_distinct, } => { write!( f, - "{}UNIQUE{index_type_display:>}{}{} ({})", + "{}UNIQUE{nulls_distinct}{index_type_display:>}{}{} ({})", display_constraint_name(name), display_option_spaced(index_name), display_option(" USING ", "", index_type), @@ -827,12 +1102,14 @@ impl fmt::Display for TableConstraint { } => { write!( f, - "{}FOREIGN KEY ({}) REFERENCES {}({})", + "{}FOREIGN KEY ({}) REFERENCES {}", display_constraint_name(name), display_comma_separated(columns), foreign_table, - display_comma_separated(referred_columns), )?; + if !referred_columns.is_empty() { + write!(f, "({})", display_comma_separated(referred_columns))?; + } if let Some(action) = on_delete { write!(f, " ON DELETE {action}")?; } @@ -945,13 +1222,20 @@ impl fmt::Display for KeyOrIndexDisplay { /// [1]: https://dev.mysql.com/doc/refman/8.0/en/create-table.html /// [2]: https://dev.mysql.com/doc/refman/8.0/en/create-index.html /// [3]: https://www.postgresql.org/docs/14/sql-createindex.html -#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum IndexType { BTree, Hash, - // TODO add Postgresql's possible indexes + GIN, + GiST, + SPGiST, + BRIN, + Bloom, + /// Users may define their own index types, which would + /// not be covered by the above variants. + Custom(Ident), } impl fmt::Display for IndexType { @@ -959,6 +1243,12 @@ impl fmt::Display for IndexType { match self { Self::BTree => write!(f, "BTREE"), Self::Hash => write!(f, "HASH"), + Self::GIN => write!(f, "GIN"), + Self::GiST => write!(f, "GIST"), + Self::SPGiST => write!(f, "SPGIST"), + Self::BRIN => write!(f, "BRIN"), + Self::Bloom => write!(f, "BLOOM"), + Self::Custom(name) => write!(f, "{}", name), } } } @@ -986,6 +1276,31 @@ impl fmt::Display for IndexOption { } } +/// [PostgreSQL] unique index nulls handling option: `[ NULLS [ NOT ] DISTINCT ]` +/// +/// [PostgreSQL]: https://www.postgresql.org/docs/17/sql-altertable.html +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum NullsDistinctOption { + /// Not specified + None, + /// NULLS DISTINCT + Distinct, + /// NULLS NOT DISTINCT + NotDistinct, +} + +impl fmt::Display for NullsDistinctOption { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::None => Ok(()), + Self::Distinct => write!(f, " NULLS DISTINCT"), + Self::NotDistinct => write!(f, " NULLS NOT DISTINCT"), + } + } +} + #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -1007,7 +1322,6 @@ impl fmt::Display for ProcedureParam { pub struct ColumnDef { pub name: Ident, pub data_type: DataType, - pub collation: Option, pub options: Vec, } @@ -1018,9 +1332,6 @@ impl fmt::Display for ColumnDef { } else { write!(f, "{} {}", self.name, self.data_type)?; } - if let Some(collation) = &self.collation { - write!(f, " COLLATE {collation}")?; - } for option in &self.options { write!(f, " {option}")?; } @@ -1327,15 +1638,18 @@ pub enum ColumnOption { /// `DEFAULT ` Default(Expr), - /// ClickHouse supports `MATERIALIZE`, `EPHEMERAL` and `ALIAS` expr to generate default values. + /// `MATERIALIZE ` /// Syntax: `b INT MATERIALIZE (a + 1)` + /// /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/create/table#default_values) - - /// `MATERIALIZE ` Materialized(Expr), /// `EPHEMERAL []` + /// + /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/create/table#default_values) Ephemeral(Option), /// `ALIAS ` + /// + /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/create/table#default_values) Alias(Expr), /// `{ PRIMARY KEY | UNIQUE } []` @@ -1364,6 +1678,7 @@ pub enum ColumnOption { /// - ... DialectSpecific(Vec), CharacterSet(ObjectName), + Collation(ObjectName), Comment(String), OnUpdate(Expr), /// `Generated`s are modifiers that follow a column definition in a `CREATE @@ -1463,6 +1778,7 @@ impl fmt::Display for ColumnOption { Check(expr) => write!(f, "CHECK ({expr})"), DialectSpecific(val) => write!(f, "{}", display_separated(val, " ")), CharacterSet(n) => write!(f, "CHARACTER SET {n}"), + Collation(n) => write!(f, "COLLATE {n}"), Comment(v) => write!(f, "COMMENT '{}'", escape_single_quote_string(v)), OnUpdate(expr) => write!(f, "ON UPDATE {expr}"), Generated { @@ -1552,7 +1868,7 @@ pub enum GeneratedExpressionMode { #[must_use] fn display_constraint_name(name: &'_ Option) -> impl fmt::Display + '_ { struct ConstraintName<'a>(&'a Option); - impl<'a> fmt::Display for ConstraintName<'a> { + impl fmt::Display for ConstraintName<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(name) = self.0 { write!(f, "CONSTRAINT {name} ")?; @@ -1573,7 +1889,7 @@ fn display_option<'a, T: fmt::Display>( option: &'a Option, ) -> impl fmt::Display + 'a { struct OptionDisplay<'a, T>(&'a str, &'a str, &'a Option); - impl<'a, T: fmt::Display> fmt::Display for OptionDisplay<'a, T> { + impl fmt::Display for OptionDisplay<'_, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(inner) = self.2 { let (prefix, postfix) = (self.0, self.1); @@ -1698,6 +2014,26 @@ impl fmt::Display for ReferentialAction { } } +/// ` ::= CASCADE | RESTRICT`. +/// +/// Used in `DROP` statements. +#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum DropBehavior { + Restrict, + Cascade, +} + +impl fmt::Display for DropBehavior { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(match self { + DropBehavior::Restrict => "RESTRICT", + DropBehavior::Cascade => "CASCADE", + }) + } +} + /// SQL user defined type definition #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -1816,3 +2152,241 @@ impl fmt::Display for ClusteredBy { write!(f, " INTO {} BUCKETS", self.num_buckets) } } + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +/// ```sql +/// CREATE DOMAIN name [ AS ] data_type +/// [ COLLATE collation ] +/// [ DEFAULT expression ] +/// [ domain_constraint [ ... ] ] +/// +/// where domain_constraint is: +/// +/// [ CONSTRAINT constraint_name ] +/// { NOT NULL | NULL | CHECK (expression) } +/// ``` +/// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createdomain.html) +pub struct CreateDomain { + /// The name of the domain to be created. + pub name: ObjectName, + /// The data type of the domain. + pub data_type: DataType, + /// The collation of the domain. + pub collation: Option, + /// The default value of the domain. + pub default: Option, + /// The constraints of the domain. + pub constraints: Vec, +} + +impl fmt::Display for CreateDomain { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "CREATE DOMAIN {name} AS {data_type}", + name = self.name, + data_type = self.data_type + )?; + if let Some(collation) = &self.collation { + write!(f, " COLLATE {collation}")?; + } + if let Some(default) = &self.default { + write!(f, " DEFAULT {default}")?; + } + if !self.constraints.is_empty() { + write!(f, " {}", display_separated(&self.constraints, " "))?; + } + Ok(()) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct CreateFunction { + /// True if this is a `CREATE OR ALTER FUNCTION` statement + /// + /// [MsSql](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql?view=sql-server-ver16#or-alter) + pub or_alter: bool, + pub or_replace: bool, + pub temporary: bool, + pub if_not_exists: bool, + pub name: ObjectName, + pub args: Option>, + pub return_type: Option, + /// The expression that defines the function. + /// + /// Examples: + /// ```sql + /// AS ((SELECT 1)) + /// AS "console.log();" + /// ``` + pub function_body: Option, + /// Behavior attribute for the function + /// + /// IMMUTABLE | STABLE | VOLATILE + /// + /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-createfunction.html) + pub behavior: Option, + /// CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT + /// + /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-createfunction.html) + pub called_on_null: Option, + /// PARALLEL { UNSAFE | RESTRICTED | SAFE } + /// + /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-createfunction.html) + pub parallel: Option, + /// USING ... (Hive only) + pub using: Option, + /// Language used in a UDF definition. + /// + /// Example: + /// ```sql + /// CREATE FUNCTION foo() LANGUAGE js AS "console.log();" + /// ``` + /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_a_javascript_udf) + pub language: Option, + /// Determinism keyword used for non-sql UDF definitions. + /// + /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_11) + pub determinism_specifier: Option, + /// List of options for creating the function. + /// + /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_11) + pub options: Option>, + /// Connection resource for a remote function. + /// + /// Example: + /// ```sql + /// CREATE FUNCTION foo() + /// RETURNS FLOAT64 + /// REMOTE WITH CONNECTION us.myconnection + /// ``` + /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_a_remote_function) + pub remote_connection: Option, +} + +impl fmt::Display for CreateFunction { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "CREATE {or_alter}{or_replace}{temp}FUNCTION {if_not_exists}{name}", + name = self.name, + temp = if self.temporary { "TEMPORARY " } else { "" }, + or_alter = if self.or_alter { "OR ALTER " } else { "" }, + or_replace = if self.or_replace { "OR REPLACE " } else { "" }, + if_not_exists = if self.if_not_exists { + "IF NOT EXISTS " + } else { + "" + }, + )?; + if let Some(args) = &self.args { + write!(f, "({})", display_comma_separated(args))?; + } + if let Some(return_type) = &self.return_type { + write!(f, " RETURNS {return_type}")?; + } + if let Some(determinism_specifier) = &self.determinism_specifier { + write!(f, " {determinism_specifier}")?; + } + if let Some(language) = &self.language { + write!(f, " LANGUAGE {language}")?; + } + if let Some(behavior) = &self.behavior { + write!(f, " {behavior}")?; + } + if let Some(called_on_null) = &self.called_on_null { + write!(f, " {called_on_null}")?; + } + if let Some(parallel) = &self.parallel { + write!(f, " {parallel}")?; + } + if let Some(remote_connection) = &self.remote_connection { + write!(f, " REMOTE WITH CONNECTION {remote_connection}")?; + } + if let Some(CreateFunctionBody::AsBeforeOptions(function_body)) = &self.function_body { + write!(f, " AS {function_body}")?; + } + if let Some(CreateFunctionBody::Return(function_body)) = &self.function_body { + write!(f, " RETURN {function_body}")?; + } + if let Some(using) = &self.using { + write!(f, " {using}")?; + } + if let Some(options) = &self.options { + write!( + f, + " OPTIONS({})", + display_comma_separated(options.as_slice()) + )?; + } + if let Some(CreateFunctionBody::AsAfterOptions(function_body)) = &self.function_body { + write!(f, " AS {function_body}")?; + } + if let Some(CreateFunctionBody::AsBeginEnd(bes)) = &self.function_body { + write!(f, " AS {bes}")?; + } + Ok(()) + } +} + +/// ```sql +/// CREATE CONNECTOR [IF NOT EXISTS] connector_name +/// [TYPE datasource_type] +/// [URL datasource_url] +/// [COMMENT connector_comment] +/// [WITH DCPROPERTIES(property_name=property_value, ...)] +/// ``` +/// +/// [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector) +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct CreateConnector { + pub name: Ident, + pub if_not_exists: bool, + pub connector_type: Option, + pub url: Option, + pub comment: Option, + pub with_dcproperties: Option>, +} + +impl fmt::Display for CreateConnector { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "CREATE CONNECTOR {if_not_exists}{name}", + if_not_exists = if self.if_not_exists { + "IF NOT EXISTS " + } else { + "" + }, + name = self.name, + )?; + + if let Some(connector_type) = &self.connector_type { + write!(f, " TYPE '{connector_type}'")?; + } + + if let Some(url) = &self.url { + write!(f, " URL '{url}'")?; + } + + if let Some(comment) = &self.comment { + write!(f, " COMMENT = '{comment}'")?; + } + + if let Some(with_dcproperties) = &self.with_dcproperties { + write!( + f, + " WITH DCPROPERTIES({})", + display_comma_separated(with_dcproperties) + )?; + } + + Ok(()) + } +} diff --git a/src/ast/dml.rs b/src/ast/dml.rs index 2932fafb5..7ed17be9b 100644 --- a/src/ast/dml.rs +++ b/src/ast/dml.rs @@ -32,13 +32,33 @@ use sqlparser_derive::{Visit, VisitMut}; pub use super::ddl::{ColumnDef, TableConstraint}; use super::{ - display_comma_separated, display_separated, ClusteredBy, CommentDef, Expr, FileFormat, - FromTable, HiveDistributionStyle, HiveFormat, HiveIOFormat, HiveRowFormat, Ident, - InsertAliases, MysqlInsertPriority, ObjectName, OnCommit, OnInsert, OneOrManyWithParens, - OrderByExpr, Query, RowAccessPolicy, SelectItem, SqlOption, SqliteOnConflict, TableEngine, - TableWithJoins, Tag, WrappedCollection, + display_comma_separated, display_separated, query::InputFormatClause, Assignment, ClusteredBy, + CommentDef, CreateTableOptions, Expr, FileFormat, FromTable, HiveDistributionStyle, HiveFormat, + HiveIOFormat, HiveRowFormat, Ident, IndexType, InsertAliases, MysqlInsertPriority, ObjectName, + OnCommit, OnInsert, OneOrManyWithParens, OrderByExpr, Query, RowAccessPolicy, SelectItem, + Setting, SqliteOnConflict, StorageSerializationPolicy, TableObject, TableWithJoins, Tag, + WrappedCollection, }; +/// Index column type. +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct IndexColumn { + pub column: OrderByExpr, + pub operator_class: Option, +} + +impl Display for IndexColumn { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.column)?; + if let Some(operator_class) = &self.operator_class { + write!(f, " {}", operator_class)?; + } + Ok(()) + } +} + /// CREATE INDEX statement. #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -48,8 +68,8 @@ pub struct CreateIndex { pub name: Option, #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] pub table_name: ObjectName, - pub using: Option, - pub columns: Vec, + pub using: Option, + pub columns: Vec, pub unique: bool, pub concurrently: bool, pub if_not_exists: bool, @@ -117,6 +137,7 @@ pub struct CreateTable { pub if_not_exists: bool, pub transient: bool, pub volatile: bool, + pub iceberg: bool, /// Table name #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] pub name: ObjectName, @@ -125,19 +146,17 @@ pub struct CreateTable { pub constraints: Vec, pub hive_distribution: HiveDistributionStyle, pub hive_formats: Option, - pub table_properties: Vec, - pub with_options: Vec, + pub table_options: CreateTableOptions, pub file_format: Option, pub location: Option, pub query: Option>, pub without_rowid: bool, pub like: Option, pub clone: Option, - pub engine: Option, + // For Hive dialect, the table comment is after the column definitions without `=`, + // so the `comment` field is optional and different than the comment field in the general options list. + // [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable) pub comment: Option, - pub auto_increment_offset: Option, - pub default_charset: Option, - pub collation: Option, pub on_commit: Option, /// ClickHouse "ON CLUSTER" clause: /// @@ -158,9 +177,11 @@ pub struct CreateTable { /// Hive: Table clustering column list. /// pub clustered_by: Option, - /// BigQuery: Table options list. - /// - pub options: Option>, + /// Postgres `INHERITs` clause, which contains the list of tables from which + /// the new table inherits. + /// + /// + pub inherits: Option>, /// SQLite "STRICT" clause. /// if the "STRICT" table-option keyword is added to the end, after the closing ")", /// then strict typing rules apply to that table. @@ -192,6 +213,21 @@ pub struct CreateTable { /// Snowflake "WITH TAG" clause /// pub with_tags: Option>, + /// Snowflake "EXTERNAL_VOLUME" clause for Iceberg tables + /// + pub external_volume: Option, + /// Snowflake "BASE_LOCATION" clause for Iceberg tables + /// + pub base_location: Option, + /// Snowflake "CATALOG" clause for Iceberg tables + /// + pub catalog: Option, + /// Snowflake "CATALOG_SYNC" clause for Iceberg tables + /// + pub catalog_sync: Option, + /// Snowflake "STORAGE_SERIALIZATION_POLICY" clause for Iceberg tables + /// + pub storage_serialization_policy: Option, } impl Display for CreateTable { @@ -205,7 +241,7 @@ impl Display for CreateTable { // `CREATE TABLE t (a INT) AS SELECT a from t2` write!( f, - "CREATE {or_replace}{external}{global}{temporary}{transient}{volatile}TABLE {if_not_exists}{name}", + "CREATE {or_replace}{external}{global}{temporary}{transient}{volatile}{iceberg}TABLE {if_not_exists}{name}", or_replace = if self.or_replace { "OR REPLACE " } else { "" }, external = if self.external { "EXTERNAL " } else { "" }, global = self.global @@ -221,6 +257,8 @@ impl Display for CreateTable { temporary = if self.temporary { "TEMPORARY " } else { "" }, transient = if self.transient { "TRANSIENT " } else { "" }, volatile = if self.volatile { "VOLATILE " } else { "" }, + // Only for Snowflake + iceberg = if self.iceberg { "ICEBERG " } else { "" }, name = self.name, )?; if let Some(on_cluster) = &self.on_cluster { @@ -239,7 +277,7 @@ impl Display for CreateTable { // Hive table comment should be after column definitions, please refer to: // [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable) - if let Some(CommentDef::AfterColumnDefsWithoutEq(comment)) = &self.comment { + if let Some(comment) = &self.comment { write!(f, " COMMENT '{comment}'")?; } @@ -332,53 +370,54 @@ impl Display for CreateTable { } write!(f, " LOCATION '{}'", self.location.as_ref().unwrap())?; } - if !self.table_properties.is_empty() { - write!( - f, - " TBLPROPERTIES ({})", - display_comma_separated(&self.table_properties) - )?; - } - if !self.with_options.is_empty() { - write!(f, " WITH ({})", display_comma_separated(&self.with_options))?; - } - if let Some(engine) = &self.engine { - write!(f, " ENGINE={engine}")?; - } - if let Some(comment_def) = &self.comment { - match comment_def { - CommentDef::WithEq(comment) => { - write!(f, " COMMENT = '{comment}'")?; - } - CommentDef::WithoutEq(comment) => { - write!(f, " COMMENT '{comment}'")?; - } - // For CommentDef::AfterColumnDefsWithoutEq will be displayed after column definition - CommentDef::AfterColumnDefsWithoutEq(_) => (), - } - } - if let Some(auto_increment_offset) = self.auto_increment_offset { - write!(f, " AUTO_INCREMENT {auto_increment_offset}")?; + match &self.table_options { + options @ CreateTableOptions::With(_) + | options @ CreateTableOptions::Plain(_) + | options @ CreateTableOptions::TableProperties(_) => write!(f, " {}", options)?, + _ => (), } + if let Some(primary_key) = &self.primary_key { write!(f, " PRIMARY KEY {}", primary_key)?; } if let Some(order_by) = &self.order_by { write!(f, " ORDER BY {}", order_by)?; } + if let Some(inherits) = &self.inherits { + write!(f, " INHERITS ({})", display_comma_separated(inherits))?; + } if let Some(partition_by) = self.partition_by.as_ref() { write!(f, " PARTITION BY {partition_by}")?; } if let Some(cluster_by) = self.cluster_by.as_ref() { write!(f, " CLUSTER BY {cluster_by}")?; } + if let options @ CreateTableOptions::Options(_) = &self.table_options { + write!(f, " {}", options)?; + } + if let Some(external_volume) = self.external_volume.as_ref() { + write!(f, " EXTERNAL_VOLUME = '{external_volume}'")?; + } - if let Some(options) = self.options.as_ref() { + if let Some(catalog) = self.catalog.as_ref() { + write!(f, " CATALOG = '{catalog}'")?; + } + + if self.iceberg { + if let Some(base_location) = self.base_location.as_ref() { + write!(f, " BASE_LOCATION = '{base_location}'")?; + } + } + + if let Some(catalog_sync) = self.catalog_sync.as_ref() { + write!(f, " CATALOG_SYNC = '{catalog_sync}'")?; + } + + if let Some(storage_serialization_policy) = self.storage_serialization_policy.as_ref() { write!( f, - " OPTIONS({})", - display_comma_separated(options.as_slice()) + " STORAGE_SERIALIZATION_POLICY = {storage_serialization_policy}" )?; } @@ -432,13 +471,6 @@ impl Display for CreateTable { write!(f, " WITH TAG ({})", display_comma_separated(tag.as_slice()))?; } - if let Some(default_charset) = &self.default_charset { - write!(f, " DEFAULT CHARSET={default_charset}")?; - } - if let Some(collation) = &self.collation { - write!(f, " COLLATE={collation}")?; - } - if self.on_commit.is_some() { let on_commit = match self.on_commit { Some(OnCommit::DeleteRows) => "ON COMMIT DELETE ROWS", @@ -470,8 +502,7 @@ pub struct Insert { /// INTO - optional keyword pub into: bool, /// TABLE - #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] - pub table_name: ObjectName, + pub table: TableObject, /// table_name as foo (for PostgreSQL) pub table_alias: Option, /// COLUMNS @@ -480,12 +511,15 @@ pub struct Insert { pub overwrite: bool, /// A SQL query that specifies what to insert pub source: Option>, + /// MySQL `INSERT INTO ... SET` + /// See: + pub assignments: Vec, /// partitioned insert (Hive) pub partitioned: Option>, /// Columns defined after PARTITION pub after_columns: Vec, /// whether the insert has the table keyword (Hive) - pub table: bool, + pub has_table_keyword: bool, pub on: Option, /// RETURNING pub returning: Option>, @@ -495,18 +529,31 @@ pub struct Insert { pub priority: Option, /// Only for mysql pub insert_alias: Option, + /// Settings used for ClickHouse. + /// + /// ClickHouse syntax: `INSERT INTO tbl SETTINGS format_template_resultset = '/some/path/resultset.format'` + /// + /// [ClickHouse `INSERT INTO`](https://clickhouse.com/docs/en/sql-reference/statements/insert-into) + pub settings: Option>, + /// Format for `INSERT` statement when not using standard SQL format. Can be e.g. `CSV`, + /// `JSON`, `JSONAsString`, `LineAsString` and more. + /// + /// ClickHouse syntax: `INSERT INTO tbl FORMAT JSONEachRow {"foo": 1, "bar": 2}, {"foo": 3}` + /// + /// [ClickHouse formats JSON insert](https://clickhouse.com/docs/en/interfaces/formats#json-inserting-data) + pub format_clause: Option, } impl Display for Insert { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let table_name = if let Some(alias) = &self.table_alias { - format!("{0} AS {alias}", self.table_name) + format!("{0} AS {alias}", self.table) } else { - self.table_name.to_string() + self.table.to_string() }; - if let Some(action) = self.or { - write!(f, "INSERT OR {action} INTO {table_name} ")?; + if let Some(on_conflict) = self.or { + write!(f, "INSERT {on_conflict} INTO {table_name} ")?; } else { write!( f, @@ -528,7 +575,7 @@ impl Display for Insert { ignore = if self.ignore { " IGNORE" } else { "" }, over = if self.overwrite { " OVERWRITE" } else { "" }, int = if self.into { " INTO" } else { "" }, - tbl = if self.table { " TABLE" } else { "" }, + tbl = if self.has_table_keyword { " TABLE" } else { "" }, )?; } if !self.columns.is_empty() { @@ -543,11 +590,18 @@ impl Display for Insert { write!(f, "({}) ", display_comma_separated(&self.after_columns))?; } - if let Some(source) = &self.source { - write!(f, "{source}")?; + if let Some(settings) = &self.settings { + write!(f, "SETTINGS {} ", display_comma_separated(settings))?; } - if self.source.is_none() && self.columns.is_empty() { + if let Some(source) = &self.source { + write!(f, "{source}")?; + } else if !self.assignments.is_empty() { + write!(f, "SET ")?; + write!(f, "{}", display_comma_separated(&self.assignments))?; + } else if let Some(format_clause) = &self.format_clause { + write!(f, "{format_clause}")?; + } else if self.columns.is_empty() { write!(f, "DEFAULT VALUES")?; } diff --git a/src/ast/helpers/attached_token.rs b/src/ast/helpers/attached_token.rs new file mode 100644 index 000000000..6b930b513 --- /dev/null +++ b/src/ast/helpers/attached_token.rs @@ -0,0 +1,136 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +use core::cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd}; +use core::fmt::{self, Debug, Formatter}; +use core::hash::{Hash, Hasher}; + +use crate::tokenizer::TokenWithSpan; + +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; + +#[cfg(feature = "visitor")] +use sqlparser_derive::{Visit, VisitMut}; + +/// A wrapper over [`TokenWithSpan`]s that ignores the token and source +/// location in comparisons and hashing. +/// +/// This type is used when the token and location is not relevant for semantics, +/// but is still needed for accurate source location tracking, for example, in +/// the nodes in the [ast](crate::ast) module. +/// +/// Note: **All** `AttachedTokens` are equal. +/// +/// # Examples +/// +/// Same token, different location are equal +/// ``` +/// # use sqlparser::ast::helpers::attached_token::AttachedToken; +/// # use sqlparser::tokenizer::{Location, Span, Token, TokenWithLocation}; +/// // commas @ line 1, column 10 +/// let tok1 = TokenWithLocation::new( +/// Token::Comma, +/// Span::new(Location::new(1, 10), Location::new(1, 11)), +/// ); +/// // commas @ line 2, column 20 +/// let tok2 = TokenWithLocation::new( +/// Token::Comma, +/// Span::new(Location::new(2, 20), Location::new(2, 21)), +/// ); +/// +/// assert_ne!(tok1, tok2); // token with locations are *not* equal +/// assert_eq!(AttachedToken(tok1), AttachedToken(tok2)); // attached tokens are +/// ``` +/// +/// Different token, different location are equal 🤯 +/// +/// ``` +/// # use sqlparser::ast::helpers::attached_token::AttachedToken; +/// # use sqlparser::tokenizer::{Location, Span, Token, TokenWithLocation}; +/// // commas @ line 1, column 10 +/// let tok1 = TokenWithLocation::new( +/// Token::Comma, +/// Span::new(Location::new(1, 10), Location::new(1, 11)), +/// ); +/// // period @ line 2, column 20 +/// let tok2 = TokenWithLocation::new( +/// Token::Period, +/// Span::new(Location::new(2, 10), Location::new(2, 21)), +/// ); +/// +/// assert_ne!(tok1, tok2); // token with locations are *not* equal +/// assert_eq!(AttachedToken(tok1), AttachedToken(tok2)); // attached tokens are +/// ``` +/// // period @ line 2, column 20 +#[derive(Clone)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct AttachedToken(pub TokenWithSpan); + +impl AttachedToken { + /// Return a new Empty AttachedToken + pub fn empty() -> Self { + AttachedToken(TokenWithSpan::new_eof()) + } +} + +// Conditional Implementations +impl Debug for AttachedToken { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +// Blanket Implementations +impl PartialEq for AttachedToken { + fn eq(&self, _: &Self) -> bool { + true + } +} + +impl Eq for AttachedToken {} + +impl PartialOrd for AttachedToken { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for AttachedToken { + fn cmp(&self, _: &Self) -> Ordering { + Ordering::Equal + } +} + +impl Hash for AttachedToken { + fn hash(&self, _state: &mut H) { + // Do nothing + } +} + +impl From for AttachedToken { + fn from(value: TokenWithSpan) -> Self { + AttachedToken(value) + } +} + +impl From for TokenWithSpan { + fn from(value: AttachedToken) -> Self { + value.0 + } +} diff --git a/src/ast/helpers/key_value_options.rs b/src/ast/helpers/key_value_options.rs new file mode 100644 index 000000000..06f028dd2 --- /dev/null +++ b/src/ast/helpers/key_value_options.rs @@ -0,0 +1,89 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +//! Key-value options for SQL statements. +//! See [this page](https://docs.snowflake.com/en/sql-reference/commands-data-loading) for more details. + +#[cfg(not(feature = "std"))] +use alloc::string::String; +#[cfg(not(feature = "std"))] +use alloc::vec::Vec; +use core::fmt; +use core::fmt::Formatter; + +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; + +#[cfg(feature = "visitor")] +use sqlparser_derive::{Visit, VisitMut}; + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct KeyValueOptions { + pub options: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum KeyValueOptionType { + STRING, + BOOLEAN, + ENUM, + NUMBER, +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct KeyValueOption { + pub option_name: String, + pub option_type: KeyValueOptionType, + pub value: String, +} + +impl fmt::Display for KeyValueOptions { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + if !self.options.is_empty() { + let mut first = false; + for option in &self.options { + if !first { + first = true; + } else { + f.write_str(" ")?; + } + write!(f, "{}", option)?; + } + } + Ok(()) + } +} + +impl fmt::Display for KeyValueOption { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.option_type { + KeyValueOptionType::STRING => { + write!(f, "{}='{}'", self.option_name, self.value)?; + } + KeyValueOptionType::ENUM | KeyValueOptionType::BOOLEAN | KeyValueOptionType::NUMBER => { + write!(f, "{}={}", self.option_name, self.value)?; + } + } + Ok(()) + } +} diff --git a/src/ast/helpers/mod.rs b/src/ast/helpers/mod.rs index d6924ab88..55831220d 100644 --- a/src/ast/helpers/mod.rs +++ b/src/ast/helpers/mod.rs @@ -14,5 +14,7 @@ // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. +pub mod attached_token; +pub mod key_value_options; pub mod stmt_create_table; pub mod stmt_data_loading; diff --git a/src/ast/helpers/stmt_create_table.rs b/src/ast/helpers/stmt_create_table.rs index 364969c40..542d30ea9 100644 --- a/src/ast/helpers/stmt_create_table.rs +++ b/src/ast/helpers/stmt_create_table.rs @@ -26,10 +26,12 @@ use sqlparser_derive::{Visit, VisitMut}; use super::super::dml::CreateTable; use crate::ast::{ - ClusteredBy, ColumnDef, CommentDef, Expr, FileFormat, HiveDistributionStyle, HiveFormat, Ident, - ObjectName, OnCommit, OneOrManyWithParens, Query, RowAccessPolicy, SqlOption, Statement, - TableConstraint, TableEngine, Tag, WrappedCollection, + ClusteredBy, ColumnDef, CommentDef, CreateTableOptions, Expr, FileFormat, + HiveDistributionStyle, HiveFormat, Ident, ObjectName, OnCommit, OneOrManyWithParens, Query, + RowAccessPolicy, Statement, StorageSerializationPolicy, TableConstraint, Tag, + WrappedCollection, }; + use crate::parser::ParserError; /// Builder for create table statement variant ([1]). @@ -42,12 +44,11 @@ use crate::parser::ParserError; /// ```rust /// use sqlparser::ast::helpers::stmt_create_table::CreateTableBuilder; /// use sqlparser::ast::{ColumnDef, DataType, Ident, ObjectName}; -/// let builder = CreateTableBuilder::new(ObjectName(vec![Ident::new("table_name")])) +/// let builder = CreateTableBuilder::new(ObjectName::from(vec![Ident::new("table_name")])) /// .if_not_exists(true) /// .columns(vec![ColumnDef { /// name: Ident::new("c1"), /// data_type: DataType::Int(None), -/// collation: None, /// options: vec![], /// }]); /// // You can access internal elements with ease @@ -71,24 +72,19 @@ pub struct CreateTableBuilder { pub if_not_exists: bool, pub transient: bool, pub volatile: bool, + pub iceberg: bool, pub name: ObjectName, pub columns: Vec, pub constraints: Vec, pub hive_distribution: HiveDistributionStyle, pub hive_formats: Option, - pub table_properties: Vec, - pub with_options: Vec, pub file_format: Option, pub location: Option, pub query: Option>, pub without_rowid: bool, pub like: Option, pub clone: Option, - pub engine: Option, pub comment: Option, - pub auto_increment_offset: Option, - pub default_charset: Option, - pub collation: Option, pub on_commit: Option, pub on_cluster: Option, pub primary_key: Option>, @@ -96,7 +92,7 @@ pub struct CreateTableBuilder { pub partition_by: Option>, pub cluster_by: Option>>, pub clustered_by: Option, - pub options: Option>, + pub inherits: Option>, pub strict: bool, pub copy_grants: bool, pub enable_schema_evolution: Option, @@ -107,6 +103,12 @@ pub struct CreateTableBuilder { pub with_aggregation_policy: Option, pub with_row_access_policy: Option, pub with_tags: Option>, + pub base_location: Option, + pub external_volume: Option, + pub catalog: Option, + pub catalog_sync: Option, + pub storage_serialization_policy: Option, + pub table_options: CreateTableOptions, } impl CreateTableBuilder { @@ -119,24 +121,19 @@ impl CreateTableBuilder { if_not_exists: false, transient: false, volatile: false, + iceberg: false, name, columns: vec![], constraints: vec![], hive_distribution: HiveDistributionStyle::NONE, hive_formats: None, - table_properties: vec![], - with_options: vec![], file_format: None, location: None, query: None, without_rowid: false, like: None, clone: None, - engine: None, comment: None, - auto_increment_offset: None, - default_charset: None, - collation: None, on_commit: None, on_cluster: None, primary_key: None, @@ -144,7 +141,7 @@ impl CreateTableBuilder { partition_by: None, cluster_by: None, clustered_by: None, - options: None, + inherits: None, strict: false, copy_grants: false, enable_schema_evolution: None, @@ -155,6 +152,12 @@ impl CreateTableBuilder { with_aggregation_policy: None, with_row_access_policy: None, with_tags: None, + base_location: None, + external_volume: None, + catalog: None, + catalog_sync: None, + storage_serialization_policy: None, + table_options: CreateTableOptions::None, } } pub fn or_replace(mut self, or_replace: bool) -> Self { @@ -192,6 +195,11 @@ impl CreateTableBuilder { self } + pub fn iceberg(mut self, iceberg: bool) -> Self { + self.iceberg = iceberg; + self + } + pub fn columns(mut self, columns: Vec) -> Self { self.columns = columns; self @@ -212,15 +220,6 @@ impl CreateTableBuilder { self } - pub fn table_properties(mut self, table_properties: Vec) -> Self { - self.table_properties = table_properties; - self - } - - pub fn with_options(mut self, with_options: Vec) -> Self { - self.with_options = with_options; - self - } pub fn file_format(mut self, file_format: Option) -> Self { self.file_format = file_format; self @@ -250,31 +249,11 @@ impl CreateTableBuilder { self } - pub fn engine(mut self, engine: Option) -> Self { - self.engine = engine; - self - } - - pub fn comment(mut self, comment: Option) -> Self { + pub fn comment_after_column_def(mut self, comment: Option) -> Self { self.comment = comment; self } - pub fn auto_increment_offset(mut self, offset: Option) -> Self { - self.auto_increment_offset = offset; - self - } - - pub fn default_charset(mut self, default_charset: Option) -> Self { - self.default_charset = default_charset; - self - } - - pub fn collation(mut self, collation: Option) -> Self { - self.collation = collation; - self - } - pub fn on_commit(mut self, on_commit: Option) -> Self { self.on_commit = on_commit; self @@ -310,8 +289,8 @@ impl CreateTableBuilder { self } - pub fn options(mut self, options: Option>) -> Self { - self.options = options; + pub fn inherits(mut self, inherits: Option>) -> Self { + self.inherits = inherits; self } @@ -371,6 +350,39 @@ impl CreateTableBuilder { self } + pub fn base_location(mut self, base_location: Option) -> Self { + self.base_location = base_location; + self + } + + pub fn external_volume(mut self, external_volume: Option) -> Self { + self.external_volume = external_volume; + self + } + + pub fn catalog(mut self, catalog: Option) -> Self { + self.catalog = catalog; + self + } + + pub fn catalog_sync(mut self, catalog_sync: Option) -> Self { + self.catalog_sync = catalog_sync; + self + } + + pub fn storage_serialization_policy( + mut self, + storage_serialization_policy: Option, + ) -> Self { + self.storage_serialization_policy = storage_serialization_policy; + self + } + + pub fn table_options(mut self, table_options: CreateTableOptions) -> Self { + self.table_options = table_options; + self + } + pub fn build(self) -> Statement { Statement::CreateTable(CreateTable { or_replace: self.or_replace, @@ -380,24 +392,19 @@ impl CreateTableBuilder { if_not_exists: self.if_not_exists, transient: self.transient, volatile: self.volatile, + iceberg: self.iceberg, name: self.name, columns: self.columns, constraints: self.constraints, hive_distribution: self.hive_distribution, hive_formats: self.hive_formats, - table_properties: self.table_properties, - with_options: self.with_options, file_format: self.file_format, location: self.location, query: self.query, without_rowid: self.without_rowid, like: self.like, clone: self.clone, - engine: self.engine, comment: self.comment, - auto_increment_offset: self.auto_increment_offset, - default_charset: self.default_charset, - collation: self.collation, on_commit: self.on_commit, on_cluster: self.on_cluster, primary_key: self.primary_key, @@ -405,7 +412,7 @@ impl CreateTableBuilder { partition_by: self.partition_by, cluster_by: self.cluster_by, clustered_by: self.clustered_by, - options: self.options, + inherits: self.inherits, strict: self.strict, copy_grants: self.copy_grants, enable_schema_evolution: self.enable_schema_evolution, @@ -416,6 +423,12 @@ impl CreateTableBuilder { with_aggregation_policy: self.with_aggregation_policy, with_row_access_policy: self.with_row_access_policy, with_tags: self.with_tags, + base_location: self.base_location, + external_volume: self.external_volume, + catalog: self.catalog, + catalog_sync: self.catalog_sync, + storage_serialization_policy: self.storage_serialization_policy, + table_options: self.table_options, }) } } @@ -435,24 +448,19 @@ impl TryFrom for CreateTableBuilder { if_not_exists, transient, volatile, + iceberg, name, columns, constraints, hive_distribution, hive_formats, - table_properties, - with_options, file_format, location, query, without_rowid, like, clone, - engine, comment, - auto_increment_offset, - default_charset, - collation, on_commit, on_cluster, primary_key, @@ -460,7 +468,7 @@ impl TryFrom for CreateTableBuilder { partition_by, cluster_by, clustered_by, - options, + inherits, strict, copy_grants, enable_schema_evolution, @@ -471,6 +479,12 @@ impl TryFrom for CreateTableBuilder { with_aggregation_policy, with_row_access_policy, with_tags, + base_location, + external_volume, + catalog, + catalog_sync, + storage_serialization_policy, + table_options, }) => Ok(Self { or_replace, temporary, @@ -483,19 +497,13 @@ impl TryFrom for CreateTableBuilder { constraints, hive_distribution, hive_formats, - table_properties, - with_options, file_format, location, query, without_rowid, like, clone, - engine, comment, - auto_increment_offset, - default_charset, - collation, on_commit, on_cluster, primary_key, @@ -503,8 +511,9 @@ impl TryFrom for CreateTableBuilder { partition_by, cluster_by, clustered_by, - options, + inherits, strict, + iceberg, copy_grants, enable_schema_evolution, change_tracking, @@ -515,6 +524,12 @@ impl TryFrom for CreateTableBuilder { with_row_access_policy, with_tags, volatile, + base_location, + external_volume, + catalog, + catalog_sync, + storage_serialization_policy, + table_options, }), _ => Err(ParserError::ParserError(format!( "Expected create table statement, but received: {stmt}" @@ -528,7 +543,8 @@ impl TryFrom for CreateTableBuilder { pub(crate) struct CreateTableConfiguration { pub partition_by: Option>, pub cluster_by: Option>>, - pub options: Option>, + pub inherits: Option>, + pub table_options: CreateTableOptions, } #[cfg(test)] @@ -539,7 +555,7 @@ mod tests { #[test] pub fn test_from_valid_statement() { - let builder = CreateTableBuilder::new(ObjectName(vec![Ident::new("table_name")])); + let builder = CreateTableBuilder::new(ObjectName::from(vec![Ident::new("table_name")])); let stmt = builder.clone().build(); @@ -548,7 +564,11 @@ mod tests { #[test] pub fn test_from_invalid_statement() { - let stmt = Statement::Commit { chain: false }; + let stmt = Statement::Commit { + chain: false, + end: false, + modifier: None, + }; assert_eq!( CreateTableBuilder::try_from(stmt).unwrap_err(), diff --git a/src/ast/helpers/stmt_data_loading.rs b/src/ast/helpers/stmt_data_loading.rs index cda6c6ea4..92a727279 100644 --- a/src/ast/helpers/stmt_data_loading.rs +++ b/src/ast/helpers/stmt_data_loading.rs @@ -21,15 +21,13 @@ #[cfg(not(feature = "std"))] use alloc::string::String; -#[cfg(not(feature = "std"))] -use alloc::vec::Vec; use core::fmt; -use core::fmt::Formatter; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; -use crate::ast::Ident; +use crate::ast::helpers::key_value_options::KeyValueOptions; +use crate::ast::{Ident, ObjectName, SelectItem}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; @@ -38,35 +36,29 @@ use sqlparser_derive::{Visit, VisitMut}; #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct StageParamsObject { pub url: Option, - pub encryption: DataLoadingOptions, + pub encryption: KeyValueOptions, pub endpoint: Option, pub storage_integration: Option, - pub credentials: DataLoadingOptions, + pub credentials: KeyValueOptions, } +/// This enum enables support for both standard SQL select item expressions +/// and Snowflake-specific ones for data loading. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct DataLoadingOptions { - pub options: Vec, +pub enum StageLoadSelectItemKind { + SelectItem(SelectItem), + StageLoadSelectItem(StageLoadSelectItem), } -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum DataLoadingOptionType { - STRING, - BOOLEAN, - ENUM, -} - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct DataLoadingOption { - pub option_name: String, - pub option_type: DataLoadingOptionType, - pub value: String, +impl fmt::Display for StageLoadSelectItemKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match &self { + StageLoadSelectItemKind::SelectItem(item) => write!(f, "{item}"), + StageLoadSelectItemKind::StageLoadSelectItem(item) => write!(f, "{item}"), + } + } } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -105,42 +97,6 @@ impl fmt::Display for StageParamsObject { } } -impl fmt::Display for DataLoadingOptions { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - if !self.options.is_empty() { - let mut first = false; - for option in &self.options { - if !first { - first = true; - } else { - f.write_str(" ")?; - } - write!(f, "{}", option)?; - } - } - Ok(()) - } -} - -impl fmt::Display for DataLoadingOption { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.option_type { - DataLoadingOptionType::STRING => { - write!(f, "{}='{}'", self.option_name, self.value)?; - } - DataLoadingOptionType::ENUM => { - // single quote is omitted - write!(f, "{}={}", self.option_name, self.value)?; - } - DataLoadingOptionType::BOOLEAN => { - // single quote is omitted - write!(f, "{}={}", self.option_name, self.value)?; - } - } - Ok(()) - } -} - impl fmt::Display for StageLoadSelectItem { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.alias.is_some() { @@ -156,3 +112,22 @@ impl fmt::Display for StageLoadSelectItem { Ok(()) } } + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct FileStagingCommand { + #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] + pub stage: ObjectName, + pub pattern: Option, +} + +impl fmt::Display for FileStagingCommand { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.stage)?; + if let Some(pattern) = self.pattern.as_ref() { + write!(f, " PATTERN='{pattern}'")?; + } + Ok(()) + } +} diff --git a/src/ast/mod.rs b/src/ast/mod.rs index 0573240a2..6b7ba12d9 100644 --- a/src/ast/mod.rs +++ b/src/ast/mod.rs @@ -23,9 +23,16 @@ use alloc::{ string::{String, ToString}, vec::Vec, }; +use helpers::{ + attached_token::AttachedToken, + stmt_data_loading::{FileStagingCommand, StageLoadSelectItemKind}, +}; -use core::fmt::{self, Display}; use core::ops::Deref; +use core::{ + fmt::{self, Display}, + hash, +}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -33,34 +40,49 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; +use crate::keywords::Keyword; +use crate::tokenizer::{Span, Token}; + pub use self::data_type::{ - ArrayElemTypeDef, CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, - StructBracketKind, TimezoneInfo, + ArrayElemTypeDef, BinaryLength, CharLengthUnits, CharacterLength, DataType, EnumMember, + ExactNumberInfo, StructBracketKind, TimezoneInfo, +}; +pub use self::dcl::{ + AlterRoleOperation, ResetConfig, RoleOption, SecondaryRoles, SetConfigValue, Use, }; -pub use self::dcl::{AlterRoleOperation, ResetConfig, RoleOption, SetConfigValue, Use}; pub use self::ddl::{ - AlterColumnOperation, AlterIndexOperation, AlterPolicyOperation, AlterTableOperation, + AlterColumnOperation, AlterConnectorOwner, AlterIndexOperation, AlterPolicyOperation, + AlterTableAlgorithm, AlterTableLock, AlterTableOperation, AlterType, AlterTypeAddValue, + AlterTypeAddValuePosition, AlterTypeOperation, AlterTypeRename, AlterTypeRenameValue, ClusteredBy, ColumnDef, ColumnOption, ColumnOptionDef, ColumnPolicy, ColumnPolicyProperty, - ConstraintCharacteristics, Deduplicate, DeferrableInitial, GeneratedAs, - GeneratedExpressionMode, IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, - IdentityPropertyKind, IdentityPropertyOrder, IndexOption, IndexType, KeyOrIndexDisplay, Owner, - Partition, ProcedureParam, ReferentialAction, TableConstraint, TagsColumnOption, + ConstraintCharacteristics, CreateConnector, CreateDomain, CreateFunction, Deduplicate, + DeferrableInitial, DropBehavior, GeneratedAs, GeneratedExpressionMode, IdentityParameters, + IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind, IdentityPropertyOrder, + IndexOption, IndexType, KeyOrIndexDisplay, NullsDistinctOption, Owner, Partition, + ProcedureParam, ReferentialAction, TableConstraint, TagsColumnOption, UserDefinedTypeCompositeAttributeDef, UserDefinedTypeRepresentation, ViewColumnDef, }; -pub use self::dml::{CreateIndex, CreateTable, Delete, Insert}; +pub use self::dml::{CreateIndex, CreateTable, Delete, IndexColumn, Insert}; pub use self::operator::{BinaryOperator, UnaryOperator}; pub use self::query::{ AfterMatchSkip, ConnectBy, Cte, CteAsMaterialized, Distinct, EmptyMatchesMode, - ExceptSelectItem, ExcludeSelectItem, ExprWithAlias, Fetch, ForClause, ForJson, ForXml, - FormatClause, GroupByExpr, GroupByWithModifier, IdentWithAlias, IlikeSelectItem, Interpolate, - InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonTableColumn, - JsonTableColumnErrorHandling, LateralView, LockClause, LockType, MatchRecognizePattern, + ExceptSelectItem, ExcludeSelectItem, ExprWithAlias, ExprWithAliasAndOrderBy, Fetch, ForClause, + ForJson, ForXml, FormatClause, GroupByExpr, GroupByWithModifier, IdentWithAlias, + IlikeSelectItem, InputFormatClause, Interpolate, InterpolateExpr, Join, JoinConstraint, + JoinOperator, JsonTableColumn, JsonTableColumnErrorHandling, JsonTableNamedColumn, + JsonTableNestedColumn, LateralView, LimitClause, LockClause, LockType, MatchRecognizePattern, MatchRecognizeSymbol, Measure, NamedWindowDefinition, NamedWindowExpr, NonBlock, Offset, - OffsetRows, OrderBy, OrderByExpr, PivotValueSource, ProjectionSelect, Query, RenameSelectItem, + OffsetRows, OpenJsonTableColumn, OrderBy, OrderByExpr, OrderByKind, OrderByOptions, + PipeOperator, PivotValueSource, ProjectionSelect, Query, RenameSelectItem, RepetitionQuantifier, ReplaceSelectElement, ReplaceSelectItem, RowsPerMatch, Select, - SelectInto, SelectItem, SetExpr, SetOperator, SetQuantifier, Setting, SymbolDefinition, Table, - TableAlias, TableFactor, TableFunctionArgs, TableVersion, TableWithJoins, Top, TopQuantity, - ValueTableMode, Values, WildcardAdditionalOptions, With, WithFill, + SelectFlavor, SelectInto, SelectItem, SelectItemQualifiedWildcardKind, SetExpr, SetOperator, + SetQuantifier, Setting, SymbolDefinition, Table, TableAlias, TableAliasColumnDef, TableFactor, + TableFunctionArgs, TableIndexHintForClause, TableIndexHintType, TableIndexHints, + TableIndexType, TableSample, TableSampleBucket, TableSampleKind, TableSampleMethod, + TableSampleModifier, TableSampleQuantity, TableSampleSeed, TableSampleSeedModifier, + TableSampleUnit, TableVersion, TableWithJoins, Top, TopQuantity, UpdateTableFromKind, + ValueTableMode, Values, WildcardAdditionalOptions, With, WithFill, XmlNamespaceDefinition, + XmlPassingArgument, XmlPassingClause, XmlTableColumn, XmlTableColumnOption, }; pub use self::trigger::{ @@ -70,15 +92,17 @@ pub use self::trigger::{ pub use self::value::{ escape_double_quote_string, escape_quoted_string, DateTimeField, DollarQuotedString, - TrimWhereField, Value, + NormalizationForm, TrimWhereField, Value, ValueWithSpan, }; -use crate::ast::helpers::stmt_data_loading::{ - DataLoadingOptions, StageLoadSelectItem, StageParamsObject, -}; +use crate::ast::helpers::key_value_options::KeyValueOptions; +use crate::ast::helpers::stmt_data_loading::StageParamsObject; + #[cfg(feature = "visitor")] pub use visitor::*; +pub use self::data_type::GeometricTypeKind; + mod data_type; mod dcl; mod ddl; @@ -86,6 +110,9 @@ mod dml; pub mod helpers; mod operator; mod query; +mod spans; +pub use spans::Spanned; + mod trigger; mod value; @@ -100,7 +127,7 @@ where sep: &'static str, } -impl<'a, T> fmt::Display for DisplaySeparated<'a, T> +impl fmt::Display for DisplaySeparated<'_, T> where T: fmt::Display, { @@ -129,8 +156,17 @@ where DisplaySeparated { slice, sep: ", " } } +/// Writes the given statements to the formatter, each ending with +/// a semicolon and space separated. +fn format_statement_list(f: &mut fmt::Formatter, statements: &[Statement]) -> fmt::Result { + write!(f, "{}", display_separated(statements, "; "))?; + // We manually insert semicolon for the last statement, + // since display_separated doesn't handle that case. + write!(f, ";") +} + /// An identifier, decomposed into its value or character data and the quote style. -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[derive(Debug, Clone, PartialOrd, Ord)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct Ident { @@ -139,10 +175,41 @@ pub struct Ident { /// The starting quote if any. Valid quote characters are the single quote, /// double quote, backtick, and opening square bracket. pub quote_style: Option, + /// The span of the identifier in the original SQL string. + pub span: Span, +} + +impl PartialEq for Ident { + fn eq(&self, other: &Self) -> bool { + let Ident { + value, + quote_style, + // exhaustiveness check; we ignore spans in comparisons + span: _, + } = self; + + value == &other.value && quote_style == &other.quote_style + } } +impl core::hash::Hash for Ident { + fn hash(&self, state: &mut H) { + let Ident { + value, + quote_style, + // exhaustiveness check; we ignore spans in hashes + span: _, + } = self; + + value.hash(state); + quote_style.hash(state); + } +} + +impl Eq for Ident {} + impl Ident { - /// Create a new identifier with the given value and no quotes. + /// Create a new identifier with the given value and no quotes and an empty span. pub fn new(value: S) -> Self where S: Into, @@ -150,6 +217,7 @@ impl Ident { Ident { value: value.into(), quote_style: None, + span: Span::empty(), } } @@ -163,6 +231,30 @@ impl Ident { Ident { value: value.into(), quote_style: Some(quote), + span: Span::empty(), + } + } + + pub fn with_span(span: Span, value: S) -> Self + where + S: Into, + { + Ident { + value: value.into(), + quote_style: None, + span, + } + } + + pub fn with_quote_and_span(quote: char, span: Span, value: S) -> Self + where + S: Into, + { + assert!(quote == '\'' || quote == '"' || quote == '`' || quote == '['); + Ident { + value: value.into(), + quote_style: Some(quote), + span, } } } @@ -172,6 +264,7 @@ impl From<&str> for Ident { Ident { value: value.to_string(), quote_style: None, + span: Span::empty(), } } } @@ -194,7 +287,13 @@ impl fmt::Display for Ident { #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct ObjectName(pub Vec); +pub struct ObjectName(pub Vec); + +impl From> for ObjectName { + fn from(idents: Vec) -> Self { + ObjectName(idents.into_iter().map(ObjectNamePart::Identifier).collect()) + } +} impl fmt::Display for ObjectName { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -202,6 +301,30 @@ impl fmt::Display for ObjectName { } } +/// A single part of an ObjectName +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum ObjectNamePart { + Identifier(Ident), +} + +impl ObjectNamePart { + pub fn as_ident(&self) -> Option<&Ident> { + match self { + ObjectNamePart::Identifier(ident) => Some(ident), + } + } +} + +impl fmt::Display for ObjectNamePart { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ObjectNamePart::Identifier(ident) => write!(f, "{}", ident), + } + } +} + /// Represents an Array Expression, either /// `ARRAY[..]`, or `[..]` #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -292,7 +415,7 @@ impl fmt::Display for Interval { /// A field definition within a struct /// -/// [bigquery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct_type +/// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct_type #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -313,7 +436,7 @@ impl fmt::Display for StructField { /// A field definition within a union /// -/// [duckdb]: https://duckdb.org/docs/sql/data_types/union.html +/// [DuckDB]: https://duckdb.org/docs/sql/data_types/union.html #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -330,7 +453,7 @@ impl fmt::Display for UnionField { /// A dictionary field within a dictionary. /// -/// [duckdb]: https://duckdb.org/docs/sql/data_types/struct#creating-structs +/// [DuckDB]: https://duckdb.org/docs/sql/data_types/struct#creating-structs #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -361,7 +484,7 @@ impl Display for Map { /// A map field within a map. /// -/// [duckdb]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps +/// [DuckDB]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -386,40 +509,6 @@ pub enum CastFormat { ValueAtTimeZone(Value, Value), } -/// Represents the syntax/style used in a map access. -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum MapAccessSyntax { - /// Access using bracket notation. `mymap[mykey]` - Bracket, - /// Access using period notation. `mymap.mykey` - Period, -} - -/// Expression used to access a value in a nested structure. -/// -/// Example: `SAFE_OFFSET(0)` in -/// ```sql -/// SELECT mymap[SAFE_OFFSET(0)]; -/// ``` -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct MapAccessKey { - pub key: Expr, - pub syntax: MapAccessSyntax, -} - -impl fmt::Display for MapAccessKey { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self.syntax { - MapAccessSyntax::Bracket => write!(f, "[{}]", self.key), - MapAccessSyntax::Period => write!(f, ".{}", self.key), - } - } -} - /// An element of a JSON path. #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -527,11 +616,39 @@ pub enum CeilFloorKind { Scale(Value), } +/// A WHEN clause in a CASE expression containing both +/// the condition and its corresponding result +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct CaseWhen { + pub condition: Expr, + pub result: Expr, +} + +impl fmt::Display for CaseWhen { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "WHEN {} THEN {}", self.condition, self.result) + } +} + /// An SQL expression of any type. /// +/// # Semantics / Type Checking +/// /// The parser does not distinguish between expressions of different types -/// (e.g. boolean vs string), so the caller must handle expressions of -/// inappropriate type, like `WHERE 1` or `SELECT 1=1`, as necessary. +/// (e.g. boolean vs string). The caller is responsible for detecting and +/// validating types as necessary (for example `WHERE 1` vs `SELECT 1=1`) +/// See the [README.md] for more details. +/// +/// [README.md]: https://github.com/apache/datafusion-sqlparser-rs/blob/main/README.md#syntax-vs-semantics +/// +/// # Equality and Hashing Does not Include Source Locations +/// +/// The `Expr` type implements `PartialEq` and `Eq` based on the semantic value +/// of the expression (not bitwise comparison). This means that `Expr` instances +/// that are semantically equivalent but have different spans (locations in the +/// source tree) will compare as equal. #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr( @@ -544,6 +661,28 @@ pub enum Expr { Identifier(Ident), /// Multi-part identifier, e.g. `table_alias.column` or `schema.table.col` CompoundIdentifier(Vec), + /// Multi-part expression access. + /// + /// This structure represents an access chain in structured / nested types + /// such as maps, arrays, and lists: + /// - Array + /// - A 1-dim array `a[1]` will be represented like: + /// `CompoundFieldAccess(Ident('a'), vec![Subscript(1)]` + /// - A 2-dim array `a[1][2]` will be represented like: + /// `CompoundFieldAccess(Ident('a'), vec![Subscript(1), Subscript(2)]` + /// - Map or Struct (Bracket-style) + /// - A map `a['field1']` will be represented like: + /// `CompoundFieldAccess(Ident('a'), vec![Subscript('field')]` + /// - A 2-dim map `a['field1']['field2']` will be represented like: + /// `CompoundFieldAccess(Ident('a'), vec![Subscript('field1'), Subscript('field2')]` + /// - Struct (Dot-style) (only effect when the chain contains both subscript and expr) + /// - A struct access `a[field1].field2` will be represented like: + /// `CompoundFieldAccess(Ident('a'), vec![Subscript('field1'), Ident('field2')]` + /// - If a struct access likes `a.field1.field2`, it will be represented by CompoundIdentifier([a, field1, field2]) + CompoundFieldAccess { + root: Box, + access_chain: Vec, + }, /// Access data nested in a value containing semi-structured data, such as /// the `VARIANT` type on Snowflake. for example `src:customer[0].name`. /// @@ -555,11 +694,6 @@ pub enum Expr { /// The path to the data to extract. path: JsonPath, }, - /// CompositeAccess (postgres) eg: SELECT (information_schema._pg_expandarray(array['i','i'])).n - CompositeAccess { - expr: Box, - key: Ident, - }, /// `IS FALSE` operator IsFalse(Box), /// `IS NOT FALSE` operator @@ -580,6 +714,12 @@ pub enum Expr { IsDistinctFrom(Box, Box), /// `IS NOT DISTINCT FROM` operator IsNotDistinctFrom(Box, Box), + /// ` IS [ NOT ] [ form ] NORMALIZED` + IsNormalized { + expr: Box, + form: Option, + negated: bool, + }, /// `[ NOT ] IN (val1, val2, ...)` InList { expr: Box, @@ -589,7 +729,7 @@ pub enum Expr { /// `[ NOT ] IN (SELECT ...)` InSubquery { expr: Box, - subquery: Box, + subquery: Box, negated: bool, }, /// `[ NOT ] IN UNNEST(array_expression)` @@ -690,8 +830,9 @@ pub enum Expr { kind: CastKind, expr: Box, data_type: DataType, - // Optional CAST(string_expression AS type FORMAT format_string_expression) as used by BigQuery - // https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements#formatting_syntax + /// Optional CAST(string_expression AS type FORMAT format_string_expression) as used by [BigQuery] + /// + /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/format-elements#formatting_syntax format: Option, }, /// AT a timestamp to a different timezone e.g. `FROM_UNIXTIME(0) AT TIME ZONE 'UTC-06:00'` @@ -754,6 +895,10 @@ pub enum Expr { /// true if the expression is represented using the `SUBSTRING(expr, start, len)` syntax /// This flag is used for formatting. special: bool, + + /// true if the expression is represented using the `SUBSTR` shorthand + /// This flag is used for formatting. + shorthand: bool, }, /// ```sql /// TRIM([BOTH | LEADING | TRAILING] [ FROM] ) @@ -784,26 +929,24 @@ pub enum Expr { /// Nested expression e.g. `(foo > bar)` or `(1)` Nested(Box), /// A literal value, such as string, number, date or NULL - Value(Value), + Value(ValueWithSpan), + /// Prefixed expression, e.g. introducer strings, projection prefix /// - IntroducedString { - introducer: String, - value: Value, + /// + Prefixed { + prefix: Ident, + /// The value of the constant. + /// Hint: you can unwrap the string value using `value.into_string()`. + value: Box, }, /// A constant of form ` 'value'`. /// This can represent ANSI SQL `DATE`, `TIME`, and `TIMESTAMP` literals (such as `DATE '2020-01-01'`), /// as well as constants of other types (a non-standard PostgreSQL extension). TypedString { data_type: DataType, - value: String, - }, - /// Access a map-like object by field (e.g. `column['field']` or `column[4]` - /// Note that depending on the dialect, struct like accesses may be - /// parsed as [`Subscript`](Self::Subscript) or [`MapAccess`](Self::MapAccess) - /// - MapAccess { - column: Box, - keys: Vec, + /// The value of the constant. + /// Hint: you can unwrap the string value using `value.into_string()`. + value: Value, }, /// Scalar function call e.g. `LEFT(foo, 5)` Function(Function), @@ -814,8 +957,7 @@ pub enum Expr { /// Case { operand: Option>, - conditions: Vec, - results: Vec, + conditions: Vec, else_result: Option>, }, /// An exists expression `[ NOT ] EXISTS(SELECT ...)`, used in expressions like @@ -835,12 +977,14 @@ pub enum Expr { Rollup(Vec>), /// ROW / TUPLE a single value, such as `SELECT (1, 2)` Tuple(Vec), - /// `BigQuery` specific `Struct` literal expression [1] + /// `Struct` literal expression /// Syntax: /// ```sql /// STRUCT<[field_name] field_type, ...>( expr1 [, ... ]) + /// + /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct_type) + /// [Databricks](https://docs.databricks.com/en/sql/language-manual/functions/struct.html) /// ``` - /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct_type Struct { /// Struct values. values: Vec, @@ -874,11 +1018,6 @@ pub enum Expr { /// ``` /// [1]: https://duckdb.org/docs/sql/data_types/map#creating-maps Map(Map), - /// An access of nested data using subscript syntax, for example `array[2]`. - Subscript { - expr: Box, - subscript: Box, - }, /// An array expression e.g. `ARRAY[1, 2]` Array(Array), /// An interval expression e.g. `INTERVAL '1' YEAR` @@ -895,16 +1034,16 @@ pub enum Expr { /// [(1)]: https://dev.mysql.com/doc/refman/8.0/en/fulltext-search.html#function_match MatchAgainst { /// `(, , ...)`. - columns: Vec, + columns: Vec, /// ``. match_value: Value, /// `` opt_search_modifier: Option, }, - Wildcard, + Wildcard(AttachedToken), /// Qualified wildcard, e.g. `alias.*` or `schema.table.*`. /// (Same caveats apply to `QualifiedWildcard` as to `Wildcard`.) - QualifiedWildcard(ObjectName), + QualifiedWildcard(ObjectName, AttachedToken), /// Some dialects support an older syntax for outer joins where columns are /// marked with the `(+)` operator in the WHERE clause, for example: /// @@ -929,10 +1068,19 @@ pub enum Expr { /// param -> expr | (param1, ...) -> expr /// ``` /// - /// See . + /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/functions#higher-order-functions---operator-and-lambdaparams-expr-function) + /// [Databricks](https://docs.databricks.com/en/sql/language-manual/sql-ref-lambda-functions.html) + /// [DuckDb](https://duckdb.org/docs/sql/functions/lambda.html) Lambda(LambdaFunction), } +impl Expr { + /// Creates a new [`Expr::Value`] + pub fn value(value: impl Into) -> Self { + Expr::Value(value.into()) + } +} + /// The contents inside the `[` and `]` in a subscript expression. #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -995,6 +1143,27 @@ impl fmt::Display for Subscript { } } +/// An element of a [`Expr::CompoundFieldAccess`]. +/// It can be an expression or a subscript. +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum AccessExpr { + /// Accesses a field using dot notation, e.g. `foo.bar.baz`. + Dot(Expr), + /// Accesses a field or array element using bracket notation, e.g. `foo['bar']`. + Subscript(Subscript), +} + +impl fmt::Display for AccessExpr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + AccessExpr::Dot(expr) => write!(f, ".{}", expr), + AccessExpr::Subscript(subscript) => write!(f, "[{}]", subscript), + } + } +} + /// A lambda function. #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -1018,7 +1187,7 @@ impl fmt::Display for LambdaFunction { /// `OneOrManyWithParens` implements `Deref` and `IntoIterator`, /// so you can call slice methods on it and iterate over items /// # Examples -/// Acessing as a slice: +/// Accessing as a slice: /// ``` /// # use sqlparser::ast::OneOrManyWithParens; /// let one = OneOrManyWithParens::One("a"); @@ -1187,15 +1356,20 @@ impl fmt::Display for CastFormat { } impl fmt::Display for Expr { + #[cfg_attr(feature = "recursive-protection", recursive::recursive)] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Expr::Identifier(s) => write!(f, "{s}"), - Expr::MapAccess { column, keys } => { - write!(f, "{column}{}", display_separated(keys, "")) - } - Expr::Wildcard => f.write_str("*"), - Expr::QualifiedWildcard(prefix) => write!(f, "{}.*", prefix), + Expr::Wildcard(_) => f.write_str("*"), + Expr::QualifiedWildcard(prefix, _) => write!(f, "{}.*", prefix), Expr::CompoundIdentifier(s) => write!(f, "{}", display_separated(s, ".")), + Expr::CompoundFieldAccess { root, access_chain } => { + write!(f, "{}", root)?; + for field in access_chain { + write!(f, "{}", field)?; + } + Ok(()) + } Expr::IsTrue(ast) => write!(f, "{ast} IS TRUE"), Expr::IsNotTrue(ast) => write!(f, "{ast} IS NOT TRUE"), Expr::IsFalse(ast) => write!(f, "{ast} IS FALSE"), @@ -1314,6 +1488,24 @@ impl fmt::Display for Expr { if *regexp { "REGEXP" } else { "RLIKE" }, pattern ), + Expr::IsNormalized { + expr, + form, + negated, + } => { + let not_ = if *negated { "NOT " } else { "" }; + if form.is_none() { + write!(f, "{} IS {}NORMALIZED", expr, not_) + } else { + write!( + f, + "{} IS {}{} NORMALIZED", + expr, + not_, + form.as_ref().unwrap() + ) + } + } Expr::SimilarTo { negated, expr, @@ -1367,7 +1559,15 @@ impl fmt::Display for Expr { Expr::UnaryOp { op, expr } => { if op == &UnaryOperator::PGPostfixFactorial { write!(f, "{expr}{op}") - } else if op == &UnaryOperator::Not { + } else if matches!( + op, + UnaryOperator::Not + | UnaryOperator::Hash + | UnaryOperator::AtDashAt + | UnaryOperator::DoubleAt + | UnaryOperator::QuestionDash + | UnaryOperator::QuestionPipe + ) { write!(f, "{op} {expr}") } else { write!(f, "{op}{expr}") @@ -1457,26 +1657,24 @@ impl fmt::Display for Expr { Expr::Collate { expr, collation } => write!(f, "{expr} COLLATE {collation}"), Expr::Nested(ast) => write!(f, "({ast})"), Expr::Value(v) => write!(f, "{v}"), - Expr::IntroducedString { introducer, value } => write!(f, "{introducer} {value}"), + Expr::Prefixed { prefix, value } => write!(f, "{prefix} {value}"), Expr::TypedString { data_type, value } => { write!(f, "{data_type}")?; - write!(f, " '{}'", &value::escape_single_quote_string(value)) + write!(f, " {value}") } Expr::Function(fun) => write!(f, "{fun}"), Expr::Case { operand, conditions, - results, else_result, } => { write!(f, "CASE")?; if let Some(operand) = operand { write!(f, " {operand}")?; } - for (c, r) in conditions.iter().zip(results) { - write!(f, " WHEN {c} THEN {r}")?; + for when in conditions { + write!(f, " {when}")?; } - if let Some(else_result) = else_result { write!(f, " ELSE {else_result}")?; } @@ -1532,8 +1730,13 @@ impl fmt::Display for Expr { substring_from, substring_for, special, + shorthand, } => { - write!(f, "SUBSTRING({expr}")?; + f.write_str("SUBSTR")?; + if !*shorthand { + f.write_str("ING")?; + } + write!(f, "({expr}")?; if let Some(from_part) = substring_from { if *special { write!(f, ", {from_part}")?; @@ -1614,21 +1817,12 @@ impl fmt::Display for Expr { Expr::Map(map) => { write!(f, "{map}") } - Expr::Subscript { - expr, - subscript: key, - } => { - write!(f, "{expr}[{key}]") - } Expr::Array(set) => { write!(f, "{set}") } Expr::JsonAccess { value, path } => { write!(f, "{value}{path}") } - Expr::CompositeAccess { expr, key } => { - write!(f, "{expr}.{key}") - } Expr::AtTimeZone { timestamp, time_zone, @@ -1883,6 +2077,10 @@ pub enum CommentObject { Column, Table, Extension, + Schema, + Database, + User, + Role, } impl fmt::Display for CommentObject { @@ -1891,6 +2089,10 @@ impl fmt::Display for CommentObject { CommentObject::Column => f.write_str("COLUMN"), CommentObject::Table => f.write_str("TABLE"), CommentObject::Extension => f.write_str("EXTENSION"), + CommentObject::Schema => f.write_str("SCHEMA"), + CommentObject::Database => f.write_str("DATABASE"), + CommentObject::User => f.write_str("USER"), + CommentObject::Role => f.write_str("ROLE"), } } } @@ -1903,128 +2105,468 @@ pub enum Password { NullPassword, } -/// Represents an expression assignment within a variable `DECLARE` statement. +/// A `CASE` statement. /// /// Examples: /// ```sql -/// DECLARE variable_name := 42 -/// DECLARE variable_name DEFAULT 42 +/// CASE +/// WHEN EXISTS(SELECT 1) +/// THEN SELECT 1 FROM T; +/// WHEN EXISTS(SELECT 2) +/// THEN SELECT 1 FROM U; +/// ELSE +/// SELECT 1 FROM V; +/// END CASE; /// ``` +/// +/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#case_search_expression) +/// [Snowflake](https://docs.snowflake.com/en/sql-reference/snowflake-scripting/case) #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum DeclareAssignment { - /// Plain expression specified. - Expr(Box), - - /// Expression assigned via the `DEFAULT` keyword - Default(Box), - - /// Expression assigned via the `:=` syntax - /// - /// Example: - /// ```sql - /// DECLARE variable_name := 42; - /// ``` - DuckAssignment(Box), - - /// Expression via the `FOR` keyword - /// - /// Example: - /// ```sql - /// DECLARE c1 CURSOR FOR res - /// ``` - For(Box), - - /// Expression via the `=` syntax. - /// - /// Example: - /// ```sql - /// DECLARE @variable AS INT = 100 - /// ``` - MsSqlAssignment(Box), +pub struct CaseStatement { + /// The `CASE` token that starts the statement. + pub case_token: AttachedToken, + pub match_expr: Option, + pub when_blocks: Vec, + pub else_block: Option, + /// The last token of the statement (`END` or `CASE`). + pub end_case_token: AttachedToken, } -impl fmt::Display for DeclareAssignment { +impl fmt::Display for CaseStatement { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - DeclareAssignment::Expr(expr) => { - write!(f, "{expr}") - } - DeclareAssignment::Default(expr) => { - write!(f, "DEFAULT {expr}") - } - DeclareAssignment::DuckAssignment(expr) => { - write!(f, ":= {expr}") - } - DeclareAssignment::MsSqlAssignment(expr) => { - write!(f, "= {expr}") - } - DeclareAssignment::For(expr) => { - write!(f, "FOR {expr}") - } + let CaseStatement { + case_token: _, + match_expr, + when_blocks, + else_block, + end_case_token: AttachedToken(end), + } = self; + + write!(f, "CASE")?; + + if let Some(expr) = match_expr { + write!(f, " {expr}")?; } - } -} -/// Represents the type of a `DECLARE` statement. -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum DeclareType { - /// Cursor variable type. e.g. [Snowflake] [Postgres] - /// - /// [Snowflake]: https://docs.snowflake.com/en/developer-guide/snowflake-scripting/cursors#declaring-a-cursor - /// [Postgres]: https://www.postgresql.org/docs/current/plpgsql-cursors.html - Cursor, + if !when_blocks.is_empty() { + write!(f, " {}", display_separated(when_blocks, " "))?; + } - /// Result set variable type. [Snowflake] - /// - /// Syntax: - /// ```text - /// RESULTSET [ { DEFAULT | := } ( ) ] ; - /// ``` - /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#resultset-declaration-syntax - ResultSet, + if let Some(else_block) = else_block { + write!(f, " {else_block}")?; + } - /// Exception declaration syntax. [Snowflake] - /// - /// Syntax: - /// ```text - /// EXCEPTION [ ( , '' ) ] ; - /// ``` - /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#exception-declaration-syntax - Exception, -} + write!(f, " END")?; -impl fmt::Display for DeclareType { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - DeclareType::Cursor => { - write!(f, "CURSOR") - } - DeclareType::ResultSet => { - write!(f, "RESULTSET") - } - DeclareType::Exception => { - write!(f, "EXCEPTION") + if let Token::Word(w) = &end.token { + if w.keyword == Keyword::CASE { + write!(f, " CASE")?; } } + + Ok(()) } } -/// A `DECLARE` statement. -/// [Postgres] [Snowflake] [BigQuery] +/// An `IF` statement. /// -/// Examples: +/// Example (BigQuery or Snowflake): /// ```sql -/// DECLARE variable_name := 42 -/// DECLARE liahona CURSOR FOR SELECT * FROM films; +/// IF TRUE THEN +/// SELECT 1; +/// SELECT 2; +/// ELSEIF TRUE THEN +/// SELECT 3; +/// ELSE +/// SELECT 4; +/// END IF /// ``` +/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#if) +/// [Snowflake](https://docs.snowflake.com/en/sql-reference/snowflake-scripting/if) /// -/// [Postgres]: https://www.postgresql.org/docs/current/sql-declare.html -/// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare -/// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare +/// Example (MSSQL): +/// ```sql +/// IF 1=1 SELECT 1 ELSE SELECT 2 +/// ``` +/// [MSSQL](https://learn.microsoft.com/en-us/sql/t-sql/language-elements/if-else-transact-sql?view=sql-server-ver16) +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct IfStatement { + pub if_block: ConditionalStatementBlock, + pub elseif_blocks: Vec, + pub else_block: Option, + pub end_token: Option, +} + +impl fmt::Display for IfStatement { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let IfStatement { + if_block, + elseif_blocks, + else_block, + end_token, + } = self; + + write!(f, "{if_block}")?; + + for elseif_block in elseif_blocks { + write!(f, " {elseif_block}")?; + } + + if let Some(else_block) = else_block { + write!(f, " {else_block}")?; + } + + if let Some(AttachedToken(end_token)) = end_token { + write!(f, " END {end_token}")?; + } + + Ok(()) + } +} + +/// A `WHILE` statement. +/// +/// Example: +/// ```sql +/// WHILE @@FETCH_STATUS = 0 +/// BEGIN +/// FETCH NEXT FROM c1 INTO @var1, @var2; +/// END +/// ``` +/// +/// [MsSql](https://learn.microsoft.com/en-us/sql/t-sql/language-elements/while-transact-sql) +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct WhileStatement { + pub while_block: ConditionalStatementBlock, +} + +impl fmt::Display for WhileStatement { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let WhileStatement { while_block } = self; + write!(f, "{while_block}")?; + Ok(()) + } +} + +/// A block within a [Statement::Case] or [Statement::If] or [Statement::While]-like statement +/// +/// Example 1: +/// ```sql +/// WHEN EXISTS(SELECT 1) THEN SELECT 1; +/// ``` +/// +/// Example 2: +/// ```sql +/// IF TRUE THEN SELECT 1; SELECT 2; +/// ``` +/// +/// Example 3: +/// ```sql +/// ELSE SELECT 1; SELECT 2; +/// ``` +/// +/// Example 4: +/// ```sql +/// WHILE @@FETCH_STATUS = 0 +/// BEGIN +/// FETCH NEXT FROM c1 INTO @var1, @var2; +/// END +/// ``` +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct ConditionalStatementBlock { + pub start_token: AttachedToken, + pub condition: Option, + pub then_token: Option, + pub conditional_statements: ConditionalStatements, +} + +impl ConditionalStatementBlock { + pub fn statements(&self) -> &Vec { + self.conditional_statements.statements() + } +} + +impl fmt::Display for ConditionalStatementBlock { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let ConditionalStatementBlock { + start_token: AttachedToken(start_token), + condition, + then_token, + conditional_statements, + } = self; + + write!(f, "{start_token}")?; + + if let Some(condition) = condition { + write!(f, " {condition}")?; + } + + if then_token.is_some() { + write!(f, " THEN")?; + } + + if !conditional_statements.statements().is_empty() { + write!(f, " {conditional_statements}")?; + } + + Ok(()) + } +} + +/// A list of statements in a [ConditionalStatementBlock]. +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum ConditionalStatements { + /// SELECT 1; SELECT 2; SELECT 3; ... + Sequence { statements: Vec }, + /// BEGIN SELECT 1; SELECT 2; SELECT 3; ... END + BeginEnd(BeginEndStatements), +} + +impl ConditionalStatements { + pub fn statements(&self) -> &Vec { + match self { + ConditionalStatements::Sequence { statements } => statements, + ConditionalStatements::BeginEnd(bes) => &bes.statements, + } + } +} + +impl fmt::Display for ConditionalStatements { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ConditionalStatements::Sequence { statements } => { + if !statements.is_empty() { + format_statement_list(f, statements)?; + } + Ok(()) + } + ConditionalStatements::BeginEnd(bes) => write!(f, "{}", bes), + } + } +} + +/// Represents a list of statements enclosed within `BEGIN` and `END` keywords. +/// Example: +/// ```sql +/// BEGIN +/// SELECT 1; +/// SELECT 2; +/// END +/// ``` +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct BeginEndStatements { + pub begin_token: AttachedToken, + pub statements: Vec, + pub end_token: AttachedToken, +} + +impl fmt::Display for BeginEndStatements { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let BeginEndStatements { + begin_token: AttachedToken(begin_token), + statements, + end_token: AttachedToken(end_token), + } = self; + + if begin_token.token != Token::EOF { + write!(f, "{begin_token} ")?; + } + if !statements.is_empty() { + format_statement_list(f, statements)?; + } + if end_token.token != Token::EOF { + write!(f, " {end_token}")?; + } + Ok(()) + } +} + +/// A `RAISE` statement. +/// +/// Examples: +/// ```sql +/// RAISE USING MESSAGE = 'error'; +/// +/// RAISE myerror; +/// ``` +/// +/// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#raise) +/// [Snowflake](https://docs.snowflake.com/en/sql-reference/snowflake-scripting/raise) +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct RaiseStatement { + pub value: Option, +} + +impl fmt::Display for RaiseStatement { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let RaiseStatement { value } = self; + + write!(f, "RAISE")?; + if let Some(value) = value { + write!(f, " {value}")?; + } + + Ok(()) + } +} + +/// Represents the error value of a [RaiseStatement]. +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum RaiseStatementValue { + /// `RAISE USING MESSAGE = 'error'` + UsingMessage(Expr), + /// `RAISE myerror` + Expr(Expr), +} + +impl fmt::Display for RaiseStatementValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + RaiseStatementValue::Expr(expr) => write!(f, "{expr}"), + RaiseStatementValue::UsingMessage(expr) => write!(f, "USING MESSAGE = {expr}"), + } + } +} + +/// Represents an expression assignment within a variable `DECLARE` statement. +/// +/// Examples: +/// ```sql +/// DECLARE variable_name := 42 +/// DECLARE variable_name DEFAULT 42 +/// ``` +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum DeclareAssignment { + /// Plain expression specified. + Expr(Box), + + /// Expression assigned via the `DEFAULT` keyword + Default(Box), + + /// Expression assigned via the `:=` syntax + /// + /// Example: + /// ```sql + /// DECLARE variable_name := 42; + /// ``` + DuckAssignment(Box), + + /// Expression via the `FOR` keyword + /// + /// Example: + /// ```sql + /// DECLARE c1 CURSOR FOR res + /// ``` + For(Box), + + /// Expression via the `=` syntax. + /// + /// Example: + /// ```sql + /// DECLARE @variable AS INT = 100 + /// ``` + MsSqlAssignment(Box), +} + +impl fmt::Display for DeclareAssignment { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + DeclareAssignment::Expr(expr) => { + write!(f, "{expr}") + } + DeclareAssignment::Default(expr) => { + write!(f, "DEFAULT {expr}") + } + DeclareAssignment::DuckAssignment(expr) => { + write!(f, ":= {expr}") + } + DeclareAssignment::MsSqlAssignment(expr) => { + write!(f, "= {expr}") + } + DeclareAssignment::For(expr) => { + write!(f, "FOR {expr}") + } + } + } +} + +/// Represents the type of a `DECLARE` statement. +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum DeclareType { + /// Cursor variable type. e.g. [Snowflake] [PostgreSQL] [MsSql] + /// + /// [Snowflake]: https://docs.snowflake.com/en/developer-guide/snowflake-scripting/cursors#declaring-a-cursor + /// [PostgreSQL]: https://www.postgresql.org/docs/current/plpgsql-cursors.html + /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-cursor-transact-sql + Cursor, + + /// Result set variable type. [Snowflake] + /// + /// Syntax: + /// ```text + /// RESULTSET [ { DEFAULT | := } ( ) ] ; + /// ``` + /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#resultset-declaration-syntax + ResultSet, + + /// Exception declaration syntax. [Snowflake] + /// + /// Syntax: + /// ```text + /// EXCEPTION [ ( , '' ) ] ; + /// ``` + /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#exception-declaration-syntax + Exception, +} + +impl fmt::Display for DeclareType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + DeclareType::Cursor => { + write!(f, "CURSOR") + } + DeclareType::ResultSet => { + write!(f, "RESULTSET") + } + DeclareType::Exception => { + write!(f, "EXCEPTION") + } + } + } +} + +/// A `DECLARE` statement. +/// [PostgreSQL] [Snowflake] [BigQuery] +/// +/// Examples: +/// ```sql +/// DECLARE variable_name := 42 +/// DECLARE liahona CURSOR FOR SELECT * FROM films; +/// ``` +/// +/// [PostgreSQL]: https://www.postgresql.org/docs/current/sql-declare.html +/// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare +/// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -2144,6 +2686,18 @@ pub enum CreateTableOptions { /// /// Options(Vec), + + /// Plain options, options which are not part on any declerative statement e.g. WITH/OPTIONS/... + /// + Plain(Vec), + + TableProperties(Vec), +} + +impl Default for CreateTableOptions { + fn default() -> Self { + Self::None + } } impl fmt::Display for CreateTableOptions { @@ -2155,6 +2709,12 @@ impl fmt::Display for CreateTableOptions { CreateTableOptions::Options(options) => { write!(f, "OPTIONS({})", display_comma_separated(options)) } + CreateTableOptions::TableProperties(options) => { + write!(f, "TBLPROPERTIES ({})", display_comma_separated(options)) + } + CreateTableOptions::Plain(options) => { + write!(f, "{}", display_separated(options, " ")) + } CreateTableOptions::None => Ok(()), } } @@ -2218,6 +2778,174 @@ pub enum CreatePolicyCommand { Delete, } +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum Set { + /// SQL Standard-style + /// SET a = 1; + SingleAssignment { + scope: Option, + hivevar: bool, + variable: ObjectName, + values: Vec, + }, + /// Snowflake-style + /// SET (a, b, ..) = (1, 2, ..); + ParenthesizedAssignments { + variables: Vec, + values: Vec, + }, + /// MySQL-style + /// SET a = 1, b = 2, ..; + MultipleAssignments { assignments: Vec }, + /// MS-SQL session + /// + /// See + SetSessionParam(SetSessionParamKind), + /// ```sql + /// SET [ SESSION | LOCAL ] ROLE role_name + /// ``` + /// + /// Sets session state. Examples: [ANSI][1], [Postgresql][2], [MySQL][3], and [Oracle][4] + /// + /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#set-role-statement + /// [2]: https://www.postgresql.org/docs/14/sql-set-role.html + /// [3]: https://dev.mysql.com/doc/refman/8.0/en/set-role.html + /// [4]: https://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_10004.htm + SetRole { + /// Non-ANSI optional identifier to inform if the role is defined inside the current session (`SESSION`) or transaction (`LOCAL`). + context_modifier: Option, + /// Role name. If NONE is specified, then the current role name is removed. + role_name: Option, + }, + /// ```sql + /// SET TIME ZONE + /// ``` + /// + /// Note: this is a PostgreSQL-specific statements + /// `SET TIME ZONE ` is an alias for `SET timezone TO ` in PostgreSQL + /// However, we allow it for all dialects. + SetTimeZone { local: bool, value: Expr }, + /// ```sql + /// SET NAMES 'charset_name' [COLLATE 'collation_name'] + /// ``` + SetNames { + charset_name: Ident, + collation_name: Option, + }, + /// ```sql + /// SET NAMES DEFAULT + /// ``` + /// + /// Note: this is a MySQL-specific statement. + SetNamesDefault {}, + /// ```sql + /// SET TRANSACTION ... + /// ``` + SetTransaction { + modes: Vec, + snapshot: Option, + session: bool, + }, +} + +impl Display for Set { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::ParenthesizedAssignments { variables, values } => write!( + f, + "SET ({}) = ({})", + display_comma_separated(variables), + display_comma_separated(values) + ), + Self::MultipleAssignments { assignments } => { + write!(f, "SET {}", display_comma_separated(assignments)) + } + Self::SetRole { + context_modifier, + role_name, + } => { + let role_name = role_name.clone().unwrap_or_else(|| Ident::new("NONE")); + write!( + f, + "SET {modifier}ROLE {role_name}", + modifier = context_modifier + .map(|m| format!("{}", m)) + .unwrap_or_default() + ) + } + Self::SetSessionParam(kind) => write!(f, "SET {kind}"), + Self::SetTransaction { + modes, + snapshot, + session, + } => { + if *session { + write!(f, "SET SESSION CHARACTERISTICS AS TRANSACTION")?; + } else { + write!(f, "SET TRANSACTION")?; + } + if !modes.is_empty() { + write!(f, " {}", display_comma_separated(modes))?; + } + if let Some(snapshot_id) = snapshot { + write!(f, " SNAPSHOT {snapshot_id}")?; + } + Ok(()) + } + Self::SetTimeZone { local, value } => { + f.write_str("SET ")?; + if *local { + f.write_str("LOCAL ")?; + } + write!(f, "TIME ZONE {value}") + } + Self::SetNames { + charset_name, + collation_name, + } => { + write!(f, "SET NAMES {}", charset_name)?; + + if let Some(collation) = collation_name { + f.write_str(" COLLATE ")?; + f.write_str(collation)?; + }; + + Ok(()) + } + Self::SetNamesDefault {} => { + f.write_str("SET NAMES DEFAULT")?; + + Ok(()) + } + Set::SingleAssignment { + scope, + hivevar, + variable, + values, + } => { + write!( + f, + "SET {}{}{} = {}", + scope.map(|s| format!("{}", s)).unwrap_or_default(), + if *hivevar { "HIVEVAR:" } else { "" }, + variable, + display_comma_separated(values) + ) + } + } + } +} + +/// Convert a `Set` into a `Statement`. +/// Convenience function, instead of writing `Statement::Set(Set::Set...{...})` +impl From for Statement { + fn from(set: Set) -> Self { + Statement::Set(set) + } +} + /// A top-level statement (SELECT, INSERT, CREATE, etc.) #[allow(clippy::large_enum_variant)] #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -2241,7 +2969,9 @@ pub enum Statement { cache_metadata: bool, noscan: bool, compute_statistics: bool, + has_table_keyword: bool, }, + Set(Set), /// ```sql /// TRUNCATE /// ``` @@ -2259,7 +2989,7 @@ pub enum Statement { identity: Option, /// Postgres-specific option /// [ CASCADE | RESTRICT ] - cascade: Option, + cascade: Option, /// ClickHouse-specific option /// [ ON CLUSTER cluster_name ] /// @@ -2306,6 +3036,14 @@ pub enum Statement { file_format: Option, source: Box, }, + /// A `CASE` statement. + Case(CaseStatement), + /// An `IF` statement. + If(IfStatement), + /// A `WHILE` statement. + While(WhileStatement), + /// A `RAISE` statement. + Raise(RaiseStatement), /// ```sql /// CALL /// ``` @@ -2328,26 +3066,38 @@ pub enum Statement { values: Vec>, }, /// ```sql - /// COPY INTO + /// COPY INTO | /// ``` - /// See + /// See: + /// + /// + /// /// Copy Into syntax available for Snowflake is different than the one implemented in /// Postgres. Although they share common prefix, it is reasonable to implement them /// in different enums. This can be refactored later once custom dialects /// are allowed to have custom Statements. CopyIntoSnowflake { + kind: CopyIntoSnowflakeKind, into: ObjectName, - from_stage: ObjectName, - from_stage_alias: Option, + into_columns: Option>, + from_obj: Option, + from_obj_alias: Option, stage_params: StageParamsObject, - from_transformations: Option>, + from_transformations: Option>, + from_query: Option>, files: Option>, pattern: Option, - file_format: DataLoadingOptions, - copy_options: DataLoadingOptions, + file_format: KeyValueOptions, + copy_options: KeyValueOptions, validation_mode: Option, + partition: Option>, }, /// ```sql + /// OPEN cursor_name + /// ``` + /// Opens a cursor. + Open(OpenStatement), + /// ```sql /// CLOSE /// ``` /// Closes the portal underlying an open cursor. @@ -2364,11 +3114,13 @@ pub enum Statement { /// Column assignments assignments: Vec, /// Table which provide value to be set - from: Option, + from: Option, /// WHERE selection: Option, /// RETURNING returning: Option>, + /// SQLite-specific conflict resolution clause + or: Option, }, /// ```sql /// DELETE @@ -2378,6 +3130,10 @@ pub enum Statement { /// CREATE VIEW /// ``` CreateView { + /// True if this is a `CREATE OR ALTER VIEW` statement + /// + /// [MsSql](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-view-transact-sql) + or_alter: bool, or_replace: bool, materialized: bool, /// View name @@ -2398,6 +3154,8 @@ pub enum Statement { /// if not None, has Clickhouse `TO` clause, specify the table into which to insert results /// to: Option, + /// MySQL: Optional parameters for the view algorithm, definer, and security context + params: Option, }, /// ```sql /// CREATE TABLE @@ -2421,7 +3179,7 @@ pub enum Statement { /// ```sql /// CREATE ROLE /// ``` - /// See [postgres](https://www.postgresql.org/docs/current/sql-createrole.html) + /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createrole.html) CreateRole { names: Vec, if_not_exists: bool, @@ -2447,7 +3205,7 @@ pub enum Statement { /// ```sql /// CREATE SECRET /// ``` - /// See [duckdb](https://duckdb.org/docs/sql/statements/create_secret.html) + /// See [DuckDB](https://duckdb.org/docs/sql/statements/create_secret.html) CreateSecret { or_replace: bool, temporary: Option, @@ -2472,6 +3230,11 @@ pub enum Statement { with_check: Option, }, /// ```sql + /// CREATE CONNECTOR + /// ``` + /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector) + CreateConnector(CreateConnector), + /// ```sql /// ALTER TABLE /// ``` AlterTable { @@ -2506,6 +3269,11 @@ pub enum Statement { with_options: Vec, }, /// ```sql + /// ALTER TYPE + /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-altertype.html) + /// ``` + AlterType(AlterType), + /// ```sql /// ALTER ROLE /// ``` AlterRole { @@ -2523,6 +3291,31 @@ pub enum Statement { operation: AlterPolicyOperation, }, /// ```sql + /// ALTER CONNECTOR connector_name SET DCPROPERTIES(property_name=property_value, ...); + /// or + /// ALTER CONNECTOR connector_name SET URL new_url; + /// or + /// ALTER CONNECTOR connector_name SET OWNER [USER|ROLE] user_or_role; + /// ``` + /// (Hive-specific) + AlterConnector { + name: Ident, + properties: Option>, + url: Option, + owner: Option, + }, + /// ```sql + /// ALTER SESSION SET sessionParam + /// ALTER SESSION UNSET [ , , ... ] + /// ``` + /// See + AlterSession { + /// true is to set for the session parameters, false is to unset + set: bool, + /// The session parameters to set or unset + session_params: KeyValueOptions, + }, + /// ```sql /// ATTACH DATABASE 'path/to/file' AS alias /// ``` /// (SQLite-specific) @@ -2589,9 +3382,17 @@ pub enum Statement { /// One or more function to drop func_desc: Vec, /// `CASCADE` or `RESTRICT` - option: Option, + drop_behavior: Option, }, /// ```sql + /// DROP DOMAIN + /// ``` + /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-dropdomain.html) + /// + /// DROP DOMAIN [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ] + /// + DropDomain(DropDomain), + /// ```sql /// DROP PROCEDURE /// ``` DropProcedure { @@ -2599,7 +3400,7 @@ pub enum Statement { /// One or more function to drop proc_desc: Vec, /// `CASCADE` or `RESTRICT` - option: Option, + drop_behavior: Option, }, /// ```sql /// DROP SECRET @@ -2618,7 +3419,15 @@ pub enum Statement { if_exists: bool, name: Ident, table_name: ObjectName, - option: Option, + drop_behavior: Option, + }, + /// ```sql + /// DROP CONNECTOR + /// ``` + /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector) + DropConnector { + if_exists: bool, + name: Ident, }, /// ```sql /// DECLARE @@ -2627,7 +3436,9 @@ pub enum Statement { /// /// Note: this is a PostgreSQL-specific statement, /// but may also compatible with other SQL. - Declare { stmts: Vec }, + Declare { + stmts: Vec, + }, /// ```sql /// CREATE EXTENSION [ IF NOT EXISTS ] extension_name /// [ WITH ] [ SCHEMA schema_name ] @@ -2644,6 +3455,18 @@ pub enum Statement { version: Option, }, /// ```sql + /// DROP EXTENSION [ IF EXISTS ] name [, ...] [ CASCADE | RESTRICT ] + /// + /// Note: this is a PostgreSQL-specific statement. + /// https://www.postgresql.org/docs/current/sql-dropextension.html + /// ``` + DropExtension { + names: Vec, + if_exists: bool, + /// `CASCADE` or `RESTRICT` + cascade_or_restrict: Option, + }, + /// ```sql /// FETCH /// ``` /// Retrieve rows from a query using a cursor @@ -2654,6 +3477,7 @@ pub enum Statement { /// Cursor name name: Ident, direction: FetchDirection, + position: FetchPosition, /// Optional, It's possible to fetch rows form cursor to the table into: Option, }, @@ -2663,83 +3487,37 @@ pub enum Statement { /// /// Note: this is a Mysql-specific statement, /// but may also compatible with other SQL. - Flush { - object_type: FlushType, - location: Option, - channel: Option, - read_lock: bool, - export: bool, - tables: Vec, - }, - /// ```sql - /// DISCARD [ ALL | PLANS | SEQUENCES | TEMPORARY | TEMP ] - /// ``` - /// - /// Note: this is a PostgreSQL-specific statement, - /// but may also compatible with other SQL. - Discard { object_type: DiscardObject }, - /// ```sql - /// SET [ SESSION | LOCAL ] ROLE role_name - /// ``` - /// - /// Sets session state. Examples: [ANSI][1], [Postgresql][2], [MySQL][3], and [Oracle][4] - /// - /// [1]: https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#set-role-statement - /// [2]: https://www.postgresql.org/docs/14/sql-set-role.html - /// [3]: https://dev.mysql.com/doc/refman/8.0/en/set-role.html - /// [4]: https://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_10004.htm - SetRole { - /// Non-ANSI optional identifier to inform if the role is defined inside the current session (`SESSION`) or transaction (`LOCAL`). - context_modifier: ContextModifier, - /// Role name. If NONE is specified, then the current role name is removed. - role_name: Option, - }, - /// ```sql - /// SET = expression; - /// SET (variable[, ...]) = (expression[, ...]); - /// ``` - /// - /// Note: this is not a standard SQL statement, but it is supported by at - /// least MySQL and PostgreSQL. Not all MySQL-specific syntactic forms are - /// supported yet. - SetVariable { - local: bool, - hivevar: bool, - variables: OneOrManyWithParens, - value: Vec, - }, - /// ```sql - /// SET TIME ZONE - /// ``` - /// - /// Note: this is a PostgreSQL-specific statements - /// `SET TIME ZONE ` is an alias for `SET timezone TO ` in PostgreSQL - SetTimeZone { local: bool, value: Expr }, - /// ```sql - /// SET NAMES 'charset_name' [COLLATE 'collation_name'] - /// ``` - /// - /// Note: this is a MySQL-specific statement. - SetNames { - charset_name: String, - collation_name: Option, + Flush { + object_type: FlushType, + location: Option, + channel: Option, + read_lock: bool, + export: bool, + tables: Vec, }, /// ```sql - /// SET NAMES DEFAULT + /// DISCARD [ ALL | PLANS | SEQUENCES | TEMPORARY | TEMP ] /// ``` /// - /// Note: this is a MySQL-specific statement. - SetNamesDefault {}, + /// Note: this is a PostgreSQL-specific statement, + /// but may also compatible with other SQL. + Discard { + object_type: DiscardObject, + }, /// `SHOW FUNCTIONS` /// /// Note: this is a Presto-specific statement. - ShowFunctions { filter: Option }, + ShowFunctions { + filter: Option, + }, /// ```sql /// SHOW /// ``` /// /// Note: this is a PostgreSQL-specific statement. - ShowVariable { variable: Vec }, + ShowVariable { + variable: Vec, + }, /// ```sql /// SHOW [GLOBAL | SESSION] STATUS [LIKE 'pattern' | WHERE expr] /// ``` @@ -2772,31 +3550,60 @@ pub enum Statement { /// ```sql /// SHOW COLUMNS /// ``` - /// - /// Note: this is a MySQL-specific statement. ShowColumns { extended: bool, full: bool, - #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] - table_name: ObjectName, - filter: Option, + show_options: ShowStatementOptions, + }, + /// ```sql + /// SHOW DATABASES + /// ``` + ShowDatabases { + terse: bool, + history: bool, + show_options: ShowStatementOptions, + }, + /// ```sql + /// SHOW SCHEMAS + /// ``` + ShowSchemas { + terse: bool, + history: bool, + show_options: ShowStatementOptions, }, /// ```sql + /// SHOW OBJECTS LIKE 'line%' IN mydb.public + /// ``` + /// Snowflake-specific statement + /// + ShowObjects(ShowObjects), + /// ```sql /// SHOW TABLES /// ``` - /// Note: this is a MySQL-specific statement. ShowTables { + terse: bool, + history: bool, extended: bool, full: bool, - db_name: Option, - filter: Option, + external: bool, + show_options: ShowStatementOptions, + }, + /// ```sql + /// SHOW VIEWS + /// ``` + ShowViews { + terse: bool, + materialized: bool, + show_options: ShowStatementOptions, }, /// ```sql /// SHOW COLLATION /// ``` /// /// Note: this is a MySQL-specific statement. - ShowCollation { filter: Option }, + ShowCollation { + filter: Option, + }, /// ```sql /// `USE ...` /// ``` @@ -2813,16 +3620,30 @@ pub enum Statement { StartTransaction { modes: Vec, begin: bool, - /// Only for SQLite + transaction: Option, modifier: Option, - }, - /// ```sql - /// SET TRANSACTION ... - /// ``` - SetTransaction { - modes: Vec, - snapshot: Option, - session: bool, + /// List of statements belonging to the `BEGIN` block. + /// Example: + /// ```sql + /// BEGIN + /// SELECT 1; + /// SELECT 2; + /// END; + /// ``` + statements: Vec, + /// Statements of an exception clause. + /// Example: + /// ```sql + /// BEGIN + /// SELECT 1; + /// EXCEPTION WHEN ERROR THEN + /// SELECT 2; + /// SELECT 3; + /// END; + /// + exception_statements: Option>, + /// TRUE if the statement has an `END` keyword. + has_end_keyword: bool, }, /// ```sql /// COMMENT ON ... @@ -2840,7 +3661,17 @@ pub enum Statement { /// ```sql /// COMMIT [ TRANSACTION | WORK ] [ AND [ NO ] CHAIN ] /// ``` - Commit { chain: bool }, + /// If `end` is false + /// + /// ```sql + /// END [ TRY | CATCH ] + /// ``` + /// If `end` is true + Commit { + chain: bool, + end: bool, + modifier: Option, + }, /// ```sql /// ROLLBACK [ TRANSACTION | WORK ] [ AND [ NO ] CHAIN ] [ TO [ SAVEPOINT ] savepoint_name ] /// ``` @@ -2855,6 +3686,22 @@ pub enum Statement { /// ` | AUTHORIZATION | AUTHORIZATION ` schema_name: SchemaName, if_not_exists: bool, + /// Schema options. + /// + /// ```sql + /// CREATE SCHEMA myschema OPTIONS(key1='value1'); + /// ``` + /// + /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_schema_statement) + options: Option>, + /// Default collation specification for the schema. + /// + /// ```sql + /// CREATE SCHEMA myschema DEFAULT COLLATE 'und:ci'; + /// ``` + /// + /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_schema_statement) + default_collate_spec: Option, }, /// ```sql /// CREATE DATABASE @@ -2871,66 +3718,10 @@ pub enum Statement { /// /// Supported variants: /// 1. [Hive](https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction) - /// 2. [Postgres](https://www.postgresql.org/docs/15/sql-createfunction.html) + /// 2. [PostgreSQL](https://www.postgresql.org/docs/15/sql-createfunction.html) /// 3. [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement) - CreateFunction { - or_replace: bool, - temporary: bool, - if_not_exists: bool, - name: ObjectName, - args: Option>, - return_type: Option, - /// The expression that defines the function. - /// - /// Examples: - /// ```sql - /// AS ((SELECT 1)) - /// AS "console.log();" - /// ``` - function_body: Option, - /// Behavior attribute for the function - /// - /// IMMUTABLE | STABLE | VOLATILE - /// - /// [Postgres](https://www.postgresql.org/docs/current/sql-createfunction.html) - behavior: Option, - /// CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT - /// - /// [Postgres](https://www.postgresql.org/docs/current/sql-createfunction.html) - called_on_null: Option, - /// PARALLEL { UNSAFE | RESTRICTED | SAFE } - /// - /// [Postgres](https://www.postgresql.org/docs/current/sql-createfunction.html) - parallel: Option, - /// USING ... (Hive only) - using: Option, - /// Language used in a UDF definition. - /// - /// Example: - /// ```sql - /// CREATE FUNCTION foo() LANGUAGE js AS "console.log();" - /// ``` - /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_a_javascript_udf) - language: Option, - /// Determinism keyword used for non-sql UDF definitions. - /// - /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_11) - determinism_specifier: Option, - /// List of options for creating the function. - /// - /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_11) - options: Option>, - /// Connection resource for a remote function. - /// - /// Example: - /// ```sql - /// CREATE FUNCTION foo() - /// RETURNS FLOAT64 - /// REMOTE WITH CONNECTION us.myconnection - /// ``` - /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_a_remote_function) - remote_connection: Option, - }, + /// 4. [MsSql](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql) + CreateFunction(CreateFunction), /// CREATE TRIGGER /// /// Examples: @@ -2943,7 +3734,12 @@ pub enum Statement { /// ``` /// /// Postgres: + /// SQL Server: CreateTrigger { + /// True if this is a `CREATE OR ALTER TRIGGER` statement + /// + /// [MsSql](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-trigger-transact-sql?view=sql-server-ver16#arguments) + or_alter: bool, /// The `OR REPLACE` clause is used to re-create the trigger if it already exists. /// /// Example: @@ -3004,7 +3800,9 @@ pub enum Statement { /// Triggering conditions condition: Option, /// Execute logic block - exec_body: TriggerExecBody, + exec_body: Option, + /// For SQL dialects with statement(s) for a body + statements: Option, /// The characteristic of the trigger, which include whether the trigger is `DEFERRABLE`, `INITIALLY DEFERRED`, or `INITIALLY IMMEDIATE`, characteristics: Option, }, @@ -3017,7 +3815,7 @@ pub enum Statement { DropTrigger { if_exists: bool, trigger_name: ObjectName, - table_name: ObjectName, + table_name: Option, /// `CASCADE` or `RESTRICT` option: Option, }, @@ -3028,7 +3826,7 @@ pub enum Statement { or_alter: bool, name: ObjectName, params: Option>, - body: Vec, + body: ConditionalStatements, }, /// ```sql /// CREATE MACRO @@ -3053,9 +3851,9 @@ pub enum Statement { if_not_exists: bool, name: ObjectName, stage_params: StageParamsObject, - directory_table_params: DataLoadingOptions, - file_format: DataLoadingOptions, - copy_options: DataLoadingOptions, + directory_table_params: KeyValueOptions, + file_format: KeyValueOptions, + copy_options: KeyValueOptions, comment: Option, }, /// ```sql @@ -3070,8 +3868,8 @@ pub enum Statement { /// ``` Grant { privileges: Privileges, - objects: GrantObjects, - grantees: Vec, + objects: Option, + grantees: Vec, with_grant_option: bool, granted_by: Option, }, @@ -3080,26 +3878,36 @@ pub enum Statement { /// ``` Revoke { privileges: Privileges, - objects: GrantObjects, - grantees: Vec, + objects: Option, + grantees: Vec, granted_by: Option, - cascade: bool, + cascade: Option, }, /// ```sql /// DEALLOCATE [ PREPARE ] { name | ALL } /// ``` /// /// Note: this is a PostgreSQL-specific statement. - Deallocate { name: Ident, prepare: bool }, + Deallocate { + name: Ident, + prepare: bool, + }, /// ```sql - /// EXECUTE name [ ( parameter [, ...] ) ] [USING ] + /// An `EXECUTE` statement /// ``` /// - /// Note: this is a PostgreSQL-specific statement. + /// Postgres: + /// MSSQL: + /// BigQuery: + /// Snowflake: Execute { - name: Ident, + name: Option, parameters: Vec, - using: Vec, + has_parentheses: bool, + /// Is this an `EXECUTE IMMEDIATE` + immediate: bool, + into: Vec, + using: Vec, }, /// ```sql /// PREPARE name [ ( data_type [, ...] ) ] AS statement @@ -3115,7 +3923,7 @@ pub enum Statement { /// KILL [CONNECTION | QUERY | MUTATION] /// ``` /// - /// See + /// See /// See Kill { modifier: Option, @@ -3155,6 +3963,9 @@ pub enum Statement { /// /// [SQLite](https://sqlite.org/lang_explain.html) query_plan: bool, + /// `EXPLAIN ESTIMATE` + /// [Clickhouse](https://clickhouse.com/docs/en/sql-reference/statements/explain#explain-estimate) + estimate: bool, /// A SQL query that specifies what to explain statement: Box, /// Optional output format of explain @@ -3166,11 +3977,15 @@ pub enum Statement { /// SAVEPOINT /// ``` /// Define a new savepoint within the current transaction - Savepoint { name: Ident }, + Savepoint { + name: Ident, + }, /// ```sql /// RELEASE [ SAVEPOINT ] savepoint_name /// ``` - ReleaseSavepoint { name: Ident }, + ReleaseSavepoint { + name: Ident, + }, /// A `MERGE` statement. /// /// ```sql @@ -3178,6 +3993,7 @@ pub enum Statement { /// ``` /// [Snowflake](https://docs.snowflake.com/en/sql-reference/sql/merge) /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#merge_statement) + /// [MSSQL](https://learn.microsoft.com/en-us/sql/t-sql/statements/merge-transact-sql?view=sql-server-ver16) Merge { /// optional INTO keyword into: bool, @@ -3189,6 +4005,8 @@ pub enum Statement { on: Box, /// Specifies the actions to perform when values match or do not match. clauses: Vec, + // Specifies the output to save changes in MSSQL + output: Option, }, /// ```sql /// CACHE [ FLAG ] TABLE [ OPTIONS('K1' = 'V1', 'K2' = V2) ] [ AS ] [ ] @@ -3231,6 +4049,8 @@ pub enum Statement { sequence_options: Vec, owned_by: Option, }, + /// A `CREATE DOMAIN` statement. + CreateDomain(CreateDomain), /// ```sql /// CREATE TYPE /// ``` @@ -3250,7 +4070,9 @@ pub enum Statement { /// LOCK TABLES [READ [LOCAL] | [LOW_PRIORITY] WRITE] /// ``` /// Note: this is a MySQL-specific statement. See - LockTables { tables: Vec }, + LockTables { + tables: Vec, + }, /// ```sql /// UNLOCK TABLES /// ``` @@ -3278,6 +4100,107 @@ pub enum Statement { include_final: bool, deduplicate: Option, }, + /// ```sql + /// LISTEN + /// ``` + /// listen for a notification channel + /// + /// See Postgres + LISTEN { + channel: Ident, + }, + /// ```sql + /// UNLISTEN + /// ``` + /// stop listening for a notification + /// + /// See Postgres + UNLISTEN { + channel: Ident, + }, + /// ```sql + /// NOTIFY channel [ , payload ] + /// ``` + /// send a notification event together with an optional “payload” string to channel + /// + /// See Postgres + NOTIFY { + channel: Ident, + payload: Option, + }, + /// ```sql + /// LOAD DATA [LOCAL] INPATH 'filepath' [OVERWRITE] INTO TABLE tablename + /// [PARTITION (partcol1=val1, partcol2=val2 ...)] + /// [INPUTFORMAT 'inputformat' SERDE 'serde'] + /// ``` + /// Loading files into tables + /// + /// See Hive + LoadData { + local: bool, + inpath: String, + overwrite: bool, + table_name: ObjectName, + partitioned: Option>, + table_format: Option, + }, + /// ```sql + /// Rename TABLE tbl_name TO new_tbl_name[, tbl_name2 TO new_tbl_name2] ... + /// ``` + /// Renames one or more tables + /// + /// See Mysql + RenameTable(Vec), + /// Snowflake `LIST` + /// See: + List(FileStagingCommand), + /// Snowflake `REMOVE` + /// See: + Remove(FileStagingCommand), + /// RaiseError (MSSQL) + /// RAISERROR ( { msg_id | msg_str | @local_variable } + /// { , severity , state } + /// [ , argument [ , ...n ] ] ) + /// [ WITH option [ , ...n ] ] + /// See + RaisError { + message: Box, + severity: Box, + state: Box, + arguments: Vec, + options: Vec, + }, + /// ```sql + /// PRINT msg_str | @local_variable | string_expr + /// ``` + /// + /// See: + Print(PrintStatement), + /// ```sql + /// RETURN [ expression ] + /// ``` + /// + /// See [ReturnStatement] + Return(ReturnStatement), +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum RaisErrorOption { + Log, + NoWait, + SetError, +} + +impl fmt::Display for RaisErrorOption { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + RaisErrorOption::Log => write!(f, "LOG"), + RaisErrorOption::NoWait => write!(f, "NOWAIT"), + RaisErrorOption::SetError => write!(f, "SETERROR"), + } + } } impl fmt::Display for Statement { @@ -3347,6 +4270,7 @@ impl fmt::Display for Statement { verbose, analyze, query_plan, + estimate, statement, format, options, @@ -3359,6 +4283,9 @@ impl fmt::Display for Statement { if *analyze { write!(f, "ANALYZE ")?; } + if *estimate { + write!(f, "ESTIMATE ")?; + } if *verbose { write!(f, "VERBOSE ")?; @@ -3382,11 +4309,10 @@ impl fmt::Display for Statement { Statement::Fetch { name, direction, + position, into, } => { - write!(f, "FETCH {direction} ")?; - - write!(f, "IN {name}")?; + write!(f, "FETCH {direction} {position} {name}")?; if let Some(into) = into { write!(f, " INTO {into}")?; @@ -3455,8 +4381,8 @@ impl fmt::Display for Statement { } if let Some(cascade) = cascade { match cascade { - TruncateCascadeOption::Cascade => write!(f, " CASCADE")?, - TruncateCascadeOption::Restrict => write!(f, " RESTRICT")?, + CascadeOption::Cascade => write!(f, " CASCADE")?, + CascadeOption::Restrict => write!(f, " RESTRICT")?, } } @@ -3470,6 +4396,18 @@ impl fmt::Display for Statement { } Ok(()) } + Statement::Case(stmt) => { + write!(f, "{stmt}") + } + Statement::If(stmt) => { + write!(f, "{stmt}") + } + Statement::While(stmt) => { + write!(f, "{stmt}") + } + Statement::Raise(stmt) => { + write!(f, "{stmt}") + } Statement::AttachDatabase { schema_name, database_file_name, @@ -3520,8 +4458,13 @@ impl fmt::Display for Statement { cache_metadata, noscan, compute_statistics, + has_table_keyword, } => { - write!(f, "ANALYZE TABLE {table_name}")?; + write!( + f, + "ANALYZE{}{table_name}", + if *has_table_keyword { " TABLE " } else { " " } + )?; if let Some(ref parts) = partitions { if !parts.is_empty() { write!(f, " PARTITION ({})", display_comma_separated(parts))?; @@ -3606,13 +4549,21 @@ impl fmt::Display for Statement { from, selection, returning, + or, } => { - write!(f, "UPDATE {table}")?; + write!(f, "UPDATE ")?; + if let Some(or) = or { + write!(f, "{or} ")?; + } + write!(f, "{table}")?; + if let Some(UpdateTableFromKind::BeforeSet(from)) = from { + write!(f, " FROM {}", display_comma_separated(from))?; + } if !assignments.is_empty() { write!(f, " SET {}", display_comma_separated(assignments))?; } - if let Some(from) = from { - write!(f, " FROM {from}")?; + if let Some(UpdateTableFromKind::AfterSet(from)) = from { + write!(f, " FROM {}", display_comma_separated(from))?; } if let Some(selection) = selection { write!(f, " WHERE {selection}")?; @@ -3623,6 +4574,7 @@ impl fmt::Display for Statement { Ok(()) } Statement::Delete(delete) => write!(f, "{delete}"), + Statement::Open(open) => write!(f, "{open}"), Statement::Close { cursor } => { write!(f, "CLOSE {cursor}")?; @@ -3647,76 +4599,10 @@ impl fmt::Display for Statement { } Ok(()) } - Statement::CreateFunction { - or_replace, - temporary, - if_not_exists, - name, - args, - return_type, - function_body, - language, - behavior, - called_on_null, - parallel, - using, - determinism_specifier, - options, - remote_connection, - } => { - write!( - f, - "CREATE {or_replace}{temp}FUNCTION {if_not_exists}{name}", - temp = if *temporary { "TEMPORARY " } else { "" }, - or_replace = if *or_replace { "OR REPLACE " } else { "" }, - if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" }, - )?; - if let Some(args) = args { - write!(f, "({})", display_comma_separated(args))?; - } - if let Some(return_type) = return_type { - write!(f, " RETURNS {return_type}")?; - } - if let Some(determinism_specifier) = determinism_specifier { - write!(f, " {determinism_specifier}")?; - } - if let Some(language) = language { - write!(f, " LANGUAGE {language}")?; - } - if let Some(behavior) = behavior { - write!(f, " {behavior}")?; - } - if let Some(called_on_null) = called_on_null { - write!(f, " {called_on_null}")?; - } - if let Some(parallel) = parallel { - write!(f, " {parallel}")?; - } - if let Some(remote_connection) = remote_connection { - write!(f, " REMOTE WITH CONNECTION {remote_connection}")?; - } - if let Some(CreateFunctionBody::AsBeforeOptions(function_body)) = function_body { - write!(f, " AS {function_body}")?; - } - if let Some(CreateFunctionBody::Return(function_body)) = function_body { - write!(f, " RETURN {function_body}")?; - } - if let Some(using) = using { - write!(f, " {using}")?; - } - if let Some(options) = options { - write!( - f, - " OPTIONS({})", - display_comma_separated(options.as_slice()) - )?; - } - if let Some(CreateFunctionBody::AsAfterOptions(function_body)) = function_body { - write!(f, " AS {function_body}")?; - } - Ok(()) - } + Statement::CreateFunction(create_function) => create_function.fmt(f), + Statement::CreateDomain(create_domain) => create_domain.fmt(f), Statement::CreateTrigger { + or_alter, or_replace, is_constraint, name, @@ -3729,19 +4615,30 @@ impl fmt::Display for Statement { condition, include_each, exec_body, + statements, characteristics, } => { write!( f, - "CREATE {or_replace}{is_constraint}TRIGGER {name} {period}", + "CREATE {or_alter}{or_replace}{is_constraint}TRIGGER {name} ", + or_alter = if *or_alter { "OR ALTER " } else { "" }, or_replace = if *or_replace { "OR REPLACE " } else { "" }, is_constraint = if *is_constraint { "CONSTRAINT " } else { "" }, )?; - if !events.is_empty() { - write!(f, " {}", display_separated(events, " OR "))?; + if exec_body.is_some() { + write!(f, "{period}")?; + if !events.is_empty() { + write!(f, " {}", display_separated(events, " OR "))?; + } + write!(f, " ON {table_name}")?; + } else { + write!(f, "ON {table_name}")?; + write!(f, " {period}")?; + if !events.is_empty() { + write!(f, " {}", display_separated(events, ", "))?; + } } - write!(f, " ON {table_name}")?; if let Some(referenced_table_name) = referenced_table_name { write!(f, " FROM {referenced_table_name}")?; @@ -3757,13 +4654,19 @@ impl fmt::Display for Statement { if *include_each { write!(f, " FOR EACH {trigger_object}")?; - } else { + } else if exec_body.is_some() { write!(f, " FOR {trigger_object}")?; } if let Some(condition) = condition { write!(f, " WHEN {condition}")?; } - write!(f, " EXECUTE {exec_body}") + if let Some(exec_body) = exec_body { + write!(f, " EXECUTE {exec_body}")?; + } + if let Some(statements) = statements { + write!(f, " AS {statements}")?; + } + Ok(()) } Statement::DropTrigger { if_exists, @@ -3775,7 +4678,10 @@ impl fmt::Display for Statement { if *if_exists { write!(f, " IF EXISTS")?; } - write!(f, " {trigger_name} ON {table_name}")?; + match &table_name { + Some(table_name) => write!(f, " {trigger_name} ON {table_name}")?, + None => write!(f, " {trigger_name}")?, + }; if let Some(option) = option { write!(f, " {option}")?; } @@ -3799,11 +4705,8 @@ impl fmt::Display for Statement { write!(f, " ({})", display_comma_separated(p))?; } } - write!( - f, - " AS BEGIN {body} END", - body = display_separated(body, "; ") - ) + + write!(f, " AS {body}") } Statement::CreateMacro { or_replace, @@ -3828,6 +4731,7 @@ impl fmt::Display for Statement { Ok(()) } Statement::CreateView { + or_alter, name, or_replace, columns, @@ -3840,11 +4744,20 @@ impl fmt::Display for Statement { if_not_exists, temporary, to, + params, } => { write!( f, - "CREATE {or_replace}{materialized}{temporary}VIEW {if_not_exists}{name}{to}", + "CREATE {or_alter}{or_replace}", + or_alter = if *or_alter { "OR ALTER " } else { "" }, or_replace = if *or_replace { "OR REPLACE " } else { "" }, + )?; + if let Some(params) = params { + params.fmt(f)?; + } + write!( + f, + "{materialized}{temporary}VIEW {if_not_exists}{name}{to}", materialized = if *materialized { "MATERIALIZED " } else { "" }, name = name, temporary = if *temporary { "TEMPORARY " } else { "" }, @@ -3880,6 +4793,36 @@ impl fmt::Display for Statement { Ok(()) } Statement::CreateTable(create_table) => create_table.fmt(f), + Statement::LoadData { + local, + inpath, + overwrite, + table_name, + partitioned, + table_format, + } => { + write!( + f, + "LOAD DATA {local}INPATH '{inpath}' {overwrite}INTO TABLE {table_name}", + local = if *local { "LOCAL " } else { "" }, + inpath = inpath, + overwrite = if *overwrite { "OVERWRITE " } else { "" }, + table_name = table_name, + )?; + if let Some(ref parts) = &partitioned { + if !parts.is_empty() { + write!(f, " PARTITION ({})", display_comma_separated(parts))?; + } + } + if let Some(HiveLoadDataFormat { + serde, + input_format, + }) = &table_format + { + write!(f, " INPUTFORMAT {input_format} SERDE {serde}")?; + } + Ok(()) + } Statement::CreateVirtualTable { name, if_not_exists, @@ -3927,6 +4870,21 @@ impl fmt::Display for Statement { Ok(()) } + Statement::DropExtension { + names, + if_exists, + cascade_or_restrict, + } => { + write!(f, "DROP EXTENSION")?; + if *if_exists { + write!(f, " IF EXISTS")?; + } + write!(f, " {}", display_comma_separated(names))?; + if let Some(cascade_or_restrict) = cascade_or_restrict { + write!(f, " {cascade_or_restrict}")?; + } + Ok(()) + } Statement::CreateRole { names, if_not_exists, @@ -4096,6 +5054,7 @@ impl fmt::Display for Statement { Ok(()) } + Statement::CreateConnector(create_connector) => create_connector.fmt(f), Statement::AlterTable { name, if_exists, @@ -4141,17 +5100,65 @@ impl fmt::Display for Statement { if !columns.is_empty() { write!(f, " ({})", display_comma_separated(columns))?; } - write!(f, " AS {query}") - } - Statement::AlterRole { name, operation } => { - write!(f, "ALTER ROLE {name} {operation}") + write!(f, " AS {query}") + } + Statement::AlterType(AlterType { name, operation }) => { + write!(f, "ALTER TYPE {name} {operation}") + } + Statement::AlterRole { name, operation } => { + write!(f, "ALTER ROLE {name} {operation}") + } + Statement::AlterPolicy { + name, + table_name, + operation, + } => { + write!(f, "ALTER POLICY {name} ON {table_name}{operation}") + } + Statement::AlterConnector { + name, + properties, + url, + owner, + } => { + write!(f, "ALTER CONNECTOR {name}")?; + if let Some(properties) = properties { + write!( + f, + " SET DCPROPERTIES({})", + display_comma_separated(properties) + )?; + } + if let Some(url) = url { + write!(f, " SET URL '{url}'")?; + } + if let Some(owner) = owner { + write!(f, " SET OWNER {owner}")?; + } + Ok(()) } - Statement::AlterPolicy { - name, - table_name, - operation, + Statement::AlterSession { + set, + session_params, } => { - write!(f, "ALTER POLICY {name} ON {table_name}{operation}") + write!( + f, + "ALTER SESSION {set}", + set = if *set { "SET" } else { "UNSET" } + )?; + if !session_params.options.is_empty() { + if *set { + write!(f, " {}", session_params)?; + } else { + let options = session_params + .options + .iter() + .map(|p| p.option_name.clone()) + .collect::>(); + write!(f, " {}", display_separated(&options, ", "))?; + } + } + Ok(()) } Statement::Drop { object_type, @@ -4175,7 +5182,7 @@ impl fmt::Display for Statement { Statement::DropFunction { if_exists, func_desc, - option, + drop_behavior, } => { write!( f, @@ -4183,7 +5190,22 @@ impl fmt::Display for Statement { if *if_exists { " IF EXISTS" } else { "" }, display_comma_separated(func_desc), )?; - if let Some(op) = option { + if let Some(op) = drop_behavior { + write!(f, " {op}")?; + } + Ok(()) + } + Statement::DropDomain(DropDomain { + if_exists, + name, + drop_behavior, + }) => { + write!( + f, + "DROP DOMAIN{} {name}", + if *if_exists { " IF EXISTS" } else { "" }, + )?; + if let Some(op) = drop_behavior { write!(f, " {op}")?; } Ok(()) @@ -4191,7 +5213,7 @@ impl fmt::Display for Statement { Statement::DropProcedure { if_exists, proc_desc, - option, + drop_behavior, } => { write!( f, @@ -4199,7 +5221,7 @@ impl fmt::Display for Statement { if *if_exists { " IF EXISTS" } else { "" }, display_comma_separated(proc_desc), )?; - if let Some(op) = option { + if let Some(op) = drop_behavior { write!(f, " {op}")?; } Ok(()) @@ -4228,76 +5250,31 @@ impl fmt::Display for Statement { if_exists, name, table_name, - option, + drop_behavior, } => { write!(f, "DROP POLICY")?; if *if_exists { write!(f, " IF EXISTS")?; } write!(f, " {name} ON {table_name}")?; - if let Some(option) = option { - write!(f, " {option}")?; + if let Some(drop_behavior) = drop_behavior { + write!(f, " {drop_behavior}")?; } Ok(()) } - Statement::Discard { object_type } => { - write!(f, "DISCARD {object_type}")?; - Ok(()) - } - Self::SetRole { - context_modifier, - role_name, - } => { - let role_name = role_name.clone().unwrap_or_else(|| Ident::new("NONE")); - write!(f, "SET{context_modifier} ROLE {role_name}") - } - Statement::SetVariable { - local, - variables, - hivevar, - value, - } => { - f.write_str("SET ")?; - if *local { - f.write_str("LOCAL ")?; - } - let parenthesized = matches!(variables, OneOrManyWithParens::Many(_)); + Statement::DropConnector { if_exists, name } => { write!( f, - "{hivevar}{name} = {l_paren}{value}{r_paren}", - hivevar = if *hivevar { "HIVEVAR:" } else { "" }, - name = variables, - l_paren = parenthesized.then_some("(").unwrap_or_default(), - value = display_comma_separated(value), - r_paren = parenthesized.then_some(")").unwrap_or_default(), - ) - } - Statement::SetTimeZone { local, value } => { - f.write_str("SET ")?; - if *local { - f.write_str("LOCAL ")?; - } - write!(f, "TIME ZONE {value}") - } - Statement::SetNames { - charset_name, - collation_name, - } => { - f.write_str("SET NAMES ")?; - f.write_str(charset_name)?; - - if let Some(collation) = collation_name { - f.write_str(" COLLATE ")?; - f.write_str(collation)?; - }; - + "DROP CONNECTOR {if_exists}{name}", + if_exists = if *if_exists { "IF EXISTS " } else { "" } + )?; Ok(()) } - Statement::SetNamesDefault {} => { - f.write_str("SET NAMES DEFAULT")?; - + Statement::Discard { object_type } => { + write!(f, "DISCARD {object_type}")?; Ok(()) } + Self::Set(set) => write!(f, "{set}"), Statement::ShowVariable { variable } => { write!(f, "SHOW")?; if !variable.is_empty() { @@ -4348,39 +5325,83 @@ impl fmt::Display for Statement { Statement::ShowColumns { extended, full, - table_name, - filter, + show_options, } => { write!( f, - "SHOW {extended}{full}COLUMNS FROM {table_name}", + "SHOW {extended}{full}COLUMNS{show_options}", extended = if *extended { "EXTENDED " } else { "" }, full = if *full { "FULL " } else { "" }, - table_name = table_name, )?; - if let Some(filter) = filter { - write!(f, " {filter}")?; - } + Ok(()) + } + Statement::ShowDatabases { + terse, + history, + show_options, + } => { + write!( + f, + "SHOW {terse}DATABASES{history}{show_options}", + terse = if *terse { "TERSE " } else { "" }, + history = if *history { " HISTORY" } else { "" }, + )?; + Ok(()) + } + Statement::ShowSchemas { + terse, + history, + show_options, + } => { + write!( + f, + "SHOW {terse}SCHEMAS{history}{show_options}", + terse = if *terse { "TERSE " } else { "" }, + history = if *history { " HISTORY" } else { "" }, + )?; + Ok(()) + } + Statement::ShowObjects(ShowObjects { + terse, + show_options, + }) => { + write!( + f, + "SHOW {terse}OBJECTS{show_options}", + terse = if *terse { "TERSE " } else { "" }, + )?; Ok(()) } Statement::ShowTables { + terse, + history, extended, full, - db_name, - filter, + external, + show_options, } => { write!( f, - "SHOW {extended}{full}TABLES", + "SHOW {terse}{extended}{full}{external}TABLES{history}{show_options}", + terse = if *terse { "TERSE " } else { "" }, extended = if *extended { "EXTENDED " } else { "" }, full = if *full { "FULL " } else { "" }, + external = if *external { "EXTERNAL " } else { "" }, + history = if *history { " HISTORY" } else { "" }, + )?; + Ok(()) + } + Statement::ShowViews { + terse, + materialized, + show_options, + } => { + write!( + f, + "SHOW {terse}{materialized}VIEWS{show_options}", + terse = if *terse { "TERSE " } else { "" }, + materialized = if *materialized { "MATERIALIZED " } else { "" } )?; - if let Some(db_name) = db_name { - write!(f, " FROM {db_name}")?; - } - if let Some(filter) = filter { - write!(f, " {filter}")?; - } Ok(()) } Statement::ShowFunctions { filter } => { @@ -4401,43 +5422,61 @@ impl fmt::Display for Statement { Statement::StartTransaction { modes, begin: syntax_begin, + transaction, modifier, + statements, + exception_statements, + has_end_keyword, } => { if *syntax_begin { if let Some(modifier) = *modifier { - write!(f, "BEGIN {} TRANSACTION", modifier)?; + write!(f, "BEGIN {}", modifier)?; } else { - write!(f, "BEGIN TRANSACTION")?; + write!(f, "BEGIN")?; } } else { - write!(f, "START TRANSACTION")?; + write!(f, "START")?; + } + if let Some(transaction) = transaction { + write!(f, " {transaction}")?; } if !modes.is_empty() { write!(f, " {}", display_comma_separated(modes))?; } + if !statements.is_empty() { + write!(f, " ")?; + format_statement_list(f, statements)?; + } + if let Some(exception_statements) = exception_statements { + write!(f, " EXCEPTION WHEN ERROR THEN")?; + if !exception_statements.is_empty() { + write!(f, " ")?; + format_statement_list(f, exception_statements)?; + } + } + if *has_end_keyword { + write!(f, " END")?; + } Ok(()) } - Statement::SetTransaction { - modes, - snapshot, - session, + Statement::Commit { + chain, + end: end_syntax, + modifier, } => { - if *session { - write!(f, "SET SESSION CHARACTERISTICS AS TRANSACTION")?; + if *end_syntax { + write!(f, "END")?; + if let Some(modifier) = *modifier { + write!(f, " {}", modifier)?; + } + if *chain { + write!(f, " AND CHAIN")?; + } } else { - write!(f, "SET TRANSACTION")?; - } - if !modes.is_empty() { - write!(f, " {}", display_comma_separated(modes))?; - } - if let Some(snapshot_id) = snapshot { - write!(f, " SNAPSHOT {snapshot_id}")?; + write!(f, "COMMIT{}", if *chain { " AND CHAIN" } else { "" })?; } Ok(()) } - Statement::Commit { chain } => { - write!(f, "COMMIT{}", if *chain { " AND CHAIN" } else { "" },) - } Statement::Rollback { chain, savepoint } => { write!(f, "ROLLBACK")?; @@ -4454,12 +5493,26 @@ impl fmt::Display for Statement { Statement::CreateSchema { schema_name, if_not_exists, - } => write!( - f, - "CREATE SCHEMA {if_not_exists}{name}", - if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" }, - name = schema_name - ), + options, + default_collate_spec, + } => { + write!( + f, + "CREATE SCHEMA {if_not_exists}{name}", + if_not_exists = if *if_not_exists { "IF NOT EXISTS " } else { "" }, + name = schema_name + )?; + + if let Some(collate) = default_collate_spec { + write!(f, " DEFAULT COLLATE {collate}")?; + } + + if let Some(options) = options { + write!(f, " OPTIONS({})", display_comma_separated(options))?; + } + + Ok(()) + } Statement::Assert { condition, message } => { write!(f, "ASSERT {condition}")?; if let Some(m) = message { @@ -4475,7 +5528,9 @@ impl fmt::Display for Statement { granted_by, } => { write!(f, "GRANT {privileges} ")?; - write!(f, "ON {objects} ")?; + if let Some(objects) = objects { + write!(f, "ON {objects} ")?; + } write!(f, "TO {}", display_comma_separated(grantees))?; if *with_grant_option { write!(f, " WITH GRANT OPTION")?; @@ -4493,12 +5548,16 @@ impl fmt::Display for Statement { cascade, } => { write!(f, "REVOKE {privileges} ")?; - write!(f, "ON {objects} ")?; + if let Some(objects) = objects { + write!(f, "ON {objects} ")?; + } write!(f, "FROM {}", display_comma_separated(grantees))?; if let Some(grantor) = granted_by { write!(f, " GRANTED BY {grantor}")?; } - write!(f, " {}", if *cascade { "CASCADE" } else { "RESTRICT" })?; + if let Some(cascade) = cascade { + write!(f, " {}", cascade)?; + } Ok(()) } Statement::Deallocate { name, prepare } => write!( @@ -4510,11 +5569,26 @@ impl fmt::Display for Statement { Statement::Execute { name, parameters, + has_parentheses, + immediate, + into, using, } => { - write!(f, "EXECUTE {name}")?; - if !parameters.is_empty() { - write!(f, "({})", display_comma_separated(parameters))?; + let (open, close) = if *has_parentheses { + ("(", ")") + } else { + (if parameters.is_empty() { "" } else { " " }, "") + }; + write!(f, "EXECUTE")?; + if *immediate { + write!(f, " IMMEDIATE")?; + } + if let Some(name) = name { + write!(f, " {name}")?; + } + write!(f, "{open}{}{close}", display_comma_separated(parameters),)?; + if !into.is_empty() { + write!(f, " INTO {}", display_comma_separated(into))?; } if !using.is_empty() { write!(f, " USING {}", display_comma_separated(using))?; @@ -4562,6 +5636,7 @@ impl fmt::Display for Statement { source, on, clauses, + output, } => { write!( f, @@ -4569,7 +5644,11 @@ impl fmt::Display for Statement { int = if *into { " INTO" } else { "" } )?; write!(f, "ON {on} ")?; - write!(f, "{}", display_separated(clauses, " ")) + write!(f, "{}", display_separated(clauses, " "))?; + if let Some(output) = output { + write!(f, " {output}")?; + } + Ok(()) } Statement::Cache { table_name, @@ -4670,60 +5749,73 @@ impl fmt::Display for Statement { Ok(()) } Statement::CopyIntoSnowflake { + kind, into, - from_stage, - from_stage_alias, + into_columns, + from_obj, + from_obj_alias, stage_params, from_transformations, + from_query, files, pattern, file_format, copy_options, validation_mode, + partition, } => { write!(f, "COPY INTO {}", into)?; - if from_transformations.is_none() { - // Standard data load - write!(f, " FROM {}{}", from_stage, stage_params)?; - if from_stage_alias.as_ref().is_some() { - write!(f, " AS {}", from_stage_alias.as_ref().unwrap())?; - } - } else { + if let Some(into_columns) = into_columns { + write!(f, " ({})", display_comma_separated(into_columns))?; + } + if let Some(from_transformations) = from_transformations { // Data load with transformation - write!( - f, - " FROM (SELECT {} FROM {}{}", - display_separated(from_transformations.as_ref().unwrap(), ", "), - from_stage, - stage_params, - )?; - if from_stage_alias.as_ref().is_some() { - write!(f, " AS {}", from_stage_alias.as_ref().unwrap())?; + if let Some(from_stage) = from_obj { + write!( + f, + " FROM (SELECT {} FROM {}{}", + display_separated(from_transformations, ", "), + from_stage, + stage_params + )?; + } + if let Some(from_obj_alias) = from_obj_alias { + write!(f, " AS {}", from_obj_alias)?; } write!(f, ")")?; + } else if let Some(from_obj) = from_obj { + // Standard data load + write!(f, " FROM {}{}", from_obj, stage_params)?; + if let Some(from_obj_alias) = from_obj_alias { + write!(f, " AS {from_obj_alias}")?; + } + } else if let Some(from_query) = from_query { + // Data unload from query + write!(f, " FROM ({from_query})")?; } - if files.is_some() { - write!( - f, - " FILES = ('{}')", - display_separated(files.as_ref().unwrap(), "', '") - )?; + + if let Some(files) = files { + write!(f, " FILES = ('{}')", display_separated(files, "', '"))?; } - if pattern.is_some() { - write!(f, " PATTERN = '{}'", pattern.as_ref().unwrap())?; + if let Some(pattern) = pattern { + write!(f, " PATTERN = '{}'", pattern)?; + } + if let Some(partition) = partition { + write!(f, " PARTITION BY {partition}")?; } if !file_format.options.is_empty() { write!(f, " FILE_FORMAT=({})", file_format)?; } if !copy_options.options.is_empty() { - write!(f, " COPY_OPTIONS=({})", copy_options)?; + match kind { + CopyIntoSnowflakeKind::Table => { + write!(f, " COPY_OPTIONS=({})", copy_options)? + } + CopyIntoSnowflakeKind::Location => write!(f, " {copy_options}")?, + } } - if validation_mode.is_some() { - write!( - f, - " VALIDATION_MODE = {}", - validation_mode.as_ref().unwrap() - )?; + if let Some(validation_mode) = validation_mode { + write!(f, " VALIDATION_MODE = {}", validation_mode)?; } Ok(()) } @@ -4782,6 +5874,45 @@ impl fmt::Display for Statement { } Ok(()) } + Statement::LISTEN { channel } => { + write!(f, "LISTEN {channel}")?; + Ok(()) + } + Statement::UNLISTEN { channel } => { + write!(f, "UNLISTEN {channel}")?; + Ok(()) + } + Statement::NOTIFY { channel, payload } => { + write!(f, "NOTIFY {channel}")?; + if let Some(payload) = payload { + write!(f, ", '{payload}'")?; + } + Ok(()) + } + Statement::RenameTable(rename_tables) => { + write!(f, "RENAME TABLE {}", display_comma_separated(rename_tables)) + } + Statement::RaisError { + message, + severity, + state, + arguments, + options, + } => { + write!(f, "RAISERROR({message}, {severity}, {state}")?; + if !arguments.is_empty() { + write!(f, ", {}", display_comma_separated(arguments))?; + } + write!(f, ")")?; + if !options.is_empty() { + write!(f, " WITH {}", display_comma_separated(options))?; + } + Ok(()) + } + Statement::Print(s) => write!(f, "{s}"), + Statement::Return(r) => write!(f, "{r}"), + Statement::List(command) => write!(f, "LIST {command}"), + Statement::Remove(command) => write!(f, "REMOVE {command}"), } } } @@ -4845,6 +5976,28 @@ impl fmt::Display for SequenceOptions { } } +/// Assignment for a `SET` statement (name [=|TO] value) +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct SetAssignment { + pub scope: Option, + pub name: ObjectName, + pub value: Expr, +} + +impl fmt::Display for SetAssignment { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "{}{} = {}", + self.scope.map(|s| format!("{}", s)).unwrap_or_default(), + self.name, + self.value + ) + } +} + /// Target of a `TRUNCATE TABLE` command /// /// Note this is its own struct because `visit_relation` requires an `ObjectName` (not a `Vec`) @@ -4873,16 +6026,43 @@ pub enum TruncateIdentityOption { Continue, } -/// PostgreSQL cascade option for TRUNCATE table +/// Cascade/restrict option for Postgres TRUNCATE table, MySQL GRANT/REVOKE, etc. /// [ CASCADE | RESTRICT ] #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum TruncateCascadeOption { +pub enum CascadeOption { Cascade, Restrict, } +impl Display for CascadeOption { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + CascadeOption::Cascade => write!(f, "CASCADE"), + CascadeOption::Restrict => write!(f, "RESTRICT"), + } + } +} + +/// Transaction started with [ TRANSACTION | WORK ] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum BeginTransactionKind { + Transaction, + Work, +} + +impl Display for BeginTransactionKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + BeginTransactionKind::Transaction => write!(f, "TRANSACTION"), + BeginTransactionKind::Work => write!(f, "WORK"), + } + } +} + /// Can use to describe options in create sequence or table column type identity /// [ MINVALUE minvalue | NO MINVALUE ] [ MAXVALUE maxvalue | NO MAXVALUE ] #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -5100,34 +6280,152 @@ impl fmt::Display for FetchDirection { } } +/// The "position" for a FETCH statement. +/// +/// [MsSql](https://learn.microsoft.com/en-us/sql/t-sql/language-elements/fetch-transact-sql) +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum FetchPosition { + From, + In, +} + +impl fmt::Display for FetchPosition { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + FetchPosition::From => f.write_str("FROM")?, + FetchPosition::In => f.write_str("IN")?, + }; + + Ok(()) + } +} + /// A privilege on a database object (table, sequence, etc.). #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum Action { + AddSearchOptimization, + Apply { + apply_type: ActionApplyType, + }, + ApplyBudget, + AttachListing, + AttachPolicy, + Audit, + BindServiceEndpoint, Connect, - Create, + Create { + obj_type: Option, + }, + DatabaseRole { + role: ObjectName, + }, Delete, - Execute, - Insert { columns: Option> }, - References { columns: Option> }, - Select { columns: Option> }, + EvolveSchema, + Execute { + obj_type: Option, + }, + Failover, + ImportedPrivileges, + ImportShare, + Insert { + columns: Option>, + }, + Manage { + manage_type: ActionManageType, + }, + ManageReleases, + ManageVersions, + Modify { + modify_type: Option, + }, + Monitor { + monitor_type: Option, + }, + Operate, + OverrideShareRestrictions, + Ownership, + PurchaseDataExchangeListing, + Read, + ReadSession, + References { + columns: Option>, + }, + Replicate, + ResolveAll, + Role { + role: Ident, + }, + Select { + columns: Option>, + }, Temporary, Trigger, Truncate, - Update { columns: Option> }, + Update { + columns: Option>, + }, Usage, } impl fmt::Display for Action { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { + Action::AddSearchOptimization => f.write_str("ADD SEARCH OPTIMIZATION")?, + Action::Apply { apply_type } => write!(f, "APPLY {apply_type}")?, + Action::ApplyBudget => f.write_str("APPLYBUDGET")?, + Action::AttachListing => f.write_str("ATTACH LISTING")?, + Action::AttachPolicy => f.write_str("ATTACH POLICY")?, + Action::Audit => f.write_str("AUDIT")?, + Action::BindServiceEndpoint => f.write_str("BIND SERVICE ENDPOINT")?, Action::Connect => f.write_str("CONNECT")?, - Action::Create => f.write_str("CREATE")?, + Action::Create { obj_type } => { + f.write_str("CREATE")?; + if let Some(obj_type) = obj_type { + write!(f, " {obj_type}")? + } + } + Action::DatabaseRole { role } => write!(f, "DATABASE ROLE {role}")?, Action::Delete => f.write_str("DELETE")?, - Action::Execute => f.write_str("EXECUTE")?, + Action::EvolveSchema => f.write_str("EVOLVE SCHEMA")?, + Action::Execute { obj_type } => { + f.write_str("EXECUTE")?; + if let Some(obj_type) = obj_type { + write!(f, " {obj_type}")? + } + } + Action::Failover => f.write_str("FAILOVER")?, + Action::ImportedPrivileges => f.write_str("IMPORTED PRIVILEGES")?, + Action::ImportShare => f.write_str("IMPORT SHARE")?, Action::Insert { .. } => f.write_str("INSERT")?, + Action::Manage { manage_type } => write!(f, "MANAGE {manage_type}")?, + Action::ManageReleases => f.write_str("MANAGE RELEASES")?, + Action::ManageVersions => f.write_str("MANAGE VERSIONS")?, + Action::Modify { modify_type } => { + write!(f, "MODIFY")?; + if let Some(modify_type) = modify_type { + write!(f, " {modify_type}")?; + } + } + Action::Monitor { monitor_type } => { + write!(f, "MONITOR")?; + if let Some(monitor_type) = monitor_type { + write!(f, " {monitor_type}")? + } + } + Action::Operate => f.write_str("OPERATE")?, + Action::OverrideShareRestrictions => f.write_str("OVERRIDE SHARE RESTRICTIONS")?, + Action::Ownership => f.write_str("OWNERSHIP")?, + Action::PurchaseDataExchangeListing => f.write_str("PURCHASE DATA EXCHANGE LISTING")?, + Action::Read => f.write_str("READ")?, + Action::ReadSession => f.write_str("READ SESSION")?, Action::References { .. } => f.write_str("REFERENCES")?, + Action::Replicate => f.write_str("REPLICATE")?, + Action::ResolveAll => f.write_str("RESOLVE ALL")?, + Action::Role { role } => write!(f, "ROLE {role}")?, Action::Select { .. } => f.write_str("SELECT")?, Action::Temporary => f.write_str("TEMPORARY")?, Action::Trigger => f.write_str("TRIGGER")?, @@ -5150,6 +6448,268 @@ impl fmt::Display for Action { } } +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +/// See +/// under `globalPrivileges` in the `CREATE` privilege. +pub enum ActionCreateObjectType { + Account, + Application, + ApplicationPackage, + ComputePool, + DataExchangeListing, + Database, + ExternalVolume, + FailoverGroup, + Integration, + NetworkPolicy, + OrganiationListing, + ReplicationGroup, + Role, + Share, + User, + Warehouse, +} + +impl fmt::Display for ActionCreateObjectType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ActionCreateObjectType::Account => write!(f, "ACCOUNT"), + ActionCreateObjectType::Application => write!(f, "APPLICATION"), + ActionCreateObjectType::ApplicationPackage => write!(f, "APPLICATION PACKAGE"), + ActionCreateObjectType::ComputePool => write!(f, "COMPUTE POOL"), + ActionCreateObjectType::DataExchangeListing => write!(f, "DATA EXCHANGE LISTING"), + ActionCreateObjectType::Database => write!(f, "DATABASE"), + ActionCreateObjectType::ExternalVolume => write!(f, "EXTERNAL VOLUME"), + ActionCreateObjectType::FailoverGroup => write!(f, "FAILOVER GROUP"), + ActionCreateObjectType::Integration => write!(f, "INTEGRATION"), + ActionCreateObjectType::NetworkPolicy => write!(f, "NETWORK POLICY"), + ActionCreateObjectType::OrganiationListing => write!(f, "ORGANIZATION LISTING"), + ActionCreateObjectType::ReplicationGroup => write!(f, "REPLICATION GROUP"), + ActionCreateObjectType::Role => write!(f, "ROLE"), + ActionCreateObjectType::Share => write!(f, "SHARE"), + ActionCreateObjectType::User => write!(f, "USER"), + ActionCreateObjectType::Warehouse => write!(f, "WAREHOUSE"), + } + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +/// See +/// under `globalPrivileges` in the `APPLY` privilege. +pub enum ActionApplyType { + AggregationPolicy, + AuthenticationPolicy, + JoinPolicy, + MaskingPolicy, + PackagesPolicy, + PasswordPolicy, + ProjectionPolicy, + RowAccessPolicy, + SessionPolicy, + Tag, +} + +impl fmt::Display for ActionApplyType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ActionApplyType::AggregationPolicy => write!(f, "AGGREGATION POLICY"), + ActionApplyType::AuthenticationPolicy => write!(f, "AUTHENTICATION POLICY"), + ActionApplyType::JoinPolicy => write!(f, "JOIN POLICY"), + ActionApplyType::MaskingPolicy => write!(f, "MASKING POLICY"), + ActionApplyType::PackagesPolicy => write!(f, "PACKAGES POLICY"), + ActionApplyType::PasswordPolicy => write!(f, "PASSWORD POLICY"), + ActionApplyType::ProjectionPolicy => write!(f, "PROJECTION POLICY"), + ActionApplyType::RowAccessPolicy => write!(f, "ROW ACCESS POLICY"), + ActionApplyType::SessionPolicy => write!(f, "SESSION POLICY"), + ActionApplyType::Tag => write!(f, "TAG"), + } + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +/// See +/// under `globalPrivileges` in the `EXECUTE` privilege. +pub enum ActionExecuteObjectType { + Alert, + DataMetricFunction, + ManagedAlert, + ManagedTask, + Task, +} + +impl fmt::Display for ActionExecuteObjectType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ActionExecuteObjectType::Alert => write!(f, "ALERT"), + ActionExecuteObjectType::DataMetricFunction => write!(f, "DATA METRIC FUNCTION"), + ActionExecuteObjectType::ManagedAlert => write!(f, "MANAGED ALERT"), + ActionExecuteObjectType::ManagedTask => write!(f, "MANAGED TASK"), + ActionExecuteObjectType::Task => write!(f, "TASK"), + } + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +/// See +/// under `globalPrivileges` in the `MANAGE` privilege. +pub enum ActionManageType { + AccountSupportCases, + EventSharing, + Grants, + ListingAutoFulfillment, + OrganizationSupportCases, + UserSupportCases, + Warehouses, +} + +impl fmt::Display for ActionManageType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ActionManageType::AccountSupportCases => write!(f, "ACCOUNT SUPPORT CASES"), + ActionManageType::EventSharing => write!(f, "EVENT SHARING"), + ActionManageType::Grants => write!(f, "GRANTS"), + ActionManageType::ListingAutoFulfillment => write!(f, "LISTING AUTO FULFILLMENT"), + ActionManageType::OrganizationSupportCases => write!(f, "ORGANIZATION SUPPORT CASES"), + ActionManageType::UserSupportCases => write!(f, "USER SUPPORT CASES"), + ActionManageType::Warehouses => write!(f, "WAREHOUSES"), + } + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +/// See +/// under `globalPrivileges` in the `MODIFY` privilege. +pub enum ActionModifyType { + LogLevel, + TraceLevel, + SessionLogLevel, + SessionTraceLevel, +} + +impl fmt::Display for ActionModifyType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ActionModifyType::LogLevel => write!(f, "LOG LEVEL"), + ActionModifyType::TraceLevel => write!(f, "TRACE LEVEL"), + ActionModifyType::SessionLogLevel => write!(f, "SESSION LOG LEVEL"), + ActionModifyType::SessionTraceLevel => write!(f, "SESSION TRACE LEVEL"), + } + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +/// See +/// under `globalPrivileges` in the `MONITOR` privilege. +pub enum ActionMonitorType { + Execution, + Security, + Usage, +} + +impl fmt::Display for ActionMonitorType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ActionMonitorType::Execution => write!(f, "EXECUTION"), + ActionMonitorType::Security => write!(f, "SECURITY"), + ActionMonitorType::Usage => write!(f, "USAGE"), + } + } +} + +/// The principal that receives the privileges +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct Grantee { + pub grantee_type: GranteesType, + pub name: Option, +} + +impl fmt::Display for Grantee { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.grantee_type { + GranteesType::Role => { + write!(f, "ROLE ")?; + } + GranteesType::Share => { + write!(f, "SHARE ")?; + } + GranteesType::User => { + write!(f, "USER ")?; + } + GranteesType::Group => { + write!(f, "GROUP ")?; + } + GranteesType::Public => { + write!(f, "PUBLIC ")?; + } + GranteesType::DatabaseRole => { + write!(f, "DATABASE ROLE ")?; + } + GranteesType::Application => { + write!(f, "APPLICATION ")?; + } + GranteesType::ApplicationRole => { + write!(f, "APPLICATION ROLE ")?; + } + GranteesType::None => (), + } + if let Some(ref name) = self.name { + name.fmt(f)?; + } + Ok(()) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum GranteesType { + Role, + Share, + User, + Group, + Public, + DatabaseRole, + Application, + ApplicationRole, + None, +} + +/// Users/roles designated in a GRANT/REVOKE +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum GranteeName { + /// A bare identifier + ObjectName(ObjectName), + /// A MySQL user/host pair such as 'root'@'%' + UserHost { user: Ident, host: Ident }, +} + +impl fmt::Display for GranteeName { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + GranteeName::ObjectName(name) => name.fmt(f), + GranteeName::UserHost { user, host } => { + write!(f, "{}@{}", user, host) + } + } + } +} + /// Objects on which privileges are granted in a GRANT statement. #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -5159,12 +6719,34 @@ pub enum GrantObjects { AllSequencesInSchema { schemas: Vec }, /// Grant privileges on `ALL TABLES IN SCHEMA [, ...]` AllTablesInSchema { schemas: Vec }, + /// Grant privileges on specific databases + Databases(Vec), /// Grant privileges on specific schemas Schemas(Vec), /// Grant privileges on specific sequences Sequences(Vec), /// Grant privileges on specific tables Tables(Vec), + /// Grant privileges on specific views + Views(Vec), + /// Grant privileges on specific warehouses + Warehouses(Vec), + /// Grant privileges on specific integrations + Integrations(Vec), + /// Grant privileges on resource monitors + ResourceMonitors(Vec), + /// Grant privileges on users + Users(Vec), + /// Grant privileges on compute pools + ComputePools(Vec), + /// Grant privileges on connections + Connections(Vec), + /// Grant privileges on failover groups + FailoverGroup(Vec), + /// Grant privileges on replication group + ReplicationGroup(Vec), + /// Grant privileges on external volumes + ExternalVolumes(Vec), } impl fmt::Display for GrantObjects { @@ -5173,12 +6755,24 @@ impl fmt::Display for GrantObjects { GrantObjects::Sequences(sequences) => { write!(f, "SEQUENCE {}", display_comma_separated(sequences)) } + GrantObjects::Databases(databases) => { + write!(f, "DATABASE {}", display_comma_separated(databases)) + } GrantObjects::Schemas(schemas) => { write!(f, "SCHEMA {}", display_comma_separated(schemas)) } GrantObjects::Tables(tables) => { write!(f, "{}", display_comma_separated(tables)) } + GrantObjects::Views(views) => { + write!(f, "VIEW {}", display_comma_separated(views)) + } + GrantObjects::Warehouses(warehouses) => { + write!(f, "WAREHOUSE {}", display_comma_separated(warehouses)) + } + GrantObjects::Integrations(integrations) => { + write!(f, "INTEGRATION {}", display_comma_separated(integrations)) + } GrantObjects::AllSequencesInSchema { schemas } => { write!( f, @@ -5193,6 +6787,27 @@ impl fmt::Display for GrantObjects { display_comma_separated(schemas) ) } + GrantObjects::ResourceMonitors(objects) => { + write!(f, "RESOURCE MONITOR {}", display_comma_separated(objects)) + } + GrantObjects::Users(objects) => { + write!(f, "USER {}", display_comma_separated(objects)) + } + GrantObjects::ComputePools(objects) => { + write!(f, "COMPUTE POOL {}", display_comma_separated(objects)) + } + GrantObjects::Connections(objects) => { + write!(f, "CONNECTION {}", display_comma_separated(objects)) + } + GrantObjects::FailoverGroup(objects) => { + write!(f, "FAILOVER GROUP {}", display_comma_separated(objects)) + } + GrantObjects::ReplicationGroup(objects) => { + write!(f, "REPLICATION GROUP {}", display_comma_separated(objects)) + } + GrantObjects::ExternalVolumes(objects) => { + write!(f, "EXTERNAL VOLUME {}", display_comma_separated(objects)) + } } } } @@ -5248,8 +6863,8 @@ pub enum FunctionArgExpr { impl From for FunctionArgExpr { fn from(wildcard_expr: Expr) -> Self { match wildcard_expr { - Expr::QualifiedWildcard(prefix) => Self::QualifiedWildcard(prefix), - Expr::Wildcard => Self::Wildcard, + Expr::QualifiedWildcard(prefix, _) => Self::QualifiedWildcard(prefix), + Expr::Wildcard(_) => Self::Wildcard, expr => Self::Expr(expr), } } @@ -5276,6 +6891,10 @@ pub enum FunctionArgOperator { RightArrow, /// function(arg1 := value1) Assignment, + /// function(arg1 : value1) + Colon, + /// function(arg1 VALUE value1) + Value, } impl fmt::Display for FunctionArgOperator { @@ -5284,6 +6903,8 @@ impl fmt::Display for FunctionArgOperator { FunctionArgOperator::Equals => f.write_str("="), FunctionArgOperator::RightArrow => f.write_str("=>"), FunctionArgOperator::Assignment => f.write_str(":="), + FunctionArgOperator::Colon => f.write_str(":"), + FunctionArgOperator::Value => f.write_str("VALUE"), } } } @@ -5292,11 +6913,22 @@ impl fmt::Display for FunctionArgOperator { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum FunctionArg { + /// `name` is identifier + /// + /// Enabled when `Dialect::supports_named_fn_args_with_expr_name` returns 'false' Named { name: Ident, arg: FunctionArgExpr, operator: FunctionArgOperator, }, + /// `name` is arbitrary expression + /// + /// Enabled when `Dialect::supports_named_fn_args_with_expr_name` returns 'true' + ExprNamed { + name: Expr, + arg: FunctionArgExpr, + operator: FunctionArgOperator, + }, Unnamed(FunctionArgExpr), } @@ -5308,6 +6940,11 @@ impl fmt::Display for FunctionArg { arg, operator, } => write!(f, "{name} {operator} {arg}"), + FunctionArg::ExprNamed { + name, + arg, + operator, + } => write!(f, "{name} {operator} {arg}"), FunctionArg::Unnamed(unnamed_arg) => write!(f, "{unnamed_arg}"), } } @@ -5330,12 +6967,34 @@ impl fmt::Display for CloseCursor { } } +/// A Drop Domain statement +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct DropDomain { + /// Whether to drop the domain if it exists + pub if_exists: bool, + /// The name of the domain to drop + pub name: ObjectName, + /// The behavior to apply when dropping the domain + pub drop_behavior: Option, +} + /// A function call #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct Function { pub name: ObjectName, + /// Flags whether this function call uses the [ODBC syntax]. + /// + /// Example: + /// ```sql + /// SELECT {fn CONCAT('foo', 'bar')} + /// ``` + /// + /// [ODBC syntax]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017 + pub uses_odbc_syntax: bool, /// The parameters to the function, including any options specified within the /// delimiting parentheses. /// @@ -5374,6 +7033,10 @@ pub struct Function { impl fmt::Display for Function { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if self.uses_odbc_syntax { + write!(f, "{{fn ")?; + } + write!(f, "{}{}{}", self.name, self.parameters, self.args)?; if !self.within_group.is_empty() { @@ -5396,6 +7059,10 @@ impl fmt::Display for Function { write!(f, " OVER {o}")?; } + if self.uses_odbc_syntax { + write!(f, "}}")?; + } + Ok(()) } } @@ -5446,7 +7113,10 @@ impl fmt::Display for FunctionArgumentList { } write!(f, "{}", display_comma_separated(&self.args))?; if !self.clauses.is_empty() { - write!(f, " {}", display_separated(&self.clauses, " "))?; + if !self.args.is_empty() { + write!(f, " ")?; + } + write!(f, "{}", display_separated(&self.clauses, " "))?; } Ok(()) } @@ -5488,6 +7158,11 @@ pub enum FunctionArgumentClause { /// /// [`GROUP_CONCAT`]: https://dev.mysql.com/doc/refman/8.0/en/aggregate-functions.html#function_group-concat Separator(Value), + /// The json-null-clause to the [`JSON_ARRAY`]/[`JSON_OBJECT`] function in MSSQL. + /// + /// [`JSON_ARRAY`]: + /// [`JSON_OBJECT`]: + JsonNullClause(JsonNullClause), } impl fmt::Display for FunctionArgumentClause { @@ -5503,10 +7178,32 @@ impl fmt::Display for FunctionArgumentClause { FunctionArgumentClause::OnOverflow(on_overflow) => write!(f, "{on_overflow}"), FunctionArgumentClause::Having(bound) => write!(f, "{bound}"), FunctionArgumentClause::Separator(sep) => write!(f, "SEPARATOR {sep}"), + FunctionArgumentClause::JsonNullClause(null_clause) => write!(f, "{null_clause}"), } } } +/// A method call +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct Method { + pub expr: Box, + // always non-empty + pub method_chain: Vec, +} + +impl fmt::Display for Method { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "{}.{}", + self.expr, + display_separated(&self.method_chain, ".") + ) + } +} + #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -5645,6 +7342,7 @@ impl fmt::Display for HavingBoundKind { pub enum ObjectType { Table, View, + MaterializedView, Index, Schema, Database, @@ -5659,6 +7357,7 @@ impl fmt::Display for ObjectType { f.write_str(match self { ObjectType::Table => "TABLE", ObjectType::View => "VIEW", + ObjectType::MaterializedView => "MATERIALIZED VIEW", ObjectType::Index => "INDEX", ObjectType::Schema => "SCHEMA", ObjectType::Database => "DATABASE", @@ -5714,6 +7413,14 @@ pub enum HiveRowFormat { DELIMITED { delimiters: Vec }, } +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct HiveLoadDataFormat { + pub serde: Expr, + pub input_format: Expr, +} + #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -5901,6 +7608,18 @@ pub enum SqlOption { range_direction: Option, for_values: Vec, }, + /// Comment parameter (supports `=` and no `=` syntax) + Comment(CommentDef), + /// MySQL TableSpace option + /// + TableSpace(TablespaceOption), + /// An option representing a key value pair, where the value is a parenthesized list and with an optional name + /// e.g. + /// + /// UNION = (tbl_name\[,tbl_name\]...) + /// ENGINE = ReplicatedMergeTree('/table_name','{replica}', ver) + /// ENGINE = SummingMergeTree(\[columns\]) + NamedParenthesizedList(NamedParenthesizedList), } impl fmt::Display for SqlOption { @@ -5932,10 +7651,54 @@ impl fmt::Display for SqlOption { display_comma_separated(for_values) ) } + SqlOption::TableSpace(tablespace_option) => { + write!(f, "TABLESPACE {}", tablespace_option.name)?; + match tablespace_option.storage { + Some(StorageType::Disk) => write!(f, " STORAGE DISK"), + Some(StorageType::Memory) => write!(f, " STORAGE MEMORY"), + None => Ok(()), + } + } + SqlOption::Comment(comment) => match comment { + CommentDef::WithEq(comment) => { + write!(f, "COMMENT = '{comment}'") + } + CommentDef::WithoutEq(comment) => { + write!(f, "COMMENT '{comment}'") + } + }, + SqlOption::NamedParenthesizedList(value) => { + write!(f, "{} = ", value.key)?; + if let Some(key) = &value.name { + write!(f, "{}", key)?; + } + if !value.values.is_empty() { + write!(f, "({})", display_comma_separated(&value.values))? + } + Ok(()) + } } } } +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum StorageType { + Disk, + Memory, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +/// MySql TableSpace option +/// +pub struct TablespaceOption { + pub name: String, + pub storage: Option, +} + #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -6013,6 +7776,7 @@ pub enum TransactionIsolationLevel { ReadCommitted, RepeatableRead, Serializable, + Snapshot, } impl fmt::Display for TransactionIsolationLevel { @@ -6023,13 +7787,15 @@ impl fmt::Display for TransactionIsolationLevel { ReadCommitted => "READ COMMITTED", RepeatableRead => "REPEATABLE READ", Serializable => "SERIALIZABLE", + Snapshot => "SNAPSHOT", }) } } -/// SQLite specific syntax +/// Modifier for the transaction in the `BEGIN` syntax /// -/// +/// SQLite: +/// MS-SQL: #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -6037,6 +7803,8 @@ pub enum TransactionModifier { Deferred, Immediate, Exclusive, + Try, + Catch, } impl fmt::Display for TransactionModifier { @@ -6046,6 +7814,8 @@ impl fmt::Display for TransactionModifier { Deferred => "DEFERRED", Immediate => "IMMEDIATE", Exclusive => "EXCLUSIVE", + Try => "TRY", + Catch => "CATCH", }) } } @@ -6057,6 +7827,7 @@ pub enum ShowStatementFilter { Like(String), ILike(String), Where(Expr), + NoKeyword(String), } impl fmt::Display for ShowStatementFilter { @@ -6066,6 +7837,25 @@ impl fmt::Display for ShowStatementFilter { Like(pattern) => write!(f, "LIKE '{}'", value::escape_single_quote_string(pattern)), ILike(pattern) => write!(f, "ILIKE {}", value::escape_single_quote_string(pattern)), Where(expr) => write!(f, "WHERE {expr}"), + NoKeyword(pattern) => write!(f, "'{}'", value::escape_single_quote_string(pattern)), + } + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum ShowStatementInClause { + IN, + FROM, +} + +impl fmt::Display for ShowStatementInClause { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + use ShowStatementInClause::*; + match self { + FROM => write!(f, "FROM"), + IN => write!(f, "IN"), } } } @@ -6089,11 +7879,11 @@ impl fmt::Display for SqliteOnConflict { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use SqliteOnConflict::*; match self { - Rollback => write!(f, "ROLLBACK"), - Abort => write!(f, "ABORT"), - Fail => write!(f, "FAIL"), - Ignore => write!(f, "IGNORE"), - Replace => write!(f, "REPLACE"), + Rollback => write!(f, "OR ROLLBACK"), + Abort => write!(f, "OR ABORT"), + Fail => write!(f, "OR FAIL"), + Ignore => write!(f, "OR IGNORE"), + Replace => write!(f, "OR REPLACE"), } } } @@ -6157,7 +7947,7 @@ impl fmt::Display for CopyTarget { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use CopyTarget::*; match self { - Stdin { .. } => write!(f, "STDIN"), + Stdin => write!(f, "STDIN"), Stdout => write!(f, "STDOUT"), File { filename } => write!(f, "'{}'", value::escape_single_quote_string(filename)), Program { command } => write!( @@ -6485,6 +8275,35 @@ impl Display for MergeClause { } } +/// A Output Clause in the end of a 'MERGE' Statement +/// +/// Example: +/// OUTPUT $action, deleted.* INTO dbo.temp_products; +/// [mssql](https://learn.microsoft.com/en-us/sql/t-sql/queries/output-clause-transact-sql) +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct OutputClause { + pub select_items: Vec, + pub into_table: SelectInto, +} + +impl fmt::Display for OutputClause { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let OutputClause { + select_items, + into_table, + } = self; + + write!( + f, + "OUTPUT {} {}", + display_comma_separated(select_items), + into_table + ) + } +} + #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -6562,30 +8381,30 @@ impl fmt::Display for FlushLocation { } } -/// Optional context modifier for statements that can be or `LOCAL`, or `SESSION`. +/// Optional context modifier for statements that can be or `LOCAL`, `GLOBAL`, or `SESSION`. #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum ContextModifier { - /// No context defined. Each dialect defines the default in this scenario. - None, /// `LOCAL` identifier, usually related to transactional states. Local, /// `SESSION` identifier Session, + /// `GLOBAL` identifier + Global, } impl fmt::Display for ContextModifier { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Self::None => { - write!(f, "") - } Self::Local => { - write!(f, " LOCAL") + write!(f, "LOCAL ") } Self::Session => { - write!(f, " SESSION") + write!(f, "SESSION ") + } + Self::Global => { + write!(f, "GLOBAL ") } } } @@ -6784,7 +8603,8 @@ impl fmt::Display for FunctionDeterminismSpecifier { /// where within the statement, the body shows up. /// /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_11 -/// [Postgres]: https://www.postgresql.org/docs/15/sql-createfunction.html +/// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html +/// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -6813,6 +8633,22 @@ pub enum CreateFunctionBody { /// /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_11 AsAfterOptions(Expr), + /// Function body with statements before the `RETURN` keyword. + /// + /// Example: + /// ```sql + /// CREATE FUNCTION my_scalar_udf(a INT, b INT) + /// RETURNS INT + /// AS + /// BEGIN + /// DECLARE c INT; + /// SET c = a + b; + /// RETURN c; + /// END + /// ``` + /// + /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql + AsBeginEnd(BeginEndStatements), /// Function body expression using the 'RETURN' keyword. /// /// Example: @@ -6822,7 +8658,7 @@ pub enum CreateFunctionBody { /// RETURN a + b; /// ``` /// - /// [Postgres]: https://www.postgresql.org/docs/current/sql-createfunction.html + /// [PostgreSQL]: https://www.postgresql.org/docs/current/sql-createfunction.html Return(Expr), } @@ -7036,52 +8872,114 @@ impl fmt::Display for HiveSetLocation { write!(f, "LOCATION {}", self.location) } } - -/// MySQL `ALTER TABLE` only [FIRST | AFTER column_name] -#[allow(clippy::large_enum_variant)] + +/// MySQL `ALTER TABLE` only [FIRST | AFTER column_name] +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum MySQLColumnPosition { + First, + After(Ident), +} + +impl Display for MySQLColumnPosition { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + MySQLColumnPosition::First => write!(f, "FIRST"), + MySQLColumnPosition::After(ident) => { + let column_name = &ident.value; + write!(f, "AFTER {column_name}") + } + } + } +} + +/// MySQL `CREATE VIEW` algorithm parameter: [ALGORITHM = {UNDEFINED | MERGE | TEMPTABLE}] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum CreateViewAlgorithm { + Undefined, + Merge, + TempTable, +} + +impl Display for CreateViewAlgorithm { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + CreateViewAlgorithm::Undefined => write!(f, "UNDEFINED"), + CreateViewAlgorithm::Merge => write!(f, "MERGE"), + CreateViewAlgorithm::TempTable => write!(f, "TEMPTABLE"), + } + } +} +/// MySQL `CREATE VIEW` security parameter: [SQL SECURITY { DEFINER | INVOKER }] #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum MySQLColumnPosition { - First, - After(Ident), +pub enum CreateViewSecurity { + Definer, + Invoker, } -impl Display for MySQLColumnPosition { +impl Display for CreateViewSecurity { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - MySQLColumnPosition::First => Ok(write!(f, "FIRST")?), - MySQLColumnPosition::After(ident) => { - let column_name = &ident.value; - Ok(write!(f, "AFTER {column_name}")?) - } + CreateViewSecurity::Definer => write!(f, "DEFINER"), + CreateViewSecurity::Invoker => write!(f, "INVOKER"), } } } -/// Engine of DB. Some warehouse has parameters of engine, e.g. [clickhouse] +/// [MySQL] `CREATE VIEW` additional parameters /// -/// [clickhouse]: https://clickhouse.com/docs/en/engines/table-engines +/// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub struct TableEngine { - pub name: String, - pub parameters: Option>, +pub struct CreateViewParams { + pub algorithm: Option, + pub definer: Option, + pub security: Option, } -impl Display for TableEngine { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.name)?; - - if let Some(parameters) = self.parameters.as_ref() { - write!(f, "({})", display_comma_separated(parameters))?; +impl Display for CreateViewParams { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let CreateViewParams { + algorithm, + definer, + security, + } = self; + if let Some(algorithm) = algorithm { + write!(f, "ALGORITHM = {algorithm} ")?; + } + if let Some(definers) = definer { + write!(f, "DEFINER = {definers} ")?; + } + if let Some(security) = security { + write!(f, "SQL SECURITY {security} ")?; } - Ok(()) } } +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +/// Key/Value, where the value is a (optionally named) list of identifiers +/// +/// ```sql +/// UNION = (tbl_name[,tbl_name]...) +/// ENGINE = ReplicatedMergeTree('/table_name','{replica}', ver) +/// ENGINE = SummingMergeTree([columns]) +/// ``` +pub struct NamedParenthesizedList { + pub key: Ident, + pub name: Option, + pub values: Vec, +} + /// Snowflake `WITH ROW ACCESS POLICY policy_name ON (identifier, ...)` /// /// @@ -7143,18 +9041,12 @@ pub enum CommentDef { /// Does not include `=` when printing the comment, as `COMMENT 'comment'` WithEq(String), WithoutEq(String), - // For Hive dialect, the table comment is after the column definitions without `=`, - // so we need to add an extra variant to allow to identify this case when displaying. - // [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable) - AfterColumnDefsWithoutEq(String), } impl Display for CommentDef { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - CommentDef::WithEq(comment) - | CommentDef::WithoutEq(comment) - | CommentDef::AfterColumnDefsWithoutEq(comment) => write!(f, "{comment}"), + CommentDef::WithEq(comment) | CommentDef::WithoutEq(comment) => write!(f, "{comment}"), } } } @@ -7220,7 +9112,7 @@ where /// ```sql /// EXPLAIN (ANALYZE, VERBOSE TRUE, FORMAT TEXT) SELECT * FROM my_table; /// -/// VACCUM (VERBOSE, ANALYZE ON, PARALLEL 10) my_table; +/// VACUUM (VERBOSE, ANALYZE ON, PARALLEL 10) my_table; /// ``` #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -7240,6 +9132,401 @@ impl Display for UtilityOption { } } +/// Represents the different options available for `SHOW` +/// statements to filter the results. Example from Snowflake: +/// +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct ShowStatementOptions { + pub show_in: Option, + pub starts_with: Option, + pub limit: Option, + pub limit_from: Option, + pub filter_position: Option, +} + +impl Display for ShowStatementOptions { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let (like_in_infix, like_in_suffix) = match &self.filter_position { + Some(ShowStatementFilterPosition::Infix(filter)) => { + (format!(" {filter}"), "".to_string()) + } + Some(ShowStatementFilterPosition::Suffix(filter)) => { + ("".to_string(), format!(" {filter}")) + } + None => ("".to_string(), "".to_string()), + }; + write!( + f, + "{like_in_infix}{show_in}{starts_with}{limit}{from}{like_in_suffix}", + show_in = match &self.show_in { + Some(i) => format!(" {i}"), + None => String::new(), + }, + starts_with = match &self.starts_with { + Some(s) => format!(" STARTS WITH {s}"), + None => String::new(), + }, + limit = match &self.limit { + Some(l) => format!(" LIMIT {l}"), + None => String::new(), + }, + from = match &self.limit_from { + Some(f) => format!(" FROM {f}"), + None => String::new(), + } + )?; + Ok(()) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum ShowStatementFilterPosition { + Infix(ShowStatementFilter), // For example: SHOW COLUMNS LIKE '%name%' IN TABLE tbl + Suffix(ShowStatementFilter), // For example: SHOW COLUMNS IN tbl LIKE '%name%' +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum ShowStatementInParentType { + Account, + Database, + Schema, + Table, + View, +} + +impl fmt::Display for ShowStatementInParentType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ShowStatementInParentType::Account => write!(f, "ACCOUNT"), + ShowStatementInParentType::Database => write!(f, "DATABASE"), + ShowStatementInParentType::Schema => write!(f, "SCHEMA"), + ShowStatementInParentType::Table => write!(f, "TABLE"), + ShowStatementInParentType::View => write!(f, "VIEW"), + } + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct ShowStatementIn { + pub clause: ShowStatementInClause, + pub parent_type: Option, + #[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] + pub parent_name: Option, +} + +impl fmt::Display for ShowStatementIn { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.clause)?; + if let Some(parent_type) = &self.parent_type { + write!(f, " {}", parent_type)?; + } + if let Some(parent_name) = &self.parent_name { + write!(f, " {}", parent_name)?; + } + Ok(()) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct ShowObjects { + pub terse: bool, + pub show_options: ShowStatementOptions, +} + +/// MSSQL's json null clause +/// +/// ```plaintext +/// ::= +/// NULL ON NULL +/// | ABSENT ON NULL +/// ``` +/// +/// +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum JsonNullClause { + NullOnNull, + AbsentOnNull, +} + +impl Display for JsonNullClause { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + JsonNullClause::NullOnNull => write!(f, "NULL ON NULL"), + JsonNullClause::AbsentOnNull => write!(f, "ABSENT ON NULL"), + } + } +} + +/// rename object definition +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct RenameTable { + pub old_name: ObjectName, + pub new_name: ObjectName, +} + +impl fmt::Display for RenameTable { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} TO {}", self.old_name, self.new_name)?; + Ok(()) + } +} + +/// Represents the referenced table in an `INSERT INTO` statement +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum TableObject { + /// Table specified by name. + /// Example: + /// ```sql + /// INSERT INTO my_table + /// ``` + TableName(#[cfg_attr(feature = "visitor", visit(with = "visit_relation"))] ObjectName), + + /// Table specified as a function. + /// Example: + /// ```sql + /// INSERT INTO TABLE FUNCTION remote('localhost', default.simple_table) + /// ``` + /// [Clickhouse](https://clickhouse.com/docs/en/sql-reference/table-functions) + TableFunction(Function), +} + +impl fmt::Display for TableObject { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::TableName(table_name) => write!(f, "{table_name}"), + Self::TableFunction(func) => write!(f, "FUNCTION {}", func), + } + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum SetSessionParamKind { + Generic(SetSessionParamGeneric), + IdentityInsert(SetSessionParamIdentityInsert), + Offsets(SetSessionParamOffsets), + Statistics(SetSessionParamStatistics), +} + +impl fmt::Display for SetSessionParamKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + SetSessionParamKind::Generic(x) => write!(f, "{x}"), + SetSessionParamKind::IdentityInsert(x) => write!(f, "{x}"), + SetSessionParamKind::Offsets(x) => write!(f, "{x}"), + SetSessionParamKind::Statistics(x) => write!(f, "{x}"), + } + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct SetSessionParamGeneric { + pub names: Vec, + pub value: String, +} + +impl fmt::Display for SetSessionParamGeneric { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} {}", display_comma_separated(&self.names), self.value) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct SetSessionParamIdentityInsert { + pub obj: ObjectName, + pub value: SessionParamValue, +} + +impl fmt::Display for SetSessionParamIdentityInsert { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "IDENTITY_INSERT {} {}", self.obj, self.value) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct SetSessionParamOffsets { + pub keywords: Vec, + pub value: SessionParamValue, +} + +impl fmt::Display for SetSessionParamOffsets { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "OFFSETS {} {}", + display_comma_separated(&self.keywords), + self.value + ) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct SetSessionParamStatistics { + pub topic: SessionParamStatsTopic, + pub value: SessionParamValue, +} + +impl fmt::Display for SetSessionParamStatistics { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "STATISTICS {} {}", self.topic, self.value) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum SessionParamStatsTopic { + IO, + Profile, + Time, + Xml, +} + +impl fmt::Display for SessionParamStatsTopic { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + SessionParamStatsTopic::IO => write!(f, "IO"), + SessionParamStatsTopic::Profile => write!(f, "PROFILE"), + SessionParamStatsTopic::Time => write!(f, "TIME"), + SessionParamStatsTopic::Xml => write!(f, "XML"), + } + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum SessionParamValue { + On, + Off, +} + +impl fmt::Display for SessionParamValue { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + SessionParamValue::On => write!(f, "ON"), + SessionParamValue::Off => write!(f, "OFF"), + } + } +} + +/// Snowflake StorageSerializationPolicy for Iceberg Tables +/// ```sql +/// [ STORAGE_SERIALIZATION_POLICY = { COMPATIBLE | OPTIMIZED } ] +/// ``` +/// +/// +#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum StorageSerializationPolicy { + Compatible, + Optimized, +} + +impl Display for StorageSerializationPolicy { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + StorageSerializationPolicy::Compatible => write!(f, "COMPATIBLE"), + StorageSerializationPolicy::Optimized => write!(f, "OPTIMIZED"), + } + } +} + +/// Variants of the Snowflake `COPY INTO` statement +#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum CopyIntoSnowflakeKind { + /// Loads data from files to a table + /// See: + Table, + /// Unloads data from a table or query to external files + /// See: + Location, +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct PrintStatement { + pub message: Box, +} + +impl fmt::Display for PrintStatement { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "PRINT {}", self.message) + } +} + +/// Represents a `Return` statement. +/// +/// [MsSql triggers](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-trigger-transact-sql) +/// [MsSql functions](https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql) +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct ReturnStatement { + pub value: Option, +} + +impl fmt::Display for ReturnStatement { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match &self.value { + Some(ReturnStatementValue::Expr(expr)) => write!(f, "RETURN {}", expr), + None => write!(f, "RETURN"), + } + } +} + +/// Variants of a `RETURN` statement +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum ReturnStatementValue { + Expr(Expr), +} + +/// Represents an `OPEN` statement. +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct OpenStatement { + /// Cursor name + pub cursor_name: Ident, +} + +impl fmt::Display for OpenStatement { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "OPEN {}", self.cursor_name) + } +} + #[cfg(test)] mod tests { use super::*; @@ -7339,9 +9626,9 @@ mod tests { #[test] fn test_interval_display() { let interval = Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString(String::from( - "123:45.67", - )))), + value: Box::new(Expr::Value( + Value::SingleQuotedString(String::from("123:45.67")).with_empty_span(), + )), leading_field: Some(DateTimeField::Minute), leading_precision: Some(10), last_field: Some(DateTimeField::Second), @@ -7353,7 +9640,9 @@ mod tests { ); let interval = Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString(String::from("5")))), + value: Box::new(Expr::Value( + Value::SingleQuotedString(String::from("5")).with_empty_span(), + )), leading_field: Some(DateTimeField::Second), leading_precision: Some(1), last_field: None, diff --git a/src/ast/operator.rs b/src/ast/operator.rs index c3bb379d6..d0bb05e3c 100644 --- a/src/ast/operator.rs +++ b/src/ast/operator.rs @@ -51,6 +51,23 @@ pub enum UnaryOperator { PGPrefixFactorial, /// Absolute value, e.g. `@ -9` (PostgreSQL-specific) PGAbs, + /// Unary logical not operator: e.g. `! false` (Hive-specific) + BangNot, + /// `#` Number of points in path or polygon (PostgreSQL/Redshift geometric operator) + /// see + Hash, + /// `@-@` Length or circumference (PostgreSQL/Redshift geometric operator) + /// see + AtDashAt, + /// `@@` Center (PostgreSQL/Redshift geometric operator) + /// see + DoubleAt, + /// `?-` Is horizontal? (PostgreSQL/Redshift geometric operator) + /// see + QuestionDash, + /// `?|` Is vertical? (PostgreSQL/Redshift geometric operator) + /// see + QuestionPipe, } impl fmt::Display for UnaryOperator { @@ -65,6 +82,12 @@ impl fmt::Display for UnaryOperator { UnaryOperator::PGPostfixFactorial => "!", UnaryOperator::PGPrefixFactorial => "!!", UnaryOperator::PGAbs => "@", + UnaryOperator::BangNot => "!", + UnaryOperator::Hash => "#", + UnaryOperator::AtDashAt => "@-@", + UnaryOperator::DoubleAt => "@@", + UnaryOperator::QuestionDash => "?-", + UnaryOperator::QuestionPipe => "?|", }) } } @@ -116,6 +139,11 @@ pub enum BinaryOperator { DuckIntegerDivide, /// MySQL [`DIV`](https://dev.mysql.com/doc/refman/8.0/en/arithmetic-functions.html) integer division MyIntegerDivide, + /// MATCH operator, e.g. `a MATCH b` (SQLite-specific) + /// See + Match, + /// REGEXP operator, e.g. `a REGEXP b` (SQLite-specific) + Regexp, /// Support for custom operators (such as Postgres custom operators) Custom(String), /// Bitwise XOR, e.g. `a # b` (PostgreSQL-specific) @@ -245,6 +273,62 @@ pub enum BinaryOperator { /// See [CREATE OPERATOR](https://www.postgresql.org/docs/current/sql-createoperator.html) /// for more information. PGCustomBinaryOperator(Vec), + /// The `OVERLAPS` operator + /// + /// Specifies a test for an overlap between two datetime periods: + /// + Overlaps, + /// `##` Point of closest proximity (PostgreSQL/Redshift geometric operator) + /// See + DoubleHash, + /// `<->` Distance between (PostgreSQL/Redshift geometric operator) + /// See + LtDashGt, + /// `&<` Overlaps to left? (PostgreSQL/Redshift geometric operator) + /// See + AndLt, + /// `&>` Overlaps to right? (PostgreSQL/Redshift geometric operator) + /// See + AndGt, + /// `<<|` Is strictly below? (PostgreSQL/Redshift geometric operator) + /// See + LtLtPipe, + /// `|>>` Is strictly above? (PostgreSQL/Redshift geometric operator) + /// See + PipeGtGt, + /// `&<|` Does not extend above? (PostgreSQL/Redshift geometric operator) + /// See + AndLtPipe, + /// `|&>` Does not extend below? (PostgreSQL/Redshift geometric operator) + /// See + PipeAndGt, + /// `<^` Is below? (PostgreSQL/Redshift geometric operator) + /// See + LtCaret, + /// `>^` Is above? (PostgreSQL/Redshift geometric operator) + /// See + GtCaret, + /// `?#` Intersects? (PostgreSQL/Redshift geometric operator) + /// See + QuestionHash, + /// `?-` Is horizontal? (PostgreSQL/Redshift geometric operator) + /// See + QuestionDash, + /// `?-|` Is perpendicular? (PostgreSQL/Redshift geometric operator) + /// See + QuestionDashPipe, + /// `?||` Are Parallel? (PostgreSQL/Redshift geometric operator) + /// See + QuestionDoublePipe, + /// `@` Contained or on? (PostgreSQL/Redshift geometric operator) + /// See + At, + /// `~=` Same as? (PostgreSQL/Redshift geometric operator) + /// See + TildeEq, + /// ':=' Assignment Operator + /// See + Assignment, } impl fmt::Display for BinaryOperator { @@ -271,6 +355,8 @@ impl fmt::Display for BinaryOperator { BinaryOperator::BitwiseXor => f.write_str("^"), BinaryOperator::DuckIntegerDivide => f.write_str("//"), BinaryOperator::MyIntegerDivide => f.write_str("DIV"), + BinaryOperator::Match => f.write_str("MATCH"), + BinaryOperator::Regexp => f.write_str("REGEXP"), BinaryOperator::Custom(s) => f.write_str(s), BinaryOperator::PGBitwiseXor => f.write_str("#"), BinaryOperator::PGBitwiseShiftLeft => f.write_str("<<"), @@ -301,6 +387,24 @@ impl fmt::Display for BinaryOperator { BinaryOperator::PGCustomBinaryOperator(idents) => { write!(f, "OPERATOR({})", display_separated(idents, ".")) } + BinaryOperator::Overlaps => f.write_str("OVERLAPS"), + BinaryOperator::DoubleHash => f.write_str("##"), + BinaryOperator::LtDashGt => f.write_str("<->"), + BinaryOperator::AndLt => f.write_str("&<"), + BinaryOperator::AndGt => f.write_str("&>"), + BinaryOperator::LtLtPipe => f.write_str("<<|"), + BinaryOperator::PipeGtGt => f.write_str("|>>"), + BinaryOperator::AndLtPipe => f.write_str("&<|"), + BinaryOperator::PipeAndGt => f.write_str("|&>"), + BinaryOperator::LtCaret => f.write_str("<^"), + BinaryOperator::GtCaret => f.write_str(">^"), + BinaryOperator::QuestionHash => f.write_str("?#"), + BinaryOperator::QuestionDash => f.write_str("?-"), + BinaryOperator::QuestionDashPipe => f.write_str("?-|"), + BinaryOperator::QuestionDoublePipe => f.write_str("?||"), + BinaryOperator::At => f.write_str("@"), + BinaryOperator::TildeEq => f.write_str("~="), + BinaryOperator::Assignment => f.write_str(":="), } } } diff --git a/src/ast/query.rs b/src/ast/query.rs index dc5966e5e..a90b61668 100644 --- a/src/ast/query.rs +++ b/src/ast/query.rs @@ -18,13 +18,17 @@ #[cfg(not(feature = "std"))] use alloc::{boxed::Box, vec::Vec}; +use helpers::attached_token::AttachedToken; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; -use crate::ast::*; +use crate::{ + ast::*, + tokenizer::{Token, TokenWithSpan}, +}; /// The most complete variant of a `SELECT` query expression, optionally /// including `WITH`, `UNION` / other set operations, and `ORDER BY`. @@ -39,14 +43,8 @@ pub struct Query { pub body: Box, /// ORDER BY pub order_by: Option, - /// `LIMIT { | ALL }` - pub limit: Option, - - /// `LIMIT { } BY { ,,... } }` - pub limit_by: Vec, - - /// `OFFSET [ { ROW | ROWS } ]` - pub offset: Option, + /// `LIMIT ... OFFSET ... | LIMIT , ` + pub limit_clause: Option, /// `FETCH { FIRST | NEXT } [ PERCENT ] { ROW | ROWS } | { ONLY | WITH TIES }` pub fetch: Option, /// `FOR { UPDATE | SHARE } [ OF table_name ] [ SKIP LOCKED | NOWAIT ]` @@ -64,6 +62,9 @@ pub struct Query { /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/select/format) /// (ClickHouse-specific) pub format_clause: Option, + + /// Pipe operator + pub pipe_operators: Vec, } impl fmt::Display for Query { @@ -75,14 +76,9 @@ impl fmt::Display for Query { if let Some(ref order_by) = self.order_by { write!(f, " {order_by}")?; } - if let Some(ref limit) = self.limit { - write!(f, " LIMIT {limit}")?; - } - if let Some(ref offset) = self.offset { - write!(f, " {offset}")?; - } - if !self.limit_by.is_empty() { - write!(f, " BY {}", display_separated(&self.limit_by, ", "))?; + + if let Some(ref limit_clause) = self.limit_clause { + limit_clause.fmt(f)?; } if let Some(ref settings) = self.settings { write!(f, " SETTINGS {}", display_comma_separated(settings))?; @@ -99,6 +95,9 @@ impl fmt::Display for Query { if let Some(ref format) = self.format_clause { write!(f, " {}", format)?; } + for pipe_operator in &self.pipe_operators { + write!(f, " |> {}", pipe_operator)?; + } Ok(()) } } @@ -152,6 +151,7 @@ pub enum SetExpr { Values(Values), Insert(Statement), Update(Statement), + Delete(Statement), Table(Box
), } @@ -174,6 +174,7 @@ impl fmt::Display for SetExpr { SetExpr::Values(v) => write!(f, "{v}"), SetExpr::Insert(v) => write!(f, "{v}"), SetExpr::Update(v) => write!(f, "{v}"), + SetExpr::Delete(v) => write!(f, "{v}"), SetExpr::Table(t) => write!(f, "{t}"), SetExpr::SetOperation { left, @@ -204,6 +205,7 @@ pub enum SetOperator { Union, Except, Intersect, + Minus, } impl fmt::Display for SetOperator { @@ -212,6 +214,7 @@ impl fmt::Display for SetOperator { SetOperator::Union => "UNION", SetOperator::Except => "EXCEPT", SetOperator::Intersect => "INTERSECT", + SetOperator::Minus => "MINUS", }) } } @@ -269,6 +272,19 @@ impl fmt::Display for Table { } } +/// What did this select look like? +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum SelectFlavor { + /// `SELECT *` + Standard, + /// `FROM ... SELECT *` + FromFirst, + /// `FROM *` + FromFirstNoSelect, +} + /// A restricted variant of `SELECT` (without CTEs/`ORDER BY`), which may /// appear either as the only body item of a `Query`, or as an operand /// to a set operation like `UNION`. @@ -276,9 +292,14 @@ impl fmt::Display for Table { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct Select { + /// Token for the `SELECT` keyword + pub select_token: AttachedToken, + /// `SELECT [DISTINCT] ...` pub distinct: Option, /// MSSQL syntax: `TOP () [ PERCENT ] [ WITH TIES ]` pub top: Option, + /// Whether the top was located before `ALL`/`DISTINCT` + pub top_before_distinct: bool, /// projection expressions pub projection: Vec, /// INTO @@ -317,29 +338,51 @@ pub struct Select { pub value_table_mode: Option, /// STARTING WITH .. CONNECT BY pub connect_by: Option, + /// Was this a FROM-first query? + pub flavor: SelectFlavor, } impl fmt::Display for Select { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "SELECT")?; + match self.flavor { + SelectFlavor::Standard => { + write!(f, "SELECT")?; + } + SelectFlavor::FromFirst => { + write!(f, "FROM {} SELECT", display_comma_separated(&self.from))?; + } + SelectFlavor::FromFirstNoSelect => { + write!(f, "FROM {}", display_comma_separated(&self.from))?; + } + } if let Some(value_table_mode) = self.value_table_mode { write!(f, " {value_table_mode}")?; } + if let Some(ref top) = self.top { + if self.top_before_distinct { + write!(f, " {top}")?; + } + } if let Some(ref distinct) = self.distinct { write!(f, " {distinct}")?; } if let Some(ref top) = self.top { - write!(f, " {top}")?; + if !self.top_before_distinct { + write!(f, " {top}")?; + } + } + + if !self.projection.is_empty() { + write!(f, " {}", display_comma_separated(&self.projection))?; } - write!(f, " {}", display_comma_separated(&self.projection))?; if let Some(ref into) = self.into { write!(f, " {into}")?; } - if !self.from.is_empty() { + if self.flavor == SelectFlavor::Standard && !self.from.is_empty() { write!(f, " FROM {}", display_comma_separated(&self.from))?; } if !self.lateral_views.is_empty() { @@ -495,6 +538,8 @@ impl fmt::Display for NamedWindowDefinition { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct With { + /// Token for the "WITH" keyword + pub with_token: AttachedToken, pub recursive: bool, pub cte_tables: Vec, } @@ -546,6 +591,8 @@ pub struct Cte { pub query: Box, pub from: Option, pub materialized: Option, + /// Token for the closing parenthesis + pub closing_paren_token: AttachedToken, } impl fmt::Display for Cte { @@ -561,6 +608,20 @@ impl fmt::Display for Cte { } } +/// Represents an expression behind a wildcard expansion in a projection. +/// `SELECT T.* FROM T; +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum SelectItemQualifiedWildcardKind { + /// Expression is an object name. + /// e.g. `alias.*` or even `schema.table.*` + ObjectName(ObjectName), + /// Select star on an arbitrary expression. + /// e.g. `STRUCT('foo').*` + Expr(Expr), +} + /// One item of the comma-separated list following `SELECT` #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -570,12 +631,24 @@ pub enum SelectItem { UnnamedExpr(Expr), /// An expression, followed by `[ AS ] alias` ExprWithAlias { expr: Expr, alias: Ident }, - /// `alias.*` or even `schema.table.*` - QualifiedWildcard(ObjectName, WildcardAdditionalOptions), + /// An expression, followed by a wildcard expansion. + /// e.g. `alias.*`, `STRUCT('foo').*` + QualifiedWildcard(SelectItemQualifiedWildcardKind, WildcardAdditionalOptions), /// An unqualified `*` Wildcard(WildcardAdditionalOptions), } +impl fmt::Display for SelectItemQualifiedWildcardKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match &self { + SelectItemQualifiedWildcardKind::ObjectName(object_name) => { + write!(f, "{object_name}.*") + } + SelectItemQualifiedWildcardKind::Expr(expr) => write!(f, "{expr}.*"), + } + } +} + /// Single aliased identifier /// /// # Syntax @@ -597,10 +670,12 @@ impl fmt::Display for IdentWithAlias { } /// Additional options for wildcards, e.g. Snowflake `EXCLUDE`/`RENAME` and Bigquery `EXCEPT`. -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Default)] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct WildcardAdditionalOptions { + /// The wildcard token `*` + pub wildcard_token: AttachedToken, /// `[ILIKE...]`. /// Snowflake syntax: pub opt_ilike: Option, @@ -618,6 +693,19 @@ pub struct WildcardAdditionalOptions { pub opt_rename: Option, } +impl Default for WildcardAdditionalOptions { + fn default() -> Self { + Self { + wildcard_token: TokenWithSpan::wrap(Token::Mul).into(), + opt_ilike: None, + opt_exclude: None, + opt_except: None, + opt_replace: None, + opt_rename: None, + } + } +} + impl fmt::Display for WildcardAdditionalOptions { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(ilike) = &self.opt_ilike { @@ -827,8 +915,8 @@ impl fmt::Display for SelectItem { match &self { SelectItem::UnnamedExpr(expr) => write!(f, "{expr}"), SelectItem::ExprWithAlias { expr, alias } => write!(f, "{expr} AS {alias}"), - SelectItem::QualifiedWildcard(prefix, additional_options) => { - write!(f, "{prefix}.*")?; + SelectItem::QualifiedWildcard(kind, additional_options) => { + write!(f, "{kind}")?; write!(f, "{additional_options}")?; Ok(()) } @@ -922,6 +1010,26 @@ impl fmt::Display for ExprWithAlias { } } +/// An expression optionally followed by an alias and order by options. +/// +/// Example: +/// ```sql +/// 42 AS myint ASC +/// ``` +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct ExprWithAliasAndOrderBy { + pub expr: ExprWithAlias, + pub order_by: OrderByOptions, +} + +impl fmt::Display for ExprWithAliasAndOrderBy { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}{}", self.expr, self.order_by) + } +} + /// Arguments to a table-valued function #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -935,6 +1043,81 @@ pub struct TableFunctionArgs { pub settings: Option>, } +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum TableIndexHintType { + Use, + Ignore, + Force, +} + +impl fmt::Display for TableIndexHintType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(match self { + TableIndexHintType::Use => "USE", + TableIndexHintType::Ignore => "IGNORE", + TableIndexHintType::Force => "FORCE", + }) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum TableIndexType { + Index, + Key, +} + +impl fmt::Display for TableIndexType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(match self { + TableIndexType::Index => "INDEX", + TableIndexType::Key => "KEY", + }) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum TableIndexHintForClause { + Join, + OrderBy, + GroupBy, +} + +impl fmt::Display for TableIndexHintForClause { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(match self { + TableIndexHintForClause::Join => "JOIN", + TableIndexHintForClause::OrderBy => "ORDER BY", + TableIndexHintForClause::GroupBy => "GROUP BY", + }) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct TableIndexHints { + pub hint_type: TableIndexHintType, + pub index_type: TableIndexType, + pub for_clause: Option, + pub index_names: Vec, +} + +impl fmt::Display for TableIndexHints { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} {} ", self.hint_type, self.index_type)?; + if let Some(for_clause) = &self.for_clause { + write!(f, "FOR {} ", for_clause)?; + } + write!(f, "({})", display_comma_separated(&self.index_names)) + } +} + /// A table name or a parenthesized subquery with an optional alias #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -964,6 +1147,14 @@ pub enum TableFactor { with_ordinality: bool, /// [Partition selection](https://dev.mysql.com/doc/refman/8.0/en/partitioning-selection.html), supported by MySQL. partitions: Vec, + /// Optional PartiQL JsonPath: + json_path: Option, + /// Optional table sample modifier + /// See: + sample: Option, + /// Optional index hints(mysql) + /// See: + index_hints: Vec, }, Derived { lateral: bool, @@ -1026,6 +1217,27 @@ pub enum TableFactor { /// The alias for the table. alias: Option, }, + /// The MSSQL's `OPENJSON` table-valued function. + /// + /// ```sql + /// OPENJSON( jsonExpression [ , path ] ) [ ] + /// + /// ::= WITH ( { colName type [ column_path ] [ AS JSON ] } [ ,...n ] ) + /// ```` + /// + /// Reference: + OpenJsonTable { + /// The JSON expression to be evaluated. It must evaluate to a json string + json_expr: Expr, + /// The path to the array or object to be iterated over. + /// It must evaluate to a json array or object. + json_path: Option, + /// The columns to be extracted from each element of the array or object. + /// Each column must have a name and a type. + columns: Vec, + /// The alias for the table. + alias: Option, + }, /// Represents a parenthesized table factor. The SQL spec only allows a /// join expression (`(foo bar [ baz ... ])`) to be nested, /// possibly several times. @@ -1085,6 +1297,215 @@ pub enum TableFactor { symbols: Vec, alias: Option, }, + /// The `XMLTABLE` table-valued function. + /// Part of the SQL standard, supported by PostgreSQL, Oracle, and DB2. + /// + /// + /// + /// ```sql + /// SELECT xmltable.* + /// FROM xmldata, + /// XMLTABLE('//ROWS/ROW' + /// PASSING data + /// COLUMNS id int PATH '@id', + /// ordinality FOR ORDINALITY, + /// "COUNTRY_NAME" text, + /// country_id text PATH 'COUNTRY_ID', + /// size_sq_km float PATH 'SIZE[@unit = "sq_km"]', + /// size_other text PATH 'concat(SIZE[@unit!="sq_km"], " ", SIZE[@unit!="sq_km"]/@unit)', + /// premier_name text PATH 'PREMIER_NAME' DEFAULT 'not specified' + /// ); + /// ```` + XmlTable { + /// Optional XMLNAMESPACES clause (empty if not present) + namespaces: Vec, + /// The row-generating XPath expression. + row_expression: Expr, + /// The PASSING clause specifying the document expression. + passing: XmlPassingClause, + /// The columns to be extracted from each generated row. + columns: Vec, + /// The alias for the table. + alias: Option, + }, +} + +/// The table sample modifier options +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] + +pub enum TableSampleKind { + /// Table sample located before the table alias option + BeforeTableAlias(Box), + /// Table sample located after the table alias option + AfterTableAlias(Box), +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct TableSample { + pub modifier: TableSampleModifier, + pub name: Option, + pub quantity: Option, + pub seed: Option, + pub bucket: Option, + pub offset: Option, +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum TableSampleModifier { + Sample, + TableSample, +} + +impl fmt::Display for TableSampleModifier { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + TableSampleModifier::Sample => write!(f, "SAMPLE")?, + TableSampleModifier::TableSample => write!(f, "TABLESAMPLE")?, + } + Ok(()) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct TableSampleQuantity { + pub parenthesized: bool, + pub value: Expr, + pub unit: Option, +} + +impl fmt::Display for TableSampleQuantity { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.parenthesized { + write!(f, "(")?; + } + write!(f, "{}", self.value)?; + if let Some(unit) = &self.unit { + write!(f, " {}", unit)?; + } + if self.parenthesized { + write!(f, ")")?; + } + Ok(()) + } +} + +/// The table sample method names +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum TableSampleMethod { + Row, + Bernoulli, + System, + Block, +} + +impl fmt::Display for TableSampleMethod { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + TableSampleMethod::Bernoulli => write!(f, "BERNOULLI"), + TableSampleMethod::Row => write!(f, "ROW"), + TableSampleMethod::System => write!(f, "SYSTEM"), + TableSampleMethod::Block => write!(f, "BLOCK"), + } + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct TableSampleSeed { + pub modifier: TableSampleSeedModifier, + pub value: Value, +} + +impl fmt::Display for TableSampleSeed { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{} ({})", self.modifier, self.value)?; + Ok(()) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum TableSampleSeedModifier { + Repeatable, + Seed, +} + +impl fmt::Display for TableSampleSeedModifier { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + TableSampleSeedModifier::Repeatable => write!(f, "REPEATABLE"), + TableSampleSeedModifier::Seed => write!(f, "SEED"), + } + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum TableSampleUnit { + Rows, + Percent, +} + +impl fmt::Display for TableSampleUnit { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + TableSampleUnit::Percent => write!(f, "PERCENT"), + TableSampleUnit::Rows => write!(f, "ROWS"), + } + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct TableSampleBucket { + pub bucket: Value, + pub total: Value, + pub on: Option, +} + +impl fmt::Display for TableSampleBucket { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "BUCKET {} OUT OF {}", self.bucket, self.total)?; + if let Some(on) = &self.on { + write!(f, " ON {}", on)?; + } + Ok(()) + } +} +impl fmt::Display for TableSample { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, " {}", self.modifier)?; + if let Some(name) = &self.name { + write!(f, " {}", name)?; + } + if let Some(quantity) = &self.quantity { + write!(f, " {}", quantity)?; + } + if let Some(seed) = &self.seed { + write!(f, " {}", seed)?; + } + if let Some(bucket) = &self.bucket { + write!(f, " ({})", bucket)?; + } + if let Some(offset) = &self.offset { + write!(f, " OFFSET {}", offset)?; + } + Ok(()) + } } /// The source of values in a `PIVOT` operation. @@ -1344,8 +1765,14 @@ impl fmt::Display for TableFactor { version, partitions, with_ordinality, + json_path, + sample, + index_hints, } => { write!(f, "{name}")?; + if let Some(json_path) = json_path { + write!(f, "{json_path}")?; + } if !partitions.is_empty() { write!(f, "PARTITION ({})", display_comma_separated(partitions))?; } @@ -1363,15 +1790,24 @@ impl fmt::Display for TableFactor { if *with_ordinality { write!(f, " WITH ORDINALITY")?; } + if let Some(TableSampleKind::BeforeTableAlias(sample)) = sample { + write!(f, "{sample}")?; + } if let Some(alias) = alias { write!(f, " AS {alias}")?; } + if !index_hints.is_empty() { + write!(f, " {}", display_separated(index_hints, " "))?; + } if !with_hints.is_empty() { write!(f, " WITH ({})", display_comma_separated(with_hints))?; } if let Some(version) = version { write!(f, "{version}")?; } + if let Some(TableSampleKind::AfterTableAlias(sample)) = sample { + write!(f, "{sample}")?; + } Ok(()) } TableFactor::Derived { @@ -1451,6 +1887,25 @@ impl fmt::Display for TableFactor { } Ok(()) } + TableFactor::OpenJsonTable { + json_expr, + json_path, + columns, + alias, + } => { + write!(f, "OPENJSON({json_expr}")?; + if let Some(json_path) = json_path { + write!(f, ", {json_path}")?; + } + write!(f, ")")?; + if !columns.is_empty() { + write!(f, " WITH ({})", display_comma_separated(columns))?; + } + if let Some(alias) = alias { + write!(f, " AS {alias}")?; + } + Ok(()) + } TableFactor::NestedJoin { table_with_joins, alias, @@ -1538,6 +1993,31 @@ impl fmt::Display for TableFactor { } Ok(()) } + TableFactor::XmlTable { + row_expression, + passing, + columns, + alias, + namespaces, + } => { + write!(f, "XMLTABLE(")?; + if !namespaces.is_empty() { + write!( + f, + "XMLNAMESPACES({}), ", + display_comma_separated(namespaces) + )?; + } + write!( + f, + "{row_expression}{passing} COLUMNS {columns})", + columns = display_comma_separated(columns) + )?; + if let Some(alias) = alias { + write!(f, " AS {alias}")?; + } + Ok(()) + } } } } @@ -1547,7 +2027,7 @@ impl fmt::Display for TableFactor { #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct TableAlias { pub name: Ident, - pub columns: Vec, + pub columns: Vec, } impl fmt::Display for TableAlias { @@ -1560,17 +2040,58 @@ impl fmt::Display for TableAlias { } } +/// SQL column definition in a table expression alias. +/// Most of the time, the data type is not specified. +/// But some table-valued functions do require specifying the data type. +/// +/// See #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] -pub enum TableVersion { - ForSystemTimeAsOf(Expr), +pub struct TableAliasColumnDef { + /// Column name alias + pub name: Ident, + /// Some table-valued functions require specifying the data type in the alias. + pub data_type: Option, } -impl Display for TableVersion { +impl TableAliasColumnDef { + /// Create a new table alias column definition with only a name and no type + pub fn from_name>(name: S) -> Self { + TableAliasColumnDef { + name: Ident::new(name), + data_type: None, + } + } +} + +impl fmt::Display for TableAliasColumnDef { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.name)?; + if let Some(ref data_type) = self.data_type { + write!(f, " {}", data_type)?; + } + Ok(()) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum TableVersion { + /// When the table version is defined using `FOR SYSTEM_TIME AS OF`. + /// For example: `SELECT * FROM tbl FOR SYSTEM_TIME AS OF TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 HOUR)` + ForSystemTimeAsOf(Expr), + /// When the table version is defined using a function. + /// For example: `SELECT * FROM tbl AT(TIMESTAMP => '2020-08-14 09:30:00')` + Function(Expr), +} + +impl Display for TableVersion { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { TableVersion::ForSystemTimeAsOf(e) => write!(f, " FOR SYSTEM_TIME AS OF {e}")?, + TableVersion::Function(func) => write!(f, " {func}")?, } Ok(()) } @@ -1597,7 +2118,7 @@ impl fmt::Display for Join { } fn suffix(constraint: &'_ JoinConstraint) -> impl fmt::Display + '_ { struct Suffix<'a>(&'a JoinConstraint); - impl<'a> fmt::Display for Suffix<'a> { + impl fmt::Display for Suffix<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.0 { JoinConstraint::On(expr) => write!(f, " ON {expr}"), @@ -1615,27 +2136,48 @@ impl fmt::Display for Join { } match &self.join_operator { - JoinOperator::Inner(constraint) => write!( + JoinOperator::Join(constraint) => write!( f, " {}JOIN {}{}", prefix(constraint), self.relation, suffix(constraint) ), - JoinOperator::LeftOuter(constraint) => write!( + JoinOperator::Inner(constraint) => write!( + f, + " {}INNER JOIN {}{}", + prefix(constraint), + self.relation, + suffix(constraint) + ), + JoinOperator::Left(constraint) => write!( f, " {}LEFT JOIN {}{}", prefix(constraint), self.relation, suffix(constraint) ), - JoinOperator::RightOuter(constraint) => write!( + JoinOperator::LeftOuter(constraint) => write!( + f, + " {}LEFT OUTER JOIN {}{}", + prefix(constraint), + self.relation, + suffix(constraint) + ), + JoinOperator::Right(constraint) => write!( f, " {}RIGHT JOIN {}{}", prefix(constraint), self.relation, suffix(constraint) ), + JoinOperator::RightOuter(constraint) => write!( + f, + " {}RIGHT OUTER JOIN {}{}", + prefix(constraint), + self.relation, + suffix(constraint) + ), JoinOperator::FullOuter(constraint) => write!( f, " {}FULL JOIN {}{}", @@ -1644,6 +2186,13 @@ impl fmt::Display for Join { suffix(constraint) ), JoinOperator::CrossJoin => write!(f, " CROSS JOIN {}", self.relation), + JoinOperator::Semi(constraint) => write!( + f, + " {}SEMI JOIN {}{}", + prefix(constraint), + self.relation, + suffix(constraint) + ), JoinOperator::LeftSemi(constraint) => write!( f, " {}LEFT SEMI JOIN {}{}", @@ -1658,6 +2207,13 @@ impl fmt::Display for Join { self.relation, suffix(constraint) ), + JoinOperator::Anti(constraint) => write!( + f, + " {}ANTI JOIN {}{}", + prefix(constraint), + self.relation, + suffix(constraint) + ), JoinOperator::LeftAnti(constraint) => write!( f, " {}LEFT ANTI JOIN {}{}", @@ -1683,6 +2239,9 @@ impl fmt::Display for Join { self.relation, suffix(constraint) ), + JoinOperator::StraightJoin(constraint) => { + write!(f, " STRAIGHT_JOIN {}{}", self.relation, suffix(constraint)) + } } } } @@ -1691,15 +2250,22 @@ impl fmt::Display for Join { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum JoinOperator { + Join(JoinConstraint), Inner(JoinConstraint), + Left(JoinConstraint), LeftOuter(JoinConstraint), + Right(JoinConstraint), RightOuter(JoinConstraint), FullOuter(JoinConstraint), CrossJoin, + /// SEMI (non-standard) + Semi(JoinConstraint), /// LEFT SEMI (non-standard) LeftSemi(JoinConstraint), /// RIGHT SEMI (non-standard) RightSemi(JoinConstraint), + /// ANTI (non-standard) + Anti(JoinConstraint), /// LEFT ANTI (non-standard) LeftAnti(JoinConstraint), /// RIGHT ANTI (non-standard) @@ -1716,6 +2282,10 @@ pub enum JoinOperator { match_condition: Expr, constraint: JoinConstraint, }, + /// STRAIGHT_JOIN (non-standard) + /// + /// See . + StraightJoin(JoinConstraint), } #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -1723,35 +2293,55 @@ pub enum JoinOperator { #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum JoinConstraint { On(Expr), - Using(Vec), + Using(Vec), Natural, None, } +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum OrderByKind { + /// ALL syntax of [DuckDB] and [ClickHouse]. + /// + /// [DuckDB]: + /// [ClickHouse]: + All(OrderByOptions), + + /// Expressions + Expressions(Vec), +} + #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct OrderBy { - pub exprs: Vec, + pub kind: OrderByKind, + /// Optional: `INTERPOLATE` /// Supported by [ClickHouse syntax] - /// - /// [ClickHouse syntax]: pub interpolate: Option, } impl fmt::Display for OrderBy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ORDER BY")?; - if !self.exprs.is_empty() { - write!(f, " {}", display_comma_separated(&self.exprs))?; + match &self.kind { + OrderByKind::Expressions(exprs) => { + write!(f, " {}", display_comma_separated(exprs))?; + } + OrderByKind::All(all) => { + write!(f, " ALL{}", all)?; + } } + if let Some(ref interpolate) = self.interpolate { match &interpolate.exprs { Some(exprs) => write!(f, " INTERPOLATE ({})", display_comma_separated(exprs))?, None => write!(f, " INTERPOLATE")?, } } + Ok(()) } } @@ -1762,10 +2352,7 @@ impl fmt::Display for OrderBy { #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct OrderByExpr { pub expr: Expr, - /// Optional `ASC` or `DESC` - pub asc: Option, - /// Optional `NULLS FIRST` or `NULLS LAST` - pub nulls_first: Option, + pub options: OrderByOptions, /// Optional: `WITH FILL` /// Supported by [ClickHouse syntax]: pub with_fill: Option, @@ -1773,17 +2360,7 @@ pub struct OrderByExpr { impl fmt::Display for OrderByExpr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.expr)?; - match self.asc { - Some(true) => write!(f, " ASC")?, - Some(false) => write!(f, " DESC")?, - None => (), - } - match self.nulls_first { - Some(true) => write!(f, " NULLS FIRST")?, - Some(false) => write!(f, " NULLS LAST")?, - None => (), - } + write!(f, "{}{}", self.expr, self.options)?; if let Some(ref with_fill) = self.with_fill { write!(f, " {}", with_fill)? } @@ -1849,6 +2426,84 @@ impl fmt::Display for InterpolateExpr { } } +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct OrderByOptions { + /// Optional `ASC` or `DESC` + pub asc: Option, + /// Optional `NULLS FIRST` or `NULLS LAST` + pub nulls_first: Option, +} + +impl fmt::Display for OrderByOptions { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.asc { + Some(true) => write!(f, " ASC")?, + Some(false) => write!(f, " DESC")?, + None => (), + } + match self.nulls_first { + Some(true) => write!(f, " NULLS FIRST")?, + Some(false) => write!(f, " NULLS LAST")?, + None => (), + } + Ok(()) + } +} + +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum LimitClause { + /// Standard SQL syntax + /// + /// `LIMIT [BY ,,...] [OFFSET ]` + LimitOffset { + /// `LIMIT { | ALL }` + limit: Option, + /// `OFFSET [ { ROW | ROWS } ]` + offset: Option, + /// `BY { ,,... } }` + /// + /// [ClickHouse](https://clickhouse.com/docs/sql-reference/statements/select/limit-by) + limit_by: Vec, + }, + /// [MySQL]-specific syntax; the order of expressions is reversed. + /// + /// `LIMIT , ` + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/select.html + OffsetCommaLimit { offset: Expr, limit: Expr }, +} + +impl fmt::Display for LimitClause { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + LimitClause::LimitOffset { + limit, + limit_by, + offset, + } => { + if let Some(ref limit) = limit { + write!(f, " LIMIT {limit}")?; + } + if let Some(ref offset) = offset { + write!(f, " {offset}")?; + } + if !limit_by.is_empty() { + debug_assert!(limit.is_some()); + write!(f, " BY {}", display_separated(limit_by, ", "))?; + } + Ok(()) + } + LimitClause::OffsetCommaLimit { offset, limit } => { + write!(f, " LIMIT {}, {}", offset, limit) + } + } + } +} + #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -1884,6 +2539,135 @@ impl fmt::Display for OffsetRows { } } +/// Pipe syntax, first introduced in Google BigQuery. +/// Example: +/// +/// ```sql +/// FROM Produce +/// |> WHERE sales > 0 +/// |> AGGREGATE SUM(sales) AS total_sales, COUNT(*) AS num_sales +/// GROUP BY item; +/// ``` +/// +/// See +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum PipeOperator { + /// Limits the number of rows to return in a query, with an optional OFFSET clause to skip over rows. + /// + /// Syntax: `|> LIMIT [OFFSET ]` + /// + /// See more at + Limit { expr: Expr, offset: Option }, + /// Filters the results of the input table. + /// + /// Syntax: `|> WHERE ` + /// + /// See more at + Where { expr: Expr }, + /// `ORDER BY [ASC|DESC], ...` + OrderBy { exprs: Vec }, + /// Produces a new table with the listed columns, similar to the outermost SELECT clause in a table subquery in standard syntax. + /// + /// Syntax `|> SELECT [[AS] alias], ...` + /// + /// See more at + Select { exprs: Vec }, + /// Propagates the existing table and adds computed columns, similar to SELECT *, new_column in standard syntax. + /// + /// Syntax: `|> EXTEND [[AS] alias], ...` + /// + /// See more at + Extend { exprs: Vec }, + /// Replaces the value of a column in the current table, similar to SELECT * REPLACE (expression AS column) in standard syntax. + /// + /// Syntax: `|> SET = , ...` + /// + /// See more at + Set { assignments: Vec }, + /// Removes listed columns from the current table, similar to SELECT * EXCEPT (column) in standard syntax. + /// + /// Syntax: `|> DROP , ...` + /// + /// See more at + Drop { columns: Vec }, + /// Introduces a table alias for the input table, similar to applying the AS alias clause on a table subquery in standard syntax. + /// + /// Syntax: `|> AS ` + /// + /// See more at + As { alias: Ident }, + /// Performs aggregation on data across grouped rows or an entire table. + /// + /// Syntax: `|> AGGREGATE [[AS] alias], ...` + /// + /// Syntax: + /// ```norust + /// |> AGGREGATE [ [[AS] alias], ...] + /// GROUP BY [AS alias], ... + /// ``` + /// + /// See more at + Aggregate { + full_table_exprs: Vec, + group_by_expr: Vec, + }, +} + +impl fmt::Display for PipeOperator { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + PipeOperator::Select { exprs } => { + write!(f, "SELECT {}", display_comma_separated(exprs.as_slice())) + } + PipeOperator::Extend { exprs } => { + write!(f, "EXTEND {}", display_comma_separated(exprs.as_slice())) + } + PipeOperator::Set { assignments } => { + write!(f, "SET {}", display_comma_separated(assignments.as_slice())) + } + PipeOperator::Drop { columns } => { + write!(f, "DROP {}", display_comma_separated(columns.as_slice())) + } + PipeOperator::As { alias } => { + write!(f, "AS {}", alias) + } + PipeOperator::Limit { expr, offset } => { + write!(f, "LIMIT {}", expr)?; + if let Some(offset) = offset { + write!(f, " OFFSET {}", offset)?; + } + Ok(()) + } + PipeOperator::Aggregate { + full_table_exprs, + group_by_expr, + } => { + write!(f, "AGGREGATE")?; + if !full_table_exprs.is_empty() { + write!( + f, + " {}", + display_comma_separated(full_table_exprs.as_slice()) + )?; + } + if !group_by_expr.is_empty() { + write!(f, " GROUP BY {}", display_comma_separated(group_by_expr))?; + } + Ok(()) + } + + PipeOperator::Where { expr } => { + write!(f, "WHERE {}", expr) + } + PipeOperator::OrderBy { exprs } => { + write!(f, "ORDER BY {}", display_comma_separated(exprs.as_slice())) + } + } + } +} + #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] @@ -2073,13 +2857,18 @@ impl fmt::Display for SelectInto { /// e.g. GROUP BY year WITH ROLLUP WITH TOTALS /// /// [ClickHouse]: -#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum GroupByWithModifier { Rollup, Cube, Totals, + /// Hive supports GROUP BY GROUPING SETS syntax. + /// e.g. GROUP BY year , month GROUPING SETS((year,month),(year),(month)) + /// + /// [Hive]: + GroupingSets(Expr), } impl fmt::Display for GroupByWithModifier { @@ -2088,6 +2877,9 @@ impl fmt::Display for GroupByWithModifier { GroupByWithModifier::Rollup => write!(f, "WITH ROLLUP"), GroupByWithModifier::Cube => write!(f, "WITH CUBE"), GroupByWithModifier::Totals => write!(f, "WITH TOTALS"), + GroupByWithModifier::GroupingSets(expr) => { + write!(f, "{expr}") + } } } } @@ -2153,6 +2945,29 @@ impl fmt::Display for FormatClause { } } +/// FORMAT identifier in input context, specific to ClickHouse. +/// +/// [ClickHouse]: +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct InputFormatClause { + pub ident: Ident, + pub values: Vec, +} + +impl fmt::Display for InputFormatClause { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "FORMAT {}", self.ident)?; + + if !self.values.is_empty() { + write!(f, " {}", display_comma_separated(self.values.as_slice()))?; + } + + Ok(()) + } +} + /// FOR XML or FOR JSON clause, specific to MSSQL /// (formats the output of a query as XML or JSON) #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -2276,19 +3091,84 @@ impl fmt::Display for ForJson { } /// A single column definition in MySQL's `JSON_TABLE` table valued function. +/// +/// See +/// - [MySQL's JSON_TABLE documentation](https://dev.mysql.com/doc/refman/8.0/en/json-table-functions.html#function_json-table) +/// - [Oracle's JSON_TABLE documentation](https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/JSON_TABLE.html) +/// - [MariaDB's JSON_TABLE documentation](https://mariadb.com/kb/en/json_table/) +/// /// ```sql /// SELECT * /// FROM JSON_TABLE( /// '["a", "b"]', /// '$[*]' COLUMNS ( -/// value VARCHAR(20) PATH '$' +/// name FOR ORDINALITY, +/// value VARCHAR(20) PATH '$', +/// NESTED PATH '$[*]' COLUMNS ( +/// value VARCHAR(20) PATH '$' +/// ) /// ) /// ) AS jt; /// ``` #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub struct JsonTableColumn { +pub enum JsonTableColumn { + /// A named column with a JSON path + Named(JsonTableNamedColumn), + /// The FOR ORDINALITY column, which is a special column that returns the index of the current row in a JSON array. + ForOrdinality(Ident), + /// A set of nested columns, which extracts data from a nested JSON array. + Nested(JsonTableNestedColumn), +} + +impl fmt::Display for JsonTableColumn { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + JsonTableColumn::Named(json_table_named_column) => { + write!(f, "{json_table_named_column}") + } + JsonTableColumn::ForOrdinality(ident) => write!(f, "{} FOR ORDINALITY", ident), + JsonTableColumn::Nested(json_table_nested_column) => { + write!(f, "{json_table_nested_column}") + } + } + } +} + +/// A nested column in a JSON_TABLE column list +/// +/// See +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub struct JsonTableNestedColumn { + pub path: Value, + pub columns: Vec, +} + +impl fmt::Display for JsonTableNestedColumn { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "NESTED PATH {} COLUMNS ({})", + self.path, + display_comma_separated(&self.columns) + ) + } +} + +/// A single column definition in MySQL's `JSON_TABLE` table valued function. +/// +/// See +/// +/// ```sql +/// value VARCHAR(20) PATH '$' +/// ``` +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub struct JsonTableNamedColumn { /// The name of the column to be extracted. pub name: Ident, /// The type of the column to be extracted. @@ -2303,7 +3183,7 @@ pub struct JsonTableColumn { pub on_error: Option, } -impl fmt::Display for JsonTableColumn { +impl fmt::Display for JsonTableNamedColumn { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, @@ -2346,6 +3226,40 @@ impl fmt::Display for JsonTableColumnErrorHandling { } } +/// A single column definition in MSSQL's `OPENJSON WITH` clause. +/// +/// ```sql +/// colName type [ column_path ] [ AS JSON ] +/// ``` +/// +/// Reference: +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub struct OpenJsonTableColumn { + /// The name of the column to be extracted. + pub name: Ident, + /// The type of the column to be extracted. + pub r#type: DataType, + /// The path to the column to be extracted. Must be a literal string. + pub path: Option, + /// The `AS JSON` option. + pub as_json: bool, +} + +impl fmt::Display for OpenJsonTableColumn { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} {}", self.name, self.r#type)?; + if let Some(path) = &self.path { + write!(f, " '{}'", value::escape_single_quote_string(path))?; + } + if self.as_json { + write!(f, " AS JSON")?; + } + Ok(()) + } +} + /// BigQuery supports ValueTables which have 2 modes: /// `SELECT AS STRUCT` /// `SELECT AS VALUE` @@ -2366,3 +3280,146 @@ impl fmt::Display for ValueTableMode { } } } + +/// The `FROM` clause of an `UPDATE TABLE` statement +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub enum UpdateTableFromKind { + /// Update Statement where the 'FROM' clause is before the 'SET' keyword (Supported by Snowflake) + /// For Example: `UPDATE FROM t1 SET t1.name='aaa'` + BeforeSet(Vec), + /// Update Statement where the 'FROM' clause is after the 'SET' keyword (Which is the standard way) + /// For Example: `UPDATE SET t1.name='aaa' FROM t1` + AfterSet(Vec), +} + +/// Defines the options for an XmlTable column: Named or ForOrdinality +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub enum XmlTableColumnOption { + /// A named column with a type, optional path, and default value. + NamedInfo { + /// The type of the column to be extracted. + r#type: DataType, + /// The path to the column to be extracted. If None, defaults to the column name. + path: Option, + /// Default value if path does not match + default: Option, + /// Whether the column is nullable (NULL=true, NOT NULL=false) + nullable: bool, + }, + /// The FOR ORDINALITY marker + ForOrdinality, +} + +/// A single column definition in XMLTABLE +/// +/// ```sql +/// COLUMNS +/// id int PATH '@id', +/// ordinality FOR ORDINALITY, +/// "COUNTRY_NAME" text, +/// country_id text PATH 'COUNTRY_ID', +/// size_sq_km float PATH 'SIZE[@unit = "sq_km"]', +/// size_other text PATH 'concat(SIZE[@unit!="sq_km"], " ", SIZE[@unit!="sq_km"]/@unit)', +/// premier_name text PATH 'PREMIER_NAME' DEFAULT 'not specified' +/// ``` +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub struct XmlTableColumn { + /// The name of the column. + pub name: Ident, + /// Column options: type/path/default or FOR ORDINALITY + pub option: XmlTableColumnOption, +} + +impl fmt::Display for XmlTableColumn { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.name)?; + match &self.option { + XmlTableColumnOption::NamedInfo { + r#type, + path, + default, + nullable, + } => { + write!(f, " {}", r#type)?; + if let Some(p) = path { + write!(f, " PATH {}", p)?; + } + if let Some(d) = default { + write!(f, " DEFAULT {}", d)?; + } + if !*nullable { + write!(f, " NOT NULL")?; + } + Ok(()) + } + XmlTableColumnOption::ForOrdinality => { + write!(f, " FOR ORDINALITY") + } + } + } +} + +/// Argument passed in the XMLTABLE PASSING clause +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub struct XmlPassingArgument { + pub expr: Expr, + pub alias: Option, + pub by_value: bool, // True if BY VALUE is specified +} + +impl fmt::Display for XmlPassingArgument { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.by_value { + write!(f, "BY VALUE ")?; + } + write!(f, "{}", self.expr)?; + if let Some(alias) = &self.alias { + write!(f, " AS {}", alias)?; + } + Ok(()) + } +} + +/// The PASSING clause for XMLTABLE +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub struct XmlPassingClause { + pub arguments: Vec, +} + +impl fmt::Display for XmlPassingClause { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if !self.arguments.is_empty() { + write!(f, " PASSING {}", display_comma_separated(&self.arguments))?; + } + Ok(()) + } +} + +/// Represents a single XML namespace definition in the XMLNAMESPACES clause. +/// +/// `namespace_uri AS namespace_name` +#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub struct XmlNamespaceDefinition { + /// The namespace URI (a text expression). + pub uri: Expr, + /// The alias for the namespace (a simple identifier). + pub name: Ident, +} + +impl fmt::Display for XmlNamespaceDefinition { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{} AS {}", self.uri, self.name) + } +} diff --git a/src/ast/spans.rs b/src/ast/spans.rs new file mode 100644 index 000000000..ff2a61cf3 --- /dev/null +++ b/src/ast/spans.rs @@ -0,0 +1,2460 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +use crate::ast::query::SelectItemQualifiedWildcardKind; +use core::iter; + +use crate::tokenizer::Span; + +use super::{ + dcl::SecondaryRoles, value::ValueWithSpan, AccessExpr, AlterColumnOperation, + AlterIndexOperation, AlterTableOperation, Array, Assignment, AssignmentTarget, AttachedToken, + BeginEndStatements, CaseStatement, CloseCursor, ClusteredIndex, ColumnDef, ColumnOption, + ColumnOptionDef, ConditionalStatementBlock, ConditionalStatements, ConflictTarget, ConnectBy, + ConstraintCharacteristics, CopySource, CreateIndex, CreateTable, CreateTableOptions, Cte, + Delete, DoUpdate, ExceptSelectItem, ExcludeSelectItem, Expr, ExprWithAlias, Fetch, FromTable, + Function, FunctionArg, FunctionArgExpr, FunctionArgumentClause, FunctionArgumentList, + FunctionArguments, GroupByExpr, HavingBound, IfStatement, IlikeSelectItem, Insert, Interpolate, + InterpolateExpr, Join, JoinConstraint, JoinOperator, JsonPath, JsonPathElem, LateralView, + LimitClause, MatchRecognizePattern, Measure, NamedParenthesizedList, NamedWindowDefinition, + ObjectName, ObjectNamePart, Offset, OnConflict, OnConflictAction, OnInsert, OpenStatement, + OrderBy, OrderByExpr, OrderByKind, Partition, PivotValueSource, ProjectionSelect, Query, + RaiseStatement, RaiseStatementValue, ReferentialAction, RenameSelectItem, ReplaceSelectElement, + ReplaceSelectItem, Select, SelectInto, SelectItem, SetExpr, SqlOption, Statement, Subscript, + SymbolDefinition, TableAlias, TableAliasColumnDef, TableConstraint, TableFactor, TableObject, + TableOptionsClustered, TableWithJoins, UpdateTableFromKind, Use, Value, Values, ViewColumnDef, + WhileStatement, WildcardAdditionalOptions, With, WithFill, +}; + +/// Given an iterator of spans, return the [Span::union] of all spans. +fn union_spans>(iter: I) -> Span { + Span::union_iter(iter) +} + +/// Trait for AST nodes that have a source location information. +/// +/// # Notes: +/// +/// Source [`Span`] are not yet complete. They may be missing: +/// +/// 1. keywords or other tokens +/// 2. span information entirely, in which case they return [`Span::empty()`]. +/// +/// Note Some impl blocks (rendered below) are annotated with which nodes are +/// missing spans. See [this ticket] for additional information and status. +/// +/// [this ticket]: https://github.com/apache/datafusion-sqlparser-rs/issues/1548 +/// +/// # Example +/// ``` +/// # use sqlparser::parser::{Parser, ParserError}; +/// # use sqlparser::ast::Spanned; +/// # use sqlparser::dialect::GenericDialect; +/// # use sqlparser::tokenizer::Location; +/// # fn main() -> Result<(), ParserError> { +/// let dialect = GenericDialect {}; +/// let sql = r#"SELECT * +/// FROM table_1"#; +/// let statements = Parser::new(&dialect) +/// .try_with_sql(sql)? +/// .parse_statements()?; +/// // Get the span of the first statement (SELECT) +/// let span = statements[0].span(); +/// // statement starts at line 1, column 1 (1 based, not 0 based) +/// assert_eq!(span.start, Location::new(1, 1)); +/// // statement ends on line 2, column 15 +/// assert_eq!(span.end, Location::new(2, 15)); +/// # Ok(()) +/// # } +/// ``` +/// +pub trait Spanned { + /// Return the [`Span`] (the minimum and maximum [`Location`]) for this AST + /// node, by recursively combining the spans of its children. + /// + /// [`Location`]: crate::tokenizer::Location + fn span(&self) -> Span; +} + +impl Spanned for Query { + fn span(&self) -> Span { + let Query { + with, + body, + order_by, + limit_clause, + fetch, + locks: _, // todo + for_clause: _, // todo, mssql specific + settings: _, // todo, clickhouse specific + format_clause: _, // todo, clickhouse specific + pipe_operators: _, // todo bigquery specific + } = self; + + union_spans( + with.iter() + .map(|i| i.span()) + .chain(core::iter::once(body.span())) + .chain(order_by.as_ref().map(|i| i.span())) + .chain(limit_clause.as_ref().map(|i| i.span())) + .chain(fetch.as_ref().map(|i| i.span())), + ) + } +} + +impl Spanned for LimitClause { + fn span(&self) -> Span { + match self { + LimitClause::LimitOffset { + limit, + offset, + limit_by, + } => union_spans( + limit + .iter() + .map(|i| i.span()) + .chain(offset.as_ref().map(|i| i.span())) + .chain(limit_by.iter().map(|i| i.span())), + ), + LimitClause::OffsetCommaLimit { offset, limit } => offset.span().union(&limit.span()), + } + } +} + +impl Spanned for Offset { + fn span(&self) -> Span { + let Offset { + value, + rows: _, // enum + } = self; + + value.span() + } +} + +impl Spanned for Fetch { + fn span(&self) -> Span { + let Fetch { + with_ties: _, // bool + percent: _, // bool + quantity, + } = self; + + quantity.as_ref().map_or(Span::empty(), |i| i.span()) + } +} + +impl Spanned for With { + fn span(&self) -> Span { + let With { + with_token, + recursive: _, // bool + cte_tables, + } = self; + + union_spans( + core::iter::once(with_token.0.span).chain(cte_tables.iter().map(|item| item.span())), + ) + } +} + +impl Spanned for Cte { + fn span(&self) -> Span { + let Cte { + alias, + query, + from, + materialized: _, // enum + closing_paren_token, + } = self; + + union_spans( + core::iter::once(alias.span()) + .chain(core::iter::once(query.span())) + .chain(from.iter().map(|item| item.span)) + .chain(core::iter::once(closing_paren_token.0.span)), + ) + } +} + +/// # partial span +/// +/// [SetExpr::Table] is not implemented. +impl Spanned for SetExpr { + fn span(&self) -> Span { + match self { + SetExpr::Select(select) => select.span(), + SetExpr::Query(query) => query.span(), + SetExpr::SetOperation { + op: _, + set_quantifier: _, + left, + right, + } => left.span().union(&right.span()), + SetExpr::Values(values) => values.span(), + SetExpr::Insert(statement) => statement.span(), + SetExpr::Table(_) => Span::empty(), + SetExpr::Update(statement) => statement.span(), + SetExpr::Delete(statement) => statement.span(), + } + } +} + +impl Spanned for Values { + fn span(&self) -> Span { + let Values { + explicit_row: _, // bool, + rows, + } = self; + + union_spans( + rows.iter() + .map(|row| union_spans(row.iter().map(|expr| expr.span()))), + ) + } +} + +/// # partial span +/// +/// Missing spans: +/// - [Statement::CopyIntoSnowflake] +/// - [Statement::CreateSecret] +/// - [Statement::CreateRole] +/// - [Statement::AlterType] +/// - [Statement::AlterRole] +/// - [Statement::AttachDatabase] +/// - [Statement::AttachDuckDBDatabase] +/// - [Statement::DetachDuckDBDatabase] +/// - [Statement::Drop] +/// - [Statement::DropFunction] +/// - [Statement::DropProcedure] +/// - [Statement::DropSecret] +/// - [Statement::Declare] +/// - [Statement::CreateExtension] +/// - [Statement::Fetch] +/// - [Statement::Flush] +/// - [Statement::Discard] +/// - [Statement::Set] +/// - [Statement::ShowFunctions] +/// - [Statement::ShowVariable] +/// - [Statement::ShowStatus] +/// - [Statement::ShowVariables] +/// - [Statement::ShowCreate] +/// - [Statement::ShowColumns] +/// - [Statement::ShowTables] +/// - [Statement::ShowCollation] +/// - [Statement::StartTransaction] +/// - [Statement::Comment] +/// - [Statement::Commit] +/// - [Statement::Rollback] +/// - [Statement::CreateSchema] +/// - [Statement::CreateDatabase] +/// - [Statement::CreateFunction] +/// - [Statement::CreateTrigger] +/// - [Statement::DropTrigger] +/// - [Statement::CreateProcedure] +/// - [Statement::CreateMacro] +/// - [Statement::CreateStage] +/// - [Statement::Assert] +/// - [Statement::Grant] +/// - [Statement::Revoke] +/// - [Statement::Deallocate] +/// - [Statement::Execute] +/// - [Statement::Prepare] +/// - [Statement::Kill] +/// - [Statement::ExplainTable] +/// - [Statement::Explain] +/// - [Statement::Savepoint] +/// - [Statement::ReleaseSavepoint] +/// - [Statement::Merge] +/// - [Statement::Cache] +/// - [Statement::UNCache] +/// - [Statement::CreateSequence] +/// - [Statement::CreateType] +/// - [Statement::Pragma] +/// - [Statement::LockTables] +/// - [Statement::UnlockTables] +/// - [Statement::Unload] +/// - [Statement::OptimizeTable] +impl Spanned for Statement { + fn span(&self) -> Span { + match self { + Statement::Analyze { + table_name, + partitions, + for_columns: _, + columns, + cache_metadata: _, + noscan: _, + compute_statistics: _, + has_table_keyword: _, + } => union_spans( + core::iter::once(table_name.span()) + .chain(partitions.iter().flat_map(|i| i.iter().map(|k| k.span()))) + .chain(columns.iter().map(|i| i.span)), + ), + Statement::Truncate { + table_names, + partitions, + table: _, + only: _, + identity: _, + cascade: _, + on_cluster: _, + } => union_spans( + table_names + .iter() + .map(|i| i.name.span()) + .chain(partitions.iter().flat_map(|i| i.iter().map(|k| k.span()))), + ), + Statement::Msck { + table_name, + repair: _, + partition_action: _, + } => table_name.span(), + Statement::Query(query) => query.span(), + Statement::Insert(insert) => insert.span(), + Statement::Install { extension_name } => extension_name.span, + Statement::Load { extension_name } => extension_name.span, + Statement::Directory { + overwrite: _, + local: _, + path: _, + file_format: _, + source, + } => source.span(), + Statement::Case(stmt) => stmt.span(), + Statement::If(stmt) => stmt.span(), + Statement::While(stmt) => stmt.span(), + Statement::Raise(stmt) => stmt.span(), + Statement::Call(function) => function.span(), + Statement::Copy { + source, + to: _, + target: _, + options: _, + legacy_options: _, + values: _, + } => source.span(), + Statement::CopyIntoSnowflake { + into: _, + into_columns: _, + from_obj: _, + from_obj_alias: _, + stage_params: _, + from_transformations: _, + files: _, + pattern: _, + file_format: _, + copy_options: _, + validation_mode: _, + kind: _, + from_query: _, + partition: _, + } => Span::empty(), + Statement::Open(open) => open.span(), + Statement::Close { cursor } => match cursor { + CloseCursor::All => Span::empty(), + CloseCursor::Specific { name } => name.span, + }, + Statement::Update { + table, + assignments, + from, + selection, + returning, + or: _, + } => union_spans( + core::iter::once(table.span()) + .chain(assignments.iter().map(|i| i.span())) + .chain(from.iter().map(|i| i.span())) + .chain(selection.iter().map(|i| i.span())) + .chain(returning.iter().flat_map(|i| i.iter().map(|k| k.span()))), + ), + Statement::Delete(delete) => delete.span(), + Statement::CreateView { + or_alter: _, + or_replace: _, + materialized: _, + name, + columns, + query, + options, + cluster_by, + comment: _, + with_no_schema_binding: _, + if_not_exists: _, + temporary: _, + to, + params: _, + } => union_spans( + core::iter::once(name.span()) + .chain(columns.iter().map(|i| i.span())) + .chain(core::iter::once(query.span())) + .chain(core::iter::once(options.span())) + .chain(cluster_by.iter().map(|i| i.span)) + .chain(to.iter().map(|i| i.span())), + ), + Statement::CreateTable(create_table) => create_table.span(), + Statement::CreateVirtualTable { + name, + if_not_exists: _, + module_name, + module_args, + } => union_spans( + core::iter::once(name.span()) + .chain(core::iter::once(module_name.span)) + .chain(module_args.iter().map(|i| i.span)), + ), + Statement::CreateIndex(create_index) => create_index.span(), + Statement::CreateRole { .. } => Span::empty(), + Statement::CreateSecret { .. } => Span::empty(), + Statement::CreateConnector { .. } => Span::empty(), + Statement::AlterTable { + name, + if_exists: _, + only: _, + operations, + location: _, + on_cluster, + } => union_spans( + core::iter::once(name.span()) + .chain(operations.iter().map(|i| i.span())) + .chain(on_cluster.iter().map(|i| i.span)), + ), + Statement::AlterIndex { name, operation } => name.span().union(&operation.span()), + Statement::AlterView { + name, + columns, + query, + with_options, + } => union_spans( + core::iter::once(name.span()) + .chain(columns.iter().map(|i| i.span)) + .chain(core::iter::once(query.span())) + .chain(with_options.iter().map(|i| i.span())), + ), + // These statements need to be implemented + Statement::AlterType { .. } => Span::empty(), + Statement::AlterRole { .. } => Span::empty(), + Statement::AlterSession { .. } => Span::empty(), + Statement::AttachDatabase { .. } => Span::empty(), + Statement::AttachDuckDBDatabase { .. } => Span::empty(), + Statement::DetachDuckDBDatabase { .. } => Span::empty(), + Statement::Drop { .. } => Span::empty(), + Statement::DropFunction { .. } => Span::empty(), + Statement::DropDomain { .. } => Span::empty(), + Statement::DropProcedure { .. } => Span::empty(), + Statement::DropSecret { .. } => Span::empty(), + Statement::Declare { .. } => Span::empty(), + Statement::CreateExtension { .. } => Span::empty(), + Statement::DropExtension { .. } => Span::empty(), + Statement::Fetch { .. } => Span::empty(), + Statement::Flush { .. } => Span::empty(), + Statement::Discard { .. } => Span::empty(), + Statement::Set(_) => Span::empty(), + Statement::ShowFunctions { .. } => Span::empty(), + Statement::ShowVariable { .. } => Span::empty(), + Statement::ShowStatus { .. } => Span::empty(), + Statement::ShowVariables { .. } => Span::empty(), + Statement::ShowCreate { .. } => Span::empty(), + Statement::ShowColumns { .. } => Span::empty(), + Statement::ShowTables { .. } => Span::empty(), + Statement::ShowCollation { .. } => Span::empty(), + Statement::Use(u) => u.span(), + Statement::StartTransaction { .. } => Span::empty(), + Statement::Comment { .. } => Span::empty(), + Statement::Commit { .. } => Span::empty(), + Statement::Rollback { .. } => Span::empty(), + Statement::CreateSchema { .. } => Span::empty(), + Statement::CreateDatabase { .. } => Span::empty(), + Statement::CreateFunction { .. } => Span::empty(), + Statement::CreateDomain { .. } => Span::empty(), + Statement::CreateTrigger { .. } => Span::empty(), + Statement::DropTrigger { .. } => Span::empty(), + Statement::CreateProcedure { .. } => Span::empty(), + Statement::CreateMacro { .. } => Span::empty(), + Statement::CreateStage { .. } => Span::empty(), + Statement::Assert { .. } => Span::empty(), + Statement::Grant { .. } => Span::empty(), + Statement::Revoke { .. } => Span::empty(), + Statement::Deallocate { .. } => Span::empty(), + Statement::Execute { .. } => Span::empty(), + Statement::Prepare { .. } => Span::empty(), + Statement::Kill { .. } => Span::empty(), + Statement::ExplainTable { .. } => Span::empty(), + Statement::Explain { .. } => Span::empty(), + Statement::Savepoint { .. } => Span::empty(), + Statement::ReleaseSavepoint { .. } => Span::empty(), + Statement::Merge { .. } => Span::empty(), + Statement::Cache { .. } => Span::empty(), + Statement::UNCache { .. } => Span::empty(), + Statement::CreateSequence { .. } => Span::empty(), + Statement::CreateType { .. } => Span::empty(), + Statement::Pragma { .. } => Span::empty(), + Statement::LockTables { .. } => Span::empty(), + Statement::UnlockTables => Span::empty(), + Statement::Unload { .. } => Span::empty(), + Statement::OptimizeTable { .. } => Span::empty(), + Statement::CreatePolicy { .. } => Span::empty(), + Statement::AlterPolicy { .. } => Span::empty(), + Statement::AlterConnector { .. } => Span::empty(), + Statement::DropPolicy { .. } => Span::empty(), + Statement::DropConnector { .. } => Span::empty(), + Statement::ShowDatabases { .. } => Span::empty(), + Statement::ShowSchemas { .. } => Span::empty(), + Statement::ShowObjects { .. } => Span::empty(), + Statement::ShowViews { .. } => Span::empty(), + Statement::LISTEN { .. } => Span::empty(), + Statement::NOTIFY { .. } => Span::empty(), + Statement::LoadData { .. } => Span::empty(), + Statement::UNLISTEN { .. } => Span::empty(), + Statement::RenameTable { .. } => Span::empty(), + Statement::RaisError { .. } => Span::empty(), + Statement::Print { .. } => Span::empty(), + Statement::Return { .. } => Span::empty(), + Statement::List(..) | Statement::Remove(..) => Span::empty(), + } + } +} + +impl Spanned for Use { + fn span(&self) -> Span { + match self { + Use::Catalog(object_name) => object_name.span(), + Use::Schema(object_name) => object_name.span(), + Use::Database(object_name) => object_name.span(), + Use::Warehouse(object_name) => object_name.span(), + Use::Role(object_name) => object_name.span(), + Use::SecondaryRoles(secondary_roles) => { + if let SecondaryRoles::List(roles) = secondary_roles { + return union_spans(roles.iter().map(|i| i.span)); + } + Span::empty() + } + Use::Object(object_name) => object_name.span(), + Use::Default => Span::empty(), + } + } +} + +impl Spanned for CreateTable { + fn span(&self) -> Span { + let CreateTable { + or_replace: _, // bool + temporary: _, // bool + external: _, // bool + global: _, // bool + if_not_exists: _, // bool + transient: _, // bool + volatile: _, // bool + iceberg: _, // bool, Snowflake specific + name, + columns, + constraints, + hive_distribution: _, // hive specific + hive_formats: _, // hive specific + file_format: _, // enum + location: _, // string, no span + query, + without_rowid: _, // bool + like, + clone, + comment: _, // todo, no span + on_commit: _, + on_cluster: _, // todo, clickhouse specific + primary_key: _, // todo, clickhouse specific + order_by: _, // todo, clickhouse specific + partition_by: _, // todo, BigQuery specific + cluster_by: _, // todo, BigQuery specific + clustered_by: _, // todo, Hive specific + inherits: _, // todo, PostgreSQL specific + strict: _, // bool + copy_grants: _, // bool + enable_schema_evolution: _, // bool + change_tracking: _, // bool + data_retention_time_in_days: _, // u64, no span + max_data_extension_time_in_days: _, // u64, no span + default_ddl_collation: _, // string, no span + with_aggregation_policy: _, // todo, Snowflake specific + with_row_access_policy: _, // todo, Snowflake specific + with_tags: _, // todo, Snowflake specific + external_volume: _, // todo, Snowflake specific + base_location: _, // todo, Snowflake specific + catalog: _, // todo, Snowflake specific + catalog_sync: _, // todo, Snowflake specific + storage_serialization_policy: _, + table_options, + } = self; + + union_spans( + core::iter::once(name.span()) + .chain(core::iter::once(table_options.span())) + .chain(columns.iter().map(|i| i.span())) + .chain(constraints.iter().map(|i| i.span())) + .chain(query.iter().map(|i| i.span())) + .chain(like.iter().map(|i| i.span())) + .chain(clone.iter().map(|i| i.span())), + ) + } +} + +impl Spanned for ColumnDef { + fn span(&self) -> Span { + let ColumnDef { + name, + data_type: _, // enum + options, + } = self; + + union_spans(core::iter::once(name.span).chain(options.iter().map(|i| i.span()))) + } +} + +impl Spanned for ColumnOptionDef { + fn span(&self) -> Span { + let ColumnOptionDef { name, option } = self; + + option.span().union_opt(&name.as_ref().map(|i| i.span)) + } +} + +impl Spanned for TableConstraint { + fn span(&self) -> Span { + match self { + TableConstraint::Unique { + name, + index_name, + index_type_display: _, + index_type: _, + columns, + index_options: _, + characteristics, + nulls_distinct: _, + } => union_spans( + name.iter() + .map(|i| i.span) + .chain(index_name.iter().map(|i| i.span)) + .chain(columns.iter().map(|i| i.span)) + .chain(characteristics.iter().map(|i| i.span())), + ), + TableConstraint::PrimaryKey { + name, + index_name, + index_type: _, + columns, + index_options: _, + characteristics, + } => union_spans( + name.iter() + .map(|i| i.span) + .chain(index_name.iter().map(|i| i.span)) + .chain(columns.iter().map(|i| i.span)) + .chain(characteristics.iter().map(|i| i.span())), + ), + TableConstraint::ForeignKey { + name, + columns, + foreign_table, + referred_columns, + on_delete, + on_update, + characteristics, + } => union_spans( + name.iter() + .map(|i| i.span) + .chain(columns.iter().map(|i| i.span)) + .chain(core::iter::once(foreign_table.span())) + .chain(referred_columns.iter().map(|i| i.span)) + .chain(on_delete.iter().map(|i| i.span())) + .chain(on_update.iter().map(|i| i.span())) + .chain(characteristics.iter().map(|i| i.span())), + ), + TableConstraint::Check { name, expr } => { + expr.span().union_opt(&name.as_ref().map(|i| i.span)) + } + TableConstraint::Index { + display_as_key: _, + name, + index_type: _, + columns, + } => union_spans( + name.iter() + .map(|i| i.span) + .chain(columns.iter().map(|i| i.span)), + ), + TableConstraint::FulltextOrSpatial { + fulltext: _, + index_type_display: _, + opt_index_name, + columns, + } => union_spans( + opt_index_name + .iter() + .map(|i| i.span) + .chain(columns.iter().map(|i| i.span)), + ), + } + } +} + +impl Spanned for CreateIndex { + fn span(&self) -> Span { + let CreateIndex { + name, + table_name, + using: _, + columns, + unique: _, // bool + concurrently: _, // bool + if_not_exists: _, // bool + include, + nulls_distinct: _, // bool + with, + predicate, + } = self; + + union_spans( + name.iter() + .map(|i| i.span()) + .chain(core::iter::once(table_name.span())) + .chain(columns.iter().map(|i| i.column.span())) + .chain(include.iter().map(|i| i.span)) + .chain(with.iter().map(|i| i.span())) + .chain(predicate.iter().map(|i| i.span())), + ) + } +} + +impl Spanned for CaseStatement { + fn span(&self) -> Span { + let CaseStatement { + case_token: AttachedToken(start), + match_expr: _, + when_blocks: _, + else_block: _, + end_case_token: AttachedToken(end), + } = self; + + union_spans([start.span, end.span].into_iter()) + } +} + +impl Spanned for IfStatement { + fn span(&self) -> Span { + let IfStatement { + if_block, + elseif_blocks, + else_block, + end_token, + } = self; + + union_spans( + iter::once(if_block.span()) + .chain(elseif_blocks.iter().map(|b| b.span())) + .chain(else_block.as_ref().map(|b| b.span())) + .chain(end_token.as_ref().map(|AttachedToken(t)| t.span)), + ) + } +} + +impl Spanned for WhileStatement { + fn span(&self) -> Span { + let WhileStatement { while_block } = self; + + while_block.span() + } +} + +impl Spanned for ConditionalStatements { + fn span(&self) -> Span { + match self { + ConditionalStatements::Sequence { statements } => { + union_spans(statements.iter().map(|s| s.span())) + } + ConditionalStatements::BeginEnd(bes) => bes.span(), + } + } +} + +impl Spanned for ConditionalStatementBlock { + fn span(&self) -> Span { + let ConditionalStatementBlock { + start_token: AttachedToken(start_token), + condition, + then_token, + conditional_statements, + } = self; + + union_spans( + iter::once(start_token.span) + .chain(condition.as_ref().map(|c| c.span())) + .chain(then_token.as_ref().map(|AttachedToken(t)| t.span)) + .chain(iter::once(conditional_statements.span())), + ) + } +} + +impl Spanned for RaiseStatement { + fn span(&self) -> Span { + let RaiseStatement { value } = self; + + union_spans(value.iter().map(|value| value.span())) + } +} + +impl Spanned for RaiseStatementValue { + fn span(&self) -> Span { + match self { + RaiseStatementValue::UsingMessage(expr) => expr.span(), + RaiseStatementValue::Expr(expr) => expr.span(), + } + } +} + +/// # partial span +/// +/// Missing spans: +/// - [ColumnOption::Null] +/// - [ColumnOption::NotNull] +/// - [ColumnOption::Comment] +/// - [ColumnOption::Unique]¨ +/// - [ColumnOption::DialectSpecific] +/// - [ColumnOption::Generated] +impl Spanned for ColumnOption { + fn span(&self) -> Span { + match self { + ColumnOption::Null => Span::empty(), + ColumnOption::NotNull => Span::empty(), + ColumnOption::Default(expr) => expr.span(), + ColumnOption::Materialized(expr) => expr.span(), + ColumnOption::Ephemeral(expr) => expr.as_ref().map_or(Span::empty(), |e| e.span()), + ColumnOption::Alias(expr) => expr.span(), + ColumnOption::Unique { .. } => Span::empty(), + ColumnOption::ForeignKey { + foreign_table, + referred_columns, + on_delete, + on_update, + characteristics, + } => union_spans( + core::iter::once(foreign_table.span()) + .chain(referred_columns.iter().map(|i| i.span)) + .chain(on_delete.iter().map(|i| i.span())) + .chain(on_update.iter().map(|i| i.span())) + .chain(characteristics.iter().map(|i| i.span())), + ), + ColumnOption::Check(expr) => expr.span(), + ColumnOption::DialectSpecific(_) => Span::empty(), + ColumnOption::CharacterSet(object_name) => object_name.span(), + ColumnOption::Collation(object_name) => object_name.span(), + ColumnOption::Comment(_) => Span::empty(), + ColumnOption::OnUpdate(expr) => expr.span(), + ColumnOption::Generated { .. } => Span::empty(), + ColumnOption::Options(vec) => union_spans(vec.iter().map(|i| i.span())), + ColumnOption::Identity(..) => Span::empty(), + ColumnOption::OnConflict(..) => Span::empty(), + ColumnOption::Policy(..) => Span::empty(), + ColumnOption::Tags(..) => Span::empty(), + } + } +} + +/// # missing span +impl Spanned for ReferentialAction { + fn span(&self) -> Span { + Span::empty() + } +} + +/// # missing span +impl Spanned for ConstraintCharacteristics { + fn span(&self) -> Span { + let ConstraintCharacteristics { + deferrable: _, // bool + initially: _, // enum + enforced: _, // bool + } = self; + + Span::empty() + } +} + +/// # partial span +/// +/// Missing spans: +/// - [AlterColumnOperation::SetNotNull] +/// - [AlterColumnOperation::DropNotNull] +/// - [AlterColumnOperation::DropDefault] +/// - [AlterColumnOperation::AddGenerated] +impl Spanned for AlterColumnOperation { + fn span(&self) -> Span { + match self { + AlterColumnOperation::SetNotNull => Span::empty(), + AlterColumnOperation::DropNotNull => Span::empty(), + AlterColumnOperation::SetDefault { value } => value.span(), + AlterColumnOperation::DropDefault => Span::empty(), + AlterColumnOperation::SetDataType { + data_type: _, + using, + } => using.as_ref().map_or(Span::empty(), |u| u.span()), + AlterColumnOperation::AddGenerated { .. } => Span::empty(), + } + } +} + +impl Spanned for CopySource { + fn span(&self) -> Span { + match self { + CopySource::Table { + table_name, + columns, + } => union_spans( + core::iter::once(table_name.span()).chain(columns.iter().map(|i| i.span)), + ), + CopySource::Query(query) => query.span(), + } + } +} + +impl Spanned for Delete { + fn span(&self) -> Span { + let Delete { + tables, + from, + using, + selection, + returning, + order_by, + limit, + } = self; + + union_spans( + tables + .iter() + .map(|i| i.span()) + .chain(core::iter::once(from.span())) + .chain( + using + .iter() + .map(|u| union_spans(u.iter().map(|i| i.span()))), + ) + .chain(selection.iter().map(|i| i.span())) + .chain(returning.iter().flat_map(|i| i.iter().map(|k| k.span()))) + .chain(order_by.iter().map(|i| i.span())) + .chain(limit.iter().map(|i| i.span())), + ) + } +} + +impl Spanned for FromTable { + fn span(&self) -> Span { + match self { + FromTable::WithFromKeyword(vec) => union_spans(vec.iter().map(|i| i.span())), + FromTable::WithoutKeyword(vec) => union_spans(vec.iter().map(|i| i.span())), + } + } +} + +impl Spanned for ViewColumnDef { + fn span(&self) -> Span { + let ViewColumnDef { + name, + data_type: _, // todo, DataType + options, + } = self; + + union_spans( + core::iter::once(name.span) + .chain(options.iter().flat_map(|i| i.iter().map(|k| k.span()))), + ) + } +} + +impl Spanned for SqlOption { + fn span(&self) -> Span { + match self { + SqlOption::Clustered(table_options_clustered) => table_options_clustered.span(), + SqlOption::Ident(ident) => ident.span, + SqlOption::KeyValue { key, value } => key.span.union(&value.span()), + SqlOption::Partition { + column_name, + range_direction: _, + for_values, + } => union_spans( + core::iter::once(column_name.span).chain(for_values.iter().map(|i| i.span())), + ), + SqlOption::TableSpace(_) => Span::empty(), + SqlOption::Comment(_) => Span::empty(), + SqlOption::NamedParenthesizedList(NamedParenthesizedList { + key: name, + name: value, + values, + }) => union_spans(core::iter::once(name.span).chain(values.iter().map(|i| i.span))) + .union_opt(&value.as_ref().map(|i| i.span)), + } + } +} + +/// # partial span +/// +/// Missing spans: +/// - [TableOptionsClustered::ColumnstoreIndex] +impl Spanned for TableOptionsClustered { + fn span(&self) -> Span { + match self { + TableOptionsClustered::ColumnstoreIndex => Span::empty(), + TableOptionsClustered::ColumnstoreIndexOrder(vec) => { + union_spans(vec.iter().map(|i| i.span)) + } + TableOptionsClustered::Index(vec) => union_spans(vec.iter().map(|i| i.span())), + } + } +} + +impl Spanned for ClusteredIndex { + fn span(&self) -> Span { + let ClusteredIndex { + name, + asc: _, // bool + } = self; + + name.span + } +} + +impl Spanned for CreateTableOptions { + fn span(&self) -> Span { + match self { + CreateTableOptions::None => Span::empty(), + CreateTableOptions::With(vec) => union_spans(vec.iter().map(|i| i.span())), + CreateTableOptions::Options(vec) => union_spans(vec.iter().map(|i| i.span())), + CreateTableOptions::Plain(vec) => union_spans(vec.iter().map(|i| i.span())), + CreateTableOptions::TableProperties(vec) => union_spans(vec.iter().map(|i| i.span())), + } + } +} + +/// # partial span +/// +/// Missing spans: +/// - [AlterTableOperation::OwnerTo] +impl Spanned for AlterTableOperation { + fn span(&self) -> Span { + match self { + AlterTableOperation::AddConstraint(table_constraint) => table_constraint.span(), + AlterTableOperation::AddColumn { + column_keyword: _, + if_not_exists: _, + column_def, + column_position: _, + } => column_def.span(), + AlterTableOperation::AddProjection { + if_not_exists: _, + name, + select, + } => name.span.union(&select.span()), + AlterTableOperation::DropProjection { if_exists: _, name } => name.span, + AlterTableOperation::MaterializeProjection { + if_exists: _, + name, + partition, + } => name.span.union_opt(&partition.as_ref().map(|i| i.span)), + AlterTableOperation::ClearProjection { + if_exists: _, + name, + partition, + } => name.span.union_opt(&partition.as_ref().map(|i| i.span)), + AlterTableOperation::DisableRowLevelSecurity => Span::empty(), + AlterTableOperation::DisableRule { name } => name.span, + AlterTableOperation::DisableTrigger { name } => name.span, + AlterTableOperation::DropConstraint { + if_exists: _, + name, + drop_behavior: _, + } => name.span, + AlterTableOperation::DropColumn { + column_name, + if_exists: _, + drop_behavior: _, + } => column_name.span, + AlterTableOperation::AttachPartition { partition } => partition.span(), + AlterTableOperation::DetachPartition { partition } => partition.span(), + AlterTableOperation::FreezePartition { + partition, + with_name, + } => partition + .span() + .union_opt(&with_name.as_ref().map(|n| n.span)), + AlterTableOperation::UnfreezePartition { + partition, + with_name, + } => partition + .span() + .union_opt(&with_name.as_ref().map(|n| n.span)), + AlterTableOperation::DropPrimaryKey => Span::empty(), + AlterTableOperation::DropForeignKey { name } => name.span, + AlterTableOperation::EnableAlwaysRule { name } => name.span, + AlterTableOperation::EnableAlwaysTrigger { name } => name.span, + AlterTableOperation::EnableReplicaRule { name } => name.span, + AlterTableOperation::EnableReplicaTrigger { name } => name.span, + AlterTableOperation::EnableRowLevelSecurity => Span::empty(), + AlterTableOperation::EnableRule { name } => name.span, + AlterTableOperation::EnableTrigger { name } => name.span, + AlterTableOperation::RenamePartitions { + old_partitions, + new_partitions, + } => union_spans( + old_partitions + .iter() + .map(|i| i.span()) + .chain(new_partitions.iter().map(|i| i.span())), + ), + AlterTableOperation::AddPartitions { + if_not_exists: _, + new_partitions, + } => union_spans(new_partitions.iter().map(|i| i.span())), + AlterTableOperation::DropPartitions { + partitions, + if_exists: _, + } => union_spans(partitions.iter().map(|i| i.span())), + AlterTableOperation::RenameColumn { + old_column_name, + new_column_name, + } => old_column_name.span.union(&new_column_name.span), + AlterTableOperation::RenameTable { table_name } => table_name.span(), + AlterTableOperation::ChangeColumn { + old_name, + new_name, + data_type: _, + options, + column_position: _, + } => union_spans( + core::iter::once(old_name.span) + .chain(core::iter::once(new_name.span)) + .chain(options.iter().map(|i| i.span())), + ), + AlterTableOperation::ModifyColumn { + col_name, + data_type: _, + options, + column_position: _, + } => { + union_spans(core::iter::once(col_name.span).chain(options.iter().map(|i| i.span()))) + } + AlterTableOperation::RenameConstraint { old_name, new_name } => { + old_name.span.union(&new_name.span) + } + AlterTableOperation::AlterColumn { column_name, op } => { + column_name.span.union(&op.span()) + } + AlterTableOperation::SwapWith { table_name } => table_name.span(), + AlterTableOperation::SetTblProperties { table_properties } => { + union_spans(table_properties.iter().map(|i| i.span())) + } + AlterTableOperation::OwnerTo { .. } => Span::empty(), + AlterTableOperation::ClusterBy { exprs } => union_spans(exprs.iter().map(|e| e.span())), + AlterTableOperation::DropClusteringKey => Span::empty(), + AlterTableOperation::SuspendRecluster => Span::empty(), + AlterTableOperation::ResumeRecluster => Span::empty(), + AlterTableOperation::Algorithm { .. } => Span::empty(), + AlterTableOperation::AutoIncrement { value, .. } => value.span(), + AlterTableOperation::Lock { .. } => Span::empty(), + } + } +} + +impl Spanned for Partition { + fn span(&self) -> Span { + match self { + Partition::Identifier(ident) => ident.span, + Partition::Expr(expr) => expr.span(), + Partition::Part(expr) => expr.span(), + Partition::Partitions(vec) => union_spans(vec.iter().map(|i| i.span())), + } + } +} + +impl Spanned for ProjectionSelect { + fn span(&self) -> Span { + let ProjectionSelect { + projection, + order_by, + group_by, + } = self; + + union_spans( + projection + .iter() + .map(|i| i.span()) + .chain(order_by.iter().map(|i| i.span())) + .chain(group_by.iter().map(|i| i.span())), + ) + } +} + +/// # partial span +/// +/// Missing spans: +/// - [OrderByKind::All] +impl Spanned for OrderBy { + fn span(&self) -> Span { + match &self.kind { + OrderByKind::All(_) => Span::empty(), + OrderByKind::Expressions(exprs) => union_spans( + exprs + .iter() + .map(|i| i.span()) + .chain(self.interpolate.iter().map(|i| i.span())), + ), + } + } +} + +/// # partial span +/// +/// Missing spans: +/// - [GroupByExpr::All] +impl Spanned for GroupByExpr { + fn span(&self) -> Span { + match self { + GroupByExpr::All(_) => Span::empty(), + GroupByExpr::Expressions(exprs, _modifiers) => { + union_spans(exprs.iter().map(|i| i.span())) + } + } + } +} + +impl Spanned for Interpolate { + fn span(&self) -> Span { + let Interpolate { exprs } = self; + + union_spans(exprs.iter().flat_map(|i| i.iter().map(|e| e.span()))) + } +} + +impl Spanned for InterpolateExpr { + fn span(&self) -> Span { + let InterpolateExpr { column, expr } = self; + + column.span.union_opt(&expr.as_ref().map(|e| e.span())) + } +} + +impl Spanned for AlterIndexOperation { + fn span(&self) -> Span { + match self { + AlterIndexOperation::RenameIndex { index_name } => index_name.span(), + } + } +} + +/// # partial span +/// +/// Missing spans:ever +/// - [Insert::insert_alias] +impl Spanned for Insert { + fn span(&self) -> Span { + let Insert { + or: _, // enum, sqlite specific + ignore: _, // bool + into: _, // bool + table, + table_alias, + columns, + overwrite: _, // bool + source, + partitioned, + after_columns, + has_table_keyword: _, // bool + on, + returning, + replace_into: _, // bool + priority: _, // todo, mysql specific + insert_alias: _, // todo, mysql specific + assignments, + settings: _, // todo, clickhouse specific + format_clause: _, // todo, clickhouse specific + } = self; + + union_spans( + core::iter::once(table.span()) + .chain(table_alias.as_ref().map(|i| i.span)) + .chain(columns.iter().map(|i| i.span)) + .chain(source.as_ref().map(|q| q.span())) + .chain(assignments.iter().map(|i| i.span())) + .chain(partitioned.iter().flat_map(|i| i.iter().map(|k| k.span()))) + .chain(after_columns.iter().map(|i| i.span)) + .chain(on.as_ref().map(|i| i.span())) + .chain(returning.iter().flat_map(|i| i.iter().map(|k| k.span()))), + ) + } +} + +impl Spanned for OnInsert { + fn span(&self) -> Span { + match self { + OnInsert::DuplicateKeyUpdate(vec) => union_spans(vec.iter().map(|i| i.span())), + OnInsert::OnConflict(on_conflict) => on_conflict.span(), + } + } +} + +impl Spanned for OnConflict { + fn span(&self) -> Span { + let OnConflict { + conflict_target, + action, + } = self; + + action + .span() + .union_opt(&conflict_target.as_ref().map(|i| i.span())) + } +} + +impl Spanned for ConflictTarget { + fn span(&self) -> Span { + match self { + ConflictTarget::Columns(vec) => union_spans(vec.iter().map(|i| i.span)), + ConflictTarget::OnConstraint(object_name) => object_name.span(), + } + } +} + +/// # partial span +/// +/// Missing spans: +/// - [OnConflictAction::DoNothing] +impl Spanned for OnConflictAction { + fn span(&self) -> Span { + match self { + OnConflictAction::DoNothing => Span::empty(), + OnConflictAction::DoUpdate(do_update) => do_update.span(), + } + } +} + +impl Spanned for DoUpdate { + fn span(&self) -> Span { + let DoUpdate { + assignments, + selection, + } = self; + + union_spans( + assignments + .iter() + .map(|i| i.span()) + .chain(selection.iter().map(|i| i.span())), + ) + } +} + +impl Spanned for Assignment { + fn span(&self) -> Span { + let Assignment { target, value } = self; + + target.span().union(&value.span()) + } +} + +impl Spanned for AssignmentTarget { + fn span(&self) -> Span { + match self { + AssignmentTarget::ColumnName(object_name) => object_name.span(), + AssignmentTarget::Tuple(vec) => union_spans(vec.iter().map(|i| i.span())), + } + } +} + +/// # partial span +/// +/// Most expressions are missing keywords in their spans. +/// f.e. `IS NULL ` reports as `::span`. +/// +/// Missing spans: +/// - [Expr::TypedString] # missing span for data_type +/// - [Expr::MatchAgainst] # MySQL specific +/// - [Expr::RLike] # MySQL specific +/// - [Expr::Struct] # BigQuery specific +/// - [Expr::Named] # BigQuery specific +/// - [Expr::Dictionary] # DuckDB specific +/// - [Expr::Map] # DuckDB specific +/// - [Expr::Lambda] +impl Spanned for Expr { + fn span(&self) -> Span { + match self { + Expr::Identifier(ident) => ident.span, + Expr::CompoundIdentifier(vec) => union_spans(vec.iter().map(|i| i.span)), + Expr::CompoundFieldAccess { root, access_chain } => { + union_spans(iter::once(root.span()).chain(access_chain.iter().map(|i| i.span()))) + } + Expr::IsFalse(expr) => expr.span(), + Expr::IsNotFalse(expr) => expr.span(), + Expr::IsTrue(expr) => expr.span(), + Expr::IsNotTrue(expr) => expr.span(), + Expr::IsNull(expr) => expr.span(), + Expr::IsNotNull(expr) => expr.span(), + Expr::IsUnknown(expr) => expr.span(), + Expr::IsNotUnknown(expr) => expr.span(), + Expr::IsDistinctFrom(lhs, rhs) => lhs.span().union(&rhs.span()), + Expr::IsNotDistinctFrom(lhs, rhs) => lhs.span().union(&rhs.span()), + Expr::InList { + expr, + list, + negated: _, + } => union_spans( + core::iter::once(expr.span()).chain(list.iter().map(|item| item.span())), + ), + Expr::InSubquery { + expr, + subquery, + negated: _, + } => expr.span().union(&subquery.span()), + Expr::InUnnest { + expr, + array_expr, + negated: _, + } => expr.span().union(&array_expr.span()), + Expr::Between { + expr, + negated: _, + low, + high, + } => expr.span().union(&low.span()).union(&high.span()), + + Expr::BinaryOp { left, op: _, right } => left.span().union(&right.span()), + Expr::Like { + negated: _, + expr, + pattern, + escape_char: _, + any: _, + } => expr.span().union(&pattern.span()), + Expr::ILike { + negated: _, + expr, + pattern, + escape_char: _, + any: _, + } => expr.span().union(&pattern.span()), + Expr::RLike { .. } => Span::empty(), + Expr::IsNormalized { + expr, + form: _, + negated: _, + } => expr.span(), + Expr::SimilarTo { + negated: _, + expr, + pattern, + escape_char: _, + } => expr.span().union(&pattern.span()), + Expr::Ceil { expr, field: _ } => expr.span(), + Expr::Floor { expr, field: _ } => expr.span(), + Expr::Position { expr, r#in } => expr.span().union(&r#in.span()), + Expr::Overlay { + expr, + overlay_what, + overlay_from, + overlay_for, + } => expr + .span() + .union(&overlay_what.span()) + .union(&overlay_from.span()) + .union_opt(&overlay_for.as_ref().map(|i| i.span())), + Expr::Collate { expr, collation } => expr + .span() + .union(&union_spans(collation.0.iter().map(|i| i.span()))), + Expr::Nested(expr) => expr.span(), + Expr::Value(value) => value.span(), + Expr::TypedString { value, .. } => value.span(), + Expr::Function(function) => function.span(), + Expr::GroupingSets(vec) => { + union_spans(vec.iter().flat_map(|i| i.iter().map(|k| k.span()))) + } + Expr::Cube(vec) => union_spans(vec.iter().flat_map(|i| i.iter().map(|k| k.span()))), + Expr::Rollup(vec) => union_spans(vec.iter().flat_map(|i| i.iter().map(|k| k.span()))), + Expr::Tuple(vec) => union_spans(vec.iter().map(|i| i.span())), + Expr::Array(array) => array.span(), + Expr::MatchAgainst { .. } => Span::empty(), + Expr::JsonAccess { value, path } => value.span().union(&path.span()), + Expr::AnyOp { + left, + compare_op: _, + right, + is_some: _, + } => left.span().union(&right.span()), + Expr::AllOp { + left, + compare_op: _, + right, + } => left.span().union(&right.span()), + Expr::UnaryOp { op: _, expr } => expr.span(), + Expr::Convert { + expr, + data_type: _, + charset, + target_before_value: _, + styles, + is_try: _, + } => union_spans( + core::iter::once(expr.span()) + .chain(charset.as_ref().map(|i| i.span())) + .chain(styles.iter().map(|i| i.span())), + ), + Expr::Cast { + kind: _, + expr, + data_type: _, + format: _, + } => expr.span(), + Expr::AtTimeZone { + timestamp, + time_zone, + } => timestamp.span().union(&time_zone.span()), + Expr::Extract { + field: _, + syntax: _, + expr, + } => expr.span(), + Expr::Substring { + expr, + substring_from, + substring_for, + special: _, + shorthand: _, + } => union_spans( + core::iter::once(expr.span()) + .chain(substring_from.as_ref().map(|i| i.span())) + .chain(substring_for.as_ref().map(|i| i.span())), + ), + Expr::Trim { + expr, + trim_where: _, + trim_what, + trim_characters, + } => union_spans( + core::iter::once(expr.span()) + .chain(trim_what.as_ref().map(|i| i.span())) + .chain( + trim_characters + .as_ref() + .map(|items| union_spans(items.iter().map(|i| i.span()))), + ), + ), + Expr::Prefixed { value, .. } => value.span(), + Expr::Case { + operand, + conditions, + else_result, + } => union_spans( + operand + .as_ref() + .map(|i| i.span()) + .into_iter() + .chain(conditions.iter().flat_map(|case_when| { + [case_when.condition.span(), case_when.result.span()] + })) + .chain(else_result.as_ref().map(|i| i.span())), + ), + Expr::Exists { subquery, .. } => subquery.span(), + Expr::Subquery(query) => query.span(), + Expr::Struct { .. } => Span::empty(), + Expr::Named { .. } => Span::empty(), + Expr::Dictionary(_) => Span::empty(), + Expr::Map(_) => Span::empty(), + Expr::Interval(interval) => interval.value.span(), + Expr::Wildcard(token) => token.0.span, + Expr::QualifiedWildcard(object_name, token) => union_spans( + object_name + .0 + .iter() + .map(|i| i.span()) + .chain(iter::once(token.0.span)), + ), + Expr::OuterJoin(expr) => expr.span(), + Expr::Prior(expr) => expr.span(), + Expr::Lambda(_) => Span::empty(), + } + } +} + +impl Spanned for Subscript { + fn span(&self) -> Span { + match self { + Subscript::Index { index } => index.span(), + Subscript::Slice { + lower_bound, + upper_bound, + stride, + } => union_spans( + [ + lower_bound.as_ref().map(|i| i.span()), + upper_bound.as_ref().map(|i| i.span()), + stride.as_ref().map(|i| i.span()), + ] + .into_iter() + .flatten(), + ), + } + } +} + +impl Spanned for AccessExpr { + fn span(&self) -> Span { + match self { + AccessExpr::Dot(ident) => ident.span(), + AccessExpr::Subscript(subscript) => subscript.span(), + } + } +} + +impl Spanned for ObjectName { + fn span(&self) -> Span { + let ObjectName(segments) = self; + + union_spans(segments.iter().map(|i| i.span())) + } +} + +impl Spanned for ObjectNamePart { + fn span(&self) -> Span { + match self { + ObjectNamePart::Identifier(ident) => ident.span, + } + } +} + +impl Spanned for Array { + fn span(&self) -> Span { + let Array { + elem, + named: _, // bool + } = self; + + union_spans(elem.iter().map(|i| i.span())) + } +} + +impl Spanned for Function { + fn span(&self) -> Span { + let Function { + name, + uses_odbc_syntax: _, + parameters, + args, + filter, + null_treatment: _, // enum + over: _, // todo + within_group, + } = self; + + union_spans( + name.0 + .iter() + .map(|i| i.span()) + .chain(iter::once(args.span())) + .chain(iter::once(parameters.span())) + .chain(filter.iter().map(|i| i.span())) + .chain(within_group.iter().map(|i| i.span())), + ) + } +} + +/// # partial span +/// +/// The span of [FunctionArguments::None] is empty. +impl Spanned for FunctionArguments { + fn span(&self) -> Span { + match self { + FunctionArguments::None => Span::empty(), + FunctionArguments::Subquery(query) => query.span(), + FunctionArguments::List(list) => list.span(), + } + } +} + +impl Spanned for FunctionArgumentList { + fn span(&self) -> Span { + let FunctionArgumentList { + duplicate_treatment: _, // enum + args, + clauses, + } = self; + + union_spans( + // # todo: duplicate-treatment span + args.iter() + .map(|i| i.span()) + .chain(clauses.iter().map(|i| i.span())), + ) + } +} + +impl Spanned for FunctionArgumentClause { + fn span(&self) -> Span { + match self { + FunctionArgumentClause::IgnoreOrRespectNulls(_) => Span::empty(), + FunctionArgumentClause::OrderBy(vec) => union_spans(vec.iter().map(|i| i.expr.span())), + FunctionArgumentClause::Limit(expr) => expr.span(), + FunctionArgumentClause::OnOverflow(_) => Span::empty(), + FunctionArgumentClause::Having(HavingBound(_kind, expr)) => expr.span(), + FunctionArgumentClause::Separator(value) => value.span(), + FunctionArgumentClause::JsonNullClause(_) => Span::empty(), + } + } +} + +/// # partial span +/// +/// see Spanned impl for JsonPathElem for more information +impl Spanned for JsonPath { + fn span(&self) -> Span { + let JsonPath { path } = self; + + union_spans(path.iter().map(|i| i.span())) + } +} + +/// # partial span +/// +/// Missing spans: +/// - [JsonPathElem::Dot] +impl Spanned for JsonPathElem { + fn span(&self) -> Span { + match self { + JsonPathElem::Dot { .. } => Span::empty(), + JsonPathElem::Bracket { key } => key.span(), + } + } +} + +impl Spanned for SelectItemQualifiedWildcardKind { + fn span(&self) -> Span { + match self { + SelectItemQualifiedWildcardKind::ObjectName(object_name) => object_name.span(), + SelectItemQualifiedWildcardKind::Expr(expr) => expr.span(), + } + } +} + +impl Spanned for SelectItem { + fn span(&self) -> Span { + match self { + SelectItem::UnnamedExpr(expr) => expr.span(), + SelectItem::ExprWithAlias { expr, alias } => expr.span().union(&alias.span), + SelectItem::QualifiedWildcard(kind, wildcard_additional_options) => union_spans( + [kind.span()] + .into_iter() + .chain(iter::once(wildcard_additional_options.span())), + ), + SelectItem::Wildcard(wildcard_additional_options) => wildcard_additional_options.span(), + } + } +} + +impl Spanned for WildcardAdditionalOptions { + fn span(&self) -> Span { + let WildcardAdditionalOptions { + wildcard_token, + opt_ilike, + opt_exclude, + opt_except, + opt_replace, + opt_rename, + } = self; + + union_spans( + core::iter::once(wildcard_token.0.span) + .chain(opt_ilike.as_ref().map(|i| i.span())) + .chain(opt_exclude.as_ref().map(|i| i.span())) + .chain(opt_rename.as_ref().map(|i| i.span())) + .chain(opt_replace.as_ref().map(|i| i.span())) + .chain(opt_except.as_ref().map(|i| i.span())), + ) + } +} + +/// # missing span +impl Spanned for IlikeSelectItem { + fn span(&self) -> Span { + Span::empty() + } +} + +impl Spanned for ExcludeSelectItem { + fn span(&self) -> Span { + match self { + ExcludeSelectItem::Single(ident) => ident.span, + ExcludeSelectItem::Multiple(vec) => union_spans(vec.iter().map(|i| i.span)), + } + } +} + +impl Spanned for RenameSelectItem { + fn span(&self) -> Span { + match self { + RenameSelectItem::Single(ident) => ident.ident.span.union(&ident.alias.span), + RenameSelectItem::Multiple(vec) => { + union_spans(vec.iter().map(|i| i.ident.span.union(&i.alias.span))) + } + } + } +} + +impl Spanned for ExceptSelectItem { + fn span(&self) -> Span { + let ExceptSelectItem { + first_element, + additional_elements, + } = self; + + union_spans( + iter::once(first_element.span).chain(additional_elements.iter().map(|i| i.span)), + ) + } +} + +impl Spanned for ReplaceSelectItem { + fn span(&self) -> Span { + let ReplaceSelectItem { items } = self; + + union_spans(items.iter().map(|i| i.span())) + } +} + +impl Spanned for ReplaceSelectElement { + fn span(&self) -> Span { + let ReplaceSelectElement { + expr, + column_name, + as_keyword: _, // bool + } = self; + + expr.span().union(&column_name.span) + } +} + +/// # partial span +/// +/// Missing spans: +/// - [TableFactor::JsonTable] +impl Spanned for TableFactor { + fn span(&self) -> Span { + match self { + TableFactor::Table { + name, + alias, + args: _, + with_hints: _, + version: _, + with_ordinality: _, + partitions: _, + json_path: _, + sample: _, + index_hints: _, + } => union_spans( + name.0 + .iter() + .map(|i| i.span()) + .chain(alias.as_ref().map(|alias| { + union_spans( + iter::once(alias.name.span) + .chain(alias.columns.iter().map(|i| i.span())), + ) + })), + ), + TableFactor::Derived { + lateral: _, + subquery, + alias, + } => subquery + .span() + .union_opt(&alias.as_ref().map(|alias| alias.span())), + TableFactor::TableFunction { expr, alias } => expr + .span() + .union_opt(&alias.as_ref().map(|alias| alias.span())), + TableFactor::UNNEST { + alias, + with_offset: _, + with_offset_alias, + array_exprs, + with_ordinality: _, + } => union_spans( + alias + .iter() + .map(|i| i.span()) + .chain(array_exprs.iter().map(|i| i.span())) + .chain(with_offset_alias.as_ref().map(|i| i.span)), + ), + TableFactor::NestedJoin { + table_with_joins, + alias, + } => table_with_joins + .span() + .union_opt(&alias.as_ref().map(|alias| alias.span())), + TableFactor::Function { + lateral: _, + name, + args, + alias, + } => union_spans( + name.0 + .iter() + .map(|i| i.span()) + .chain(args.iter().map(|i| i.span())) + .chain(alias.as_ref().map(|alias| alias.span())), + ), + TableFactor::JsonTable { .. } => Span::empty(), + TableFactor::XmlTable { .. } => Span::empty(), + TableFactor::Pivot { + table, + aggregate_functions, + value_column, + value_source, + default_on_null, + alias, + } => union_spans( + core::iter::once(table.span()) + .chain(aggregate_functions.iter().map(|i| i.span())) + .chain(value_column.iter().map(|i| i.span)) + .chain(core::iter::once(value_source.span())) + .chain(default_on_null.as_ref().map(|i| i.span())) + .chain(alias.as_ref().map(|i| i.span())), + ), + TableFactor::Unpivot { + table, + value, + name, + columns, + alias, + } => union_spans( + core::iter::once(table.span()) + .chain(core::iter::once(value.span)) + .chain(core::iter::once(name.span)) + .chain(columns.iter().map(|i| i.span)) + .chain(alias.as_ref().map(|alias| alias.span())), + ), + TableFactor::MatchRecognize { + table, + partition_by, + order_by, + measures, + rows_per_match: _, + after_match_skip: _, + pattern, + symbols, + alias, + } => union_spans( + core::iter::once(table.span()) + .chain(partition_by.iter().map(|i| i.span())) + .chain(order_by.iter().map(|i| i.span())) + .chain(measures.iter().map(|i| i.span())) + .chain(core::iter::once(pattern.span())) + .chain(symbols.iter().map(|i| i.span())) + .chain(alias.as_ref().map(|i| i.span())), + ), + TableFactor::OpenJsonTable { .. } => Span::empty(), + } + } +} + +impl Spanned for PivotValueSource { + fn span(&self) -> Span { + match self { + PivotValueSource::List(vec) => union_spans(vec.iter().map(|i| i.span())), + PivotValueSource::Any(vec) => union_spans(vec.iter().map(|i| i.span())), + PivotValueSource::Subquery(query) => query.span(), + } + } +} + +impl Spanned for ExprWithAlias { + fn span(&self) -> Span { + let ExprWithAlias { expr, alias } = self; + + expr.span().union_opt(&alias.as_ref().map(|i| i.span)) + } +} + +/// # missing span +impl Spanned for MatchRecognizePattern { + fn span(&self) -> Span { + Span::empty() + } +} + +impl Spanned for SymbolDefinition { + fn span(&self) -> Span { + let SymbolDefinition { symbol, definition } = self; + + symbol.span.union(&definition.span()) + } +} + +impl Spanned for Measure { + fn span(&self) -> Span { + let Measure { expr, alias } = self; + + expr.span().union(&alias.span) + } +} + +impl Spanned for OrderByExpr { + fn span(&self) -> Span { + let OrderByExpr { + expr, + options: _, + with_fill, + } = self; + + expr.span().union_opt(&with_fill.as_ref().map(|f| f.span())) + } +} + +impl Spanned for WithFill { + fn span(&self) -> Span { + let WithFill { from, to, step } = self; + + union_spans( + from.iter() + .map(|f| f.span()) + .chain(to.iter().map(|t| t.span())) + .chain(step.iter().map(|s| s.span())), + ) + } +} + +impl Spanned for FunctionArg { + fn span(&self) -> Span { + match self { + FunctionArg::Named { + name, + arg, + operator: _, + } => name.span.union(&arg.span()), + FunctionArg::Unnamed(arg) => arg.span(), + FunctionArg::ExprNamed { + name, + arg, + operator: _, + } => name.span().union(&arg.span()), + } + } +} + +/// # partial span +/// +/// Missing spans: +/// - [FunctionArgExpr::Wildcard] +impl Spanned for FunctionArgExpr { + fn span(&self) -> Span { + match self { + FunctionArgExpr::Expr(expr) => expr.span(), + FunctionArgExpr::QualifiedWildcard(object_name) => { + union_spans(object_name.0.iter().map(|i| i.span())) + } + FunctionArgExpr::Wildcard => Span::empty(), + } + } +} + +impl Spanned for TableAlias { + fn span(&self) -> Span { + let TableAlias { name, columns } = self; + + union_spans(iter::once(name.span).chain(columns.iter().map(|i| i.span()))) + } +} + +impl Spanned for TableAliasColumnDef { + fn span(&self) -> Span { + let TableAliasColumnDef { name, data_type: _ } = self; + + name.span + } +} + +impl Spanned for ValueWithSpan { + fn span(&self) -> Span { + self.span + } +} + +/// The span is stored in the `ValueWrapper` struct +impl Spanned for Value { + fn span(&self) -> Span { + Span::empty() // # todo: Value needs to store spans before this is possible + } +} + +impl Spanned for Join { + fn span(&self) -> Span { + let Join { + relation, + global: _, // bool + join_operator, + } = self; + + relation.span().union(&join_operator.span()) + } +} + +/// # partial span +/// +/// Missing spans: +/// - [JoinOperator::CrossJoin] +/// - [JoinOperator::CrossApply] +/// - [JoinOperator::OuterApply] +impl Spanned for JoinOperator { + fn span(&self) -> Span { + match self { + JoinOperator::Join(join_constraint) => join_constraint.span(), + JoinOperator::Inner(join_constraint) => join_constraint.span(), + JoinOperator::Left(join_constraint) => join_constraint.span(), + JoinOperator::LeftOuter(join_constraint) => join_constraint.span(), + JoinOperator::Right(join_constraint) => join_constraint.span(), + JoinOperator::RightOuter(join_constraint) => join_constraint.span(), + JoinOperator::FullOuter(join_constraint) => join_constraint.span(), + JoinOperator::CrossJoin => Span::empty(), + JoinOperator::LeftSemi(join_constraint) => join_constraint.span(), + JoinOperator::RightSemi(join_constraint) => join_constraint.span(), + JoinOperator::LeftAnti(join_constraint) => join_constraint.span(), + JoinOperator::RightAnti(join_constraint) => join_constraint.span(), + JoinOperator::CrossApply => Span::empty(), + JoinOperator::OuterApply => Span::empty(), + JoinOperator::AsOf { + match_condition, + constraint, + } => match_condition.span().union(&constraint.span()), + JoinOperator::Anti(join_constraint) => join_constraint.span(), + JoinOperator::Semi(join_constraint) => join_constraint.span(), + JoinOperator::StraightJoin(join_constraint) => join_constraint.span(), + } + } +} + +/// # partial span +/// +/// Missing spans: +/// - [JoinConstraint::Natural] +/// - [JoinConstraint::None] +impl Spanned for JoinConstraint { + fn span(&self) -> Span { + match self { + JoinConstraint::On(expr) => expr.span(), + JoinConstraint::Using(vec) => union_spans(vec.iter().map(|i| i.span())), + JoinConstraint::Natural => Span::empty(), + JoinConstraint::None => Span::empty(), + } + } +} + +impl Spanned for TableWithJoins { + fn span(&self) -> Span { + let TableWithJoins { relation, joins } = self; + + union_spans(core::iter::once(relation.span()).chain(joins.iter().map(|item| item.span()))) + } +} + +impl Spanned for Select { + fn span(&self) -> Span { + let Select { + select_token, + distinct: _, // todo + top: _, // todo, mysql specific + projection, + into, + from, + lateral_views, + prewhere, + selection, + group_by, + cluster_by, + distribute_by, + sort_by, + having, + named_window, + qualify, + window_before_qualify: _, // bool + value_table_mode: _, // todo, BigQuery specific + connect_by, + top_before_distinct: _, + flavor: _, + } = self; + + union_spans( + core::iter::once(select_token.0.span) + .chain(projection.iter().map(|item| item.span())) + .chain(into.iter().map(|item| item.span())) + .chain(from.iter().map(|item| item.span())) + .chain(lateral_views.iter().map(|item| item.span())) + .chain(prewhere.iter().map(|item| item.span())) + .chain(selection.iter().map(|item| item.span())) + .chain(core::iter::once(group_by.span())) + .chain(cluster_by.iter().map(|item| item.span())) + .chain(distribute_by.iter().map(|item| item.span())) + .chain(sort_by.iter().map(|item| item.span())) + .chain(having.iter().map(|item| item.span())) + .chain(named_window.iter().map(|item| item.span())) + .chain(qualify.iter().map(|item| item.span())) + .chain(connect_by.iter().map(|item| item.span())), + ) + } +} + +impl Spanned for ConnectBy { + fn span(&self) -> Span { + let ConnectBy { + condition, + relationships, + } = self; + + union_spans( + core::iter::once(condition.span()).chain(relationships.iter().map(|item| item.span())), + ) + } +} + +impl Spanned for NamedWindowDefinition { + fn span(&self) -> Span { + let NamedWindowDefinition( + ident, + _, // todo: NamedWindowExpr + ) = self; + + ident.span + } +} + +impl Spanned for LateralView { + fn span(&self) -> Span { + let LateralView { + lateral_view, + lateral_view_name, + lateral_col_alias, + outer: _, // bool + } = self; + + union_spans( + core::iter::once(lateral_view.span()) + .chain(core::iter::once(lateral_view_name.span())) + .chain(lateral_col_alias.iter().map(|i| i.span)), + ) + } +} + +impl Spanned for SelectInto { + fn span(&self) -> Span { + let SelectInto { + temporary: _, // bool + unlogged: _, // bool + table: _, // bool + name, + } = self; + + name.span() + } +} + +impl Spanned for UpdateTableFromKind { + fn span(&self) -> Span { + let from = match self { + UpdateTableFromKind::BeforeSet(from) => from, + UpdateTableFromKind::AfterSet(from) => from, + }; + union_spans(from.iter().map(|t| t.span())) + } +} + +impl Spanned for TableObject { + fn span(&self) -> Span { + match self { + TableObject::TableName(ObjectName(segments)) => { + union_spans(segments.iter().map(|i| i.span())) + } + TableObject::TableFunction(func) => func.span(), + } + } +} + +impl Spanned for BeginEndStatements { + fn span(&self) -> Span { + let BeginEndStatements { + begin_token, + statements, + end_token, + } = self; + union_spans( + core::iter::once(begin_token.0.span) + .chain(statements.iter().map(|i| i.span())) + .chain(core::iter::once(end_token.0.span)), + ) + } +} + +impl Spanned for OpenStatement { + fn span(&self) -> Span { + let OpenStatement { cursor_name } = self; + cursor_name.span + } +} + +#[cfg(test)] +pub mod tests { + use crate::dialect::{Dialect, GenericDialect, SnowflakeDialect}; + use crate::parser::Parser; + use crate::tokenizer::Span; + + use super::*; + + struct SpanTest<'a>(Parser<'a>, &'a str); + + impl<'a> SpanTest<'a> { + fn new(dialect: &'a dyn Dialect, sql: &'a str) -> Self { + Self(Parser::new(dialect).try_with_sql(sql).unwrap(), sql) + } + + // get the subsection of the source string that corresponds to the span + // only works on single-line strings + fn get_source(&self, span: Span) -> &'a str { + // lines in spans are 1-indexed + &self.1[(span.start.column as usize - 1)..(span.end.column - 1) as usize] + } + } + + #[test] + fn test_join() { + let dialect = &GenericDialect; + let mut test = SpanTest::new( + dialect, + "SELECT id, name FROM users LEFT JOIN companies ON users.company_id = companies.id", + ); + + let query = test.0.parse_select().unwrap(); + let select_span = query.span(); + + assert_eq!( + test.get_source(select_span), + "SELECT id, name FROM users LEFT JOIN companies ON users.company_id = companies.id" + ); + + let join_span = query.from[0].joins[0].span(); + + // 'LEFT JOIN' missing + assert_eq!( + test.get_source(join_span), + "companies ON users.company_id = companies.id" + ); + } + + #[test] + pub fn test_union() { + let dialect = &GenericDialect; + let mut test = SpanTest::new( + dialect, + "SELECT a FROM postgres.public.source UNION SELECT a FROM postgres.public.source", + ); + + let query = test.0.parse_query().unwrap(); + let select_span = query.span(); + + assert_eq!( + test.get_source(select_span), + "SELECT a FROM postgres.public.source UNION SELECT a FROM postgres.public.source" + ); + } + + #[test] + pub fn test_subquery() { + let dialect = &GenericDialect; + let mut test = SpanTest::new( + dialect, + "SELECT a FROM (SELECT a FROM postgres.public.source) AS b", + ); + + let query = test.0.parse_select().unwrap(); + let select_span = query.span(); + + assert_eq!( + test.get_source(select_span), + "SELECT a FROM (SELECT a FROM postgres.public.source) AS b" + ); + + let subquery_span = query.from[0].span(); + + // left paren missing + assert_eq!( + test.get_source(subquery_span), + "SELECT a FROM postgres.public.source) AS b" + ); + } + + #[test] + pub fn test_cte() { + let dialect = &GenericDialect; + let mut test = SpanTest::new(dialect, "WITH cte_outer AS (SELECT a FROM postgres.public.source), cte_ignored AS (SELECT a FROM cte_outer), cte_inner AS (SELECT a FROM cte_outer) SELECT a FROM cte_inner"); + + let query = test.0.parse_query().unwrap(); + + let select_span = query.span(); + + assert_eq!(test.get_source(select_span), "WITH cte_outer AS (SELECT a FROM postgres.public.source), cte_ignored AS (SELECT a FROM cte_outer), cte_inner AS (SELECT a FROM cte_outer) SELECT a FROM cte_inner"); + } + + #[test] + pub fn test_snowflake_lateral_flatten() { + let dialect = &SnowflakeDialect; + let mut test = SpanTest::new(dialect, "SELECT FLATTENED.VALUE:field::TEXT AS FIELD FROM SNOWFLAKE.SCHEMA.SOURCE AS S, LATERAL FLATTEN(INPUT => S.JSON_ARRAY) AS FLATTENED"); + + let query = test.0.parse_select().unwrap(); + + let select_span = query.span(); + + assert_eq!(test.get_source(select_span), "SELECT FLATTENED.VALUE:field::TEXT AS FIELD FROM SNOWFLAKE.SCHEMA.SOURCE AS S, LATERAL FLATTEN(INPUT => S.JSON_ARRAY) AS FLATTENED"); + } + + #[test] + pub fn test_wildcard_from_cte() { + let dialect = &GenericDialect; + let mut test = SpanTest::new( + dialect, + "WITH cte AS (SELECT a FROM postgres.public.source) SELECT cte.* FROM cte", + ); + + let query = test.0.parse_query().unwrap(); + let cte_span = query.clone().with.unwrap().cte_tables[0].span(); + let cte_query_span = query.clone().with.unwrap().cte_tables[0].query.span(); + let body_span = query.body.span(); + + // the WITH keyboard is part of the query + assert_eq!( + test.get_source(cte_span), + "cte AS (SELECT a FROM postgres.public.source)" + ); + assert_eq!( + test.get_source(cte_query_span), + "SELECT a FROM postgres.public.source" + ); + + assert_eq!(test.get_source(body_span), "SELECT cte.* FROM cte"); + } +} diff --git a/src/ast/trigger.rs b/src/ast/trigger.rs index cf1c8c466..2c64e4239 100644 --- a/src/ast/trigger.rs +++ b/src/ast/trigger.rs @@ -110,6 +110,7 @@ impl fmt::Display for TriggerEvent { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum TriggerPeriod { + For, After, Before, InsteadOf, @@ -118,6 +119,7 @@ pub enum TriggerPeriod { impl fmt::Display for TriggerPeriod { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { + TriggerPeriod::For => write!(f, "FOR"), TriggerPeriod::After => write!(f, "AFTER"), TriggerPeriod::Before => write!(f, "BEFORE"), TriggerPeriod::InsteadOf => write!(f, "INSTEAD OF"), diff --git a/src/ast/value.rs b/src/ast/value.rs index 30d956a07..77e2e0e81 100644 --- a/src/ast/value.rs +++ b/src/ast/value.rs @@ -26,14 +26,97 @@ use bigdecimal::BigDecimal; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; -use crate::ast::Ident; +use crate::{ast::Ident, tokenizer::Span}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; +/// Wraps a primitive SQL [`Value`] with its [`Span`] location +/// +/// # Example: create a `ValueWithSpan` from a `Value` +/// ``` +/// # use sqlparser::ast::{Value, ValueWithSpan}; +/// # use sqlparser::tokenizer::{Location, Span}; +/// let value = Value::SingleQuotedString(String::from("endpoint")); +/// // from line 1, column 1 to line 1, column 7 +/// let span = Span::new(Location::new(1, 1), Location::new(1, 7)); +/// let value_with_span = value.with_span(span); +/// ``` +/// +/// # Example: create a `ValueWithSpan` from a `Value` with an empty span +/// +/// You can call [`Value::with_empty_span`] to create a `ValueWithSpan` with an empty span +/// ``` +/// # use sqlparser::ast::{Value, ValueWithSpan}; +/// # use sqlparser::tokenizer::{Location, Span}; +/// let value = Value::SingleQuotedString(String::from("endpoint")); +/// let value_with_span = value.with_empty_span(); +/// assert_eq!(value_with_span.span, Span::empty()); +/// ``` +/// +/// You can also use the [`From`] trait to convert `ValueWithSpan` to/from `Value`s +/// ``` +/// # use sqlparser::ast::{Value, ValueWithSpan}; +/// # use sqlparser::tokenizer::{Location, Span}; +/// let value = Value::SingleQuotedString(String::from("endpoint")); +/// // converting `Value` to `ValueWithSpan` results in an empty span +/// let value_with_span: ValueWithSpan = value.into(); +/// assert_eq!(value_with_span.span, Span::empty()); +/// // convert back to `Value` +/// let value: Value = value_with_span.into(); +/// ``` +#[derive(Debug, Clone, Eq)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct ValueWithSpan { + pub value: Value, + pub span: Span, +} + +impl PartialEq for ValueWithSpan { + fn eq(&self, other: &Self) -> bool { + self.value == other.value + } +} + +impl Ord for ValueWithSpan { + fn cmp(&self, other: &Self) -> core::cmp::Ordering { + self.value.cmp(&other.value) + } +} + +impl PartialOrd for ValueWithSpan { + fn partial_cmp(&self, other: &Self) -> Option { + Some(Ord::cmp(self, other)) + } +} + +impl core::hash::Hash for ValueWithSpan { + fn hash(&self, state: &mut H) { + self.value.hash(state); + } +} + +impl From for ValueWithSpan { + fn from(value: Value) -> Self { + value.with_empty_span() + } +} + +impl From for Value { + fn from(value: ValueWithSpan) -> Self { + value.value + } +} + /// Primitive SQL values such as number and string #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +#[cfg_attr( + feature = "visitor", + derive(Visit, VisitMut), + visit(with = "visit_value") +)] + pub enum Value { /// Numeric literal #[cfg(not(feature = "bigdecimal"))] @@ -97,6 +180,53 @@ pub enum Value { Placeholder(String), } +impl ValueWithSpan { + /// If the underlying literal is a string, regardless of quote style, returns the associated string value + pub fn into_string(self) -> Option { + self.value.into_string() + } +} + +impl Value { + /// If the underlying literal is a string, regardless of quote style, returns the associated string value + pub fn into_string(self) -> Option { + match self { + Value::SingleQuotedString(s) + | Value::DoubleQuotedString(s) + | Value::TripleSingleQuotedString(s) + | Value::TripleDoubleQuotedString(s) + | Value::SingleQuotedByteStringLiteral(s) + | Value::DoubleQuotedByteStringLiteral(s) + | Value::TripleSingleQuotedByteStringLiteral(s) + | Value::TripleDoubleQuotedByteStringLiteral(s) + | Value::SingleQuotedRawStringLiteral(s) + | Value::DoubleQuotedRawStringLiteral(s) + | Value::TripleSingleQuotedRawStringLiteral(s) + | Value::TripleDoubleQuotedRawStringLiteral(s) + | Value::EscapedStringLiteral(s) + | Value::UnicodeStringLiteral(s) + | Value::NationalStringLiteral(s) + | Value::HexStringLiteral(s) => Some(s), + Value::DollarQuotedString(s) => Some(s.value), + _ => None, + } + } + + pub fn with_span(self, span: Span) -> ValueWithSpan { + ValueWithSpan { value: self, span } + } + + pub fn with_empty_span(self) -> ValueWithSpan { + self.with_span(Span::empty()) + } +} + +impl fmt::Display for ValueWithSpan { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.value) + } +} + impl fmt::Display for Value { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { @@ -155,7 +285,9 @@ impl fmt::Display for DollarQuotedString { #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum DateTimeField { Year, + Years, Month, + Months, /// Week optionally followed by a WEEKDAY. /// /// ```sql @@ -164,14 +296,19 @@ pub enum DateTimeField { /// /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/date_functions#extract) Week(Option), + Weeks, Day, DayOfWeek, DayOfYear, + Days, Date, Datetime, Hour, + Hours, Minute, + Minutes, Second, + Seconds, Century, Decade, Dow, @@ -210,7 +347,9 @@ impl fmt::Display for DateTimeField { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { DateTimeField::Year => write!(f, "YEAR"), + DateTimeField::Years => write!(f, "YEARS"), DateTimeField::Month => write!(f, "MONTH"), + DateTimeField::Months => write!(f, "MONTHS"), DateTimeField::Week(week_day) => { write!(f, "WEEK")?; if let Some(week_day) = week_day { @@ -218,14 +357,19 @@ impl fmt::Display for DateTimeField { } Ok(()) } + DateTimeField::Weeks => write!(f, "WEEKS"), DateTimeField::Day => write!(f, "DAY"), DateTimeField::DayOfWeek => write!(f, "DAYOFWEEK"), DateTimeField::DayOfYear => write!(f, "DAYOFYEAR"), + DateTimeField::Days => write!(f, "DAYS"), DateTimeField::Date => write!(f, "DATE"), DateTimeField::Datetime => write!(f, "DATETIME"), DateTimeField::Hour => write!(f, "HOUR"), + DateTimeField::Hours => write!(f, "HOURS"), DateTimeField::Minute => write!(f, "MINUTE"), + DateTimeField::Minutes => write!(f, "MINUTES"), DateTimeField::Second => write!(f, "SECOND"), + DateTimeField::Seconds => write!(f, "SECONDS"), DateTimeField::Century => write!(f, "CENTURY"), DateTimeField::Decade => write!(f, "DECADE"), DateTimeField::Dow => write!(f, "DOW"), @@ -256,12 +400,41 @@ impl fmt::Display for DateTimeField { } } +#[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +/// The Unicode Standard defines four normalization forms, which are intended to eliminate +/// certain distinctions between visually or functionally identical characters. +/// +/// See [Unicode Normalization Forms](https://unicode.org/reports/tr15/) for details. +pub enum NormalizationForm { + /// Canonical Decomposition, followed by Canonical Composition. + NFC, + /// Canonical Decomposition. + NFD, + /// Compatibility Decomposition, followed by Canonical Composition. + NFKC, + /// Compatibility Decomposition. + NFKD, +} + +impl fmt::Display for NormalizationForm { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + NormalizationForm::NFC => write!(f, "NFC"), + NormalizationForm::NFD => write!(f, "NFD"), + NormalizationForm::NFKC => write!(f, "NFKC"), + NormalizationForm::NFKD => write!(f, "NFKD"), + } + } +} + pub struct EscapeQuotedString<'a> { string: &'a str, quote: char, } -impl<'a> fmt::Display for EscapeQuotedString<'a> { +impl fmt::Display for EscapeQuotedString<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // EscapeQuotedString doesn't know which mode of escape was // chosen by the user. So this code must to correctly display @@ -325,7 +498,7 @@ pub fn escape_double_quote_string(s: &str) -> EscapeQuotedString<'_> { pub struct EscapeEscapedStringLiteral<'a>(&'a str); -impl<'a> fmt::Display for EscapeEscapedStringLiteral<'a> { +impl fmt::Display for EscapeEscapedStringLiteral<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for c in self.0.chars() { match c { @@ -359,7 +532,7 @@ pub fn escape_escaped_string(s: &str) -> EscapeEscapedStringLiteral<'_> { pub struct EscapeUnicodeStringLiteral<'a>(&'a str); -impl<'a> fmt::Display for EscapeUnicodeStringLiteral<'a> { +impl fmt::Display for EscapeUnicodeStringLiteral<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for c in self.0.chars() { match c { diff --git a/src/ast/visitor.rs b/src/ast/visitor.rs index 418e0a299..50985a3e7 100644 --- a/src/ast/visitor.rs +++ b/src/ast/visitor.rs @@ -17,7 +17,7 @@ //! Recursive visitors for ast Nodes. See [`Visitor`] for more details. -use crate::ast::{Expr, ObjectName, Query, Statement, TableFactor}; +use crate::ast::{Expr, ObjectName, Query, Statement, TableFactor, Value}; use core::ops::ControlFlow; /// A type that can be visited by a [`Visitor`]. See [`Visitor`] for @@ -233,6 +233,16 @@ pub trait Visitor { fn post_visit_statement(&mut self, _statement: &Statement) -> ControlFlow { ControlFlow::Continue(()) } + + /// Invoked for any Value that appear in the AST before visiting children + fn pre_visit_value(&mut self, _value: &Value) -> ControlFlow { + ControlFlow::Continue(()) + } + + /// Invoked for any Value that appear in the AST after visiting children + fn post_visit_value(&mut self, _value: &Value) -> ControlFlow { + ControlFlow::Continue(()) + } } /// A visitor that can be used to mutate an AST tree. @@ -337,6 +347,16 @@ pub trait VisitorMut { fn post_visit_statement(&mut self, _statement: &mut Statement) -> ControlFlow { ControlFlow::Continue(()) } + + /// Invoked for any value that appear in the AST before visiting children + fn pre_visit_value(&mut self, _value: &mut Value) -> ControlFlow { + ControlFlow::Continue(()) + } + + /// Invoked for any statements that appear in the AST after visiting children + fn post_visit_value(&mut self, _value: &mut Value) -> ControlFlow { + ControlFlow::Continue(()) + } } struct RelationVisitor(F); @@ -403,7 +423,7 @@ where /// ``` /// # use sqlparser::parser::Parser; /// # use sqlparser::dialect::GenericDialect; -/// # use sqlparser::ast::{ObjectName, visit_relations_mut}; +/// # use sqlparser::ast::{ObjectName, ObjectNamePart, Ident, visit_relations_mut}; /// # use core::ops::ControlFlow; /// let sql = "SELECT a FROM foo"; /// let mut statements = Parser::parse_sql(&GenericDialect{}, sql) @@ -411,7 +431,7 @@ where /// /// // visit statements, renaming table foo to bar /// visit_relations_mut(&mut statements, |table| { -/// table.0[0].value = table.0[0].value.replace("foo", "bar"); +/// table.0[0] = ObjectNamePart::Identifier(Ident::new("bar")); /// ControlFlow::<()>::Continue(()) /// }); /// @@ -503,7 +523,7 @@ where /// // Remove all select limits in sub-queries /// visit_expressions_mut(&mut statements, |expr| { /// if let Expr::Subquery(q) = expr { -/// q.limit = None +/// q.limit_clause = None; /// } /// ControlFlow::<()>::Continue(()) /// }); @@ -527,9 +547,10 @@ where /// /// visit_expressions_mut(&mut statements, |expr| { /// if matches!(expr, Expr::Identifier(col_name) if col_name.value == "x") { -/// let old_expr = std::mem::replace(expr, Expr::Value(Value::Null)); +/// let old_expr = std::mem::replace(expr, Expr::value(Value::Null)); /// *expr = Expr::Function(Function { -/// name: ObjectName(vec![Ident::new("f")]), +/// name: ObjectName::from(vec![Ident::new("f")]), +/// uses_odbc_syntax: false, /// args: FunctionArguments::List(FunctionArgumentList { /// duplicate_treatment: None, /// args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(old_expr))], @@ -626,7 +647,7 @@ where /// // Remove all select limits in outer statements (not in sub-queries) /// visit_statements_mut(&mut statements, |stmt| { /// if let Statement::Query(q) = stmt { -/// q.limit = None +/// q.limit_clause = None; /// } /// ControlFlow::<()>::Continue(()) /// }); @@ -646,6 +667,7 @@ where #[cfg(test)] mod tests { use super::*; + use crate::ast::Statement; use crate::dialect::GenericDialect; use crate::parser::Parser; use crate::tokenizer::Tokenizer; @@ -719,7 +741,7 @@ mod tests { } } - fn do_visit(sql: &str) -> Vec { + fn do_visit(sql: &str, visitor: &mut V) -> Statement { let dialect = GenericDialect {}; let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); let s = Parser::new(&dialect) @@ -727,9 +749,8 @@ mod tests { .parse_statement() .unwrap(); - let mut visitor = TestVisitor::default(); - s.visit(&mut visitor); - visitor.visited + s.visit(visitor); + s } #[test] @@ -876,12 +897,111 @@ mod tests { "POST: QUERY: SELECT * FROM monthly_sales PIVOT(SUM(a.amount) FOR a.MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) AS p (c, d) ORDER BY EMPID", "POST: STATEMENT: SELECT * FROM monthly_sales PIVOT(SUM(a.amount) FOR a.MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) AS p (c, d) ORDER BY EMPID", ] - ) + ), + ( + "SHOW COLUMNS FROM t1", + vec![ + "PRE: STATEMENT: SHOW COLUMNS FROM t1", + "PRE: RELATION: t1", + "POST: RELATION: t1", + "POST: STATEMENT: SHOW COLUMNS FROM t1", + ], + ), ]; for (sql, expected) in tests { - let actual = do_visit(sql); - let actual: Vec<_> = actual.iter().map(|x| x.as_str()).collect(); + let mut visitor = TestVisitor::default(); + let _ = do_visit(sql, &mut visitor); + let actual: Vec<_> = visitor.visited.iter().map(|x| x.as_str()).collect(); assert_eq!(actual, expected) } } + + struct QuickVisitor; // [`TestVisitor`] is too slow to iterate over thousands of nodes + + impl Visitor for QuickVisitor { + type Break = (); + } + + #[test] + fn overflow() { + let cond = (0..1000) + .map(|n| format!("X = {}", n)) + .collect::>() + .join(" OR "); + let sql = format!("SELECT x where {0}", cond); + + let dialect = GenericDialect {}; + let tokens = Tokenizer::new(&dialect, sql.as_str()).tokenize().unwrap(); + let s = Parser::new(&dialect) + .with_tokens(tokens) + .parse_statement() + .unwrap(); + + let mut visitor = QuickVisitor {}; + s.visit(&mut visitor); + } +} + +#[cfg(test)] +mod visit_mut_tests { + use crate::ast::{Statement, Value, VisitMut, VisitorMut}; + use crate::dialect::GenericDialect; + use crate::parser::Parser; + use crate::tokenizer::Tokenizer; + use core::ops::ControlFlow; + + #[derive(Default)] + struct MutatorVisitor { + index: u64, + } + + impl VisitorMut for MutatorVisitor { + type Break = (); + + fn pre_visit_value(&mut self, value: &mut Value) -> ControlFlow { + self.index += 1; + *value = Value::SingleQuotedString(format!("REDACTED_{}", self.index)); + ControlFlow::Continue(()) + } + + fn post_visit_value(&mut self, _value: &mut Value) -> ControlFlow { + ControlFlow::Continue(()) + } + } + + fn do_visit_mut(sql: &str, visitor: &mut V) -> Statement { + let dialect = GenericDialect {}; + let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); + let mut s = Parser::new(&dialect) + .with_tokens(tokens) + .parse_statement() + .unwrap(); + + s.visit(visitor); + s + } + + #[test] + fn test_value_redact() { + let tests = vec![ + ( + concat!( + "SELECT * FROM monthly_sales ", + "PIVOT(SUM(a.amount) FOR a.MONTH IN ('JAN', 'FEB', 'MAR', 'APR')) AS p (c, d) ", + "ORDER BY EMPID" + ), + concat!( + "SELECT * FROM monthly_sales ", + "PIVOT(SUM(a.amount) FOR a.MONTH IN ('REDACTED_1', 'REDACTED_2', 'REDACTED_3', 'REDACTED_4')) AS p (c, d) ", + "ORDER BY EMPID" + ), + ), + ]; + + for (sql, expected) in tests { + let mut visitor = MutatorVisitor::default(); + let mutated = do_visit_mut(sql, &mut visitor); + assert_eq!(mutated.to_string(), expected) + } + } } diff --git a/src/dialect/bigquery.rs b/src/dialect/bigquery.rs index 96633552b..68ca1390a 100644 --- a/src/dialect/bigquery.rs +++ b/src/dialect/bigquery.rs @@ -15,14 +15,41 @@ // specific language governing permissions and limitations // under the License. +use crate::ast::Statement; use crate::dialect::Dialect; +use crate::keywords::Keyword; +use crate::parser::{Parser, ParserError}; + +/// These keywords are disallowed as column identifiers. Such that +/// `SELECT 5 AS FROM T` is rejected by BigQuery. +const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[ + Keyword::WITH, + Keyword::SELECT, + Keyword::WHERE, + Keyword::GROUP, + Keyword::HAVING, + Keyword::ORDER, + Keyword::LATERAL, + Keyword::LIMIT, + Keyword::FETCH, + Keyword::UNION, + Keyword::EXCEPT, + Keyword::INTERSECT, + Keyword::FROM, + Keyword::INTO, + Keyword::END, +]; /// A [`Dialect`] for [Google Bigquery](https://cloud.google.com/bigquery/) #[derive(Debug, Default)] pub struct BigQueryDialect; impl Dialect for BigQueryDialect { - // See https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#identifiers + fn parse_statement(&self, parser: &mut Parser) -> Option> { + self.maybe_parse_statement(parser) + } + + /// See fn is_delimited_identifier_start(&self, ch: char) -> bool { ch == '`' } @@ -31,8 +58,16 @@ impl Dialect for BigQueryDialect { true } + /// See + fn supports_column_definition_trailing_commas(&self) -> bool { + true + } + fn is_identifier_start(&self, ch: char) -> bool { ch.is_ascii_lowercase() || ch.is_ascii_uppercase() || ch == '_' + // BigQuery supports `@@foo.bar` variable syntax in its procedural language. + // https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#beginexceptionend + || ch == '@' } fn is_identifier_part(&self, ch: char) -> bool { @@ -72,4 +107,82 @@ impl Dialect for BigQueryDialect { fn require_interval_qualifier(&self) -> bool { true } + + // See https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct + fn supports_struct_literal(&self) -> bool { + true + } + + /// See + fn supports_select_expr_star(&self) -> bool { + true + } + + /// See + fn supports_execute_immediate(&self) -> bool { + true + } + + // See + fn supports_timestamp_versioning(&self) -> bool { + true + } + + // See + fn supports_group_by_expr(&self) -> bool { + true + } + + fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { + !RESERVED_FOR_COLUMN_ALIAS.contains(kw) + } + + fn supports_pipe_operator(&self) -> bool { + true + } +} + +impl BigQueryDialect { + fn maybe_parse_statement(&self, parser: &mut Parser) -> Option> { + if parser.peek_keyword(Keyword::BEGIN) { + return Some(self.parse_begin(parser)); + } + None + } + + /// Parse a `BEGIN` statement. + /// + fn parse_begin(&self, parser: &mut Parser) -> Result { + parser.expect_keyword(Keyword::BEGIN)?; + + let statements = parser.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?; + + let has_exception_when_clause = parser.parse_keywords(&[ + Keyword::EXCEPTION, + Keyword::WHEN, + Keyword::ERROR, + Keyword::THEN, + ]); + let exception_statements = if has_exception_when_clause { + if !parser.peek_keyword(Keyword::END) { + Some(parser.parse_statement_list(&[Keyword::END])?) + } else { + Some(Default::default()) + } + } else { + None + }; + + parser.expect_keyword(Keyword::END)?; + + Ok(Statement::StartTransaction { + begin: true, + statements, + exception_statements, + has_end_keyword: true, + transaction: None, + modifier: None, + modes: Default::default(), + }) + } } diff --git a/src/dialect/clickhouse.rs b/src/dialect/clickhouse.rs index 0c8f08040..f5e70c309 100644 --- a/src/dialect/clickhouse.rs +++ b/src/dialect/clickhouse.rs @@ -50,4 +50,48 @@ impl Dialect for ClickHouseDialect { fn supports_limit_comma(&self) -> bool { true } + + fn supports_insert_table_function(&self) -> bool { + true + } + + fn supports_insert_format(&self) -> bool { + true + } + + fn supports_numeric_literal_underscores(&self) -> bool { + true + } + + // ClickHouse uses this for some FORMAT expressions in `INSERT` context, e.g. when inserting + // with FORMAT JSONEachRow a raw JSON key-value expression is valid and expected. + // + // [ClickHouse formats](https://clickhouse.com/docs/en/interfaces/formats) + fn supports_dictionary_syntax(&self) -> bool { + true + } + + /// See + fn supports_lambda_functions(&self) -> bool { + true + } + + fn supports_from_first_select(&self) -> bool { + true + } + + /// See + fn supports_order_by_all(&self) -> bool { + true + } + + // See + fn supports_group_by_expr(&self) -> bool { + true + } + + /// See + fn supports_group_by_with_modifier(&self) -> bool { + true + } } diff --git a/src/dialect/databricks.rs b/src/dialect/databricks.rs index 4924e8077..a3476b1b8 100644 --- a/src/dialect/databricks.rs +++ b/src/dialect/databricks.rs @@ -59,4 +59,9 @@ impl Dialect for DatabricksDialect { fn require_interval_qualifier(&self) -> bool { true } + + // See https://docs.databricks.com/en/sql/language-manual/functions/struct.html + fn supports_struct_literal(&self) -> bool { + true + } } diff --git a/src/dialect/duckdb.rs b/src/dialect/duckdb.rs index e1b8db118..3366c6705 100644 --- a/src/dialect/duckdb.rs +++ b/src/dialect/duckdb.rs @@ -47,6 +47,10 @@ impl Dialect for DuckDbDialect { true } + fn supports_named_fn_args_with_assignment_operator(&self) -> bool { + true + } + // DuckDB uses this syntax for `STRUCT`s. // // https://duckdb.org/docs/sql/data_types/struct.html#creating-structs @@ -61,9 +65,33 @@ impl Dialect for DuckDbDialect { true } + /// See + fn supports_lambda_functions(&self) -> bool { + true + } + // DuckDB is compatible with PostgreSQL syntax for this statement, // although not all features may be implemented. fn supports_explain_with_utility_options(&self) -> bool { true } + + /// See DuckDB + fn supports_load_extension(&self) -> bool { + true + } + + // See DuckDB + fn supports_array_typedef_with_brackets(&self) -> bool { + true + } + + fn supports_from_first_select(&self) -> bool { + true + } + + /// See DuckDB + fn supports_order_by_all(&self) -> bool { + true + } } diff --git a/src/dialect/generic.rs b/src/dialect/generic.rs index 0a5464c9c..8f57e487f 100644 --- a/src/dialect/generic.rs +++ b/src/dialect/generic.rs @@ -48,6 +48,10 @@ impl Dialect for GenericDialect { true } + fn supports_group_by_with_modifier(&self) -> bool { + true + } + fn supports_connect_by(&self) -> bool { true } @@ -111,4 +115,56 @@ impl Dialect for GenericDialect { fn supports_try_convert(&self) -> bool { true } + + fn supports_comment_on(&self) -> bool { + true + } + + fn supports_load_extension(&self) -> bool { + true + } + + fn supports_named_fn_args_with_assignment_operator(&self) -> bool { + true + } + + fn supports_struct_literal(&self) -> bool { + true + } + + fn supports_empty_projections(&self) -> bool { + true + } + + fn supports_nested_comments(&self) -> bool { + true + } + + fn supports_user_host_grantee(&self) -> bool { + true + } + + fn supports_string_escape_constant(&self) -> bool { + true + } + + fn supports_array_typedef_with_brackets(&self) -> bool { + true + } + + fn supports_match_against(&self) -> bool { + true + } + + fn supports_set_names(&self) -> bool { + true + } + + fn supports_comma_separated_set_assignments(&self) -> bool { + true + } + + fn supports_filter_during_aggregation(&self) -> bool { + true + } } diff --git a/src/dialect/hive.rs b/src/dialect/hive.rs index 63642b33c..3e15d395b 100644 --- a/src/dialect/hive.rs +++ b/src/dialect/hive.rs @@ -51,4 +51,24 @@ impl Dialect for HiveDialect { fn require_interval_qualifier(&self) -> bool { true } + + /// See + fn supports_bang_not_operator(&self) -> bool { + true + } + + /// See + fn supports_load_data(&self) -> bool { + true + } + + /// See + fn supports_table_sample_before_alias(&self) -> bool { + true + } + + /// See + fn supports_group_by_with_modifier(&self) -> bool { + true + } } diff --git a/src/dialect/mod.rs b/src/dialect/mod.rs index 28e7ac7d1..6fbbc7a23 100644 --- a/src/dialect/mod.rs +++ b/src/dialect/mod.rs @@ -75,6 +75,15 @@ macro_rules! dialect_of { }; } +// Similar to above, but for applying directly against an instance of dialect +// instead of a struct member named dialect. This avoids lifetime issues when +// mixing match guards and token references. +macro_rules! dialect_is { + ($dialect:ident is $($dialect_type:ty)|+) => { + ($($dialect.is::<$dialect_type>())||+) + } +} + /// Encapsulates the differences between SQL implementations. /// /// # SQL Dialects @@ -128,14 +137,39 @@ pub trait Dialect: Debug + Any { ch == '"' || ch == '`' } - /// Return the character used to quote identifiers. - fn identifier_quote_style(&self, _identifier: &str) -> Option { + /// Determine if a character starts a potential nested quoted identifier. + /// Example: RedShift supports the following quote styles to all mean the same thing: + /// ```sql + /// SELECT 1 AS foo; + /// SELECT 1 AS "foo"; + /// SELECT 1 AS [foo]; + /// SELECT 1 AS ["foo"]; + /// ``` + fn is_nested_delimited_identifier_start(&self, _ch: char) -> bool { + false + } + + /// Only applicable whenever [`Self::is_nested_delimited_identifier_start`] returns true + /// If the next sequence of tokens potentially represent a nested identifier, then this method + /// returns a tuple containing the outer quote style, and if present, the inner (nested) quote style. + /// + /// Example (Redshift): + /// ```text + /// `["foo"]` => Some(`[`, Some(`"`)) + /// `[foo]` => Some(`[`, None) + /// `[0]` => None + /// `"foo"` => None + /// ``` + fn peek_nested_delimited_identifier_quotes( + &self, + mut _chars: Peekable>, + ) -> Option<(char, Option)> { None } - /// Determine if quoted characters are proper for identifier - fn is_proper_identifier_inside_quotes(&self, mut _chars: Peekable>) -> bool { - true + /// Return the character used to quote identifiers. + fn identifier_quote_style(&self, _identifier: &str) -> Option { + None } /// Determine if a character is a valid start character for an unquoted identifier @@ -167,6 +201,33 @@ pub trait Dialect: Debug + Any { false } + /// Determine whether the dialect strips the backslash when escaping LIKE wildcards (%, _). + /// + /// [MySQL] has a special case when escaping single quoted strings which leaves these unescaped + /// so they can be used in LIKE patterns without double-escaping (as is necessary in other + /// escaping dialects, such as [Snowflake]). Generally, special characters have escaping rules + /// causing them to be replaced with a different byte sequences (e.g. `'\0'` becoming the zero + /// byte), and the default if an escaped character does not have a specific escaping rule is to + /// strip the backslash (e.g. there is no rule for `h`, so `'\h' = 'h'`). MySQL's special case + /// for ignoring LIKE wildcard escapes is to *not* strip the backslash, so that `'\%' = '\\%'`. + /// This applies to all string literals though, not just those used in LIKE patterns. + /// + /// ```text + /// mysql> select '\_', hex('\\'), hex('_'), hex('\_'); + /// +----+-----------+----------+-----------+ + /// | \_ | hex('\\') | hex('_') | hex('\_') | + /// +----+-----------+----------+-----------+ + /// | \_ | 5C | 5F | 5C5F | + /// +----+-----------+----------+-----------+ + /// 1 row in set (0.00 sec) + /// ``` + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/8.4/en/string-literals.html + /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/functions/like#usage-notes + fn ignores_wildcard_escapes(&self) -> bool { + false + } + /// Determine if the dialect supports string literals with `U&` prefix. /// This is used to specify Unicode code points in string literals. /// For example, in PostgreSQL, the following is a valid string literal: @@ -211,11 +272,27 @@ pub trait Dialect: Debug + Any { false } + /// Returns true if the dialects supports `GROUP BY` modifiers prefixed by a `WITH` keyword. + /// Example: `GROUP BY value WITH ROLLUP`. + fn supports_group_by_with_modifier(&self) -> bool { + false + } + + /// Returns true if the dialect supports the `(+)` syntax for OUTER JOIN. + fn supports_outer_join_operator(&self) -> bool { + false + } + /// Returns true if the dialect supports CONNECT BY. fn supports_connect_by(&self) -> bool { false } + /// Returns true if the dialect supports `EXECUTE IMMEDIATE` statements. + fn supports_execute_immediate(&self) -> bool { + false + } + /// Returns true if the dialect supports the MATCH_RECOGNIZE operation. fn supports_match_recognize(&self) -> bool { false @@ -226,22 +303,55 @@ pub trait Dialect: Debug + Any { false } - /// Returns true if the dialect supports `BEGIN {DEFERRED | IMMEDIATE | EXCLUSIVE} [TRANSACTION]` statements + /// Returns true if the dialect supports `BEGIN {DEFERRED | IMMEDIATE | EXCLUSIVE | TRY | CATCH} [TRANSACTION]` statements fn supports_start_transaction_modifier(&self) -> bool { false } - /// Returns true if the dialect supports named arguments of the form FUN(a = '1', b = '2'). + /// Returns true if the dialect supports `END {TRY | CATCH}` statements + fn supports_end_transaction_modifier(&self) -> bool { + false + } + + /// Returns true if the dialect supports named arguments of the form `FUN(a = '1', b = '2')`. fn supports_named_fn_args_with_eq_operator(&self) -> bool { false } + /// Returns true if the dialect supports named arguments of the form `FUN(a : '1', b : '2')`. + fn supports_named_fn_args_with_colon_operator(&self) -> bool { + false + } + + /// Returns true if the dialect supports named arguments of the form `FUN(a := '1', b := '2')`. + fn supports_named_fn_args_with_assignment_operator(&self) -> bool { + false + } + + /// Returns true if the dialect supports named arguments of the form `FUN(a => '1', b => '2')`. + fn supports_named_fn_args_with_rarrow_operator(&self) -> bool { + true + } + + /// Returns true if dialect supports argument name as arbitrary expression. + /// e.g. `FUN(LOWER('a'):'1', b:'2')` + /// Such function arguments are represented in the AST by the `FunctionArg::ExprNamed` variant, + /// otherwise use the `FunctionArg::Named` variant (compatible reason). + fn supports_named_fn_args_with_expr_name(&self) -> bool { + false + } + /// Returns true if the dialect supports identifiers starting with a numeric /// prefix such as tables named `59901_user_login` fn supports_numeric_prefix(&self) -> bool { false } + /// Returns true if the dialect supports numbers containing underscores, e.g. `10_000_000` + fn supports_numeric_literal_underscores(&self) -> bool { + false + } + /// Returns true if the dialects supports specifying null treatment /// as part of a window function's parameter list as opposed /// to after the parameter list. @@ -289,6 +399,16 @@ pub trait Dialect: Debug + Any { false } + /// Returns true if the dialect supports multiple `SET` statements + /// in a single statement. + /// + /// ```sql + /// SET variable = expression [, variable = expression]; + /// ``` + fn supports_comma_separated_set_assignments(&self) -> bool { + false + } + /// Returns true if the dialect supports an `EXCEPT` clause following a /// wildcard in a select list. /// @@ -333,6 +453,95 @@ pub trait Dialect: Debug + Any { self.supports_trailing_commas() } + /// Returns true if the dialect supports trailing commas in the `FROM` clause of a `SELECT` statement. + /// Example: `SELECT 1 FROM T, U, LIMIT 1` + fn supports_from_trailing_commas(&self) -> bool { + false + } + + /// Returns true if the dialect supports trailing commas in the + /// column definitions list of a `CREATE` statement. + /// Example: `CREATE TABLE T (x INT, y TEXT,)` + fn supports_column_definition_trailing_commas(&self) -> bool { + false + } + + /// Returns true if the dialect supports double dot notation for object names + /// + /// Example + /// ```sql + /// SELECT * FROM db_name..table_name + /// ``` + fn supports_object_name_double_dot_notation(&self) -> bool { + false + } + + /// Return true if the dialect supports the STRUCT literal + /// + /// Example + /// ```sql + /// SELECT STRUCT(1 as one, 'foo' as foo, false) + /// ``` + fn supports_struct_literal(&self) -> bool { + false + } + + /// Return true if the dialect supports empty projections in SELECT statements + /// + /// Example + /// ```sql + /// SELECT from table_name + /// ``` + fn supports_empty_projections(&self) -> bool { + false + } + + /// Return true if the dialect supports wildcard expansion on + /// arbitrary expressions in projections. + /// + /// Example: + /// ```sql + /// SELECT STRUCT('foo').* FROM T + /// ``` + fn supports_select_expr_star(&self) -> bool { + false + } + + /// Return true if the dialect supports "FROM-first" selects. + /// + /// Example: + /// ```sql + /// FROM table + /// SELECT * + /// ``` + fn supports_from_first_select(&self) -> bool { + false + } + + /// Return true if the dialect supports pipe operator. + /// + /// Example: + /// ```sql + /// SELECT * + /// FROM table + /// |> limit 1 + /// ``` + /// + /// See + fn supports_pipe_operator(&self) -> bool { + false + } + + /// Does the dialect support MySQL-style `'user'@'host'` grantee syntax? + fn supports_user_host_grantee(&self) -> bool { + false + } + + /// Does the dialect support the `MATCH() AGAINST()` syntax? + fn supports_match_against(&self) -> bool { + false + } + /// Dialect-specific infix parser override /// /// This method is called to parse the next infix expression. @@ -410,20 +619,25 @@ pub trait Dialect: Debug + Any { Token::Word(w) if w.keyword == Keyword::ILIKE => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::RLIKE => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)), + Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)), _ => Ok(self.prec_unknown()), }, Token::Word(w) if w.keyword == Keyword::IS => Ok(p!(Is)), Token::Word(w) if w.keyword == Keyword::IN => Ok(p!(Between)), Token::Word(w) if w.keyword == Keyword::BETWEEN => Ok(p!(Between)), + Token::Word(w) if w.keyword == Keyword::OVERLAPS => Ok(p!(Between)), Token::Word(w) if w.keyword == Keyword::LIKE => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::ILIKE => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::RLIKE => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::REGEXP => Ok(p!(Like)), + Token::Word(w) if w.keyword == Keyword::MATCH => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::SIMILAR => Ok(p!(Like)), Token::Word(w) if w.keyword == Keyword::OPERATOR => Ok(p!(Between)), Token::Word(w) if w.keyword == Keyword::DIV => Ok(p!(MulDivModOp)), - Token::Eq + Token::Period => Ok(p!(Period)), + Token::Assignment + | Token::Eq | Token::Lt | Token::LtEq | Token::Neq @@ -439,18 +653,34 @@ pub trait Dialect: Debug + Any { | Token::ExclamationMarkDoubleTilde | Token::ExclamationMarkDoubleTildeAsterisk | Token::Spaceship => Ok(p!(Eq)), - Token::Pipe => Ok(p!(Pipe)), + Token::Pipe + | Token::QuestionMarkDash + | Token::DoubleSharp + | Token::Overlap + | Token::AmpersandLeftAngleBracket + | Token::AmpersandRightAngleBracket + | Token::QuestionMarkDashVerticalBar + | Token::AmpersandLeftAngleBracketVerticalBar + | Token::VerticalBarAmpersandRightAngleBracket + | Token::TwoWayArrow + | Token::LeftAngleBracketCaret + | Token::RightAngleBracketCaret + | Token::QuestionMarkSharp + | Token::QuestionMarkDoubleVerticalBar + | Token::QuestionPipe + | Token::TildeEqual + | Token::AtSign + | Token::ShiftLeftVerticalBar + | Token::VerticalBarShiftRight => Ok(p!(Pipe)), Token::Caret | Token::Sharp | Token::ShiftRight | Token::ShiftLeft => Ok(p!(Caret)), Token::Ampersand => Ok(p!(Ampersand)), Token::Plus | Token::Minus => Ok(p!(PlusMinus)), Token::Mul | Token::Div | Token::DuckIntDiv | Token::Mod | Token::StringConcat => { Ok(p!(MulDivModOp)) } - Token::DoubleColon - | Token::ExclamationMark - | Token::LBracket - | Token::Overlap - | Token::CaretAt => Ok(p!(DoubleColon)), + Token::DoubleColon | Token::ExclamationMark | Token::LBracket | Token::CaretAt => { + Ok(p!(DoubleColon)) + } Token::Arrow | Token::LongArrow | Token::HashArrow @@ -462,7 +692,6 @@ pub trait Dialect: Debug + Any { | Token::AtAt | Token::Question | Token::QuestionAnd - | Token::QuestionPipe | Token::CustomBinaryOperator(_) => Ok(p!(PgOther)), _ => Ok(self.prec_unknown()), } @@ -496,6 +725,7 @@ pub trait Dialect: Debug + Any { /// Uses (APPROXIMATELY) as a reference fn prec_value(&self, prec: Precedence) -> u8 { match prec { + Precedence::Period => 100, Precedence::DoubleColon => 50, Precedence::AtTz => 41, Precedence::MulDivModOp => 40, @@ -540,6 +770,12 @@ pub trait Dialect: Debug + Any { false } + /// Returns true if this dialect allows dollar placeholders + /// e.g. `SELECT $var` (SQLite) + fn supports_dollar_placeholder(&self) -> bool { + false + } + /// Does the dialect support with clause in create index statement? /// e.g. `CREATE INDEX idx ON t WITH (key = value, key2)` fn supports_create_index_with_clause(&self) -> bool { @@ -575,6 +811,17 @@ pub trait Dialect: Debug + Any { false } + /// Returns true if the dialect supports `a!` expressions + fn supports_factorial_operator(&self) -> bool { + false + } + + /// Returns true if the dialect supports nested comments + /// e.g. `/* /* nested */ */` + fn supports_nested_comments(&self) -> bool { + false + } + /// Returns true if this dialect supports treating the equals operator `=` within a `SelectItem` /// as an alias assignment operator, rather than a boolean expression. /// For example: the following statements are equivalent for such a dialect: @@ -582,7 +829,7 @@ pub trait Dialect: Debug + Any { /// SELECT col_alias = col FROM tbl; /// SELECT col_alias AS col FROM tbl; /// ``` - fn supports_eq_alias_assigment(&self) -> bool { + fn supports_eq_alias_assignment(&self) -> bool { false } @@ -590,6 +837,192 @@ pub trait Dialect: Debug + Any { fn supports_try_convert(&self) -> bool { false } + + /// Returns true if the dialect supports `!a` syntax for boolean `NOT` expressions. + fn supports_bang_not_operator(&self) -> bool { + false + } + + /// Returns true if the dialect supports the `LISTEN`, `UNLISTEN` and `NOTIFY` statements + fn supports_listen_notify(&self) -> bool { + false + } + + /// Returns true if the dialect supports the `LOAD DATA` statement + fn supports_load_data(&self) -> bool { + false + } + + /// Returns true if the dialect supports the `LOAD extension` statement + fn supports_load_extension(&self) -> bool { + false + } + + /// Returns true if this dialect expects the `TOP` option + /// before the `ALL`/`DISTINCT` options in a `SELECT` statement. + fn supports_top_before_distinct(&self) -> bool { + false + } + + /// Returns true if the dialect supports boolean literals (`true` and `false`). + /// For example, in MSSQL these are treated as identifiers rather than boolean literals. + fn supports_boolean_literals(&self) -> bool { + true + } + + /// Returns true if this dialect supports the `LIKE 'pattern'` option in + /// a `SHOW` statement before the `IN` option + fn supports_show_like_before_in(&self) -> bool { + false + } + + /// Returns true if this dialect supports the `COMMENT` statement + fn supports_comment_on(&self) -> bool { + false + } + + /// Returns true if the dialect supports the `CREATE TABLE SELECT` statement + fn supports_create_table_select(&self) -> bool { + false + } + + /// Returns true if the dialect supports PartiQL for querying semi-structured data + /// + fn supports_partiql(&self) -> bool { + false + } + + /// Returns true if the specified keyword is reserved and cannot be + /// used as an identifier without special handling like quoting. + fn is_reserved_for_identifier(&self, kw: Keyword) -> bool { + keywords::RESERVED_FOR_IDENTIFIER.contains(&kw) + } + + /// Returns reserved keywords when looking to parse a `TableFactor`. + /// See [Self::supports_from_trailing_commas] + fn get_reserved_keywords_for_table_factor(&self) -> &[Keyword] { + keywords::RESERVED_FOR_TABLE_FACTOR + } + + /// Returns reserved keywords that may prefix a select item expression + /// e.g. `SELECT CONNECT_BY_ROOT name FROM Tbl2` (Snowflake) + fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] { + &[] + } + + /// Returns true if this dialect supports the `TABLESAMPLE` option + /// before the table alias option. For example: + /// + /// Table sample before alias: `SELECT * FROM tbl AS t TABLESAMPLE (10)` + /// Table sample after alias: `SELECT * FROM tbl TABLESAMPLE (10) AS t` + /// + /// + fn supports_table_sample_before_alias(&self) -> bool { + false + } + + /// Returns true if this dialect supports the `INSERT INTO ... SET col1 = 1, ...` syntax. + /// + /// MySQL: + fn supports_insert_set(&self) -> bool { + false + } + + /// Does the dialect support table function in insertion? + fn supports_insert_table_function(&self) -> bool { + false + } + + /// Does the dialect support insert formats, e.g. `INSERT INTO ... FORMAT ` + fn supports_insert_format(&self) -> bool { + false + } + + /// Returns true if this dialect supports `SET` statements without an explicit + /// assignment operator such as `=`. For example: `SET SHOWPLAN_XML ON`. + fn supports_set_stmt_without_operator(&self) -> bool { + false + } + + /// Returns true if the specified keyword should be parsed as a column identifier. + /// See [keywords::RESERVED_FOR_COLUMN_ALIAS] + fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { + !keywords::RESERVED_FOR_COLUMN_ALIAS.contains(kw) + } + + /// Returns true if the specified keyword should be parsed as a select item alias. + /// When explicit is true, the keyword is preceded by an `AS` word. Parser is provided + /// to enable looking ahead if needed. + fn is_select_item_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { + explicit || self.is_column_alias(kw, parser) + } + + /// Returns true if the specified keyword should be parsed as a table factor alias. + /// When explicit is true, the keyword is preceded by an `AS` word. Parser is provided + /// to enable looking ahead if needed. + fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool { + explicit || !keywords::RESERVED_FOR_TABLE_ALIAS.contains(kw) + } + + /// Returns true if this dialect supports querying historical table data + /// by specifying which version of the data to query. + fn supports_timestamp_versioning(&self) -> bool { + false + } + + /// Returns true if this dialect supports the E'...' syntax for string literals + /// + /// Postgres: + fn supports_string_escape_constant(&self) -> bool { + false + } + + /// Returns true if the dialect supports the table hints in the `FROM` clause. + fn supports_table_hints(&self) -> bool { + false + } + + /// Returns true if this dialect requires a whitespace character after `--` to start a single line comment. + /// + /// MySQL: + /// e.g. UPDATE account SET balance=balance--1 + // WHERE account_id=5752 ^^^ will be interpreted as two minus signs instead of a comment + fn requires_single_line_comment_whitespace(&self) -> bool { + false + } + + /// Returns true if the dialect supports array type definition with brackets with + /// an optional size. For example: + /// ```CREATE TABLE my_table (arr1 INT[], arr2 INT[3])``` + /// ```SELECT x::INT[]``` + fn supports_array_typedef_with_brackets(&self) -> bool { + false + } + /// Returns true if the dialect supports geometric types. + /// + /// Postgres: + /// e.g. @@ circle '((0,0),10)' + fn supports_geometric_types(&self) -> bool { + false + } + + /// Returns true if the dialect supports `ORDER BY ALL`. + /// `ALL` which means all columns of the SELECT clause. + /// + /// For example: ```SELECT * FROM addresses ORDER BY ALL;```. + fn supports_order_by_all(&self) -> bool { + false + } + + /// Returns true if the dialect supports `SET NAMES [COLLATE ]`. + /// + /// - [MySQL](https://dev.mysql.com/doc/refman/8.4/en/set-names.html) + /// - [PostgreSQL](https://www.postgresql.org/docs/17/sql-set.html) + /// + /// Note: Postgres doesn't support the `COLLATE` clause, but we permissively parse it anyway. + fn supports_set_names(&self) -> bool { + false + } } /// This represents the operators for which precedence must be defined @@ -597,6 +1030,7 @@ pub trait Dialect: Debug + Any { /// higher number -> higher precedence #[derive(Debug, Clone, Copy)] pub enum Precedence { + Period, DoubleColon, AtTz, MulDivModOp, @@ -741,6 +1175,17 @@ mod tests { self.0.is_delimited_identifier_start(ch) } + fn is_nested_delimited_identifier_start(&self, ch: char) -> bool { + self.0.is_nested_delimited_identifier_start(ch) + } + + fn peek_nested_delimited_identifier_quotes( + &self, + chars: std::iter::Peekable>, + ) -> Option<(char, Option)> { + self.0.peek_nested_delimited_identifier_quotes(chars) + } + fn identifier_quote_style(&self, identifier: &str) -> Option { self.0.identifier_quote_style(identifier) } @@ -749,13 +1194,6 @@ mod tests { self.0.supports_string_literal_backslash_escape() } - fn is_proper_identifier_inside_quotes( - &self, - chars: std::iter::Peekable>, - ) -> bool { - self.0.is_proper_identifier_inside_quotes(chars) - } - fn supports_filter_during_aggregation(&self) -> bool { self.0.supports_filter_during_aggregation() } diff --git a/src/dialect/mssql.rs b/src/dialect/mssql.rs index 78ec621ed..647e82a2a 100644 --- a/src/dialect/mssql.rs +++ b/src/dialect/mssql.rs @@ -15,7 +15,19 @@ // specific language governing permissions and limitations // under the License. +use crate::ast::helpers::attached_token::AttachedToken; +use crate::ast::{ + BeginEndStatements, ConditionalStatementBlock, ConditionalStatements, IfStatement, Statement, + TriggerObject, +}; use crate::dialect::Dialect; +use crate::keywords::{self, Keyword}; +use crate::parser::{Parser, ParserError}; +use crate::tokenizer::Token; +#[cfg(not(feature = "std"))] +use alloc::{vec, vec::Vec}; + +const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[Keyword::IF, Keyword::ELSE]; /// A [`Dialect`] for [Microsoft SQL Server](https://www.microsoft.com/en-us/sql-server/) #[derive(Debug)] @@ -46,15 +58,232 @@ impl Dialect for MsSqlDialect { true } + fn supports_outer_join_operator(&self) -> bool { + true + } + fn supports_connect_by(&self) -> bool { true } - fn supports_eq_alias_assigment(&self) -> bool { + fn supports_eq_alias_assignment(&self) -> bool { true } fn supports_try_convert(&self) -> bool { true } + + /// In MSSQL, there is no boolean type, and `true` and `false` are valid column names + fn supports_boolean_literals(&self) -> bool { + false + } + + fn supports_named_fn_args_with_colon_operator(&self) -> bool { + true + } + + fn supports_named_fn_args_with_expr_name(&self) -> bool { + true + } + + fn supports_named_fn_args_with_rarrow_operator(&self) -> bool { + false + } + + fn supports_start_transaction_modifier(&self) -> bool { + true + } + + fn supports_end_transaction_modifier(&self) -> bool { + true + } + + /// See: + fn supports_set_stmt_without_operator(&self) -> bool { + true + } + + /// See: + fn supports_timestamp_versioning(&self) -> bool { + true + } + + /// See + fn supports_nested_comments(&self) -> bool { + true + } + + /// See + fn supports_object_name_double_dot_notation(&self) -> bool { + true + } + + fn is_column_alias(&self, kw: &Keyword, _parser: &mut Parser) -> bool { + !keywords::RESERVED_FOR_COLUMN_ALIAS.contains(kw) && !RESERVED_FOR_COLUMN_ALIAS.contains(kw) + } + + fn parse_statement(&self, parser: &mut Parser) -> Option> { + if parser.peek_keyword(Keyword::IF) { + Some(self.parse_if_stmt(parser)) + } else if parser.parse_keywords(&[Keyword::CREATE, Keyword::TRIGGER]) { + Some(self.parse_create_trigger(parser, false)) + } else if parser.parse_keywords(&[ + Keyword::CREATE, + Keyword::OR, + Keyword::ALTER, + Keyword::TRIGGER, + ]) { + Some(self.parse_create_trigger(parser, true)) + } else { + None + } + } +} + +impl MsSqlDialect { + /// ```sql + /// IF boolean_expression + /// { sql_statement | statement_block } + /// [ ELSE + /// { sql_statement | statement_block } ] + /// ``` + fn parse_if_stmt(&self, parser: &mut Parser) -> Result { + let if_token = parser.expect_keyword(Keyword::IF)?; + + let condition = parser.parse_expr()?; + + let if_block = if parser.peek_keyword(Keyword::BEGIN) { + let begin_token = parser.expect_keyword(Keyword::BEGIN)?; + let statements = self.parse_statement_list(parser, Some(Keyword::END))?; + let end_token = parser.expect_keyword(Keyword::END)?; + ConditionalStatementBlock { + start_token: AttachedToken(if_token), + condition: Some(condition), + then_token: None, + conditional_statements: ConditionalStatements::BeginEnd(BeginEndStatements { + begin_token: AttachedToken(begin_token), + statements, + end_token: AttachedToken(end_token), + }), + } + } else { + let stmt = parser.parse_statement()?; + ConditionalStatementBlock { + start_token: AttachedToken(if_token), + condition: Some(condition), + then_token: None, + conditional_statements: ConditionalStatements::Sequence { + statements: vec![stmt], + }, + } + }; + + let mut prior_statement_ended_with_semi_colon = false; + while let Token::SemiColon = parser.peek_token_ref().token { + parser.advance_token(); + prior_statement_ended_with_semi_colon = true; + } + + let mut else_block = None; + if parser.peek_keyword(Keyword::ELSE) { + let else_token = parser.expect_keyword(Keyword::ELSE)?; + if parser.peek_keyword(Keyword::BEGIN) { + let begin_token = parser.expect_keyword(Keyword::BEGIN)?; + let statements = self.parse_statement_list(parser, Some(Keyword::END))?; + let end_token = parser.expect_keyword(Keyword::END)?; + else_block = Some(ConditionalStatementBlock { + start_token: AttachedToken(else_token), + condition: None, + then_token: None, + conditional_statements: ConditionalStatements::BeginEnd(BeginEndStatements { + begin_token: AttachedToken(begin_token), + statements, + end_token: AttachedToken(end_token), + }), + }); + } else { + let stmt = parser.parse_statement()?; + else_block = Some(ConditionalStatementBlock { + start_token: AttachedToken(else_token), + condition: None, + then_token: None, + conditional_statements: ConditionalStatements::Sequence { + statements: vec![stmt], + }, + }); + } + } else if prior_statement_ended_with_semi_colon { + parser.prev_token(); + } + + Ok(Statement::If(IfStatement { + if_block, + else_block, + elseif_blocks: Vec::new(), + end_token: None, + })) + } + + /// Parse `CREATE TRIGGER` for [MsSql] + /// + /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-trigger-transact-sql + fn parse_create_trigger( + &self, + parser: &mut Parser, + or_alter: bool, + ) -> Result { + let name = parser.parse_object_name(false)?; + parser.expect_keyword_is(Keyword::ON)?; + let table_name = parser.parse_object_name(false)?; + let period = parser.parse_trigger_period()?; + let events = parser.parse_comma_separated(Parser::parse_trigger_event)?; + + parser.expect_keyword_is(Keyword::AS)?; + let statements = Some(parser.parse_conditional_statements(&[Keyword::END])?); + + Ok(Statement::CreateTrigger { + or_alter, + or_replace: false, + is_constraint: false, + name, + period, + events, + table_name, + referenced_table_name: None, + referencing: Vec::new(), + trigger_object: TriggerObject::Statement, + include_each: false, + condition: None, + exec_body: None, + statements, + characteristics: None, + }) + } + + /// Parse a sequence of statements, optionally separated by semicolon. + /// + /// Stops parsing when reaching EOF or the given keyword. + fn parse_statement_list( + &self, + parser: &mut Parser, + terminal_keyword: Option, + ) -> Result, ParserError> { + let mut stmts = Vec::new(); + loop { + if let Token::EOF = parser.peek_token_ref().token { + break; + } + if let Some(term) = terminal_keyword { + if parser.peek_keyword(term) { + break; + } + } + stmts.push(parser.parse_statement()?); + while let Token::SemiColon = parser.peek_token_ref().token { + parser.advance_token(); + } + } + Ok(stmts) + } } diff --git a/src/dialect/mysql.rs b/src/dialect/mysql.rs index d1bf33345..f69e42436 100644 --- a/src/dialect/mysql.rs +++ b/src/dialect/mysql.rs @@ -25,6 +25,15 @@ use crate::{ parser::{Parser, ParserError}, }; +use super::keywords; + +const RESERVED_FOR_TABLE_ALIAS_MYSQL: &[Keyword] = &[ + Keyword::USE, + Keyword::IGNORE, + Keyword::FORCE, + Keyword::STRAIGHT_JOIN, +]; + /// A [`Dialect`] for [MySQL](https://www.mysql.com/) #[derive(Debug)] pub struct MySqlDialect {} @@ -58,6 +67,10 @@ impl Dialect for MySqlDialect { true } + fn ignores_wildcard_escapes(&self) -> bool { + true + } + fn supports_numeric_prefix(&self) -> bool { true } @@ -97,6 +110,46 @@ impl Dialect for MySqlDialect { fn supports_limit_comma(&self) -> bool { true } + + /// See: + fn supports_create_table_select(&self) -> bool { + true + } + + /// See: + fn supports_insert_set(&self) -> bool { + true + } + + fn supports_user_host_grantee(&self) -> bool { + true + } + + fn is_table_factor_alias(&self, explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool { + explicit + || (!keywords::RESERVED_FOR_TABLE_ALIAS.contains(kw) + && !RESERVED_FOR_TABLE_ALIAS_MYSQL.contains(kw)) + } + + fn supports_table_hints(&self) -> bool { + true + } + + fn requires_single_line_comment_whitespace(&self) -> bool { + true + } + + fn supports_match_against(&self) -> bool { + true + } + + fn supports_set_names(&self) -> bool { + true + } + + fn supports_comma_separated_set_assignments(&self) -> bool { + true + } } /// `LOCK TABLES` @@ -108,7 +161,7 @@ fn parse_lock_tables(parser: &mut Parser) -> Result { // tbl_name [[AS] alias] lock_type fn parse_lock_table(parser: &mut Parser) -> Result { - let table = parser.parse_identifier(false)?; + let table = parser.parse_identifier()?; let alias = parser.parse_optional_alias(&[Keyword::READ, Keyword::WRITE, Keyword::LOW_PRIORITY])?; let lock_type = parse_lock_tables_type(parser)?; diff --git a/src/dialect/postgresql.rs b/src/dialect/postgresql.rs index 51dc49849..9b08b8f32 100644 --- a/src/dialect/postgresql.rs +++ b/src/dialect/postgresql.rs @@ -28,7 +28,6 @@ // limitations under the License. use log::debug; -use crate::ast::{CommentObject, ObjectName, Statement, UserDefinedTypeRepresentation}; use crate::dialect::{Dialect, Precedence}; use crate::keywords::Keyword; use crate::parser::{Parser, ParserError}; @@ -38,6 +37,7 @@ use crate::tokenizer::Token; #[derive(Debug)] pub struct PostgreSqlDialect {} +const PERIOD_PREC: u8 = 200; const DOUBLE_COLON_PREC: u8 = 140; const BRACKET_PREC: u8 = 130; const COLLATE_PREC: u8 = 120; @@ -135,17 +135,6 @@ impl Dialect for PostgreSqlDialect { } } - fn parse_statement(&self, parser: &mut Parser) -> Option> { - if parser.parse_keyword(Keyword::COMMENT) { - Some(parse_comment(parser)) - } else if parser.parse_keyword(Keyword::CREATE) { - parser.prev_token(); // unconsume the CREATE in case we don't end up parsing anything - parse_create(parser) - } else { - None - } - } - fn supports_filter_during_aggregation(&self) -> bool { true } @@ -156,6 +145,7 @@ impl Dialect for PostgreSqlDialect { fn prec_value(&self, prec: Precedence) -> u8 { match prec { + Precedence::Period => PERIOD_PREC, Precedence::DoubleColon => DOUBLE_COLON_PREC, Precedence::AtTz => AT_TZ_PREC, Precedence::MulDivModOp => MUL_DIV_MOD_OP_PREC, @@ -191,70 +181,81 @@ impl Dialect for PostgreSqlDialect { fn supports_explain_with_utility_options(&self) -> bool { true } -} -pub fn parse_comment(parser: &mut Parser) -> Result { - let if_exists = parser.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); + /// see + /// see + /// see + fn supports_listen_notify(&self) -> bool { + true + } - parser.expect_keyword(Keyword::ON)?; - let token = parser.next_token(); + /// see + fn supports_factorial_operator(&self) -> bool { + true + } - let (object_type, object_name) = match token.token { - Token::Word(w) if w.keyword == Keyword::COLUMN => { - let object_name = parser.parse_object_name(false)?; - (CommentObject::Column, object_name) - } - Token::Word(w) if w.keyword == Keyword::TABLE => { - let object_name = parser.parse_object_name(false)?; - (CommentObject::Table, object_name) - } - Token::Word(w) if w.keyword == Keyword::EXTENSION => { - let object_name = parser.parse_object_name(false)?; - (CommentObject::Extension, object_name) - } - _ => parser.expected("comment object_type", token)?, - }; - - parser.expect_keyword(Keyword::IS)?; - let comment = if parser.parse_keyword(Keyword::NULL) { - None - } else { - Some(parser.parse_literal_string()?) - }; - Ok(Statement::Comment { - object_type, - object_name, - comment, - if_exists, - }) -} + /// see + fn supports_comment_on(&self) -> bool { + true + } -pub fn parse_create(parser: &mut Parser) -> Option> { - let name = parser.maybe_parse(|parser| -> Result { - parser.expect_keyword(Keyword::CREATE)?; - parser.expect_keyword(Keyword::TYPE)?; - let name = parser.parse_object_name(false)?; - parser.expect_keyword(Keyword::AS)?; - parser.expect_keyword(Keyword::ENUM)?; - Ok(name) - }); - name.map(|name| parse_create_type_as_enum(parser, name)) -} + /// See + fn supports_load_extension(&self) -> bool { + true + } + + /// See + /// + /// Required to support the colon in: + /// ```sql + /// SELECT json_object('a': 'b') + /// ``` + fn supports_named_fn_args_with_colon_operator(&self) -> bool { + true + } -// https://www.postgresql.org/docs/current/sql-createtype.html -pub fn parse_create_type_as_enum( - parser: &mut Parser, - name: ObjectName, -) -> Result { - if !parser.consume_token(&Token::LParen) { - return parser.expected("'(' after CREATE TYPE AS ENUM", parser.peek_token()); + /// See + /// + /// Required to support the label in: + /// ```sql + /// SELECT json_object('label': 'value') + /// ``` + fn supports_named_fn_args_with_expr_name(&self) -> bool { + true + } + + /// Return true if the dialect supports empty projections in SELECT statements + /// + /// Example + /// ```sql + /// SELECT from table_name + /// ``` + fn supports_empty_projections(&self) -> bool { + true + } + + fn supports_nested_comments(&self) -> bool { + true + } + + fn supports_string_escape_constant(&self) -> bool { + true } - let labels = parser.parse_comma_separated0(|p| p.parse_identifier(false), Token::RParen)?; - parser.expect_token(&Token::RParen)?; + fn supports_numeric_literal_underscores(&self) -> bool { + true + } + + /// See: + fn supports_array_typedef_with_brackets(&self) -> bool { + true + } - Ok(Statement::CreateType { - name, - representation: UserDefinedTypeRepresentation::Enum { labels }, - }) + fn supports_geometric_types(&self) -> bool { + true + } + + fn supports_set_names(&self) -> bool { + true + } } diff --git a/src/dialect/redshift.rs b/src/dialect/redshift.rs index 3bfdec3b0..feccca5dd 100644 --- a/src/dialect/redshift.rs +++ b/src/dialect/redshift.rs @@ -32,21 +32,51 @@ pub struct RedshiftSqlDialect {} // in the Postgres dialect, the query will be parsed as an array, while in the Redshift dialect it will // be a json path impl Dialect for RedshiftSqlDialect { - fn is_delimited_identifier_start(&self, ch: char) -> bool { - ch == '"' || ch == '[' + /// Determine if a character starts a potential nested quoted identifier. + /// Example: RedShift supports the following quote styles to all mean the same thing: + /// ```sql + /// SELECT 1 AS foo; + /// SELECT 1 AS "foo"; + /// SELECT 1 AS [foo]; + /// SELECT 1 AS ["foo"]; + /// ``` + fn is_nested_delimited_identifier_start(&self, ch: char) -> bool { + ch == '[' } - /// Determine if quoted characters are proper for identifier - /// It's needed to distinguish treating square brackets as quotes from - /// treating them as json path. If there is identifier then we assume - /// there is no json path. - fn is_proper_identifier_inside_quotes(&self, mut chars: Peekable>) -> bool { + /// Only applicable whenever [`Self::is_nested_delimited_identifier_start`] returns true + /// If the next sequence of tokens potentially represent a nested identifier, then this method + /// returns a tuple containing the outer quote style, and if present, the inner (nested) quote style. + /// + /// Example (Redshift): + /// ```text + /// `["foo"]` => Some(`[`, Some(`"`)) + /// `[foo]` => Some(`[`, None) + /// `[0]` => None + /// `"foo"` => None + /// ``` + fn peek_nested_delimited_identifier_quotes( + &self, + mut chars: Peekable>, + ) -> Option<(char, Option)> { + if chars.peek() != Some(&'[') { + return None; + } + chars.next(); + let mut not_white_chars = chars.skip_while(|ch| ch.is_whitespace()).peekable(); + if let Some(&ch) = not_white_chars.peek() { - return self.is_identifier_start(ch); + if ch == '"' { + return Some(('[', Some('"'))); + } + if self.is_identifier_start(ch) { + return Some(('[', None)); + } } - false + + None } fn is_identifier_start(&self, ch: char) -> bool { @@ -68,4 +98,35 @@ impl Dialect for RedshiftSqlDialect { fn supports_connect_by(&self) -> bool { true } + + /// Redshift expects the `TOP` option before the `ALL/DISTINCT` option: + /// + fn supports_top_before_distinct(&self) -> bool { + true + } + + /// Redshift supports PartiQL: + fn supports_partiql(&self) -> bool { + true + } + + fn supports_string_escape_constant(&self) -> bool { + true + } + + fn supports_geometric_types(&self) -> bool { + true + } + + fn supports_array_typedef_with_brackets(&self) -> bool { + true + } + + fn allow_extract_single_quotes(&self) -> bool { + true + } + + fn supports_string_literal_backslash_escape(&self) -> bool { + true + } } diff --git a/src/dialect/snowflake.rs b/src/dialect/snowflake.rs index d9331d952..ccce16198 100644 --- a/src/dialect/snowflake.rs +++ b/src/dialect/snowflake.rs @@ -17,20 +17,23 @@ #[cfg(not(feature = "std"))] use crate::alloc::string::ToString; +use crate::ast::helpers::key_value_options::{KeyValueOption, KeyValueOptionType, KeyValueOptions}; use crate::ast::helpers::stmt_create_table::CreateTableBuilder; use crate::ast::helpers::stmt_data_loading::{ - DataLoadingOption, DataLoadingOptionType, DataLoadingOptions, StageLoadSelectItem, - StageParamsObject, + FileStagingCommand, StageLoadSelectItem, StageLoadSelectItemKind, StageParamsObject, }; use crate::ast::{ - ColumnOption, ColumnPolicy, ColumnPolicyProperty, Ident, IdentityParameters, IdentityProperty, - IdentityPropertyFormatKind, IdentityPropertyKind, IdentityPropertyOrder, ObjectName, - RowAccessPolicy, Statement, TagsColumnOption, WrappedCollection, + ColumnOption, ColumnPolicy, ColumnPolicyProperty, CopyIntoSnowflakeKind, Ident, + IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, IdentityPropertyKind, + IdentityPropertyOrder, ObjectName, RowAccessPolicy, ShowObjects, SqlOption, Statement, + TagsColumnOption, WrappedCollection, }; use crate::dialect::{Dialect, Precedence}; use crate::keywords::Keyword; -use crate::parser::{Parser, ParserError}; -use crate::tokenizer::Token; +use crate::parser::{IsOptional, Parser, ParserError}; +use crate::tokenizer::{Token, Word}; +#[cfg(not(feature = "std"))] +use alloc::boxed::Box; #[cfg(not(feature = "std"))] use alloc::string::String; #[cfg(not(feature = "std"))] @@ -38,6 +41,10 @@ use alloc::vec::Vec; #[cfg(not(feature = "std"))] use alloc::{format, vec}; +use super::keywords::RESERVED_FOR_IDENTIFIER; +use sqlparser::ast::StorageSerializationPolicy; + +const RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR: [Keyword; 1] = [Keyword::CONNECT_BY_ROOT]; /// A [`Dialect`] for [Snowflake](https://www.snowflake.com/) #[derive(Debug, Default)] pub struct SnowflakeDialect; @@ -52,6 +59,18 @@ impl Dialect for SnowflakeDialect { true } + fn supports_from_trailing_commas(&self) -> bool { + true + } + + // Snowflake supports double-dot notation when the schema name is not specified + // In this case the default PUBLIC schema is used + // + // see https://docs.snowflake.com/en/sql-reference/name-resolution#resolution-when-schema-omitted-double-dot-notation + fn supports_object_name_double_dot_notation(&self) -> bool { + true + } + fn is_identifier_part(&self, ch: char) -> bool { ch.is_ascii_lowercase() || ch.is_ascii_uppercase() @@ -69,10 +88,20 @@ impl Dialect for SnowflakeDialect { true } + /// See + fn supports_outer_join_operator(&self) -> bool { + true + } + fn supports_connect_by(&self) -> bool { true } + /// See + fn supports_execute_immediate(&self) -> bool { + true + } + fn supports_match_recognize(&self) -> bool { true } @@ -96,7 +125,22 @@ impl Dialect for SnowflakeDialect { true } + /// See [doc](https://docs.snowflake.com/en/sql-reference/sql/comment) + fn supports_comment_on(&self) -> bool { + true + } + fn parse_statement(&self, parser: &mut Parser) -> Option> { + if parser.parse_keywords(&[Keyword::ALTER, Keyword::SESSION]) { + // ALTER SESSION + let set = match parser.parse_one_of_keywords(&[Keyword::SET, Keyword::UNSET]) { + Some(Keyword::SET) => true, + Some(Keyword::UNSET) => false, + _ => return Some(parser.expected("SET or UNSET", parser.peek_token())), + }; + return Some(parse_alter_session(parser, set)); + } + if parser.parse_keyword(Keyword::CREATE) { // possibly CREATE STAGE //[ OR REPLACE ] @@ -111,16 +155,19 @@ impl Dialect for SnowflakeDialect { let mut temporary = false; let mut volatile = false; let mut transient = false; + let mut iceberg = false; match parser.parse_one_of_keywords(&[ Keyword::TEMP, Keyword::TEMPORARY, Keyword::VOLATILE, Keyword::TRANSIENT, + Keyword::ICEBERG, ]) { Some(Keyword::TEMP | Keyword::TEMPORARY) => temporary = true, Some(Keyword::VOLATILE) => volatile = true, Some(Keyword::TRANSIENT) => transient = true, + Some(Keyword::ICEBERG) => iceberg = true, _ => {} } @@ -129,7 +176,7 @@ impl Dialect for SnowflakeDialect { return Some(parse_create_stage(or_replace, temporary, parser)); } else if parser.parse_keyword(Keyword::TABLE) { return Some(parse_create_table( - or_replace, global, temporary, volatile, transient, parser, + or_replace, global, temporary, volatile, transient, iceberg, parser, )); } else { // need to go back with the cursor @@ -150,6 +197,28 @@ impl Dialect for SnowflakeDialect { return Some(parse_copy_into(parser)); } + if let Some(kw) = parser.parse_one_of_keywords(&[ + Keyword::LIST, + Keyword::LS, + Keyword::REMOVE, + Keyword::RM, + ]) { + return Some(parse_file_staging_command(kw, parser)); + } + + if parser.parse_keyword(Keyword::SHOW) { + let terse = parser.parse_keyword(Keyword::TERSE); + if parser.parse_keyword(Keyword::OBJECTS) { + return Some(parse_show_objects(terse, parser)); + } + //Give back Keyword::TERSE + if terse { + parser.prev_token(); + } + //Give back Keyword::SHOW + parser.prev_token(); + } + None } @@ -203,16 +272,130 @@ impl Dialect for SnowflakeDialect { fn allow_extract_single_quotes(&self) -> bool { true } + + /// Snowflake expects the `LIKE` option before the `IN` option, + /// for example: + fn supports_show_like_before_in(&self) -> bool { + true + } + + fn is_reserved_for_identifier(&self, kw: Keyword) -> bool { + // Unreserve some keywords that Snowflake accepts as identifiers + // See: https://docs.snowflake.com/en/sql-reference/reserved-keywords + if matches!(kw, Keyword::INTERVAL) { + false + } else { + RESERVED_FOR_IDENTIFIER.contains(&kw) + } + } + + fn supports_partiql(&self) -> bool { + true + } + + fn is_select_item_alias(&self, explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { + explicit + || match kw { + // The following keywords can be considered an alias as long as + // they are not followed by other tokens that may change their meaning + // e.g. `SELECT * EXCEPT (col1) FROM tbl` + Keyword::EXCEPT + // e.g. `SELECT 1 LIMIT 5` + | Keyword::LIMIT + // e.g. `SELECT 1 OFFSET 5 ROWS` + | Keyword::OFFSET + // e.g. `INSERT INTO t SELECT 1 RETURNING *` + | Keyword::RETURNING if !matches!(parser.peek_token_ref().token, Token::Comma | Token::EOF) => + { + false + } + + // `FETCH` can be considered an alias as long as it's not followed by `FIRST`` or `NEXT` + // which would give it a different meanings, for example: `SELECT 1 FETCH FIRST 10 ROWS` - not an alias + Keyword::FETCH + if parser.peek_keyword(Keyword::FIRST) || parser.peek_keyword(Keyword::NEXT) => + { + false + } + + // Reserved keywords by the Snowflake dialect, which seem to be less strictive + // than what is listed in `keywords::RESERVED_FOR_COLUMN_ALIAS`. The following + // keywords were tested with the this statement: `SELECT 1 `. + Keyword::FROM + | Keyword::GROUP + | Keyword::HAVING + | Keyword::INTERSECT + | Keyword::INTO + | Keyword::MINUS + | Keyword::ORDER + | Keyword::SELECT + | Keyword::UNION + | Keyword::WHERE + | Keyword::WITH => false, + + // Any other word is considered an alias + _ => true, + } + } + + /// See: + fn supports_timestamp_versioning(&self) -> bool { + true + } + + /// See: + fn supports_group_by_expr(&self) -> bool { + true + } + + /// See: + fn get_reserved_keywords_for_select_item_operator(&self) -> &[Keyword] { + &RESERVED_KEYWORDS_FOR_SELECT_ITEM_OPERATOR + } +} + +fn parse_file_staging_command(kw: Keyword, parser: &mut Parser) -> Result { + let stage = parse_snowflake_stage_name(parser)?; + let pattern = if parser.parse_keyword(Keyword::PATTERN) { + parser.expect_token(&Token::Eq)?; + Some(parser.parse_literal_string()?) + } else { + None + }; + + match kw { + Keyword::LIST | Keyword::LS => Ok(Statement::List(FileStagingCommand { stage, pattern })), + Keyword::REMOVE | Keyword::RM => { + Ok(Statement::Remove(FileStagingCommand { stage, pattern })) + } + _ => Err(ParserError::ParserError( + "unexpected stage command, expecting LIST, LS, REMOVE or RM".to_string(), + )), + } +} + +/// Parse snowflake alter session. +/// +fn parse_alter_session(parser: &mut Parser, set: bool) -> Result { + let session_options = parse_session_options(parser, set)?; + Ok(Statement::AlterSession { + set, + session_params: KeyValueOptions { + options: session_options, + }, + }) } /// Parse snowflake create table statement. /// +/// pub fn parse_create_table( or_replace: bool, global: Option, temporary: bool, volatile: bool, transient: bool, + iceberg: bool, parser: &mut Parser, ) -> Result { let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); @@ -224,6 +407,7 @@ pub fn parse_create_table( .temporary(temporary) .transient(transient) .volatile(volatile) + .iceberg(iceberg) .global(global) .hive_formats(Some(Default::default())); @@ -233,18 +417,22 @@ pub fn parse_create_table( // "CREATE TABLE x COPY GRANTS (c INT)" and "CREATE TABLE x (c INT) COPY GRANTS" are both // accepted by Snowflake + let mut plain_options = vec![]; + loop { let next_token = parser.next_token(); match &next_token.token { Token::Word(word) => match word.keyword { Keyword::COPY => { - parser.expect_keyword(Keyword::GRANTS)?; + parser.expect_keyword_is(Keyword::GRANTS)?; builder = builder.copy_grants(true); } Keyword::COMMENT => { // Rewind the COMMENT keyword parser.prev_token(); - builder = builder.comment(parser.parse_optional_inline_comment()?); + if let Some(comment_def) = parser.parse_optional_inline_comment()? { + plain_options.push(SqlOption::Comment(comment_def)) + } } Keyword::AS => { let query = parser.parse_query()?; @@ -262,10 +450,10 @@ pub fn parse_create_table( break; } Keyword::CLUSTER => { - parser.expect_keyword(Keyword::BY)?; + parser.expect_keyword_is(Keyword::BY)?; parser.expect_token(&Token::LParen)?; let cluster_by = Some(WrappedCollection::Parentheses( - parser.parse_comma_separated(|p| p.parse_identifier(false))?, + parser.parse_comma_separated(|p| p.parse_identifier())?, )); parser.expect_token(&Token::RParen)?; @@ -325,16 +513,16 @@ pub fn parse_create_table( parser.prev_token(); } Keyword::AGGREGATION => { - parser.expect_keyword(Keyword::POLICY)?; + parser.expect_keyword_is(Keyword::POLICY)?; let aggregation_policy = parser.parse_object_name(false)?; builder = builder.with_aggregation_policy(Some(aggregation_policy)); } Keyword::ROW => { parser.expect_keywords(&[Keyword::ACCESS, Keyword::POLICY])?; let policy = parser.parse_object_name(false)?; - parser.expect_keyword(Keyword::ON)?; + parser.expect_keyword_is(Keyword::ON)?; parser.expect_token(&Token::LParen)?; - let columns = parser.parse_comma_separated(|p| p.parse_identifier(false))?; + let columns = parser.parse_comma_separated(|p| p.parse_identifier())?; parser.expect_token(&Token::RParen)?; builder = @@ -346,6 +534,32 @@ pub fn parse_create_table( parser.expect_token(&Token::RParen)?; builder = builder.with_tags(Some(tags)); } + Keyword::ON if parser.parse_keyword(Keyword::COMMIT) => { + let on_commit = Some(parser.parse_create_table_on_commit()?); + builder = builder.on_commit(on_commit); + } + Keyword::EXTERNAL_VOLUME => { + parser.expect_token(&Token::Eq)?; + builder.external_volume = Some(parser.parse_literal_string()?); + } + Keyword::CATALOG => { + parser.expect_token(&Token::Eq)?; + builder.catalog = Some(parser.parse_literal_string()?); + } + Keyword::BASE_LOCATION => { + parser.expect_token(&Token::Eq)?; + builder.base_location = Some(parser.parse_literal_string()?); + } + Keyword::CATALOG_SYNC => { + parser.expect_token(&Token::Eq)?; + builder.catalog_sync = Some(parser.parse_literal_string()?); + } + Keyword::STORAGE_SERIALIZATION_POLICY => { + parser.expect_token(&Token::Eq)?; + + builder.storage_serialization_policy = + Some(parse_storage_serialization_policy(parser)?); + } _ => { return parser.expected("end of statement", next_token); } @@ -379,10 +593,37 @@ pub fn parse_create_table( } } } + let table_options = if !plain_options.is_empty() { + crate::ast::CreateTableOptions::Plain(plain_options) + } else { + crate::ast::CreateTableOptions::None + }; + + builder = builder.table_options(table_options); + + if iceberg && builder.base_location.is_none() { + return Err(ParserError::ParserError( + "BASE_LOCATION is required for ICEBERG tables".to_string(), + )); + } Ok(builder.build()) } +pub fn parse_storage_serialization_policy( + parser: &mut Parser, +) -> Result { + let next_token = parser.next_token(); + match &next_token.token { + Token::Word(w) => match w.keyword { + Keyword::COMPATIBLE => Ok(StorageSerializationPolicy::Compatible), + Keyword::OPTIMIZED => Ok(StorageSerializationPolicy::Optimized), + _ => parser.expected("storage_serialization_policy", next_token), + }, + _ => parser.expected("storage_serialization_policy", next_token), + } +} + pub fn parse_create_stage( or_replace: bool, temporary: bool, @@ -420,10 +661,7 @@ pub fn parse_create_stage( // [ comment ] if parser.parse_keyword(Keyword::COMMENT) { parser.expect_token(&Token::Eq)?; - comment = Some(match parser.next_token().token { - Token::SingleQuotedString(word) => Ok(word), - _ => parser.expected("a comment statement", parser.peek_token()), - }?) + comment = Some(parser.parse_comment_value()?); } Ok(Statement::CreateStage { @@ -432,13 +670,13 @@ pub fn parse_create_stage( if_not_exists, name, stage_params, - directory_table_params: DataLoadingOptions { + directory_table_params: KeyValueOptions { options: directory_table_params, }, - file_format: DataLoadingOptions { + file_format: KeyValueOptions { options: file_format, }, - copy_options: DataLoadingOptions { + copy_options: KeyValueOptions { options: copy_options, }, comment, @@ -449,7 +687,7 @@ pub fn parse_stage_name_identifier(parser: &mut Parser) -> Result break, + Token::Whitespace(_) | Token::SemiColon => break, Token::Period => { parser.prev_token(); break; @@ -462,7 +700,7 @@ pub fn parse_stage_name_identifier(parser: &mut Parser) -> Result ident.push('~'), Token::Mod => ident.push('%'), Token::Div => ident.push('/'), - Token::Word(w) => ident.push_str(&w.value), + Token::Word(w) => ident.push_str(&w.to_string()), _ => return parser.expected("stage name identifier", parser.peek_token()), } } @@ -480,7 +718,7 @@ pub fn parse_snowflake_stage_name(parser: &mut Parser) -> Result { parser.prev_token(); @@ -489,40 +727,70 @@ pub fn parse_snowflake_stage_name(parser: &mut Parser) -> Result` +/// and `COPY INTO ` which have different syntax. pub fn parse_copy_into(parser: &mut Parser) -> Result { - let into: ObjectName = parse_snowflake_stage_name(parser)?; + let kind = match parser.peek_token().token { + // Indicates an internal stage + Token::AtSign => CopyIntoSnowflakeKind::Location, + // Indicates an external stage, i.e. s3://, gcs:// or azure:// + Token::SingleQuotedString(s) if s.contains("://") => CopyIntoSnowflakeKind::Location, + _ => CopyIntoSnowflakeKind::Table, + }; + let mut files: Vec = vec![]; - let mut from_transformations: Option> = None; - let from_stage_alias; - let from_stage: ObjectName; - let stage_params: StageParamsObject; + let mut from_transformations: Option> = None; + let mut from_stage_alias = None; + let mut from_stage = None; + let mut stage_params = StageParamsObject { + url: None, + encryption: KeyValueOptions { options: vec![] }, + endpoint: None, + storage_integration: None, + credentials: KeyValueOptions { options: vec![] }, + }; + let mut from_query = None; + let mut partition = None; + let mut file_format = Vec::new(); + let mut pattern = None; + let mut validation_mode = None; + let mut copy_options = Vec::new(); - parser.expect_keyword(Keyword::FROM)?; - // check if data load transformations are present + let into: ObjectName = parse_snowflake_stage_name(parser)?; + if kind == CopyIntoSnowflakeKind::Location { + stage_params = parse_stage_params(parser)?; + } + + let into_columns = match &parser.peek_token().token { + Token::LParen => Some(parser.parse_parenthesized_column_list(IsOptional::Optional, true)?), + _ => None, + }; + + parser.expect_keyword_is(Keyword::FROM)?; match parser.next_token().token { - Token::LParen => { - // data load with transformations - parser.expect_keyword(Keyword::SELECT)?; + Token::LParen if kind == CopyIntoSnowflakeKind::Table => { + // Data load with transformations + parser.expect_keyword_is(Keyword::SELECT)?; from_transformations = parse_select_items_for_data_load(parser)?; - parser.expect_keyword(Keyword::FROM)?; - from_stage = parse_snowflake_stage_name(parser)?; + parser.expect_keyword_is(Keyword::FROM)?; + from_stage = Some(parse_snowflake_stage_name(parser)?); stage_params = parse_stage_params(parser)?; - // as - from_stage_alias = if parser.parse_keyword(Keyword::AS) { - Some(match parser.next_token().token { - Token::Word(w) => Ok(Ident::new(w.value)), - _ => parser.expected("stage alias", parser.peek_token()), - }?) - } else { - None - }; + // Parse an optional alias + from_stage_alias = parser + .maybe_parse_table_alias()? + .map(|table_alias| table_alias.name); + parser.expect_token(&Token::RParen)?; + } + Token::LParen if kind == CopyIntoSnowflakeKind::Location => { + // Data unload with a query + from_query = Some(parser.parse_query()?); parser.expect_token(&Token::RParen)?; } _ => { parser.prev_token(); - from_stage = parse_snowflake_stage_name(parser)?; + from_stage = Some(parse_snowflake_stage_name(parser)?); stage_params = parse_stage_params(parser)?; // as @@ -535,167 +803,180 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result { None }; } - }; + } - // [ files ] - if parser.parse_keyword(Keyword::FILES) { - parser.expect_token(&Token::Eq)?; - parser.expect_token(&Token::LParen)?; - let mut continue_loop = true; - while continue_loop { - continue_loop = false; + loop { + // FILE_FORMAT + if parser.parse_keyword(Keyword::FILE_FORMAT) { + parser.expect_token(&Token::Eq)?; + file_format = parse_parentheses_options(parser)?; + // PARTITION BY + } else if parser.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) { + partition = Some(Box::new(parser.parse_expr()?)) + // FILES + } else if parser.parse_keyword(Keyword::FILES) { + parser.expect_token(&Token::Eq)?; + parser.expect_token(&Token::LParen)?; + let mut continue_loop = true; + while continue_loop { + continue_loop = false; + let next_token = parser.next_token(); + match next_token.token { + Token::SingleQuotedString(s) => files.push(s), + _ => parser.expected("file token", next_token)?, + }; + if parser.next_token().token.eq(&Token::Comma) { + continue_loop = true; + } else { + parser.prev_token(); // not a comma, need to go back + } + } + parser.expect_token(&Token::RParen)?; + // PATTERN + } else if parser.parse_keyword(Keyword::PATTERN) { + parser.expect_token(&Token::Eq)?; let next_token = parser.next_token(); - match next_token.token { - Token::SingleQuotedString(s) => files.push(s), - _ => parser.expected("file token", next_token)?, - }; - if parser.next_token().token.eq(&Token::Comma) { - continue_loop = true; - } else { - parser.prev_token(); // not a comma, need to go back + pattern = Some(match next_token.token { + Token::SingleQuotedString(s) => s, + _ => parser.expected("pattern", next_token)?, + }); + // VALIDATION MODE + } else if parser.parse_keyword(Keyword::VALIDATION_MODE) { + parser.expect_token(&Token::Eq)?; + validation_mode = Some(parser.next_token().token.to_string()); + // COPY OPTIONS + } else if parser.parse_keyword(Keyword::COPY_OPTIONS) { + parser.expect_token(&Token::Eq)?; + copy_options = parse_parentheses_options(parser)?; + } else { + match parser.next_token().token { + Token::SemiColon | Token::EOF => break, + Token::Comma => continue, + // In `COPY INTO ` the copy options do not have a shared key + // like in `COPY INTO
` + Token::Word(key) => copy_options.push(parse_option(parser, key)?), + _ => return parser.expected("another copy option, ; or EOF'", parser.peek_token()), } } - parser.expect_token(&Token::RParen)?; - } - - // [ pattern ] - let mut pattern = None; - if parser.parse_keyword(Keyword::PATTERN) { - parser.expect_token(&Token::Eq)?; - let next_token = parser.next_token(); - pattern = Some(match next_token.token { - Token::SingleQuotedString(s) => s, - _ => parser.expected("pattern", next_token)?, - }); - } - - // [ file_format] - let mut file_format = Vec::new(); - if parser.parse_keyword(Keyword::FILE_FORMAT) { - parser.expect_token(&Token::Eq)?; - file_format = parse_parentheses_options(parser)?; - } - - // [ copy_options ] - let mut copy_options = Vec::new(); - if parser.parse_keyword(Keyword::COPY_OPTIONS) { - parser.expect_token(&Token::Eq)?; - copy_options = parse_parentheses_options(parser)?; - } - - // [ VALIDATION_MODE ] - let mut validation_mode = None; - if parser.parse_keyword(Keyword::VALIDATION_MODE) { - parser.expect_token(&Token::Eq)?; - validation_mode = Some(parser.next_token().token.to_string()); } Ok(Statement::CopyIntoSnowflake { + kind, into, - from_stage, - from_stage_alias, + into_columns, + from_obj: from_stage, + from_obj_alias: from_stage_alias, stage_params, from_transformations, + from_query, files: if files.is_empty() { None } else { Some(files) }, pattern, - file_format: DataLoadingOptions { + file_format: KeyValueOptions { options: file_format, }, - copy_options: DataLoadingOptions { + copy_options: KeyValueOptions { options: copy_options, }, validation_mode, + partition, }) } fn parse_select_items_for_data_load( parser: &mut Parser, -) -> Result>, ParserError> { - // [.]$[.] [ , [.]$[.] ... ] - let mut select_items: Vec = vec![]; +) -> Result>, ParserError> { + let mut select_items: Vec = vec![]; loop { - let mut alias: Option = None; - let mut file_col_num: i32 = 0; - let mut element: Option = None; - let mut item_as: Option = None; + match parser.maybe_parse(parse_select_item_for_data_load)? { + // [.]$[.] [ , [.]$[.] ... ] + Some(item) => select_items.push(StageLoadSelectItemKind::StageLoadSelectItem(item)), + // Fallback, try to parse a standard SQL select item + None => select_items.push(StageLoadSelectItemKind::SelectItem( + parser.parse_select_item()?, + )), + } + if matches!(parser.peek_token_ref().token, Token::Comma) { + parser.advance_token(); + } else { + break; + } + } + Ok(Some(select_items)) +} - let next_token = parser.next_token(); - match next_token.token { +fn parse_select_item_for_data_load( + parser: &mut Parser, +) -> Result { + let mut alias: Option = None; + let mut file_col_num: i32 = 0; + let mut element: Option = None; + let mut item_as: Option = None; + + let next_token = parser.next_token(); + match next_token.token { + Token::Placeholder(w) => { + file_col_num = w.to_string().split_off(1).parse::().map_err(|e| { + ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}")) + })?; + Ok(()) + } + Token::Word(w) => { + alias = Some(Ident::new(w.value)); + Ok(()) + } + _ => parser.expected("alias or file_col_num", next_token), + }?; + + if alias.is_some() { + parser.expect_token(&Token::Period)?; + // now we get col_num token + let col_num_token = parser.next_token(); + match col_num_token.token { Token::Placeholder(w) => { file_col_num = w.to_string().split_off(1).parse::().map_err(|e| { ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}")) })?; Ok(()) } - Token::Word(w) => { - alias = Some(Ident::new(w.value)); - Ok(()) - } - _ => parser.expected("alias or file_col_num", next_token), + _ => parser.expected("file_col_num", col_num_token), }?; + } - if alias.is_some() { - parser.expect_token(&Token::Period)?; - // now we get col_num token - let col_num_token = parser.next_token(); - match col_num_token.token { - Token::Placeholder(w) => { - file_col_num = w.to_string().split_off(1).parse::().map_err(|e| { - ParserError::ParserError(format!("Could not parse '{w}' as i32: {e}")) - })?; - Ok(()) - } - _ => parser.expected("file_col_num", col_num_token), - }?; - } - - // try extracting optional element - match parser.next_token().token { - Token::Colon => { - // parse element - element = Some(Ident::new(match parser.next_token().token { - Token::Word(w) => Ok(w.value), - _ => parser.expected("file_col_num", parser.peek_token()), - }?)); - } - _ => { - // element not present move back - parser.prev_token(); - } + // try extracting optional element + match parser.next_token().token { + Token::Colon => { + // parse element + element = Some(Ident::new(match parser.next_token().token { + Token::Word(w) => Ok(w.value), + _ => parser.expected("file_col_num", parser.peek_token()), + }?)); } - - // as - if parser.parse_keyword(Keyword::AS) { - item_as = Some(match parser.next_token().token { - Token::Word(w) => Ok(Ident::new(w.value)), - _ => parser.expected("column item alias", parser.peek_token()), - }?); + _ => { + // element not present move back + parser.prev_token(); } + } - select_items.push(StageLoadSelectItem { - alias, - file_col_num, - element, - item_as, - }); - - match parser.next_token().token { - Token::Comma => { - // continue - } - _ => { - parser.prev_token(); // need to move back - break; - } - } + // as + if parser.parse_keyword(Keyword::AS) { + item_as = Some(match parser.next_token().token { + Token::Word(w) => Ok(Ident::new(w.value)), + _ => parser.expected("column item alias", parser.peek_token()), + }?); } - Ok(Some(select_items)) + + Ok(StageLoadSelectItem { + alias, + file_col_num, + element, + item_as, + }) } fn parse_stage_params(parser: &mut Parser) -> Result { let (mut url, mut storage_integration, mut endpoint) = (None, None, None); - let mut encryption: DataLoadingOptions = DataLoadingOptions { options: vec![] }; - let mut credentials: DataLoadingOptions = DataLoadingOptions { options: vec![] }; + let mut encryption: KeyValueOptions = KeyValueOptions { options: vec![] }; + let mut credentials: KeyValueOptions = KeyValueOptions { options: vec![] }; // URL if parser.parse_keyword(Keyword::URL) { @@ -724,7 +1005,7 @@ fn parse_stage_params(parser: &mut Parser) -> Result Result Result Result, ParserError> { + let mut options: Vec = Vec::new(); + let empty = String::new; + loop { + let next_token = parser.peek_token(); + match next_token.token { + Token::SemiColon | Token::EOF => break, + Token::Comma => { + parser.advance_token(); + continue; + } + Token::Word(key) => { + parser.advance_token(); + if set { + let option = parse_option(parser, key)?; + options.push(option); + } else { + options.push(KeyValueOption { + option_name: key.value, + option_type: KeyValueOptionType::STRING, + value: empty(), + }); + } + } + _ => { + return parser.expected("another option or end of statement", next_token); + } + } + } + if options.is_empty() { + Err(ParserError::ParserError( + "expected at least one option".to_string(), + )) + } else { + Ok(options) + } +} + /// Parses options provided within parentheses like: /// ( ENABLE = { TRUE | FALSE } /// [ AUTO_REFRESH = { TRUE | FALSE } ] /// [ REFRESH_ON_CREATE = { TRUE | FALSE } ] /// [ NOTIFICATION_INTEGRATION = '' ] ) /// -fn parse_parentheses_options(parser: &mut Parser) -> Result, ParserError> { - let mut options: Vec = Vec::new(); - +fn parse_parentheses_options(parser: &mut Parser) -> Result, ParserError> { + let mut options: Vec = Vec::new(); parser.expect_token(&Token::LParen)?; loop { match parser.next_token().token { Token::RParen => break, - Token::Word(key) => { - parser.expect_token(&Token::Eq)?; - if parser.parse_keyword(Keyword::TRUE) { - options.push(DataLoadingOption { - option_name: key.value, - option_type: DataLoadingOptionType::BOOLEAN, - value: "TRUE".to_string(), - }); - Ok(()) - } else if parser.parse_keyword(Keyword::FALSE) { - options.push(DataLoadingOption { - option_name: key.value, - option_type: DataLoadingOptionType::BOOLEAN, - value: "FALSE".to_string(), - }); - Ok(()) - } else { - match parser.next_token().token { - Token::SingleQuotedString(value) => { - options.push(DataLoadingOption { - option_name: key.value, - option_type: DataLoadingOptionType::STRING, - value, - }); - Ok(()) - } - Token::Word(word) => { - options.push(DataLoadingOption { - option_name: key.value, - option_type: DataLoadingOptionType::ENUM, - value: word.value, - }); - Ok(()) - } - _ => parser.expected("expected option value", parser.peek_token()), - } - } - } - _ => parser.expected("another option or ')'", parser.peek_token()), - }?; + Token::Comma => continue, + Token::Word(key) => options.push(parse_option(parser, key)?), + _ => return parser.expected("another option or ')'", parser.peek_token()), + }; } Ok(options) } +/// Parses a `KEY = VALUE` construct based on the specified key +fn parse_option(parser: &mut Parser, key: Word) -> Result { + parser.expect_token(&Token::Eq)?; + if parser.parse_keyword(Keyword::TRUE) { + Ok(KeyValueOption { + option_name: key.value, + option_type: KeyValueOptionType::BOOLEAN, + value: "TRUE".to_string(), + }) + } else if parser.parse_keyword(Keyword::FALSE) { + Ok(KeyValueOption { + option_name: key.value, + option_type: KeyValueOptionType::BOOLEAN, + value: "FALSE".to_string(), + }) + } else { + match parser.next_token().token { + Token::SingleQuotedString(value) => Ok(KeyValueOption { + option_name: key.value, + option_type: KeyValueOptionType::STRING, + value, + }), + Token::Word(word) => Ok(KeyValueOption { + option_name: key.value, + option_type: KeyValueOptionType::ENUM, + value: word.value, + }), + Token::Number(n, _) => Ok(KeyValueOption { + option_name: key.value, + option_type: KeyValueOptionType::NUMBER, + value: n, + }), + _ => parser.expected("expected option value", parser.peek_token()), + } + } +} + /// Parsing a property of identity or autoincrement column option /// Syntax: /// ```sql @@ -821,7 +1147,7 @@ fn parse_identity_property(parser: &mut Parser) -> Result Result { - let policy_name = parser.parse_identifier(false)?; + let policy_name = parser.parse_identifier()?; let using_columns = if parser.parse_keyword(Keyword::USING) { parser.expect_token(&Token::LParen)?; - let columns = parser.parse_comma_separated(|p| p.parse_identifier(false))?; + let columns = parser.parse_comma_separated(|p| p.parse_identifier())?; parser.expect_token(&Token::RParen)?; Some(columns) } else { @@ -878,3 +1204,13 @@ fn parse_column_tags(parser: &mut Parser, with: bool) -> Result +fn parse_show_objects(terse: bool, parser: &mut Parser) -> Result { + let show_options = parser.parse_show_stmt_options()?; + Ok(Statement::ShowObjects(ShowObjects { + terse, + show_options, + })) +} diff --git a/src/dialect/sqlite.rs b/src/dialect/sqlite.rs index 95717f9fd..847e0d135 100644 --- a/src/dialect/sqlite.rs +++ b/src/dialect/sqlite.rs @@ -15,7 +15,11 @@ // specific language governing permissions and limitations // under the License. -use crate::ast::Statement; +#[cfg(not(feature = "std"))] +use alloc::boxed::Box; + +use crate::ast::BinaryOperator; +use crate::ast::{Expr, Statement}; use crate::dialect::Dialect; use crate::keywords::Keyword; use crate::parser::{Parser, ParserError}; @@ -70,6 +74,27 @@ impl Dialect for SQLiteDialect { } } + fn parse_infix( + &self, + parser: &mut crate::parser::Parser, + expr: &crate::ast::Expr, + _precedence: u8, + ) -> Option> { + // Parse MATCH and REGEXP as operators + // See + for (keyword, op) in [ + (Keyword::REGEXP, BinaryOperator::Regexp), + (Keyword::MATCH, BinaryOperator::Match), + ] { + if parser.parse_keyword(keyword) { + let left = Box::new(expr.clone()); + let right = Box::new(parser.parse_expr().unwrap()); + return Some(Ok(Expr::BinaryOp { left, op, right })); + } + } + None + } + fn supports_in_empty_list(&self) -> bool { true } @@ -81,4 +106,8 @@ impl Dialect for SQLiteDialect { fn supports_asc_desc_in_column_definition(&self) -> bool { true } + + fn supports_dollar_placeholder(&self) -> bool { + true + } } diff --git a/src/keywords.rs b/src/keywords.rs index 6182ae176..ddb786650 100644 --- a/src/keywords.rs +++ b/src/keywords.rs @@ -18,14 +18,14 @@ //! This module defines //! 1) a list of constants for every keyword //! 2) an `ALL_KEYWORDS` array with every keyword in it -//! This is not a list of *reserved* keywords: some of these can be -//! parsed as identifiers if the parser decides so. This means that -//! new keywords can be added here without affecting the parse result. +//! This is not a list of *reserved* keywords: some of these can be +//! parsed as identifiers if the parser decides so. This means that +//! new keywords can be added here without affecting the parse result. //! -//! As a matter of fact, most of these keywords are not used at all -//! and could be removed. +//! As a matter of fact, most of these keywords are not used at all +//! and could be removed. //! 3) a `RESERVED_FOR_TABLE_ALIAS` array with keywords reserved in a -//! "table alias" context. +//! "table alias" context. #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -74,14 +74,19 @@ macro_rules! define_keywords { define_keywords!( ABORT, ABS, + ABSENT, ABSOLUTE, ACCESS, + ACCOUNT, ACTION, ADD, ADMIN, AFTER, AGAINST, + AGGREGATE, AGGREGATION, + ALERT, + ALGORITHM, ALIAS, ALL, ALLOCATE, @@ -91,7 +96,9 @@ define_keywords!( AND, ANTI, ANY, + APPLICATION, APPLY, + APPLYBUDGET, ARCHIVE, ARE, ARRAY, @@ -105,31 +112,44 @@ define_keywords!( AT, ATOMIC, ATTACH, + AUDIT, + AUTHENTICATION, AUTHORIZATION, AUTO, + AUTOEXTEND_SIZE, AUTOINCREMENT, AUTO_INCREMENT, AVG, + AVG_ROW_LENGTH, AVRO, BACKWARD, BASE64, + BASE_LOCATION, BEFORE, BEGIN, BEGIN_FRAME, BEGIN_PARTITION, + BERNOULLI, BETWEEN, BIGDECIMAL, BIGINT, BIGNUMERIC, BINARY, + BIND, BINDING, + BIT, BLOB, + BLOCK, + BLOOM, BLOOMFILTER, BOOL, BOOLEAN, BOTH, + BOX, + BRIN, BROWSE, BTREE, + BUCKET, BUCKETS, BY, BYPASSRLS, @@ -142,8 +162,11 @@ define_keywords!( CASCADE, CASCADED, CASE, + CASES, CAST, CATALOG, + CATALOG_SYNC, + CATCH, CEIL, CEILING, CENTURY, @@ -153,17 +176,21 @@ define_keywords!( CHANNEL, CHAR, CHARACTER, + CHARACTERISTICS, CHARACTERS, CHARACTER_LENGTH, CHARSET, CHAR_LENGTH, CHECK, + CHECKSUM, + CIRCLE, CLEAR, CLOB, CLONE, CLOSE, CLUSTER, CLUSTERED, + CLUSTERING, COALESCE, COLLATE, COLLATION, @@ -175,6 +202,7 @@ define_keywords!( COMMENT, COMMIT, COMMITTED, + COMPATIBLE, COMPRESSION, COMPUTE, CONCURRENTLY, @@ -182,6 +210,8 @@ define_keywords!( CONFLICT, CONNECT, CONNECTION, + CONNECTOR, + CONNECT_BY_ROOT, CONSTRAINT, CONTAINS, CONTINUE, @@ -217,6 +247,7 @@ define_keywords!( CYCLE, DATA, DATABASE, + DATABASES, DATA_RETENTION_TIME_IN_DAYS, DATE, DATE32, @@ -225,6 +256,8 @@ define_keywords!( DAY, DAYOFWEEK, DAYOFYEAR, + DAYS, + DCPROPERTIES, DEALLOCATE, DEC, DECADE, @@ -237,7 +270,9 @@ define_keywords!( DEFERRED, DEFINE, DEFINED, + DEFINER, DELAYED, + DELAY_KEY_WRITE, DELETE, DELIMITED, DELIMITER, @@ -257,6 +292,7 @@ define_keywords!( DISTRIBUTE, DIV, DO, + DOMAIN, DOUBLE, DOW, DOY, @@ -268,6 +304,7 @@ define_keywords!( ELEMENT, ELEMENTS, ELSE, + ELSEIF, EMPTY, ENABLE, ENABLE_SCHEMA_EVOLUTION, @@ -280,32 +317,42 @@ define_keywords!( END_PARTITION, ENFORCED, ENGINE, + ENGINE_ATTRIBUTE, ENUM, + ENUM16, + ENUM8, EPHEMERAL, EPOCH, EQUALS, ERROR, ESCAPE, ESCAPED, + ESTIMATE, EVENT, EVERY, + EVOLVE, EXCEPT, EXCEPTION, + EXCHANGE, EXCLUDE, EXCLUSIVE, EXEC, EXECUTE, + EXECUTION, EXISTS, EXP, EXPANSION, EXPLAIN, EXPLICIT, EXPORT, + EXTEND, EXTENDED, EXTENSION, EXTERNAL, + EXTERNAL_VOLUME, EXTRACT, FAIL, + FAILOVER, FALSE, FETCH, FIELDS, @@ -325,6 +372,7 @@ define_keywords!( FLOAT8, FLOOR, FLUSH, + FN, FOLLOWING, FOR, FORCE, @@ -340,6 +388,7 @@ define_keywords!( FREEZE, FROM, FSCK, + FULFILLMENT, FULL, FULLTEXT, FUNCTION, @@ -350,6 +399,8 @@ define_keywords!( GENERATED, GEOGRAPHY, GET, + GIN, + GIST, GLOBAL, GRANT, GRANTED, @@ -369,13 +420,18 @@ define_keywords!( HOSTS, HOUR, HOURS, + HUGEINT, + ICEBERG, ID, IDENTITY, + IDENTITY_INSERT, IF, IGNORE, ILIKE, IMMEDIATE, IMMUTABLE, + IMPORT, + IMPORTED, IN, INCLUDE, INCLUDE_NULL_VALUES, @@ -383,14 +439,19 @@ define_keywords!( INDEX, INDICATOR, INHERIT, + INHERITS, INITIALLY, INNER, INOUT, + INPATH, + INPLACE, INPUT, INPUTFORMAT, INSENSITIVE, INSERT, + INSERT_METHOD, INSTALL, + INSTANT, INSTEAD, INT, INT128, @@ -402,11 +463,14 @@ define_keywords!( INT64, INT8, INTEGER, + INTEGRATION, INTERPOLATE, INTERSECT, INTERSECTION, INTERVAL, INTO, + INVOKER, + IO, IS, ISODOW, ISOLATION, @@ -422,6 +486,7 @@ define_keywords!( JULIAN, KEY, KEYS, + KEY_BLOCK_SIZE, KILL, LAG, LANGUAGE, @@ -436,7 +501,11 @@ define_keywords!( LIKE, LIKE_REGEX, LIMIT, + LINE, LINES, + LIST, + LISTEN, + LISTING, LN, LOAD, LOCAL, @@ -445,12 +514,19 @@ define_keywords!( LOCATION, LOCK, LOCKED, + LOG, LOGIN, LOGS, + LONGBLOB, + LONGTEXT, LOWCARDINALITY, LOWER, LOW_PRIORITY, + LS, + LSEG, MACRO, + MANAGE, + MANAGED, MANAGEDLOCATION, MAP, MASKING, @@ -464,12 +540,17 @@ define_keywords!( MAX, MAXVALUE, MAX_DATA_EXTENSION_TIME_IN_DAYS, + MAX_ROWS, MEASURES, + MEDIUMBLOB, MEDIUMINT, + MEDIUMTEXT, MEMBER, MERGE, + MESSAGE, METADATA, METHOD, + METRIC, MICROSECOND, MICROSECONDS, MILLENIUM, @@ -477,18 +558,24 @@ define_keywords!( MILLISECOND, MILLISECONDS, MIN, + MINUS, MINUTE, + MINUTES, MINVALUE, + MIN_ROWS, MOD, MODE, MODIFIES, MODIFY, MODULE, + MONITOR, MONTH, + MONTHS, MSCK, MULTISET, MUTATION, NAME, + NAMES, NANOSECOND, NANOSECONDS, NATIONAL, @@ -496,8 +583,13 @@ define_keywords!( NCHAR, NCLOB, NESTED, + NETWORK, NEW, NEXT, + NFC, + NFD, + NFKC, + NFKD, NO, NOBYPASSRLS, NOCREATEDB, @@ -508,10 +600,12 @@ define_keywords!( NOORDER, NOREPLICATION, NORMALIZE, + NORMALIZED, NOSCAN, NOSUPERUSER, NOT, NOTHING, + NOTIFY, NOWAIT, NO_WRITE_TO_BINLOG, NTH_VALUE, @@ -523,19 +617,26 @@ define_keywords!( NUMERIC, NVARCHAR, OBJECT, + OBJECTS, OCCURRENCES_REGEX, OCTETS, OCTET_LENGTH, OF, + OFF, OFFSET, + OFFSETS, OLD, OMIT, ON, ONE, ONLY, OPEN, + OPENJSON, + OPERATE, OPERATOR, + OPTIMIZATION, OPTIMIZE, + OPTIMIZED, OPTIMIZER_COSTS, OPTION, OPTIONS, @@ -543,16 +644,23 @@ define_keywords!( ORC, ORDER, ORDINALITY, + ORGANIZATION, OUT, OUTER, + OUTPUT, OUTPUTFORMAT, OVER, OVERFLOW, OVERLAPS, OVERLAY, + OVERRIDE, OVERWRITE, OWNED, OWNER, + OWNERSHIP, + PACKAGE, + PACKAGES, + PACK_KEYS, PARALLEL, PARAMETER, PARQUET, @@ -560,6 +668,7 @@ define_keywords!( PARTITION, PARTITIONED, PARTITIONS, + PASSING, PASSWORD, PAST, PATH, @@ -576,7 +685,10 @@ define_keywords!( PLACING, PLAN, PLANS, + POINT, POLICY, + POLYGON, + POOL, PORTION, POSITION, POSITION_REGEX, @@ -589,16 +701,22 @@ define_keywords!( PRESERVE, PREWHERE, PRIMARY, + PRINT, PRIOR, PRIVILEGES, PROCEDURE, + PROFILE, PROGRAM, PROJECTION, + PUBLIC, + PURCHASE, PURGE, QUALIFY, QUARTER, QUERY, QUOTE, + RAISE, + RAISERROR, RANGE, RANK, RAW, @@ -607,6 +725,7 @@ define_keywords!( READS, READ_ONLY, REAL, + RECLUSTER, RECURSIVE, REF, REFERENCES, @@ -625,22 +744,29 @@ define_keywords!( RELATIVE, RELAY, RELEASE, + RELEASES, REMOTE, + REMOVE, RENAME, REORG, REPAIR, REPEATABLE, REPLACE, REPLICA, + REPLICATE, REPLICATION, RESET, + RESOLVE, + RESOURCE, RESPECT, RESTART, RESTRICT, RESTRICTED, + RESTRICTIONS, RESTRICTIVE, RESULT, RESULTSET, + RESUME, RETAIN, RETURN, RETURNING, @@ -648,26 +774,35 @@ define_keywords!( REVOKE, RIGHT, RLIKE, + RM, ROLE, + ROLES, ROLLBACK, ROLLUP, ROOT, ROW, ROWID, ROWS, + ROW_FORMAT, ROW_NUMBER, RULE, RUN, SAFE, SAFE_CAST, + SAMPLE, SAVEPOINT, SCHEMA, + SCHEMAS, SCOPE, SCROLL, SEARCH, SECOND, + SECONDARY, + SECONDARY_ENGINE_ATTRIBUTE, + SECONDS, SECRET, SECURITY, + SEED, SELECT, SEMI, SENSITIVE, @@ -678,13 +813,18 @@ define_keywords!( SERDE, SERDEPROPERTIES, SERIALIZABLE, + SERVICE, SESSION, SESSION_USER, SET, + SETERROR, SETS, SETTINGS, SHARE, + SHARED, + SHARING, SHOW, + SIGNED, SIMILAR, SKIP, SLOW, @@ -697,6 +837,7 @@ define_keywords!( SPATIAL, SPECIFIC, SPECIFICTYPE, + SPGIST, SQL, SQLEXCEPTION, SQLSTATE, @@ -705,27 +846,37 @@ define_keywords!( STABLE, STAGE, START, + STARTS, STATEMENT, STATIC, STATISTICS, + STATS_AUTO_RECALC, + STATS_PERSISTENT, + STATS_SAMPLE_PAGES, STATUS, STDDEV_POP, STDDEV_SAMP, STDIN, STDOUT, STEP, + STORAGE, STORAGE_INTEGRATION, + STORAGE_SERIALIZATION_POLICY, STORED, + STRAIGHT_JOIN, STRICT, STRING, STRUCT, SUBMULTISET, + SUBSTR, SUBSTRING, SUBSTRING_REGEX, SUCCEEDS, SUM, SUPER, SUPERUSER, + SUPPORT, + SUSPEND, SWAP, SYMMETRIC, SYNC, @@ -735,12 +886,16 @@ define_keywords!( TABLE, TABLES, TABLESAMPLE, + TABLESPACE, TAG, TARGET, + TASK, TBLPROPERTIES, TEMP, TEMPORARY, + TEMPTABLE, TERMINATED, + TERSE, TEXT, TEXTFILE, THEN, @@ -748,16 +903,20 @@ define_keywords!( TIME, TIMESTAMP, TIMESTAMPTZ, + TIMESTAMP_NTZ, TIMETZ, TIMEZONE, TIMEZONE_ABBR, TIMEZONE_HOUR, TIMEZONE_MINUTE, TIMEZONE_REGION, + TINYBLOB, TINYINT, + TINYTEXT, TO, TOP, TOTALS, + TRACE, TRAILING, TRANSACTION, TRANSIENT, @@ -770,11 +929,14 @@ define_keywords!( TRIM_ARRAY, TRUE, TRUNCATE, + TRY, TRY_CAST, TRY_CONVERT, TUPLE, TYPE, + UBIGINT, UESCAPE, + UHUGEINT, UINT128, UINT16, UINT256, @@ -784,10 +946,12 @@ define_keywords!( UNBOUNDED, UNCACHE, UNCOMMITTED, + UNDEFINED, UNFREEZE, UNION, UNIQUE, UNKNOWN, + UNLISTEN, UNLOAD, UNLOCK, UNLOGGED, @@ -795,6 +959,7 @@ define_keywords!( UNNEST, UNPIVOT, UNSAFE, + UNSET, UNSIGNED, UNTIL, UPDATE, @@ -805,6 +970,8 @@ define_keywords!( USER, USER_RESOURCES, USING, + USMALLINT, + UTINYINT, UUID, VACUUM, VALID, @@ -813,6 +980,7 @@ define_keywords!( VALUES, VALUE_OF, VARBINARY, + VARBIT, VARCHAR, VARIABLES, VARYING, @@ -821,14 +989,20 @@ define_keywords!( VERBOSE, VERSION, VERSIONING, + VERSIONS, VIEW, + VIEWS, VIRTUAL, VOLATILE, + VOLUME, WAREHOUSE, + WAREHOUSES, WEEK, + WEEKS, WHEN, WHENEVER, WHERE, + WHILE, WIDTH_BUCKET, WINDOW, WITH, @@ -838,8 +1012,11 @@ define_keywords!( WORK, WRITE, XML, + XMLNAMESPACES, + XMLTABLE, XOR, YEAR, + YEARS, ZONE, ZORDER ); @@ -868,6 +1045,7 @@ pub const RESERVED_FOR_TABLE_ALIAS: &[Keyword] = &[ Keyword::UNION, Keyword::EXCEPT, Keyword::INTERSECT, + Keyword::MINUS, // Reserved only as a table alias in the `FROM`/`JOIN` clauses: Keyword::ON, Keyword::JOIN, @@ -881,6 +1059,11 @@ pub const RESERVED_FOR_TABLE_ALIAS: &[Keyword] = &[ Keyword::CLUSTER, Keyword::DISTRIBUTE, Keyword::GLOBAL, + Keyword::ANTI, + Keyword::SEMI, + Keyword::RETURNING, + Keyword::ASOF, + Keyword::MATCH_CONDITION, // for MSSQL-specific OUTER APPLY (seems reserved in most dialects) Keyword::OUTER, Keyword::SET, @@ -892,15 +1075,18 @@ pub const RESERVED_FOR_TABLE_ALIAS: &[Keyword] = &[ Keyword::PARTITION, // for Clickhouse PREWHERE Keyword::PREWHERE, - // for ClickHouse SELECT * FROM t SETTINGS ... Keyword::SETTINGS, - // for ClickHouse SELECT * FROM t FORMAT... Keyword::FORMAT, // for Snowflake START WITH .. CONNECT BY Keyword::START, Keyword::CONNECT, // Reserved for snowflake MATCH_RECOGNIZE Keyword::MATCH_RECOGNIZE, + // Reserved for Snowflake table sample + Keyword::SAMPLE, + Keyword::TABLESAMPLE, + Keyword::FROM, + Keyword::OPEN, ]; /// Can't be used as a column alias, so that `SELECT alias` @@ -925,6 +1111,7 @@ pub const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[ Keyword::UNION, Keyword::EXCEPT, Keyword::INTERSECT, + Keyword::MINUS, Keyword::CLUSTER, Keyword::DISTRIBUTE, Keyword::RETURNING, @@ -933,3 +1120,23 @@ pub const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[ Keyword::INTO, Keyword::END, ]; + +// Global list of reserved keywords allowed after FROM. +// Parser should call Dialect::get_reserved_keyword_after_from +// to allow for each dialect to customize the list. +pub const RESERVED_FOR_TABLE_FACTOR: &[Keyword] = &[ + Keyword::INTO, + Keyword::LIMIT, + Keyword::HAVING, + Keyword::WHERE, +]; + +/// Global list of reserved keywords that cannot be parsed as identifiers +/// without special handling like quoting. Parser should call `Dialect::is_reserved_for_identifier` +/// to allow for each dialect to customize the list. +pub const RESERVED_FOR_IDENTIFIER: &[Keyword] = &[ + Keyword::EXISTS, + Keyword::INTERVAL, + Keyword::STRUCT, + Keyword::TRIM, +]; diff --git a/src/lib.rs b/src/lib.rs index 6c8987b63..5d72f9f0e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -25,6 +25,9 @@ //! 1. [`Parser::parse_sql`] and [`Parser::new`] for the Parsing API //! 2. [`ast`] for the AST structure //! 3. [`Dialect`] for supported SQL dialects +//! 4. [`Spanned`] for source text locations (see "Source Spans" below for details) +//! +//! [`Spanned`]: ast::Spanned //! //! # Example parsing SQL text //! @@ -61,13 +64,67 @@ //! // The original SQL text can be generated from the AST //! assert_eq!(ast[0].to_string(), sql); //! ``` -//! //! [sqlparser crates.io page]: https://crates.io/crates/sqlparser //! [`Parser::parse_sql`]: crate::parser::Parser::parse_sql //! [`Parser::new`]: crate::parser::Parser::new //! [`AST`]: crate::ast //! [`ast`]: crate::ast //! [`Dialect`]: crate::dialect::Dialect +//! +//! # Source Spans +//! +//! Starting with version `0.53.0` sqlparser introduced source spans to the +//! AST. This feature provides source information for syntax errors, enabling +//! better error messages. See [issue #1548] for more information and the +//! [`Spanned`] trait to access the spans. +//! +//! [issue #1548]: https://github.com/apache/datafusion-sqlparser-rs/issues/1548 +//! [`Spanned`]: ast::Spanned +//! +//! ## Migration Guide +//! +//! For the next few releases, we will be incrementally adding source spans to the +//! AST nodes, trying to minimize the impact on existing users. Some breaking +//! changes are inevitable, and the following is a summary of the changes: +//! +//! #### New fields for spans (must be added to any existing pattern matches) +//! +//! The primary change is that new fields will be added to AST nodes to store the source `Span` or `TokenWithLocation`. +//! +//! This will require +//! 1. Adding new fields to existing pattern matches. +//! 2. Filling in the proper span information when constructing AST nodes. +//! +//! For example, since `Ident` now stores a `Span`, to construct an `Ident` you +//! must provide now provide one: +//! +//! Previously: +//! ```text +//! # use sqlparser::ast::Ident; +//! Ident { +//! value: "name".into(), +//! quote_style: None, +//! } +//! ``` +//! Now +//! ```rust +//! # use sqlparser::ast::Ident; +//! # use sqlparser::tokenizer::Span; +//! Ident { +//! value: "name".into(), +//! quote_style: None, +//! span: Span::empty(), +//! }; +//! ``` +//! +//! Similarly, when pattern matching on `Ident`, you must now account for the +//! `span` field. +//! +//! #### Misc. +//! - [`TokenWithLocation`] stores a full `Span`, rather than just a source location. +//! Users relying on `token.location` should use `token.location.start` instead. +//! +//![`TokenWithLocation`]: tokenizer::TokenWithLocation #![cfg_attr(not(feature = "std"), no_std)] #![allow(clippy::upper_case_acronyms)] diff --git a/src/parser/alter.rs b/src/parser/alter.rs index 534105790..bff462ee0 100644 --- a/src/parser/alter.rs +++ b/src/parser/alter.rs @@ -18,15 +18,15 @@ use alloc::vec; use super::{Parser, ParserError}; use crate::{ ast::{ - AlterPolicyOperation, AlterRoleOperation, Expr, Password, ResetConfig, RoleOption, - SetConfigValue, Statement, + AlterConnectorOwner, AlterPolicyOperation, AlterRoleOperation, Expr, Password, ResetConfig, + RoleOption, SetConfigValue, Statement, }, dialect::{MsSqlDialect, PostgreSqlDialect}, keywords::Keyword, tokenizer::Token, }; -impl<'a> Parser<'a> { +impl Parser<'_> { pub fn parse_alter_role(&mut self) -> Result { if dialect_of!(self is PostgreSqlDialect) { return self.parse_pg_alter_role(); @@ -51,13 +51,13 @@ impl<'a> Parser<'a> { /// /// [PostgreSQL](https://www.postgresql.org/docs/current/sql-alterpolicy.html) pub fn parse_alter_policy(&mut self) -> Result { - let name = self.parse_identifier(false)?; - self.expect_keyword(Keyword::ON)?; + let name = self.parse_identifier()?; + self.expect_keyword_is(Keyword::ON)?; let table_name = self.parse_object_name(false)?; if self.parse_keyword(Keyword::RENAME) { - self.expect_keyword(Keyword::TO)?; - let new_name = self.parse_identifier(false)?; + self.expect_keyword_is(Keyword::TO)?; + let new_name = self.parse_identifier()?; Ok(Statement::AlterPolicy { name, table_name, @@ -99,18 +99,59 @@ impl<'a> Parser<'a> { } } + /// Parse an `ALTER CONNECTOR` statement + /// ```sql + /// ALTER CONNECTOR connector_name SET DCPROPERTIES(property_name=property_value, ...); + /// + /// ALTER CONNECTOR connector_name SET URL new_url; + /// + /// ALTER CONNECTOR connector_name SET OWNER [USER|ROLE] user_or_role; + /// ``` + pub fn parse_alter_connector(&mut self) -> Result { + let name = self.parse_identifier()?; + self.expect_keyword_is(Keyword::SET)?; + + let properties = match self.parse_options_with_keywords(&[Keyword::DCPROPERTIES])? { + properties if !properties.is_empty() => Some(properties), + _ => None, + }; + + let url = if self.parse_keyword(Keyword::URL) { + Some(self.parse_literal_string()?) + } else { + None + }; + + let owner = if self.parse_keywords(&[Keyword::OWNER, Keyword::USER]) { + let owner = self.parse_identifier()?; + Some(AlterConnectorOwner::User(owner)) + } else if self.parse_keywords(&[Keyword::OWNER, Keyword::ROLE]) { + let owner = self.parse_identifier()?; + Some(AlterConnectorOwner::Role(owner)) + } else { + None + }; + + Ok(Statement::AlterConnector { + name, + properties, + url, + owner, + }) + } + fn parse_mssql_alter_role(&mut self) -> Result { - let role_name = self.parse_identifier(false)?; + let role_name = self.parse_identifier()?; let operation = if self.parse_keywords(&[Keyword::ADD, Keyword::MEMBER]) { - let member_name = self.parse_identifier(false)?; + let member_name = self.parse_identifier()?; AlterRoleOperation::AddMember { member_name } } else if self.parse_keywords(&[Keyword::DROP, Keyword::MEMBER]) { - let member_name = self.parse_identifier(false)?; + let member_name = self.parse_identifier()?; AlterRoleOperation::DropMember { member_name } } else if self.parse_keywords(&[Keyword::WITH, Keyword::NAME]) { if self.consume_token(&Token::Eq) { - let role_name = self.parse_identifier(false)?; + let role_name = self.parse_identifier()?; AlterRoleOperation::RenameRole { role_name } } else { return self.expected("= after WITH NAME ", self.peek_token()); @@ -126,7 +167,7 @@ impl<'a> Parser<'a> { } fn parse_pg_alter_role(&mut self) -> Result { - let role_name = self.parse_identifier(false)?; + let role_name = self.parse_identifier()?; // [ IN DATABASE _`database_name`_ ] let in_database = if self.parse_keywords(&[Keyword::IN, Keyword::DATABASE]) { @@ -137,7 +178,7 @@ impl<'a> Parser<'a> { let operation = if self.parse_keyword(Keyword::RENAME) { if self.parse_keyword(Keyword::TO) { - let role_name = self.parse_identifier(false)?; + let role_name = self.parse_identifier()?; AlterRoleOperation::RenameRole { role_name } } else { return self.expected("TO after RENAME", self.peek_token()); @@ -232,7 +273,7 @@ impl<'a> Parser<'a> { Some(Keyword::BYPASSRLS) => RoleOption::BypassRLS(true), Some(Keyword::NOBYPASSRLS) => RoleOption::BypassRLS(false), Some(Keyword::CONNECTION) => { - self.expect_keyword(Keyword::LIMIT)?; + self.expect_keyword_is(Keyword::LIMIT)?; RoleOption::ConnectionLimit(Expr::Value(self.parse_number_value()?)) } Some(Keyword::CREATEDB) => RoleOption::CreateDB(true), @@ -256,7 +297,7 @@ impl<'a> Parser<'a> { Some(Keyword::SUPERUSER) => RoleOption::SuperUser(true), Some(Keyword::NOSUPERUSER) => RoleOption::SuperUser(false), Some(Keyword::VALID) => { - self.expect_keyword(Keyword::UNTIL)?; + self.expect_keyword_is(Keyword::UNTIL)?; RoleOption::ValidUntil(Expr::Value(self.parse_value()?)) } _ => self.expected("option", self.peek_token())?, diff --git a/src/parser/mod.rs b/src/parser/mod.rs index a9a5b1df4..d18c7f694 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -24,6 +24,7 @@ use core::{ fmt::{self, Display}, str::FromStr, }; +use helpers::attached_token::AttachedToken; use log::debug; @@ -47,9 +48,6 @@ pub enum ParserError { RecursionLimitExceeded, } -// avoid clippy type_complexity warnings -type ParsedAction = (Keyword, Option>); - // Use `Parser::expected` instead, if possible macro_rules! parser_err { ($MSG:expr, $loc:expr) => { @@ -72,6 +70,9 @@ mod recursion { /// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust /// borrow checker so the automatic [`DepthGuard`] decrement a /// reference to the counter. + /// + /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection + /// for some of its recursive methods. See [`recursive::recursive`] for more information. pub(crate) struct RecursionCounter { remaining_depth: Rc>, } @@ -185,6 +186,15 @@ impl std::error::Error for ParserError {} // By default, allow expressions up to this deep before erroring const DEFAULT_REMAINING_DEPTH: usize = 50; +// A constant EOF token that can be referenced. +const EOF_TOKEN: TokenWithSpan = TokenWithSpan { + token: Token::EOF, + span: Span { + start: Location { line: 0, column: 0 }, + end: Location { line: 0, column: 0 }, + }, +}; + /// Composite types declarations using angle brackets syntax can be arbitrary /// nested such that the following declaration is possible: /// `ARRAY>` @@ -263,19 +273,58 @@ enum ParserState { ConnectBy, } +/// A SQL Parser +/// +/// This struct is the main entry point for parsing SQL queries. +/// +/// # Functionality: +/// * Parsing SQL: see examples on [`Parser::new`] and [`Parser::parse_sql`] +/// * Controlling recursion: See [`Parser::with_recursion_limit`] +/// * Controlling parser options: See [`Parser::with_options`] +/// * Providing your own tokens: See [`Parser::with_tokens`] +/// +/// # Internals +/// +/// The parser uses a [`Tokenizer`] to tokenize the input SQL string into a +/// `Vec` of [`TokenWithSpan`]s and maintains an `index` to the current token +/// being processed. The token vec may contain multiple SQL statements. +/// +/// * The "current" token is the token at `index - 1` +/// * The "next" token is the token at `index` +/// * The "previous" token is the token at `index - 2` +/// +/// If `index` is equal to the length of the token stream, the 'next' token is +/// [`Token::EOF`]. +/// +/// For example, the SQL string "SELECT * FROM foo" will be tokenized into +/// following tokens: +/// ```text +/// [ +/// "SELECT", // token index 0 +/// " ", // whitespace +/// "*", +/// " ", +/// "FROM", +/// " ", +/// "foo" +/// ] +/// ``` +/// +/// pub struct Parser<'a> { - tokens: Vec, + /// The tokens + tokens: Vec, /// The index of the first unprocessed token in [`Parser::tokens`]. index: usize, /// The current state of the parser. state: ParserState, - /// The current dialect to use. + /// The SQL dialect to use. dialect: &'a dyn Dialect, /// Additional options that allow you to mix & match behavior /// otherwise constrained to certain dialects (e.g. trailing /// commas) and/or format of parse (e.g. unescaping). options: ParserOptions, - /// Ensure the stack does not overflow by limiting recursion depth. + /// Ensures the stack does not overflow by limiting recursion depth. recursion_counter: RecursionCounter, } @@ -325,6 +374,9 @@ impl<'a> Parser<'a> { /// # Ok(()) /// # } /// ``` + /// + /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection + // for some of its recursive methods. See [`recursive::recursive`] for more information. pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self { self.recursion_counter = RecursionCounter::new(recursion_limit); self @@ -358,7 +410,7 @@ impl<'a> Parser<'a> { } /// Reset this parser to parse the specified token stream - pub fn with_tokens_with_locations(mut self, tokens: Vec) -> Self { + pub fn with_tokens_with_locations(mut self, tokens: Vec) -> Self { self.tokens = tokens; self.index = 0; self @@ -367,11 +419,11 @@ impl<'a> Parser<'a> { /// Reset this parser state to parse the specified tokens pub fn with_tokens(self, tokens: Vec) -> Self { // Put in dummy locations - let tokens_with_locations: Vec = tokens + let tokens_with_locations: Vec = tokens .into_iter() - .map(|token| TokenWithLocation { + .map(|token| TokenWithSpan { token, - location: Location { line: 0, column: 0 }, + span: Span::empty(), }) .collect(); self.with_tokens_with_locations(tokens_with_locations) @@ -476,7 +528,23 @@ impl<'a> Parser<'a> { Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe), Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain), Keyword::ANALYZE => self.parse_analyze(), - Keyword::SELECT | Keyword::WITH | Keyword::VALUES => { + Keyword::CASE => { + self.prev_token(); + self.parse_case_stmt() + } + Keyword::IF => { + self.prev_token(); + self.parse_if_stmt() + } + Keyword::WHILE => { + self.prev_token(); + self.parse_while() + } + Keyword::RAISE => { + self.prev_token(); + self.parse_raise_stmt() + } + Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => { self.prev_token(); self.parse_query().map(Statement::Query) } @@ -506,6 +574,10 @@ impl<'a> Parser<'a> { Keyword::ALTER => self.parse_alter(), Keyword::CALL => self.parse_call(), Keyword::COPY => self.parse_copy(), + Keyword::OPEN => { + self.prev_token(); + self.parse_open() + } Keyword::CLOSE => self.parse_close(), Keyword::SET => self.parse_set(), Keyword::SHOW => self.parse_show(), @@ -513,40 +585,42 @@ impl<'a> Parser<'a> { Keyword::GRANT => self.parse_grant(), Keyword::REVOKE => self.parse_revoke(), Keyword::START => self.parse_start_transaction(), - // `BEGIN` is a nonstandard but common alias for the - // standard `START TRANSACTION` statement. It is supported - // by at least PostgreSQL and MySQL. Keyword::BEGIN => self.parse_begin(), - // `END` is a nonstandard but common alias for the - // standard `COMMIT TRANSACTION` statement. It is supported - // by PostgreSQL. Keyword::END => self.parse_end(), Keyword::SAVEPOINT => self.parse_savepoint(), Keyword::RELEASE => self.parse_release(), Keyword::COMMIT => self.parse_commit(), + Keyword::RAISERROR => Ok(self.parse_raiserror()?), Keyword::ROLLBACK => self.parse_rollback(), Keyword::ASSERT => self.parse_assert(), // `PREPARE`, `EXECUTE` and `DEALLOCATE` are Postgres-specific // syntaxes. They are used for Postgres prepared statement. Keyword::DEALLOCATE => self.parse_deallocate(), - Keyword::EXECUTE => self.parse_execute(), + Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(), Keyword::PREPARE => self.parse_prepare(), Keyword::MERGE => self.parse_merge(), + // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific + // syntaxes. They are used for Postgres statement. + Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(), + Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(), + Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(), // `PRAGMA` is sqlite specific https://www.sqlite.org/pragma.html Keyword::PRAGMA => self.parse_pragma(), Keyword::UNLOAD => self.parse_unload(), + Keyword::RENAME => self.parse_rename(), // `INSTALL` is duckdb specific https://duckdb.org/docs/extensions/overview Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => { self.parse_install() } - // `LOAD` is duckdb specific https://duckdb.org/docs/extensions/overview - Keyword::LOAD if dialect_of!(self is DuckDbDialect | GenericDialect) => { - self.parse_load() - } + Keyword::LOAD => self.parse_load(), // `OPTIMIZE` is clickhouse specific https://clickhouse.tech/docs/en/sql-reference/statements/optimize/ Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => { self.parse_optimize_table() } + // `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment + Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(), + Keyword::PRINT => self.parse_print(), + Keyword::RETURN => self.parse_return(), _ => self.expected("an SQL statement", next_token), }, Token::LParen => { @@ -557,6 +631,215 @@ impl<'a> Parser<'a> { } } + /// Parse a `CASE` statement. + /// + /// See [Statement::Case] + pub fn parse_case_stmt(&mut self) -> Result { + let case_token = self.expect_keyword(Keyword::CASE)?; + + let match_expr = if self.peek_keyword(Keyword::WHEN) { + None + } else { + Some(self.parse_expr()?) + }; + + self.expect_keyword_is(Keyword::WHEN)?; + let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| { + parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END]) + })?; + + let else_block = if self.parse_keyword(Keyword::ELSE) { + Some(self.parse_conditional_statement_block(&[Keyword::END])?) + } else { + None + }; + + let mut end_case_token = self.expect_keyword(Keyword::END)?; + if self.peek_keyword(Keyword::CASE) { + end_case_token = self.expect_keyword(Keyword::CASE)?; + } + + Ok(Statement::Case(CaseStatement { + case_token: AttachedToken(case_token), + match_expr, + when_blocks, + else_block, + end_case_token: AttachedToken(end_case_token), + })) + } + + /// Parse an `IF` statement. + /// + /// See [Statement::If] + pub fn parse_if_stmt(&mut self) -> Result { + self.expect_keyword_is(Keyword::IF)?; + let if_block = self.parse_conditional_statement_block(&[ + Keyword::ELSE, + Keyword::ELSEIF, + Keyword::END, + ])?; + + let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) { + self.parse_keyword_separated(Keyword::ELSEIF, |parser| { + parser.parse_conditional_statement_block(&[ + Keyword::ELSEIF, + Keyword::ELSE, + Keyword::END, + ]) + })? + } else { + vec![] + }; + + let else_block = if self.parse_keyword(Keyword::ELSE) { + Some(self.parse_conditional_statement_block(&[Keyword::END])?) + } else { + None + }; + + self.expect_keyword_is(Keyword::END)?; + let end_token = self.expect_keyword(Keyword::IF)?; + + Ok(Statement::If(IfStatement { + if_block, + elseif_blocks, + else_block, + end_token: Some(AttachedToken(end_token)), + })) + } + + /// Parse a `WHILE` statement. + /// + /// See [Statement::While] + fn parse_while(&mut self) -> Result { + self.expect_keyword_is(Keyword::WHILE)?; + let while_block = self.parse_conditional_statement_block(&[Keyword::END])?; + + Ok(Statement::While(WhileStatement { while_block })) + } + + /// Parses an expression and associated list of statements + /// belonging to a conditional statement like `IF` or `WHEN` or `WHILE`. + /// + /// Example: + /// ```sql + /// IF condition THEN statement1; statement2; + /// ``` + fn parse_conditional_statement_block( + &mut self, + terminal_keywords: &[Keyword], + ) -> Result { + let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?; + let mut then_token = None; + + let condition = match &start_token.token { + Token::Word(w) if w.keyword == Keyword::ELSE => None, + Token::Word(w) if w.keyword == Keyword::WHILE => { + let expr = self.parse_expr()?; + Some(expr) + } + _ => { + let expr = self.parse_expr()?; + then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?)); + Some(expr) + } + }; + + let conditional_statements = self.parse_conditional_statements(terminal_keywords)?; + + Ok(ConditionalStatementBlock { + start_token: AttachedToken(start_token), + condition, + then_token, + conditional_statements, + }) + } + + /// Parse a BEGIN/END block or a sequence of statements + /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements. + pub(crate) fn parse_conditional_statements( + &mut self, + terminal_keywords: &[Keyword], + ) -> Result { + let conditional_statements = if self.peek_keyword(Keyword::BEGIN) { + let begin_token = self.expect_keyword(Keyword::BEGIN)?; + let statements = self.parse_statement_list(terminal_keywords)?; + let end_token = self.expect_keyword(Keyword::END)?; + + ConditionalStatements::BeginEnd(BeginEndStatements { + begin_token: AttachedToken(begin_token), + statements, + end_token: AttachedToken(end_token), + }) + } else { + ConditionalStatements::Sequence { + statements: self.parse_statement_list(terminal_keywords)?, + } + }; + Ok(conditional_statements) + } + + /// Parse a `RAISE` statement. + /// + /// See [Statement::Raise] + pub fn parse_raise_stmt(&mut self) -> Result { + self.expect_keyword_is(Keyword::RAISE)?; + + let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) { + self.expect_token(&Token::Eq)?; + Some(RaiseStatementValue::UsingMessage(self.parse_expr()?)) + } else { + self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))? + }; + + Ok(Statement::Raise(RaiseStatement { value })) + } + + pub fn parse_comment(&mut self) -> Result { + let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); + + self.expect_keyword_is(Keyword::ON)?; + let token = self.next_token(); + + let (object_type, object_name) = match token.token { + Token::Word(w) if w.keyword == Keyword::COLUMN => { + (CommentObject::Column, self.parse_object_name(false)?) + } + Token::Word(w) if w.keyword == Keyword::TABLE => { + (CommentObject::Table, self.parse_object_name(false)?) + } + Token::Word(w) if w.keyword == Keyword::EXTENSION => { + (CommentObject::Extension, self.parse_object_name(false)?) + } + Token::Word(w) if w.keyword == Keyword::SCHEMA => { + (CommentObject::Schema, self.parse_object_name(false)?) + } + Token::Word(w) if w.keyword == Keyword::DATABASE => { + (CommentObject::Database, self.parse_object_name(false)?) + } + Token::Word(w) if w.keyword == Keyword::USER => { + (CommentObject::User, self.parse_object_name(false)?) + } + Token::Word(w) if w.keyword == Keyword::ROLE => { + (CommentObject::Role, self.parse_object_name(false)?) + } + _ => self.expected("comment object_type", token)?, + }; + + self.expect_keyword_is(Keyword::IS)?; + let comment = if self.parse_keyword(Keyword::NULL) { + None + } else { + Some(self.parse_literal_string()?) + }; + Ok(Statement::Comment { + object_type, + object_name, + comment, + if_exists, + }) + } + pub fn parse_flush(&mut self) -> Result { let mut channel = None; let mut tables: Vec = vec![]; @@ -564,7 +847,7 @@ impl<'a> Parser<'a> { let mut export = false; if !dialect_of!(self is MySqlDialect | GenericDialect) { - return parser_err!("Unsupported statement FLUSH", self.peek_token().location); + return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start); } let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) { @@ -646,7 +929,7 @@ impl<'a> Parser<'a> { pub fn parse_msck(&mut self) -> Result { let repair = self.parse_keyword(Keyword::REPAIR); - self.expect_keyword(Keyword::TABLE)?; + self.expect_keyword_is(Keyword::TABLE)?; let table_name = self.parse_object_name(false)?; let partition_action = self .maybe_parse(|parser| { @@ -660,7 +943,7 @@ impl<'a> Parser<'a> { Some(Keyword::SYNC) => Some(AddDropSync::SYNC), _ => None, }; - parser.expect_keyword(Keyword::PARTITIONS)?; + parser.expect_keyword_is(Keyword::PARTITIONS)?; Ok(pa) })? .unwrap_or_default(); @@ -700,13 +983,7 @@ impl<'a> Parser<'a> { None }; - cascade = if self.parse_keyword(Keyword::CASCADE) { - Some(TruncateCascadeOption::Cascade) - } else if self.parse_keyword(Keyword::RESTRICT) { - Some(TruncateCascadeOption::Restrict) - } else { - None - }; + cascade = self.parse_cascade_option(); }; let on_cluster = self.parse_optional_on_cluster()?; @@ -722,6 +999,16 @@ impl<'a> Parser<'a> { }) } + fn parse_cascade_option(&mut self) -> Option { + if self.parse_keyword(Keyword::CASCADE) { + Some(CascadeOption::Cascade) + } else if self.parse_keyword(Keyword::RESTRICT) { + Some(CascadeOption::Restrict) + } else { + None + } + } + pub fn parse_attach_duckdb_database_options( &mut self, ) -> Result, ParserError> { @@ -741,7 +1028,7 @@ impl<'a> Parser<'a> { }; options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean)); } else if self.parse_keyword(Keyword::TYPE) { - let ident = self.parse_identifier(false)?; + let ident = self.parse_identifier()?; options.push(AttachDuckDBDatabaseOption::Type(ident)); } else { return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token()); @@ -760,9 +1047,9 @@ impl<'a> Parser<'a> { pub fn parse_attach_duckdb_database(&mut self) -> Result { let database = self.parse_keyword(Keyword::DATABASE); let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); - let database_path = self.parse_identifier(false)?; + let database_path = self.parse_identifier()?; let database_alias = if self.parse_keyword(Keyword::AS) { - Some(self.parse_identifier(false)?) + Some(self.parse_identifier()?) } else { None }; @@ -780,7 +1067,7 @@ impl<'a> Parser<'a> { pub fn parse_detach_duckdb_database(&mut self) -> Result { let database = self.parse_keyword(Keyword::DATABASE); let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let database_alias = self.parse_identifier(false)?; + let database_alias = self.parse_identifier()?; Ok(Statement::DetachDuckDBDatabase { if_exists, database, @@ -791,8 +1078,8 @@ impl<'a> Parser<'a> { pub fn parse_attach_database(&mut self) -> Result { let database = self.parse_keyword(Keyword::DATABASE); let database_file_name = self.parse_expr()?; - self.expect_keyword(Keyword::AS)?; - let schema_name = self.parse_identifier(false)?; + self.expect_keyword_is(Keyword::AS)?; + let schema_name = self.parse_identifier()?; Ok(Statement::AttachDatabase { database, schema_name, @@ -801,7 +1088,7 @@ impl<'a> Parser<'a> { } pub fn parse_analyze(&mut self) -> Result { - self.expect_keyword(Keyword::TABLE)?; + let has_table_keyword = self.parse_keyword(Keyword::TABLE); let table_name = self.parse_object_name(false)?; let mut for_columns = false; let mut cache_metadata = false; @@ -824,21 +1111,21 @@ impl<'a> Parser<'a> { } Some(Keyword::NOSCAN) => noscan = true, Some(Keyword::FOR) => { - self.expect_keyword(Keyword::COLUMNS)?; + self.expect_keyword_is(Keyword::COLUMNS)?; columns = self .maybe_parse(|parser| { - parser.parse_comma_separated(|p| p.parse_identifier(false)) + parser.parse_comma_separated(|p| p.parse_identifier()) })? .unwrap_or_default(); for_columns = true } Some(Keyword::CACHE) => { - self.expect_keyword(Keyword::METADATA)?; + self.expect_keyword_is(Keyword::METADATA)?; cache_metadata = true } Some(Keyword::COMPUTE) => { - self.expect_keyword(Keyword::STATISTICS)?; + self.expect_keyword_is(Keyword::STATISTICS)?; compute_statistics = true } _ => break, @@ -846,6 +1133,7 @@ impl<'a> Parser<'a> { } Ok(Statement::Analyze { + has_table_keyword, table_name, for_columns, columns, @@ -865,7 +1153,7 @@ impl<'a> Parser<'a> { t @ (Token::Word(_) | Token::SingleQuotedString(_)) => { if self.peek_token().token == Token::Period { let mut id_parts: Vec = vec![match t { - Token::Word(w) => w.to_ident(), + Token::Word(w) => w.into_ident(next_token.span), Token::SingleQuotedString(s) => Ident::with_quote('\'', s), _ => unreachable!(), // We matched above }]; @@ -873,13 +1161,16 @@ impl<'a> Parser<'a> { while self.consume_token(&Token::Period) { let next_token = self.next_token(); match next_token.token { - Token::Word(w) => id_parts.push(w.to_ident()), + Token::Word(w) => id_parts.push(w.into_ident(next_token.span)), Token::SingleQuotedString(s) => { // SQLite has single-quoted identifiers id_parts.push(Ident::with_quote('\'', s)) } Token::Mul => { - return Ok(Expr::QualifiedWildcard(ObjectName(id_parts))); + return Ok(Expr::QualifiedWildcard( + ObjectName::from(id_parts), + AttachedToken(next_token), + )); } _ => { return self @@ -890,7 +1181,7 @@ impl<'a> Parser<'a> { } } Token::Mul => { - return Ok(Expr::Wildcard); + return Ok(Expr::Wildcard(AttachedToken(next_token))); } _ => (), }; @@ -904,11 +1195,33 @@ impl<'a> Parser<'a> { self.parse_subexpr(self.dialect.prec_unknown()) } + pub fn parse_expr_with_alias_and_order_by( + &mut self, + ) -> Result { + let expr = self.parse_expr()?; + + fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool { + explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw) + } + let alias = self.parse_optional_alias_inner(None, validator)?; + let order_by = OrderByOptions { + asc: self.parse_asc_desc(), + nulls_first: None, + }; + Ok(ExprWithAliasAndOrderBy { + expr: ExprWithAlias { expr, alias }, + order_by, + }) + } + /// Parse tokens until the precedence changes. pub fn parse_subexpr(&mut self, precedence: u8) -> Result { let _guard = self.recursion_counter.try_decrease()?; debug!("parsing expr"); let mut expr = self.parse_prefix()?; + + expr = self.parse_compound_expr(expr, vec![])?; + debug!("prefix: {:?}", expr); loop { let next_precedence = self.get_next_precedence()?; @@ -918,6 +1231,12 @@ impl<'a> Parser<'a> { break; } + // The period operator is handled exclusively by the + // compound field access parsing. + if Token::Period == self.peek_token_ref().token { + break; + } + expr = self.parse_infix(expr, next_precedence)?; } Ok(expr) @@ -935,17 +1254,232 @@ impl<'a> Parser<'a> { } pub fn parse_savepoint(&mut self) -> Result { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; Ok(Statement::Savepoint { name }) } pub fn parse_release(&mut self) -> Result { let _ = self.parse_keyword(Keyword::SAVEPOINT); - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; Ok(Statement::ReleaseSavepoint { name }) } + pub fn parse_listen(&mut self) -> Result { + let channel = self.parse_identifier()?; + Ok(Statement::LISTEN { channel }) + } + + pub fn parse_unlisten(&mut self) -> Result { + let channel = if self.consume_token(&Token::Mul) { + Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string()) + } else { + match self.parse_identifier() { + Ok(expr) => expr, + _ => { + self.prev_token(); + return self.expected("wildcard or identifier", self.peek_token()); + } + } + }; + Ok(Statement::UNLISTEN { channel }) + } + + pub fn parse_notify(&mut self) -> Result { + let channel = self.parse_identifier()?; + let payload = if self.consume_token(&Token::Comma) { + Some(self.parse_literal_string()?) + } else { + None + }; + Ok(Statement::NOTIFY { channel, payload }) + } + + /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable] + pub fn parse_rename(&mut self) -> Result { + if self.peek_keyword(Keyword::TABLE) { + self.expect_keyword(Keyword::TABLE)?; + let rename_tables = self.parse_comma_separated(|parser| { + let old_name = parser.parse_object_name(false)?; + parser.expect_keyword(Keyword::TO)?; + let new_name = parser.parse_object_name(false)?; + + Ok(RenameTable { old_name, new_name }) + })?; + Ok(Statement::RenameTable(rename_tables)) + } else { + self.expected("KEYWORD `TABLE` after RENAME", self.peek_token()) + } + } + + /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect. + /// Returns `None if no match is found. + fn parse_expr_prefix_by_reserved_word( + &mut self, + w: &Word, + w_span: Span, + ) -> Result, ParserError> { + match w.keyword { + Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => { + self.prev_token(); + Ok(Some(Expr::Value(self.parse_value()?))) + } + Keyword::NULL => { + self.prev_token(); + Ok(Some(Expr::Value(self.parse_value()?))) + } + Keyword::CURRENT_CATALOG + | Keyword::CURRENT_USER + | Keyword::SESSION_USER + | Keyword::USER + if dialect_of!(self is PostgreSqlDialect | GenericDialect) => + { + Ok(Some(Expr::Function(Function { + name: ObjectName::from(vec![w.clone().into_ident(w_span)]), + uses_odbc_syntax: false, + parameters: FunctionArguments::None, + args: FunctionArguments::None, + null_treatment: None, + filter: None, + over: None, + within_group: vec![], + }))) + } + Keyword::CURRENT_TIMESTAMP + | Keyword::CURRENT_TIME + | Keyword::CURRENT_DATE + | Keyword::LOCALTIME + | Keyword::LOCALTIMESTAMP => { + Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?)) + } + Keyword::CASE => Ok(Some(self.parse_case_expr()?)), + Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)), + Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)), + Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)), + Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)), + Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)), + Keyword::EXISTS + // Support parsing Databricks has a function named `exists`. + if !dialect_of!(self is DatabricksDialect) + || matches!( + self.peek_nth_token_ref(1).token, + Token::Word(Word { + keyword: Keyword::SELECT | Keyword::WITH, + .. + }) + ) => + { + Ok(Some(self.parse_exists_expr(false)?)) + } + Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)), + Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)), + Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)), + Keyword::POSITION if self.peek_token_ref().token == Token::LParen => { + Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?)) + } + Keyword::SUBSTR | Keyword::SUBSTRING => { + self.prev_token(); + Ok(Some(self.parse_substring()?)) + } + Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)), + Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)), + Keyword::INTERVAL => Ok(Some(self.parse_interval()?)), + // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call + Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => { + self.expect_token(&Token::LBracket)?; + Ok(Some(self.parse_array_expr(true)?)) + } + Keyword::ARRAY + if self.peek_token() == Token::LParen + && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) => + { + self.expect_token(&Token::LParen)?; + let query = self.parse_query()?; + self.expect_token(&Token::RParen)?; + Ok(Some(Expr::Function(Function { + name: ObjectName::from(vec![w.clone().into_ident(w_span)]), + uses_odbc_syntax: false, + parameters: FunctionArguments::None, + args: FunctionArguments::Subquery(query), + filter: None, + null_treatment: None, + over: None, + within_group: vec![], + }))) + } + Keyword::NOT => Ok(Some(self.parse_not()?)), + Keyword::MATCH if self.dialect.supports_match_against() => { + Ok(Some(self.parse_match_against()?)) + } + Keyword::STRUCT if self.dialect.supports_struct_literal() => { + let struct_expr = self.parse_struct_literal()?; + Ok(Some(struct_expr)) + } + Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => { + let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?; + Ok(Some(Expr::Prior(Box::new(expr)))) + } + Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => { + Ok(Some(self.parse_duckdb_map_literal()?)) + } + _ if self.dialect.supports_geometric_types() => match w.keyword { + Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)), + Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)), + Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)), + Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)), + Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)), + Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)), + Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)), + _ => Ok(None), + }, + _ => Ok(None), + } + } + + /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect. + fn parse_expr_prefix_by_unreserved_word( + &mut self, + w: &Word, + w_span: Span, + ) -> Result { + match self.peek_token().token { + Token::LParen if !self.peek_outer_join_operator() => { + let id_parts = vec![w.clone().into_ident(w_span)]; + self.parse_function(ObjectName::from(id_parts)) + } + // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html + Token::SingleQuotedString(_) + | Token::DoubleQuotedString(_) + | Token::HexStringLiteral(_) + if w.value.starts_with('_') => + { + Ok(Expr::Prefixed { + prefix: w.clone().into_ident(w_span), + value: self.parse_introduced_string_expr()?.into(), + }) + } + // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html + Token::SingleQuotedString(_) + | Token::DoubleQuotedString(_) + | Token::HexStringLiteral(_) + if w.value.starts_with('_') => + { + Ok(Expr::Prefixed { + prefix: w.clone().into_ident(w_span), + value: self.parse_introduced_string_expr()?.into(), + }) + } + Token::Arrow if self.dialect.supports_lambda_functions() => { + self.expect_token(&Token::Arrow)?; + Ok(Expr::Lambda(LambdaFunction { + params: OneOrManyWithParens::One(w.clone().into_ident(w_span)), + body: Box::new(self.parse_expr()?), + })) + } + _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))), + } + } + /// Parse an expression prefix. pub fn parse_prefix(&mut self) -> Result { // allow the dialect to override prefix parsing @@ -969,7 +1503,7 @@ impl<'a> Parser<'a> { // Note also that naively `SELECT date` looks like a syntax error because the `date` type // name is not followed by a string literal, but in fact in PostgreSQL it is a valid // expression that should parse as the column name "date". - let loc = self.peek_token().location; + let loc = self.peek_token_ref().span.start; let opt_expr = self.maybe_parse(|parser| { match parser.parse_data_type()? { DataType::Interval => parser.parse_interval(), @@ -983,7 +1517,7 @@ impl<'a> Parser<'a> { DataType::Custom(..) => parser_err!("dummy", loc), data_type => Ok(Expr::TypedString { data_type, - value: parser.parse_literal_string()?, + value: parser.parse_value()?.value, }), } })?; @@ -992,178 +1526,55 @@ impl<'a> Parser<'a> { return Ok(expr); } - let next_token = self.next_token(); - let expr = match next_token.token { - Token::Word(w) => match w.keyword { - Keyword::TRUE | Keyword::FALSE | Keyword::NULL => { - self.prev_token(); - Ok(Expr::Value(self.parse_value()?)) - } - Keyword::CURRENT_CATALOG - | Keyword::CURRENT_USER - | Keyword::SESSION_USER - | Keyword::USER - if dialect_of!(self is PostgreSqlDialect | GenericDialect) => - { - Ok(Expr::Function(Function { - name: ObjectName(vec![w.to_ident()]), - parameters: FunctionArguments::None, - args: FunctionArguments::None, - null_treatment: None, - filter: None, - over: None, - within_group: vec![], - })) - } - Keyword::CURRENT_TIMESTAMP - | Keyword::CURRENT_TIME - | Keyword::CURRENT_DATE - | Keyword::LOCALTIME - | Keyword::LOCALTIMESTAMP => { - self.parse_time_functions(ObjectName(vec![w.to_ident()])) - } - Keyword::CASE => self.parse_case_expr(), - Keyword::CONVERT => self.parse_convert_expr(false), - Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => self.parse_convert_expr(true), - Keyword::CAST => self.parse_cast_expr(CastKind::Cast), - Keyword::TRY_CAST => self.parse_cast_expr(CastKind::TryCast), - Keyword::SAFE_CAST => self.parse_cast_expr(CastKind::SafeCast), - Keyword::EXISTS - // Support parsing Databricks has a function named `exists`. - if !dialect_of!(self is DatabricksDialect) - || matches!( - self.peek_nth_token(1).token, - Token::Word(Word { - keyword: Keyword::SELECT | Keyword::WITH, - .. - }) - ) => - { - self.parse_exists_expr(false) - } - Keyword::EXTRACT => self.parse_extract_expr(), - Keyword::CEIL => self.parse_ceil_floor_expr(true), - Keyword::FLOOR => self.parse_ceil_floor_expr(false), - Keyword::POSITION if self.peek_token().token == Token::LParen => { - self.parse_position_expr(w.to_ident()) - } - Keyword::SUBSTRING => self.parse_substring_expr(), - Keyword::OVERLAY => self.parse_overlay_expr(), - Keyword::TRIM => self.parse_trim_expr(), - Keyword::INTERVAL => self.parse_interval(), - // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call - Keyword::ARRAY if self.peek_token() == Token::LBracket => { - self.expect_token(&Token::LBracket)?; - self.parse_array_expr(true) - } - Keyword::ARRAY - if self.peek_token() == Token::LParen - && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) => - { - self.expect_token(&Token::LParen)?; - let query = self.parse_query()?; - self.expect_token(&Token::RParen)?; - Ok(Expr::Function(Function { - name: ObjectName(vec![w.to_ident()]), - parameters: FunctionArguments::None, - args: FunctionArguments::Subquery(query), - filter: None, - null_treatment: None, - over: None, - within_group: vec![], - })) - } - Keyword::NOT => self.parse_not(), - Keyword::MATCH if dialect_of!(self is MySqlDialect | GenericDialect) => { - self.parse_match_against() - } - Keyword::STRUCT if dialect_of!(self is BigQueryDialect | GenericDialect) => { - self.prev_token(); - self.parse_bigquery_struct_literal() - } - Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => { - let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?; - Ok(Expr::Prior(Box::new(expr))) - } - Keyword::MAP if self.peek_token() == Token::LBrace && self.dialect.support_map_literal_syntax() => { - self.parse_duckdb_map_literal() - } - // Here `w` is a word, check if it's a part of a multipart - // identifier, a function call, or a simple identifier: - _ => match self.peek_token().token { - Token::LParen | Token::Period => { - let mut id_parts: Vec = vec![w.to_ident()]; - let mut ends_with_wildcard = false; - while self.consume_token(&Token::Period) { - let next_token = self.next_token(); - match next_token.token { - Token::Word(w) => id_parts.push(w.to_ident()), - Token::Mul => { - // Postgres explicitly allows funcnm(tablenm.*) and the - // function array_agg traverses this control flow - if dialect_of!(self is PostgreSqlDialect) { - ends_with_wildcard = true; - break; - } else { - return self - .expected("an identifier after '.'", next_token); - } - } - Token::SingleQuotedString(s) => { - id_parts.push(Ident::with_quote('\'', s)) - } - _ => { - return self - .expected("an identifier or a '*' after '.'", next_token); - } - } - } + // Cache some dialect properties to avoid lifetime issues with the + // next_token reference. - if ends_with_wildcard { - Ok(Expr::QualifiedWildcard(ObjectName(id_parts))) - } else if self.consume_token(&Token::LParen) { - if dialect_of!(self is SnowflakeDialect | MsSqlDialect) - && self.consume_tokens(&[Token::Plus, Token::RParen]) - { - Ok(Expr::OuterJoin(Box::new( - match <[Ident; 1]>::try_from(id_parts) { - Ok([ident]) => Expr::Identifier(ident), - Err(parts) => Expr::CompoundIdentifier(parts), - }, - ))) - } else { - self.prev_token(); - self.parse_function(ObjectName(id_parts)) + let dialect = self.dialect; + + self.advance_token(); + let next_token_index = self.get_current_index(); + let next_token = self.get_current_token(); + let span = next_token.span; + let expr = match &next_token.token { + Token::Word(w) => { + // The word we consumed may fall into one of two cases: it has a special meaning, or not. + // For example, in Snowflake, the word `interval` may have two meanings depending on the context: + // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;` + // ^^^^^^^^^^^^^^^^ ^^^^^^^^ + // interval expression identifier + // + // We first try to parse the word and following tokens as a special expression, and if that fails, + // we rollback and try to parse it as an identifier. + let w = w.clone(); + match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) { + // This word indicated an expression prefix and parsing was successful + Ok(Some(expr)) => Ok(expr), + + // No expression prefix associated with this word + Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?), + + // If parsing of the word as a special expression failed, we are facing two options: + // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`) + // 2. The word is used as an identifier, e.g. `SELECT MAX(interval) FROM tbl` + // We first try to parse the word as an identifier and if that fails + // we rollback and return the parsing error we got from trying to parse a + // special expression (to maintain backwards compatibility of parsing errors). + Err(e) => { + if !self.dialect.is_reserved_for_identifier(w.keyword) { + if let Ok(Some(expr)) = self.maybe_parse(|parser| { + parser.parse_expr_prefix_by_unreserved_word(&w, span) + }) { + return Ok(expr); } - } else { - Ok(Expr::CompoundIdentifier(id_parts)) } + return Err(e); } - // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html - Token::SingleQuotedString(_) - | Token::DoubleQuotedString(_) - | Token::HexStringLiteral(_) - if w.value.starts_with('_') => - { - Ok(Expr::IntroducedString { - introducer: w.value, - value: self.parse_introduced_string_value()?, - }) - } - Token::Arrow if self.dialect.supports_lambda_functions() => { - self.expect_token(&Token::Arrow)?; - return Ok(Expr::Lambda(LambdaFunction { - params: OneOrManyWithParens::One(w.to_ident()), - body: Box::new(self.parse_expr()?), - })); - } - _ => Ok(Expr::Identifier(w.to_ident())), - }, - }, // End of Token::Word + } + } // End of Token::Word // array `[1, 2, 3]` Token::LBracket => self.parse_array_expr(false), tok @ Token::Minus | tok @ Token::Plus => { - let op = if tok == Token::Plus { + let op = if *tok == Token::Plus { UnaryOperator::Plus } else { UnaryOperator::Minus @@ -1175,12 +1586,16 @@ impl<'a> Parser<'a> { ), }) } + Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp { + op: UnaryOperator::BangNot, + expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?), + }), tok @ Token::DoubleExclamationMark | tok @ Token::PGSquareRoot | tok @ Token::PGCubeRoot | tok @ Token::AtSign | tok @ Token::Tilde - if dialect_of!(self is PostgreSqlDialect) => + if dialect_is!(dialect is PostgreSqlDialect) => { let op = match tok { Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial, @@ -1197,7 +1612,34 @@ impl<'a> Parser<'a> { ), }) } - Token::EscapedStringLiteral(_) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => + tok @ Token::Sharp + | tok @ Token::AtDashAt + | tok @ Token::AtAt + | tok @ Token::QuestionMarkDash + | tok @ Token::QuestionPipe + if self.dialect.supports_geometric_types() => + { + let op = match tok { + Token::Sharp => UnaryOperator::Hash, + Token::AtDashAt => UnaryOperator::AtDashAt, + Token::AtAt => UnaryOperator::DoubleAt, + Token::QuestionMarkDash => UnaryOperator::QuestionDash, + Token::QuestionPipe => UnaryOperator::QuestionPipe, + _ => { + return Err(ParserError::ParserError(format!( + "Unexpected token in unary operator parsing: {:?}", + tok + ))) + } + }; + Ok(Expr::UnaryOp { + op, + expr: Box::new( + self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?, + ), + }) + } + Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { self.prev_token(); Ok(Expr::Value(self.parse_value()?)) @@ -1239,34 +1681,17 @@ impl<'a> Parser<'a> { } }; self.expect_token(&Token::RParen)?; - if !self.consume_token(&Token::Period) { - Ok(expr) - } else { - let tok = self.next_token(); - let key = match tok.token { - Token::Word(word) => word.to_ident(), - _ => { - return parser_err!( - format!("Expected identifier, found: {tok}"), - tok.location - ) - } - }; - Ok(Expr::CompositeAccess { - expr: Box::new(expr), - key, - }) - } + Ok(expr) } Token::Placeholder(_) | Token::Colon | Token::AtSign => { self.prev_token(); Ok(Expr::Value(self.parse_value()?)) } - Token::LBrace if self.dialect.supports_dictionary_syntax() => { + Token::LBrace => { self.prev_token(); - self.parse_duckdb_struct_literal() + self.parse_lbrace_expr() } - _ => self.expected("an expression", next_token), + _ => self.expected_at("an expression", next_token_index), }?; if self.parse_keyword(Keyword::COLLATE) { @@ -1279,6 +1704,261 @@ impl<'a> Parser<'a> { } } + fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result { + let value: Value = self.parse_value()?.value; + Ok(Expr::TypedString { + data_type: DataType::GeometricType(kind), + value, + }) + } + + /// Try to parse an [Expr::CompoundFieldAccess] like `a.b.c` or `a.b[1].c`. + /// If all the fields are `Expr::Identifier`s, return an [Expr::CompoundIdentifier] instead. + /// If only the root exists, return the root. + /// Parses compound expressions which may be delimited by period + /// or bracket notation. + /// For example: `a.b.c`, `a.b[1]`. + pub fn parse_compound_expr( + &mut self, + root: Expr, + mut chain: Vec, + ) -> Result { + let mut ending_wildcard: Option = None; + loop { + if self.consume_token(&Token::Period) { + let next_token = self.peek_token_ref(); + match &next_token.token { + Token::Mul => { + // Postgres explicitly allows funcnm(tablenm.*) and the + // function array_agg traverses this control flow + if dialect_of!(self is PostgreSqlDialect) { + ending_wildcard = Some(self.next_token()); + } else { + // Put back the consumed `.` tokens before exiting. + // If this expression is being parsed in the + // context of a projection, then the `.*` could imply + // a wildcard expansion. For example: + // `SELECT STRUCT('foo').* FROM T` + self.prev_token(); // . + } + + break; + } + Token::SingleQuotedString(s) => { + let expr = + Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s)); + chain.push(AccessExpr::Dot(expr)); + self.advance_token(); // The consumed string + } + // Fallback to parsing an arbitrary expression. + _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? { + // If we get back a compound field access or identifier, + // we flatten the nested expression. + // For example if the current root is `foo` + // and we get back a compound identifier expression `bar.baz` + // The full expression should be `foo.bar.baz` (i.e. + // a root with an access chain with 2 entries) and not + // `foo.(bar.baz)` (i.e. a root with an access chain with + // 1 entry`). + Expr::CompoundFieldAccess { root, access_chain } => { + chain.push(AccessExpr::Dot(*root)); + chain.extend(access_chain); + } + Expr::CompoundIdentifier(parts) => chain + .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)), + expr => { + chain.push(AccessExpr::Dot(expr)); + } + }, + } + } else if !self.dialect.supports_partiql() + && self.peek_token_ref().token == Token::LBracket + { + self.parse_multi_dim_subscript(&mut chain)?; + } else { + break; + } + } + + let tok_index = self.get_current_index(); + if let Some(wildcard_token) = ending_wildcard { + if !Self::is_all_ident(&root, &chain) { + return self.expected("an identifier or a '*' after '.'", self.peek_token()); + }; + Ok(Expr::QualifiedWildcard( + ObjectName::from(Self::exprs_to_idents(root, chain)?), + AttachedToken(wildcard_token), + )) + } else if self.maybe_parse_outer_join_operator() { + if !Self::is_all_ident(&root, &chain) { + return self.expected_at("column identifier before (+)", tok_index); + }; + let expr = if chain.is_empty() { + root + } else { + Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?) + }; + Ok(Expr::OuterJoin(expr.into())) + } else { + Self::build_compound_expr(root, chain) + } + } + + /// Combines a root expression and access chain to form + /// a compound expression. Which may be a [Expr::CompoundFieldAccess] + /// or other special cased expressions like [Expr::CompoundIdentifier], + /// [Expr::OuterJoin]. + fn build_compound_expr( + root: Expr, + mut access_chain: Vec, + ) -> Result { + if access_chain.is_empty() { + return Ok(root); + } + + if Self::is_all_ident(&root, &access_chain) { + return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents( + root, + access_chain, + )?)); + } + + // Flatten qualified function calls. + // For example, the expression `a.b.c.foo(1,2,3)` should + // represent a function called `a.b.c.foo`, rather than + // a composite expression. + if matches!(root, Expr::Identifier(_)) + && matches!( + access_chain.last(), + Some(AccessExpr::Dot(Expr::Function(_))) + ) + && access_chain + .iter() + .rev() + .skip(1) // All except the Function + .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_)))) + { + let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else { + return parser_err!("expected function expression", root.span().start); + }; + + let compound_func_name = [root] + .into_iter() + .chain(access_chain.into_iter().flat_map(|access| match access { + AccessExpr::Dot(expr) => Some(expr), + _ => None, + })) + .flat_map(|expr| match expr { + Expr::Identifier(ident) => Some(ident), + _ => None, + }) + .map(ObjectNamePart::Identifier) + .chain(func.name.0) + .collect::>(); + func.name = ObjectName(compound_func_name); + + return Ok(Expr::Function(func)); + } + + // Flatten qualified outer join expressions. + // For example, the expression `T.foo(+)` should + // represent an outer join on the column name `T.foo` + // rather than a composite expression. + if access_chain.len() == 1 + && matches!( + access_chain.last(), + Some(AccessExpr::Dot(Expr::OuterJoin(_))) + ) + { + let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else { + return parser_err!("expected (+) expression", root.span().start); + }; + + if !Self::is_all_ident(&root, &[]) { + return parser_err!("column identifier before (+)", root.span().start); + }; + + let token_start = root.span().start; + let mut idents = Self::exprs_to_idents(root, vec![])?; + match *inner_expr { + Expr::CompoundIdentifier(suffix) => idents.extend(suffix), + Expr::Identifier(suffix) => idents.push(suffix), + _ => { + return parser_err!("column identifier before (+)", token_start); + } + } + + return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into())); + } + + Ok(Expr::CompoundFieldAccess { + root: Box::new(root), + access_chain, + }) + } + + fn keyword_to_modifier(k: Keyword) -> Option { + match k { + Keyword::LOCAL => Some(ContextModifier::Local), + Keyword::GLOBAL => Some(ContextModifier::Global), + Keyword::SESSION => Some(ContextModifier::Session), + _ => None, + } + } + + /// Check if the root is an identifier and all fields are identifiers. + fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool { + if !matches!(root, Expr::Identifier(_)) { + return false; + } + fields + .iter() + .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_)))) + } + + /// Convert a root and a list of fields to a list of identifiers. + fn exprs_to_idents(root: Expr, fields: Vec) -> Result, ParserError> { + let mut idents = vec![]; + if let Expr::Identifier(root) = root { + idents.push(root); + for x in fields { + if let AccessExpr::Dot(Expr::Identifier(ident)) = x { + idents.push(ident); + } else { + return parser_err!( + format!("Expected identifier, found: {}", x), + x.span().start + ); + } + } + Ok(idents) + } else { + parser_err!( + format!("Expected identifier, found: {}", root), + root.span().start + ) + } + } + + /// Returns true if the next tokens indicate the outer join operator `(+)`. + fn peek_outer_join_operator(&mut self) -> bool { + if !self.dialect.supports_outer_join_operator() { + return false; + } + + let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref(); + Token::LParen == maybe_lparen.token + && Token::Plus == maybe_plus.token + && Token::RParen == maybe_rparen.token + } + + /// If the next tokens indicates the outer join operator `(+)`, consume + /// the tokens and return true. + fn maybe_parse_outer_join_operator(&mut self) -> bool { + self.dialect.supports_outer_join_operator() + && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen]) + } + pub fn parse_utility_options(&mut self) -> Result, ParserError> { self.expect_token(&Token::LParen)?; let options = self.parse_comma_separated(Self::parse_utility_option)?; @@ -1288,7 +1968,7 @@ impl<'a> Parser<'a> { } fn parse_utility_option(&mut self) -> Result { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let next_token = self.peek_token(); if next_token == Token::Comma || next_token == Token::RParen { @@ -1315,7 +1995,7 @@ impl<'a> Parser<'a> { return Ok(None); } self.maybe_parse(|p| { - let params = p.parse_comma_separated(|p| p.parse_identifier(false))?; + let params = p.parse_comma_separated(|p| p.parse_identifier())?; p.expect_token(&Token::RParen)?; p.expect_token(&Token::Arrow)?; let expr = p.parse_expr()?; @@ -1326,7 +2006,29 @@ impl<'a> Parser<'a> { }) } + /// Tries to parse the body of an [ODBC function] call. + /// i.e. without the enclosing braces + /// + /// ```sql + /// fn myfunc(1,2,3) + /// ``` + /// + /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017 + fn maybe_parse_odbc_fn_body(&mut self) -> Result, ParserError> { + self.maybe_parse(|p| { + p.expect_keyword(Keyword::FN)?; + let fn_name = p.parse_object_name(false)?; + let mut fn_call = p.parse_function_call(fn_name)?; + fn_call.uses_odbc_syntax = true; + Ok(Expr::Function(fn_call)) + }) + } + pub fn parse_function(&mut self, name: ObjectName) -> Result { + self.parse_function_call(name).map(Expr::Function) + } + + fn parse_function_call(&mut self, name: ObjectName) -> Result { self.expect_token(&Token::LParen)?; // Snowflake permits a subquery to be passed as an argument without @@ -1334,15 +2036,16 @@ impl<'a> Parser<'a> { if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() { let subquery = self.parse_query()?; self.expect_token(&Token::RParen)?; - return Ok(Expr::Function(Function { + return Ok(Function { name, + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::Subquery(subquery), filter: None, null_treatment: None, over: None, within_group: vec![], - })); + }); } let mut args = self.parse_function_argument_list()?; @@ -1395,28 +2098,29 @@ impl<'a> Parser<'a> { let window_spec = self.parse_window_spec()?; Some(WindowType::WindowSpec(window_spec)) } else { - Some(WindowType::NamedWindow(self.parse_identifier(false)?)) + Some(WindowType::NamedWindow(self.parse_identifier()?)) } } else { None }; - Ok(Expr::Function(Function { + Ok(Function { name, + uses_odbc_syntax: false, parameters, args: FunctionArguments::List(args), null_treatment, filter, over, within_group, - })) + }) } /// Optionally parses a null treatment clause. fn parse_null_treatment(&mut self) -> Result, ParserError> { match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) { Some(keyword) => { - self.expect_keyword(Keyword::NULLS)?; + self.expect_keyword_is(Keyword::NULLS)?; Ok(match keyword { Keyword::RESPECT => Some(NullTreatment::RespectNulls), @@ -1436,6 +2140,7 @@ impl<'a> Parser<'a> { }; Ok(Expr::Function(Function { name, + uses_odbc_syntax: false, parameters: FunctionArguments::None, args, filter: None, @@ -1462,7 +2167,7 @@ impl<'a> Parser<'a> { let units = self.parse_window_frame_units()?; let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) { let start_bound = self.parse_window_frame_bound()?; - self.expect_keyword(Keyword::AND)?; + self.expect_keyword_is(Keyword::AND)?; let end_bound = Some(self.parse_window_frame_bound()?); (start_bound, end_bound) } else { @@ -1568,14 +2273,14 @@ impl<'a> Parser<'a> { let mut operand = None; if !self.parse_keyword(Keyword::WHEN) { operand = Some(Box::new(self.parse_expr()?)); - self.expect_keyword(Keyword::WHEN)?; + self.expect_keyword_is(Keyword::WHEN)?; } let mut conditions = vec![]; - let mut results = vec![]; loop { - conditions.push(self.parse_expr()?); - self.expect_keyword(Keyword::THEN)?; - results.push(self.parse_expr()?); + let condition = self.parse_expr()?; + self.expect_keyword_is(Keyword::THEN)?; + let result = self.parse_expr()?; + conditions.push(CaseWhen { condition, result }); if !self.parse_keyword(Keyword::WHEN) { break; } @@ -1585,18 +2290,17 @@ impl<'a> Parser<'a> { } else { None }; - self.expect_keyword(Keyword::END)?; + self.expect_keyword_is(Keyword::END)?; Ok(Expr::Case { operand, conditions, - results, else_result, }) } pub fn parse_optional_cast_format(&mut self) -> Result, ParserError> { if self.parse_keyword(Keyword::FORMAT) { - let value = self.parse_value()?; + let value = self.parse_value()?.value; match self.parse_optional_time_zone()? { Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))), None => Ok(Some(CastFormat::Value(value))), @@ -1608,7 +2312,7 @@ impl<'a> Parser<'a> { pub fn parse_optional_time_zone(&mut self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) { - self.parse_value().map(Some) + self.parse_value().map(|v| Some(v.value)) } else { Ok(None) } @@ -1680,7 +2384,7 @@ impl<'a> Parser<'a> { pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result { self.expect_token(&Token::LParen)?; let expr = self.parse_expr()?; - self.expect_keyword(Keyword::AS)?; + self.expect_keyword_is(Keyword::AS)?; let data_type = self.parse_data_type()?; let format = self.parse_optional_cast_format()?; self.expect_token(&Token::RParen)?; @@ -1737,7 +2441,7 @@ impl<'a> Parser<'a> { CeilFloorKind::DateTimeField(self.parse_date_time_field()?) } else if self.consume_token(&Token::Comma) { // Parse `CEIL/FLOOR(expr, scale)` - match self.parse_value()? { + match self.parse_value()?.value { Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)), _ => { return Err(ParserError::ParserError( @@ -1770,7 +2474,7 @@ impl<'a> Parser<'a> { // Parse the subexpr till the IN keyword let expr = p.parse_subexpr(between_prec)?; - p.expect_keyword(Keyword::IN)?; + p.expect_keyword_is(Keyword::IN)?; let from = p.parse_expr()?; p.expect_token(&Token::RParen)?; Ok(Expr::Position { @@ -1782,12 +2486,20 @@ impl<'a> Parser<'a> { Some(expr) => Ok(expr), // Snowflake supports `position` as an ordinary function call // without the special `IN` syntax. - None => self.parse_function(ObjectName(vec![ident])), + None => self.parse_function(ObjectName::from(vec![ident])), } } - pub fn parse_substring_expr(&mut self) -> Result { - // PARSE SUBSTRING (EXPR [FROM 1] [FOR 3]) + // { SUBSTRING | SUBSTR } ( [FROM 1] [FOR 3]) + pub fn parse_substring(&mut self) -> Result { + let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? { + Keyword::SUBSTR => true, + Keyword::SUBSTRING => false, + _ => { + self.prev_token(); + return self.expected("SUBSTR or SUBSTRING", self.peek_token()); + } + }; self.expect_token(&Token::LParen)?; let expr = self.parse_expr()?; let mut from_expr = None; @@ -1807,6 +2519,7 @@ impl<'a> Parser<'a> { substring_from: from_expr.map(Box::new), substring_for: to_expr.map(Box::new), special, + shorthand, }) } @@ -1814,9 +2527,9 @@ impl<'a> Parser<'a> { // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3]) self.expect_token(&Token::LParen)?; let expr = self.parse_expr()?; - self.expect_keyword(Keyword::PLACING)?; + self.expect_keyword_is(Keyword::PLACING)?; let what_expr = self.parse_expr()?; - self.expect_keyword(Keyword::FROM)?; + self.expect_keyword_is(Keyword::FROM)?; let from_expr = self.parse_expr()?; let mut for_expr = None; if self.parse_keyword(Keyword::FOR) { @@ -1907,7 +2620,7 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::ERROR) { Ok(Some(ListAggOnOverflow::Error)) } else { - self.expect_keyword(Keyword::TRUNCATE)?; + self.expect_keyword_is(Keyword::TRUNCATE)?; let filler = match self.peek_token().token { Token::Word(w) if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT => @@ -1928,7 +2641,7 @@ impl<'a> Parser<'a> { if !with_count && !self.parse_keyword(Keyword::WITHOUT) { self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?; } - self.expect_keyword(Keyword::COUNT)?; + self.expect_keyword_is(Keyword::COUNT)?; Ok(Some(ListAggOnOverflow::Truncate { filler, with_count })) } } else { @@ -1945,12 +2658,14 @@ impl<'a> Parser<'a> { match &next_token.token { Token::Word(w) => match w.keyword { Keyword::YEAR => Ok(DateTimeField::Year), + Keyword::YEARS => Ok(DateTimeField::Years), Keyword::MONTH => Ok(DateTimeField::Month), + Keyword::MONTHS => Ok(DateTimeField::Months), Keyword::WEEK => { let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect) && self.consume_token(&Token::LParen) { - let week_day = self.parse_identifier(false)?; + let week_day = self.parse_identifier()?; self.expect_token(&Token::RParen)?; Some(week_day) } else { @@ -1958,14 +2673,19 @@ impl<'a> Parser<'a> { }; Ok(DateTimeField::Week(week_day)) } + Keyword::WEEKS => Ok(DateTimeField::Weeks), Keyword::DAY => Ok(DateTimeField::Day), Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek), Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear), + Keyword::DAYS => Ok(DateTimeField::Days), Keyword::DATE => Ok(DateTimeField::Date), Keyword::DATETIME => Ok(DateTimeField::Datetime), Keyword::HOUR => Ok(DateTimeField::Hour), + Keyword::HOURS => Ok(DateTimeField::Hours), Keyword::MINUTE => Ok(DateTimeField::Minute), + Keyword::MINUTES => Ok(DateTimeField::Minutes), Keyword::SECOND => Ok(DateTimeField::Second), + Keyword::SECONDS => Ok(DateTimeField::Seconds), Keyword::CENTURY => Ok(DateTimeField::Century), Keyword::DECADE => Ok(DateTimeField::Decade), Keyword::DOY => Ok(DateTimeField::Doy), @@ -1992,14 +2712,14 @@ impl<'a> Parser<'a> { Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion), _ if self.dialect.allow_extract_custom() => { self.prev_token(); - let custom = self.parse_identifier(false)?; + let custom = self.parse_identifier()?; Ok(DateTimeField::Custom(custom)) } _ => self.expected("date/time field", next_token), }, Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => { self.prev_token(); - let custom = self.parse_identifier(false)?; + let custom = self.parse_identifier()?; Ok(DateTimeField::Custom(custom)) } _ => self.expected("date/time field", next_token), @@ -2028,20 +2748,45 @@ impl<'a> Parser<'a> { } } + /// Parse expression types that start with a left brace '{'. + /// Examples: + /// ```sql + /// -- Dictionary expr. + /// {'key1': 'value1', 'key2': 'value2'} + /// + /// -- Function call using the ODBC syntax. + /// { fn CONCAT('foo', 'bar') } + /// ``` + fn parse_lbrace_expr(&mut self) -> Result { + let token = self.expect_token(&Token::LBrace)?; + + if let Some(fn_expr) = self.maybe_parse_odbc_fn_body()? { + self.expect_token(&Token::RBrace)?; + return Ok(fn_expr); + } + + if self.dialect.supports_dictionary_syntax() { + self.prev_token(); // Put back the '{' + return self.parse_duckdb_struct_literal(); + } + + self.expected("an expression", token) + } + /// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`] /// /// # Errors /// This method will raise an error if the column list is empty or with invalid identifiers, /// the match expression is not a literal string, or if the search modifier is not valid. pub fn parse_match_against(&mut self) -> Result { - let columns = self.parse_parenthesized_column_list(Mandatory, false)?; + let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?; - self.expect_keyword(Keyword::AGAINST)?; + self.expect_keyword_is(Keyword::AGAINST)?; self.expect_token(&Token::LParen)?; // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level. - let match_value = self.parse_value()?; + let match_value = self.parse_value()?.value; let in_natural_language_mode_keywords = &[ Keyword::IN, @@ -2120,7 +2865,7 @@ impl<'a> Parser<'a> { } else if self.dialect.require_interval_qualifier() { return parser_err!( "INTERVAL requires a unit after the literal value", - self.peek_token().location + self.peek_token().span.start ); } else { None @@ -2167,12 +2912,19 @@ impl<'a> Parser<'a> { matches!( word.keyword, Keyword::YEAR + | Keyword::YEARS | Keyword::MONTH + | Keyword::MONTHS | Keyword::WEEK + | Keyword::WEEKS | Keyword::DAY + | Keyword::DAYS | Keyword::HOUR + | Keyword::HOURS | Keyword::MINUTE + | Keyword::MINUTES | Keyword::SECOND + | Keyword::SECONDS | Keyword::CENTURY | Keyword::DECADE | Keyword::DOW @@ -2199,7 +2951,6 @@ impl<'a> Parser<'a> { } } - /// Bigquery specific: Parse a struct literal /// Syntax /// ```sql /// -- typed @@ -2207,13 +2958,19 @@ impl<'a> Parser<'a> { /// -- typeless /// STRUCT( expr1 [AS field_name] [, ... ]) /// ``` - fn parse_bigquery_struct_literal(&mut self) -> Result { + fn parse_struct_literal(&mut self) -> Result { + // Parse the fields definition if exist `<[field_name] field_type, ...>` + self.prev_token(); let (fields, trailing_bracket) = self.parse_struct_type_def(Self::parse_struct_field_def)?; if trailing_bracket.0 { - return parser_err!("unmatched > in STRUCT literal", self.peek_token().location); + return parser_err!( + "unmatched > in STRUCT literal", + self.peek_token().span.start + ); } + // Parse the struct values `(expr1 [, ... ])` self.expect_token(&Token::LParen)?; let values = self .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?; @@ -2222,13 +2979,13 @@ impl<'a> Parser<'a> { Ok(Expr::Struct { values, fields }) } - /// Parse an expression value for a bigquery struct [1] + /// Parse an expression value for a struct literal /// Syntax /// ```sql /// expr [AS name] /// ``` /// - /// Parameter typed_syntax is set to true if the expression + /// For biquery [1], Parameter typed_syntax is set to true if the expression /// is to be parsed as a field expression declared using typed /// struct syntax [2], and false if using typeless struct syntax [3]. /// @@ -2241,10 +2998,10 @@ impl<'a> Parser<'a> { if typed_syntax { return parser_err!("Typed syntax does not allow AS", { self.prev_token(); - self.peek_token().location + self.peek_token().span.start }); } - let field_name = self.parse_identifier(false)?; + let field_name = self.parse_identifier()?; Ok(Expr::Named { expr: expr.into(), name: field_name, @@ -2274,7 +3031,7 @@ impl<'a> Parser<'a> { F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>, { let start_token = self.peek_token(); - self.expect_keyword(Keyword::STRUCT)?; + self.expect_keyword_is(Keyword::STRUCT)?; // Nothing to do if we have no type information. if Token::Lt != self.peek_token() { @@ -2294,7 +3051,7 @@ impl<'a> Parser<'a> { // we've matched all field types for the current struct. // e.g. this is invalid syntax `STRUCT>>, INT>(NULL)` if trailing_bracket.0 { - return parser_err!("unmatched > in STRUCT definition", start_token.location); + return parser_err!("unmatched > in STRUCT definition", start_token.span.start); } }; @@ -2306,10 +3063,10 @@ impl<'a> Parser<'a> { /// Duckdb Struct Data Type fn parse_duckdb_struct_type_def(&mut self) -> Result, ParserError> { - self.expect_keyword(Keyword::STRUCT)?; + self.expect_keyword_is(Keyword::STRUCT)?; self.expect_token(&Token::LParen)?; let struct_body = self.parse_comma_separated(|parser| { - let field_name = parser.parse_identifier(false)?; + let field_name = parser.parse_identifier()?; let field_type = parser.parse_data_type()?; Ok(StructField { @@ -2343,7 +3100,7 @@ impl<'a> Parser<'a> { let field_name = if is_anonymous_field { None } else { - Some(self.parse_identifier(false)?) + Some(self.parse_identifier()?) }; let (field_type, trailing_bracket) = self.parse_data_type_helper()?; @@ -2367,13 +3124,13 @@ impl<'a> Parser<'a> { /// /// [1]: https://duckdb.org/docs/sql/data_types/union.html fn parse_union_type_def(&mut self) -> Result, ParserError> { - self.expect_keyword(Keyword::UNION)?; + self.expect_keyword_is(Keyword::UNION)?; self.expect_token(&Token::LParen)?; let fields = self.parse_comma_separated(|p| { Ok(UnionField { - field_name: p.parse_identifier(false)?, + field_name: p.parse_identifier()?, field_type: p.parse_data_type()?, }) })?; @@ -2395,7 +3152,8 @@ impl<'a> Parser<'a> { fn parse_duckdb_struct_literal(&mut self) -> Result { self.expect_token(&Token::LBrace)?; - let fields = self.parse_comma_separated(Self::parse_duckdb_dictionary_field)?; + let fields = + self.parse_comma_separated0(Self::parse_duckdb_dictionary_field, Token::RBrace)?; self.expect_token(&Token::RBrace)?; @@ -2412,7 +3170,7 @@ impl<'a> Parser<'a> { /// /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs fn parse_duckdb_dictionary_field(&mut self) -> Result { - let key = self.parse_identifier(false)?; + let key = self.parse_identifier()?; self.expect_token(&Token::Colon)?; @@ -2472,7 +3230,7 @@ impl<'a> Parser<'a> { /// /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> { - self.expect_keyword(Keyword::MAP)?; + self.expect_keyword_is(Keyword::MAP)?; self.expect_token(&Token::LParen)?; let key_data_type = self.parse_data_type()?; self.expect_token(&Token::Comma)?; @@ -2492,7 +3250,7 @@ impl<'a> Parser<'a> { /// /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple fn parse_click_house_tuple_def(&mut self) -> Result, ParserError> { - self.expect_keyword(Keyword::TUPLE)?; + self.expect_keyword_is(Keyword::TUPLE)?; self.expect_token(&Token::LParen)?; let mut field_defs = vec![]; loop { @@ -2541,10 +3299,16 @@ impl<'a> Parser<'a> { return infix; } - let mut tok = self.next_token(); - let regular_binary_operator = match &mut tok.token { + let dialect = self.dialect; + + self.advance_token(); + let tok = self.get_current_token(); + let tok_index = self.get_current_index(); + let span = tok.span; + let regular_binary_operator = match &tok.token { Token::Spaceship => Some(BinaryOperator::Spaceship), Token::DoubleEq => Some(BinaryOperator::Eq), + Token::Assignment => Some(BinaryOperator::Assignment), Token::Eq => Some(BinaryOperator::Eq), Token::Neq => Some(BinaryOperator::NotEq), Token::Gt => Some(BinaryOperator::Gt), @@ -2560,7 +3324,7 @@ impl<'a> Parser<'a> { Token::Caret => { // In PostgreSQL, ^ stands for the exponentiation operation, // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html - if dialect_of!(self is PostgreSqlDialect) { + if dialect_is!(dialect is PostgreSqlDialect) { Some(BinaryOperator::PGExp) } else { Some(BinaryOperator::BitwiseXor) @@ -2568,22 +3332,25 @@ impl<'a> Parser<'a> { } Token::Ampersand => Some(BinaryOperator::BitwiseAnd), Token::Div => Some(BinaryOperator::Divide), - Token::DuckIntDiv if dialect_of!(self is DuckDbDialect | GenericDialect) => { + Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => { Some(BinaryOperator::DuckIntegerDivide) } - Token::ShiftLeft if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect) => { + Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => { Some(BinaryOperator::PGBitwiseShiftLeft) } - Token::ShiftRight if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect) => { + Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => { Some(BinaryOperator::PGBitwiseShiftRight) } - Token::Sharp if dialect_of!(self is PostgreSqlDialect) => { + Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => { Some(BinaryOperator::PGBitwiseXor) } - Token::Overlap if dialect_of!(self is PostgreSqlDialect | GenericDialect) => { + Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => { Some(BinaryOperator::PGOverlap) } - Token::CaretAt if dialect_of!(self is PostgreSqlDialect | GenericDialect) => { + Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { + Some(BinaryOperator::PGOverlap) + } + Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { Some(BinaryOperator::PGStartsWith) } Token::Tilde => Some(BinaryOperator::PGRegexMatch), @@ -2606,13 +3373,65 @@ impl<'a> Parser<'a> { Token::Question => Some(BinaryOperator::Question), Token::QuestionAnd => Some(BinaryOperator::QuestionAnd), Token::QuestionPipe => Some(BinaryOperator::QuestionPipe), - Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(core::mem::take(s))), + Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())), + Token::DoubleSharp if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::DoubleHash) + } + + Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::AndLt) + } + Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::AndGt) + } + Token::QuestionMarkDash if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::QuestionDash) + } + Token::AmpersandLeftAngleBracketVerticalBar + if self.dialect.supports_geometric_types() => + { + Some(BinaryOperator::AndLtPipe) + } + Token::VerticalBarAmpersandRightAngleBracket + if self.dialect.supports_geometric_types() => + { + Some(BinaryOperator::PipeAndGt) + } + Token::TwoWayArrow if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::LtDashGt) + } + Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::LtCaret) + } + Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::GtCaret) + } + Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::QuestionHash) + } + Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::QuestionDoublePipe) + } + Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::QuestionDashPipe) + } + Token::TildeEqual if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::TildeEq) + } + Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::LtLtPipe) + } + Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => { + Some(BinaryOperator::PipeGtGt) + } + Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At), Token::Word(w) => match w.keyword { Keyword::AND => Some(BinaryOperator::And), Keyword::OR => Some(BinaryOperator::Or), Keyword::XOR => Some(BinaryOperator::Xor), - Keyword::OPERATOR if dialect_of!(self is PostgreSqlDialect | GenericDialect) => { + Keyword::OVERLAPS => Some(BinaryOperator::Overlaps), + Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => { self.expect_token(&Token::LParen)?; // there are special rules for operator names in // postgres so we can not use 'parse_object' @@ -2620,7 +3439,8 @@ impl<'a> Parser<'a> { // See https://www.postgresql.org/docs/current/sql-createoperator.html let mut idents = vec![]; loop { - idents.push(self.next_token().to_string()); + self.advance_token(); + idents.push(self.get_current_token().to_string()); if !self.consume_token(&Token::Period) { break; } @@ -2633,6 +3453,7 @@ impl<'a> Parser<'a> { _ => None, }; + let tok = self.token_at(tok_index); if let Some(op) = regular_binary_operator { if let Some(keyword) = self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME]) @@ -2663,7 +3484,7 @@ impl<'a> Parser<'a> { format!( "Expected one of [=, >, <, =>, =<, !=] as comparison operator, found: {op}" ), - tok.location + span.start ); }; @@ -2714,9 +3535,11 @@ impl<'a> Parser<'a> { { let expr2 = self.parse_expr()?; Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2))) + } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) { + Ok(is_normalized) } else { self.expected( - "[NOT] NULL or TRUE|FALSE or [NOT] DISTINCT FROM after IS", + "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS", self.peek_token(), ) } @@ -2789,39 +3612,31 @@ impl<'a> Parser<'a> { // Can only happen if `get_next_precedence` got out of sync with this function _ => parser_err!( format!("No infix parser for token {:?}", tok.token), - tok.location + tok.span.start ), } - } else if Token::DoubleColon == tok { + } else if Token::DoubleColon == *tok { Ok(Expr::Cast { kind: CastKind::DoubleColon, expr: Box::new(expr), data_type: self.parse_data_type()?, format: None, }) - } else if Token::ExclamationMark == tok { - // PostgreSQL factorial operation + } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() { Ok(Expr::UnaryOp { op: UnaryOperator::PGPostfixFactorial, expr: Box::new(expr), }) - } else if Token::LBracket == tok { - if dialect_of!(self is PostgreSqlDialect | DuckDbDialect | GenericDialect) { - self.parse_subscript(expr) - } else if dialect_of!(self is SnowflakeDialect) { - self.prev_token(); - self.parse_json_access(expr) - } else { - self.parse_map_access(expr) - } - } else if dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == tok { + } else if Token::LBracket == *tok && self.dialect.supports_partiql() + || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok) + { self.prev_token(); self.parse_json_access(expr) } else { // Can only happen if `get_next_precedence` got out of sync with this function parser_err!( format!("No infix parser for token {:?}", tok.token), - tok.location + tok.span.start ) } } @@ -2908,15 +3723,24 @@ impl<'a> Parser<'a> { }) } + /// Parse a multi-dimension array accessing like `[1:3][1][1]` + pub fn parse_multi_dim_subscript( + &mut self, + chain: &mut Vec, + ) -> Result<(), ParserError> { + while self.consume_token(&Token::LBracket) { + self.parse_subscript(chain)?; + } + Ok(()) + } + /// Parses an array subscript like `[1:3]` /// /// Parser is right after `[` - pub fn parse_subscript(&mut self, expr: Expr) -> Result { + fn parse_subscript(&mut self, chain: &mut Vec) -> Result<(), ParserError> { let subscript = self.parse_subscript_inner()?; - Ok(Expr::Subscript { - expr: Box::new(expr), - subscript: Box::new(subscript), - }) + chain.push(AccessExpr::Subscript(subscript)); + Ok(()) } fn parse_json_path_object_key(&mut self) -> Result { @@ -2944,6 +3768,14 @@ impl<'a> Parser<'a> { } fn parse_json_access(&mut self, expr: Expr) -> Result { + let path = self.parse_json_path()?; + Ok(Expr::JsonAccess { + value: Box::new(expr), + path, + }) + } + + fn parse_json_path(&mut self) -> Result { let mut path = Vec::new(); loop { match self.next_token().token { @@ -2967,50 +3799,7 @@ impl<'a> Parser<'a> { } debug_assert!(!path.is_empty()); - Ok(Expr::JsonAccess { - value: Box::new(expr), - path: JsonPath { path }, - }) - } - - pub fn parse_map_access(&mut self, expr: Expr) -> Result { - let key = self.parse_expr()?; - self.expect_token(&Token::RBracket)?; - - let mut keys = vec![MapAccessKey { - key, - syntax: MapAccessSyntax::Bracket, - }]; - loop { - let key = match self.peek_token().token { - Token::LBracket => { - self.next_token(); // consume `[` - let key = self.parse_expr()?; - self.expect_token(&Token::RBracket)?; - MapAccessKey { - key, - syntax: MapAccessSyntax::Bracket, - } - } - // Access on BigQuery nested and repeated expressions can - // mix notations in the same expression. - // https://cloud.google.com/bigquery/docs/nested-repeated#query_nested_and_repeated_columns - Token::Period if dialect_of!(self is BigQueryDialect) => { - self.next_token(); // consume `.` - MapAccessKey { - key: self.parse_expr()?, - syntax: MapAccessSyntax::Period, - } - } - _ => break, - }; - keys.push(key); - } - - Ok(Expr::MapAccess { - column: Box::new(expr), - keys, - }) + Ok(JsonPath { path }) } /// Parses the parens following the `[ NOT ] IN` operator. @@ -3028,15 +3817,13 @@ impl<'a> Parser<'a> { }); } self.expect_token(&Token::LParen)?; - let in_op = if self.parse_keyword(Keyword::SELECT) || self.parse_keyword(Keyword::WITH) { - self.prev_token(); - Expr::InSubquery { + let in_op = match self.maybe_parse(|p| p.parse_query_body(p.dialect.prec_unknown()))? { + Some(subquery) => Expr::InSubquery { expr: Box::new(expr), - subquery: self.parse_query()?, + subquery, negated, - } - } else { - Expr::InList { + }, + None => Expr::InList { expr: Box::new(expr), list: if self.dialect.supports_in_empty_list() { self.parse_comma_separated0(Parser::parse_expr, Token::RParen)? @@ -3044,7 +3831,7 @@ impl<'a> Parser<'a> { self.parse_comma_separated(Parser::parse_expr)? }, negated, - } + }, }; self.expect_token(&Token::RParen)?; Ok(in_op) @@ -3055,7 +3842,7 @@ impl<'a> Parser<'a> { // Stop parsing subexpressions for and on tokens with // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc. let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?; - self.expect_keyword(Keyword::AND)?; + self.expect_keyword_is(Keyword::AND)?; let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?; Ok(Expr::Between { expr: Box::new(expr), @@ -3065,7 +3852,7 @@ impl<'a> Parser<'a> { }) } - /// Parse a postgresql casting style which is in the form of `expr::datatype`. + /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`. pub fn parse_pg_cast(&mut self, expr: Expr) -> Result { Ok(Expr::Cast { kind: CastKind::DoubleColon, @@ -3080,12 +3867,26 @@ impl<'a> Parser<'a> { self.dialect.get_next_precedence_default(self) } + /// Return the token at the given location, or EOF if the index is beyond + /// the length of the current set of tokens. + pub fn token_at(&self, index: usize) -> &TokenWithSpan { + self.tokens.get(index).unwrap_or(&EOF_TOKEN) + } + /// Return the first non-whitespace token that has not yet been processed - /// (or None if reached end-of-file) - pub fn peek_token(&self) -> TokenWithLocation { + /// or Token::EOF + /// + /// See [`Self::peek_token_ref`] to avoid the copy. + pub fn peek_token(&self) -> TokenWithSpan { self.peek_nth_token(0) } + /// Return a reference to the first non-whitespace token that has not yet + /// been processed or Token::EOF + pub fn peek_token_ref(&self) -> &TokenWithSpan { + self.peek_nth_token_ref(0) + } + /// Returns the `N` next non-whitespace tokens that have not yet been /// processed. /// @@ -3117,41 +3918,63 @@ impl<'a> Parser<'a> { /// yet been processed. /// /// See [`Self::peek_token`] for an example. - pub fn peek_tokens_with_location(&self) -> [TokenWithLocation; N] { + pub fn peek_tokens_with_location(&self) -> [TokenWithSpan; N] { let mut index = self.index; core::array::from_fn(|_| loop { let token = self.tokens.get(index); index += 1; - if let Some(TokenWithLocation { + if let Some(TokenWithSpan { token: Token::Whitespace(_), - location: _, + span: _, }) = token { continue; } - break token.cloned().unwrap_or(TokenWithLocation { + break token.cloned().unwrap_or(TokenWithSpan { token: Token::EOF, - location: Location { line: 0, column: 0 }, + span: Span::empty(), }); }) } + /// Returns references to the `N` next non-whitespace tokens + /// that have not yet been processed. + /// + /// See [`Self::peek_tokens`] for an example. + pub fn peek_tokens_ref(&self) -> [&TokenWithSpan; N] { + let mut index = self.index; + core::array::from_fn(|_| loop { + let token = self.tokens.get(index); + index += 1; + if let Some(TokenWithSpan { + token: Token::Whitespace(_), + span: _, + }) = token + { + continue; + } + break token.unwrap_or(&EOF_TOKEN); + }) + } + + /// Return nth non-whitespace token that has not yet been processed + pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan { + self.peek_nth_token_ref(n).clone() + } + /// Return nth non-whitespace token that has not yet been processed - pub fn peek_nth_token(&self, mut n: usize) -> TokenWithLocation { + pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan { let mut index = self.index; loop { index += 1; match self.tokens.get(index - 1) { - Some(TokenWithLocation { + Some(TokenWithSpan { token: Token::Whitespace(_), - location: _, + span: _, }) => continue, non_whitespace => { if n == 0 { - return non_whitespace.cloned().unwrap_or(TokenWithLocation { - token: Token::EOF, - location: Location { line: 0, column: 0 }, - }); + return non_whitespace.unwrap_or(&EOF_TOKEN); } n -= 1; } @@ -3161,57 +3984,104 @@ impl<'a> Parser<'a> { /// Return the first token, possibly whitespace, that has not yet been processed /// (or None if reached end-of-file). - pub fn peek_token_no_skip(&self) -> TokenWithLocation { + pub fn peek_token_no_skip(&self) -> TokenWithSpan { self.peek_nth_token_no_skip(0) } /// Return nth token, possibly whitespace, that has not yet been processed. - pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithLocation { + pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan { self.tokens .get(self.index + n) .cloned() - .unwrap_or(TokenWithLocation { + .unwrap_or(TokenWithSpan { token: Token::EOF, - location: Location { line: 0, column: 0 }, + span: Span::empty(), }) } - /// Return the first non-whitespace token that has not yet been processed - /// (or None if reached end-of-file) and mark it as processed. OK to call - /// repeatedly after reaching EOF. - pub fn next_token(&mut self) -> TokenWithLocation { - loop { - self.index += 1; - match self.tokens.get(self.index - 1) { - Some(TokenWithLocation { - token: Token::Whitespace(_), - location: _, - }) => continue, - token => { - return token - .cloned() - .unwrap_or_else(|| TokenWithLocation::wrap(Token::EOF)) - } - } - } + /// Return true if the next tokens exactly `expected` + /// + /// Does not advance the current token. + fn peek_keywords(&mut self, expected: &[Keyword]) -> bool { + let index = self.index; + let matched = self.parse_keywords(expected); + self.index = index; + matched + } + + /// Advances to the next non-whitespace token and returns a copy. + /// + /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to + /// avoid the copy. + pub fn next_token(&mut self) -> TokenWithSpan { + self.advance_token(); + self.get_current_token().clone() + } + + /// Returns the index of the current token + /// + /// This can be used with APIs that expect an index, such as + /// [`Self::token_at`] + pub fn get_current_index(&self) -> usize { + self.index.saturating_sub(1) } - /// Return the first unprocessed token, possibly whitespace. - pub fn next_token_no_skip(&mut self) -> Option<&TokenWithLocation> { + /// Return the next unprocessed token, possibly whitespace. + pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> { self.index += 1; self.tokens.get(self.index - 1) } - /// Push back the last one non-whitespace token. Must be called after - /// `next_token()`, otherwise might panic. OK to call after - /// `next_token()` indicates an EOF. + /// Advances the current token to the next non-whitespace token + /// + /// See [`Self::get_current_token`] to get the current token after advancing + pub fn advance_token(&mut self) { + loop { + self.index += 1; + match self.tokens.get(self.index - 1) { + Some(TokenWithSpan { + token: Token::Whitespace(_), + span: _, + }) => continue, + _ => break, + } + } + } + + /// Returns a reference to the current token + /// + /// Does not advance the current token. + pub fn get_current_token(&self) -> &TokenWithSpan { + self.token_at(self.index.saturating_sub(1)) + } + + /// Returns a reference to the previous token + /// + /// Does not advance the current token. + pub fn get_previous_token(&self) -> &TokenWithSpan { + self.token_at(self.index.saturating_sub(2)) + } + + /// Returns a reference to the next token + /// + /// Does not advance the current token. + pub fn get_next_token(&self) -> &TokenWithSpan { + self.token_at(self.index) + } + + /// Seek back the last one non-whitespace token. + /// + /// Must be called after `next_token()`, otherwise might panic. OK to call + /// after `next_token()` indicates an EOF. + /// + // TODO rename to backup_token and deprecate prev_token? pub fn prev_token(&mut self) { loop { assert!(self.index > 0); self.index -= 1; - if let Some(TokenWithLocation { + if let Some(TokenWithSpan { token: Token::Whitespace(_), - location: _, + span: _, }) = self.tokens.get(self.index) { continue; @@ -3221,10 +4091,27 @@ impl<'a> Parser<'a> { } /// Report `found` was encountered instead of `expected` - pub fn expected(&self, expected: &str, found: TokenWithLocation) -> Result { + pub fn expected(&self, expected: &str, found: TokenWithSpan) -> Result { + parser_err!( + format!("Expected: {expected}, found: {found}"), + found.span.start + ) + } + + /// report `found` was encountered instead of `expected` + pub fn expected_ref(&self, expected: &str, found: &TokenWithSpan) -> Result { + parser_err!( + format!("Expected: {expected}, found: {found}"), + found.span.start + ) + } + + /// Report that the token at `index` was found instead of `expected`. + pub fn expected_at(&self, expected: &str, index: usize) -> Result { + let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN); parser_err!( format!("Expected: {expected}, found: {found}"), - found.location + found.span.start ) } @@ -3232,15 +4119,19 @@ impl<'a> Parser<'a> { /// true. Otherwise, no tokens are consumed and returns false. #[must_use] pub fn parse_keyword(&mut self, expected: Keyword) -> bool { - match self.peek_token().token { - Token::Word(w) if expected == w.keyword => { - self.next_token(); - true - } - _ => false, + if self.peek_keyword(expected) { + self.advance_token(); + true + } else { + false } } + #[must_use] + pub fn peek_keyword(&self, expected: Keyword) -> bool { + matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword) + } + /// If the current token is the `expected` keyword followed by /// specified tokens, consume them and returns true. /// Otherwise, no tokens are consumed and returns false. @@ -3249,16 +4140,16 @@ impl<'a> Parser<'a> { /// not be efficient as it does a loop on the tokens with `peek_nth_token` /// each time. pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool { - match self.peek_token().token { + match &self.peek_token_ref().token { Token::Word(w) if expected == w.keyword => { for (idx, token) in tokens.iter().enumerate() { - if self.peek_nth_token(idx + 1).token != *token { + if self.peek_nth_token_ref(idx + 1).token != *token { return false; } } // consume all tokens for _ in 0..(tokens.len() + 1) { - self.next_token(); + self.advance_token(); } true } @@ -3283,18 +4174,30 @@ impl<'a> Parser<'a> { true } + /// If the current token is one of the given `keywords`, returns the keyword + /// that matches, without consuming the token. Otherwise, returns [`None`]. + #[must_use] + pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option { + for keyword in keywords { + if self.peek_keyword(*keyword) { + return Some(*keyword); + } + } + None + } + /// If the current token is one of the given `keywords`, consume the token /// and return the keyword that matches. Otherwise, no tokens are consumed /// and returns [`None`]. #[must_use] pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option { - match self.peek_token().token { + match &self.peek_token_ref().token { Token::Word(w) => { keywords .iter() .find(|keyword| **keyword == w.keyword) .map(|keyword| { - self.next_token(); + self.advance_token(); *keyword }) } @@ -3309,20 +4212,35 @@ impl<'a> Parser<'a> { Ok(keyword) } else { let keywords: Vec = keywords.iter().map(|x| format!("{x:?}")).collect(); - self.expected( + self.expected_ref( &format!("one of {}", keywords.join(" or ")), - self.peek_token(), + self.peek_token_ref(), ) } } /// If the current token is the `expected` keyword, consume the token. /// Otherwise, return an error. - pub fn expect_keyword(&mut self, expected: Keyword) -> Result<(), ParserError> { + /// + // todo deprecate in favor of expected_keyword_is + pub fn expect_keyword(&mut self, expected: Keyword) -> Result { + if self.parse_keyword(expected) { + Ok(self.get_current_token().clone()) + } else { + self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref()) + } + } + + /// If the current token is the `expected` keyword, consume the token. + /// Otherwise, return an error. + /// + /// This differs from expect_keyword only in that the matched keyword + /// token is not returned. + pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> { if self.parse_keyword(expected) { Ok(()) } else { - self.expected(format!("{:?}", &expected).as_str(), self.peek_token()) + self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref()) } } @@ -3330,16 +4248,18 @@ impl<'a> Parser<'a> { /// sequence, consume them and returns Ok. Otherwise, return an Error. pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> { for &kw in expected { - self.expect_keyword(kw)?; + self.expect_keyword_is(kw)?; } Ok(()) } /// Consume the next token if it matches the expected token, otherwise return false + /// + /// See [Self::advance_token] to consume the token unconditionally #[must_use] pub fn consume_token(&mut self, expected: &Token) -> bool { - if self.peek_token() == *expected { - self.next_token(); + if self.peek_token_ref() == expected { + self.advance_token(); true } else { false @@ -3362,11 +4282,11 @@ impl<'a> Parser<'a> { } /// Bail out if the current token is not an expected keyword, or consume it if it is - pub fn expect_token(&mut self, expected: &Token) -> Result<(), ParserError> { - if self.consume_token(expected) { - Ok(()) + pub fn expect_token(&mut self, expected: &Token) -> Result { + if self.peek_token_ref() == expected { + Ok(self.next_token()) } else { - self.expected(&expected.to_string(), self.peek_token()) + self.expected_ref(&expected.to_string(), self.peek_token_ref()) } } @@ -3388,19 +4308,18 @@ impl<'a> Parser<'a> { // e.g. `SELECT 1, 2, FROM t` // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas // https://docs.snowflake.com/en/release-notes/2024/8_11#select-supports-trailing-commas - // - // This pattern could be captured better with RAII type semantics, but it's quite a bit of - // code to add for just one case, so we'll just do it manually here. - let old_value = self.options.trailing_commas; - self.options.trailing_commas |= self.dialect.supports_projection_trailing_commas(); - let ret = self.parse_comma_separated(|p| p.parse_select_item()); - self.options.trailing_commas = old_value; + let trailing_commas = + self.options.trailing_commas | self.dialect.supports_projection_trailing_commas(); - ret + self.parse_comma_separated_with_trailing_commas( + |p| p.parse_select_item(), + trailing_commas, + Self::is_reserved_for_column_alias, + ) } - pub fn parse_actions_list(&mut self) -> Result, ParserError> { + pub fn parse_actions_list(&mut self) -> Result, ParserError> { let mut values = vec![]; loop { values.push(self.parse_grant_permission()?); @@ -3423,38 +4342,111 @@ impl<'a> Parser<'a> { Ok(values) } + /// Parse a list of [TableWithJoins] + fn parse_table_with_joins(&mut self) -> Result, ParserError> { + let trailing_commas = self.dialect.supports_from_trailing_commas(); + + self.parse_comma_separated_with_trailing_commas( + Parser::parse_table_and_joins, + trailing_commas, + |kw, _parser| { + self.dialect + .get_reserved_keywords_for_table_factor() + .contains(kw) + }, + ) + } + /// Parse the comma of a comma-separated syntax element. + /// `R` is a predicate that should return true if the next + /// keyword is a reserved keyword. + /// Allows for control over trailing commas + /// /// Returns true if there is a next element - fn is_parse_comma_separated_end(&mut self) -> bool { + fn is_parse_comma_separated_end_with_trailing_commas( + &mut self, + trailing_commas: bool, + is_reserved_keyword: &R, + ) -> bool + where + R: Fn(&Keyword, &mut Parser) -> bool, + { if !self.consume_token(&Token::Comma) { true - } else if self.options.trailing_commas { - let token = self.peek_token().token; - match token { - Token::Word(ref kw) - if keywords::RESERVED_FOR_COLUMN_ALIAS.contains(&kw.keyword) => - { - true - } + } else if trailing_commas { + let token = self.next_token().token; + let is_end = match token { + Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true, Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => { true } _ => false, - } + }; + self.prev_token(); + + is_end } else { false } } + /// Parse the comma of a comma-separated syntax element. + /// Returns true if there is a next element + fn is_parse_comma_separated_end(&mut self) -> bool { + self.is_parse_comma_separated_end_with_trailing_commas( + self.options.trailing_commas, + &Self::is_reserved_for_column_alias, + ) + } + /// Parse a comma-separated list of 1+ items accepted by `F` - pub fn parse_comma_separated(&mut self, mut f: F) -> Result, ParserError> + pub fn parse_comma_separated(&mut self, f: F) -> Result, ParserError> + where + F: FnMut(&mut Parser<'a>) -> Result, + { + self.parse_comma_separated_with_trailing_commas( + f, + self.options.trailing_commas, + Self::is_reserved_for_column_alias, + ) + } + + /// Parse a comma-separated list of 1+ items accepted by `F`. + /// `R` is a predicate that should return true if the next + /// keyword is a reserved keyword. + /// Allows for control over trailing commas. + fn parse_comma_separated_with_trailing_commas( + &mut self, + mut f: F, + trailing_commas: bool, + is_reserved_keyword: R, + ) -> Result, ParserError> where F: FnMut(&mut Parser<'a>) -> Result, + R: Fn(&Keyword, &mut Parser) -> bool, { let mut values = vec![]; loop { values.push(f(self)?); - if self.is_parse_comma_separated_end() { + if self.is_parse_comma_separated_end_with_trailing_commas( + trailing_commas, + &is_reserved_keyword, + ) { + break; + } + } + Ok(values) + } + + /// Parse a period-separated list of 1+ items accepted by `F` + fn parse_period_separated(&mut self, mut f: F) -> Result, ParserError> + where + F: FnMut(&mut Parser<'a>) -> Result, + { + let mut values = vec![]; + loop { + values.push(f(self)?); + if !self.consume_token(&Token::Period) { break; } } @@ -3512,19 +4504,64 @@ impl<'a> Parser<'a> { self.parse_comma_separated(f) } + /// Parses 0 or more statements, each followed by a semicolon. + /// If the next token is any of `terminal_keywords` then no more + /// statements will be parsed. + pub(crate) fn parse_statement_list( + &mut self, + terminal_keywords: &[Keyword], + ) -> Result, ParserError> { + let mut values = vec![]; + loop { + match &self.peek_nth_token_ref(0).token { + Token::EOF => break, + Token::Word(w) => { + if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) { + break; + } + } + _ => {} + } + + values.push(self.parse_statement()?); + self.expect_token(&Token::SemiColon)?; + } + Ok(values) + } + + /// Default implementation of a predicate that returns true if + /// the specified keyword is reserved for column alias. + /// See [Dialect::is_column_alias] + fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool { + !parser.dialect.is_column_alias(kw, parser) + } + /// Run a parser method `f`, reverting back to the current position if unsuccessful. - pub fn maybe_parse(&mut self, mut f: F) -> Result, ParserError> + /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`. + /// Returns `Ok(None)` if `f` returns any other error. + pub fn maybe_parse(&mut self, f: F) -> Result, ParserError> where F: FnMut(&mut Parser) -> Result, { - let index = self.index; - match f(self) { + match self.try_parse(f) { Ok(t) => Ok(Some(t)), - // Unwind stack if limit exceeded Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded), - Err(_) => { + _ => Ok(None), + } + } + + /// Run a parser method `f`, reverting back to the current position if unsuccessful. + pub fn try_parse(&mut self, mut f: F) -> Result + where + F: FnMut(&mut Parser) -> Result, + { + let index = self.index; + match f(self) { + Ok(t) => Ok(t), + Err(e) => { + // Unwind stack if limit exceeded self.index = index; - Ok(None) + Err(e) } } } @@ -3532,7 +4569,7 @@ impl<'a> Parser<'a> { /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found. pub fn parse_all_or_distinct(&mut self) -> Result, ParserError> { - let loc = self.peek_token().location; + let loc = self.peek_token().span.start; let all = self.parse_keyword(Keyword::ALL); let distinct = self.parse_keyword(Keyword::DISTINCT); if !distinct { @@ -3576,21 +4613,24 @@ impl<'a> Parser<'a> { .is_some(); let persistent = dialect_of!(self is DuckDbDialect) && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some(); + let create_view_params = self.parse_create_view_params()?; if self.parse_keyword(Keyword::TABLE) { self.parse_create_table(or_replace, temporary, global, transient) } else if self.parse_keyword(Keyword::MATERIALIZED) || self.parse_keyword(Keyword::VIEW) { self.prev_token(); - self.parse_create_view(or_replace, temporary) + self.parse_create_view(or_alter, or_replace, temporary, create_view_params) } else if self.parse_keyword(Keyword::POLICY) { self.parse_create_policy() } else if self.parse_keyword(Keyword::EXTERNAL) { self.parse_create_external_table(or_replace) } else if self.parse_keyword(Keyword::FUNCTION) { - self.parse_create_function(or_replace, temporary) + self.parse_create_function(or_alter, or_replace, temporary) + } else if self.parse_keyword(Keyword::DOMAIN) { + self.parse_create_domain() } else if self.parse_keyword(Keyword::TRIGGER) { - self.parse_create_trigger(or_replace, false) + self.parse_create_trigger(or_alter, or_replace, false) } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) { - self.parse_create_trigger(or_replace, true) + self.parse_create_trigger(or_alter, or_replace, true) } else if self.parse_keyword(Keyword::MACRO) { self.parse_create_macro(or_replace, temporary) } else if self.parse_keyword(Keyword::SECRET) { @@ -3620,6 +4660,8 @@ impl<'a> Parser<'a> { self.parse_create_type() } else if self.parse_keyword(Keyword::PROCEDURE) { self.parse_create_procedure(or_alter) + } else if self.parse_keyword(Keyword::CONNECTOR) { + self.parse_create_connector() } else { self.expected("an object type after CREATE", self.peek_token()) } @@ -3638,9 +4680,9 @@ impl<'a> Parser<'a> { let mut name = None; if self.peek_token() != Token::LParen { if self.parse_keyword(Keyword::IN) { - storage_specifier = self.parse_identifier(false).ok() + storage_specifier = self.parse_identifier().ok() } else { - name = self.parse_identifier(false).ok(); + name = self.parse_identifier().ok(); } // Storage specifier may follow the name @@ -3648,19 +4690,19 @@ impl<'a> Parser<'a> { && self.peek_token() != Token::LParen && self.parse_keyword(Keyword::IN) { - storage_specifier = self.parse_identifier(false).ok(); + storage_specifier = self.parse_identifier().ok(); } } self.expect_token(&Token::LParen)?; - self.expect_keyword(Keyword::TYPE)?; - let secret_type = self.parse_identifier(false)?; + self.expect_keyword_is(Keyword::TYPE)?; + let secret_type = self.parse_identifier()?; let mut options = Vec::new(); if self.consume_token(&Token::Comma) { options.append(&mut self.parse_comma_separated(|p| { - let key = p.parse_identifier(false)?; - let value = p.parse_identifier(false)?; + let key = p.parse_identifier()?; + let value = p.parse_identifier()?; Ok(SecretOption { key, value }) })?); } @@ -3776,7 +4818,7 @@ impl<'a> Parser<'a> { /// Parse a UNCACHE TABLE statement pub fn parse_uncache_table(&mut self) -> Result { - self.expect_keyword(Keyword::TABLE)?; + self.expect_keyword_is(Keyword::TABLE)?; let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let table_name = self.parse_object_name(false)?; Ok(Statement::UNCache { @@ -3787,11 +4829,11 @@ impl<'a> Parser<'a> { /// SQLite-specific `CREATE VIRTUAL TABLE` pub fn parse_create_virtual_table(&mut self) -> Result { - self.expect_keyword(Keyword::TABLE)?; + self.expect_keyword_is(Keyword::TABLE)?; let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let table_name = self.parse_object_name(false)?; - self.expect_keyword(Keyword::USING)?; - let module_name = self.parse_identifier(false)?; + self.expect_keyword_is(Keyword::USING)?; + let module_name = self.parse_identifier()?; // SQLite docs note that module "arguments syntax is sufficiently // general that the arguments can be made to appear as column // definitions in a traditional CREATE TABLE statement", but @@ -3810,24 +4852,36 @@ impl<'a> Parser<'a> { let schema_name = self.parse_schema_name()?; + let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) { + Some(self.parse_expr()?) + } else { + None + }; + + let options = if self.peek_keyword(Keyword::OPTIONS) { + Some(self.parse_options(Keyword::OPTIONS)?) + } else { + None + }; + Ok(Statement::CreateSchema { schema_name, if_not_exists, + options, + default_collate_spec, }) } fn parse_schema_name(&mut self) -> Result { if self.parse_keyword(Keyword::AUTHORIZATION) { - Ok(SchemaName::UnnamedAuthorization( - self.parse_identifier(false)?, - )) + Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?)) } else { let name = self.parse_object_name(false)?; if self.parse_keyword(Keyword::AUTHORIZATION) { Ok(SchemaName::NamedAuthorization( name, - self.parse_identifier(false)?, + self.parse_identifier()?, )) } else { Ok(SchemaName::Simple(name)) @@ -3874,13 +4928,14 @@ impl<'a> Parser<'a> { Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))), _ => self.expected( "JAR, FILE or ARCHIVE, got {:?}", - TokenWithLocation::wrap(Token::make_keyword(format!("{keyword:?}").as_str())), + TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())), ), } } pub fn parse_create_function( &mut self, + or_alter: bool, or_replace: bool, temporary: bool, ) -> Result { @@ -3892,29 +4947,30 @@ impl<'a> Parser<'a> { self.parse_create_macro(or_replace, temporary) } else if dialect_of!(self is BigQueryDialect) { self.parse_bigquery_create_function(or_replace, temporary) + } else if dialect_of!(self is MsSqlDialect) { + self.parse_mssql_create_function(or_alter, or_replace, temporary) } else { self.prev_token(); self.expected("an object type after CREATE", self.peek_token()) } } - /// Parse `CREATE FUNCTION` for [Postgres] + /// Parse `CREATE FUNCTION` for [PostgreSQL] /// - /// [Postgres]: https://www.postgresql.org/docs/15/sql-createfunction.html + /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html fn parse_postgres_create_function( &mut self, or_replace: bool, temporary: bool, ) -> Result { let name = self.parse_object_name(false)?; + self.expect_token(&Token::LParen)?; - let args = if self.consume_token(&Token::RParen) { - self.prev_token(); - None + let args = if Token::RParen != self.peek_token_ref().token { + self.parse_comma_separated(Parser::parse_function_arg)? } else { - Some(self.parse_comma_separated(Parser::parse_function_arg)?) + vec![] }; - self.expect_token(&Token::RParen)?; let return_type = if self.parse_keyword(Keyword::RETURNS) { @@ -3948,7 +5004,7 @@ impl<'a> Parser<'a> { )); } else if self.parse_keyword(Keyword::LANGUAGE) { ensure_not_set(&body.language, "LANGUAGE")?; - body.language = Some(self.parse_identifier(false)?); + body.language = Some(self.parse_identifier()?); } else if self.parse_keyword(Keyword::IMMUTABLE) { ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?; body.behavior = Some(FunctionBehavior::Immutable); @@ -4006,11 +5062,12 @@ impl<'a> Parser<'a> { } } - Ok(Statement::CreateFunction { + Ok(Statement::CreateFunction(CreateFunction { + or_alter: false, or_replace, temporary, name, - args, + args: Some(args), return_type, behavior: body.behavior, called_on_null: body.called_on_null, @@ -4022,7 +5079,7 @@ impl<'a> Parser<'a> { determinism_specifier: None, options: None, remote_connection: None, - }) + })) } /// Parse `CREATE FUNCTION` for [Hive] @@ -4034,12 +5091,13 @@ impl<'a> Parser<'a> { temporary: bool, ) -> Result { let name = self.parse_object_name(false)?; - self.expect_keyword(Keyword::AS)?; + self.expect_keyword_is(Keyword::AS)?; let as_ = self.parse_create_function_body_string()?; let using = self.parse_optional_create_function_using()?; - Ok(Statement::CreateFunction { + Ok(Statement::CreateFunction(CreateFunction { + or_alter: false, or_replace, temporary, name, @@ -4055,7 +5113,7 @@ impl<'a> Parser<'a> { determinism_specifier: None, options: None, remote_connection: None, - }) + })) } /// Parse `CREATE FUNCTION` for [BigQuery] @@ -4067,22 +5125,7 @@ impl<'a> Parser<'a> { temporary: bool, ) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); - let name = self.parse_object_name(false)?; - - let parse_function_param = - |parser: &mut Parser| -> Result { - let name = parser.parse_identifier(false)?; - let data_type = parser.parse_data_type()?; - Ok(OperateFunctionArg { - mode: None, - name: Some(name), - data_type, - default_expr: None, - }) - }; - self.expect_token(&Token::LParen)?; - let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?; - self.expect_token(&Token::RParen)?; + let (name, args) = self.parse_create_function_name_and_params()?; let return_type = if self.parse_keyword(Keyword::RETURNS) { Some(self.parse_data_type()?) @@ -4099,7 +5142,7 @@ impl<'a> Parser<'a> { }; let language = if self.parse_keyword(Keyword::LANGUAGE) { - Some(self.parse_identifier(false)?) + Some(self.parse_identifier()?) } else { None }; @@ -4116,7 +5159,7 @@ impl<'a> Parser<'a> { let mut options = self.maybe_parse_options(Keyword::OPTIONS)?; let function_body = if remote_connection.is_none() { - self.expect_keyword(Keyword::AS)?; + self.expect_keyword_is(Keyword::AS)?; let expr = self.parse_expr()?; if options.is_none() { options = self.maybe_parse_options(Keyword::OPTIONS)?; @@ -4128,7 +5171,8 @@ impl<'a> Parser<'a> { None }; - Ok(Statement::CreateFunction { + Ok(Statement::CreateFunction(CreateFunction { + or_alter: false, or_replace, temporary, if_not_exists, @@ -4144,7 +5188,74 @@ impl<'a> Parser<'a> { behavior: None, called_on_null: None, parallel: None, - }) + })) + } + + /// Parse `CREATE FUNCTION` for [MsSql] + /// + /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql + fn parse_mssql_create_function( + &mut self, + or_alter: bool, + or_replace: bool, + temporary: bool, + ) -> Result { + let (name, args) = self.parse_create_function_name_and_params()?; + + self.expect_keyword(Keyword::RETURNS)?; + let return_type = Some(self.parse_data_type()?); + + self.expect_keyword_is(Keyword::AS)?; + + let begin_token = self.expect_keyword(Keyword::BEGIN)?; + let statements = self.parse_statement_list(&[Keyword::END])?; + let end_token = self.expect_keyword(Keyword::END)?; + + let function_body = Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements { + begin_token: AttachedToken(begin_token), + statements, + end_token: AttachedToken(end_token), + })); + + Ok(Statement::CreateFunction(CreateFunction { + or_alter, + or_replace, + temporary, + if_not_exists: false, + name, + args: Some(args), + return_type, + function_body, + language: None, + determinism_specifier: None, + options: None, + remote_connection: None, + using: None, + behavior: None, + called_on_null: None, + parallel: None, + })) + } + + fn parse_create_function_name_and_params( + &mut self, + ) -> Result<(ObjectName, Vec), ParserError> { + let name = self.parse_object_name(false)?; + let parse_function_param = + |parser: &mut Parser| -> Result { + let name = parser.parse_identifier()?; + let data_type = parser.parse_data_type()?; + Ok(OperateFunctionArg { + mode: None, + name: Some(name), + data_type, + default_expr: None, + }) + }; + self.expect_token(&Token::LParen)?; + let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?; + self.expect_token(&Token::RParen)?; + Ok((name, args)) } fn parse_function_arg(&mut self) -> Result { @@ -4163,7 +5274,9 @@ impl<'a> Parser<'a> { let mut data_type = self.parse_data_type()?; if let DataType::Custom(n, _) = &data_type { // the first token is actually a name - name = Some(n.0[0].clone()); + match n.0[0].clone() { + ObjectNamePart::Identifier(ident) => name = Some(ident), + } data_type = self.parse_data_type()?; } @@ -4187,14 +5300,17 @@ impl<'a> Parser<'a> { /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ] /// ``` pub fn parse_drop_trigger(&mut self) -> Result { - if !dialect_of!(self is PostgreSqlDialect | GenericDialect) { + if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) { self.prev_token(); return self.expected("an object type after DROP", self.peek_token()); } let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let trigger_name = self.parse_object_name(false)?; - self.expect_keyword(Keyword::ON)?; - let table_name = self.parse_object_name(false)?; + let table_name = if self.parse_keyword(Keyword::ON) { + Some(self.parse_object_name(false)?) + } else { + None + }; let option = self .parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) .map(|keyword| match keyword { @@ -4212,10 +5328,11 @@ impl<'a> Parser<'a> { pub fn parse_create_trigger( &mut self, + or_alter: bool, or_replace: bool, is_constraint: bool, ) -> Result { - if !dialect_of!(self is PostgreSqlDialect | GenericDialect) { + if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) { self.prev_token(); return self.expected("an object type after CREATE", self.peek_token()); } @@ -4224,7 +5341,7 @@ impl<'a> Parser<'a> { let period = self.parse_trigger_period()?; let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?; - self.expect_keyword(Keyword::ON)?; + self.expect_keyword_is(Keyword::ON)?; let table_name = self.parse_object_name(false)?; let referenced_table_name = if self.parse_keyword(Keyword::FROM) { @@ -4242,7 +5359,7 @@ impl<'a> Parser<'a> { } } - self.expect_keyword(Keyword::FOR)?; + self.expect_keyword_is(Keyword::FOR)?; let include_each = self.parse_keyword(Keyword::EACH); let trigger_object = match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? { @@ -4256,11 +5373,12 @@ impl<'a> Parser<'a> { .then(|| self.parse_expr()) .transpose()?; - self.expect_keyword(Keyword::EXECUTE)?; + self.expect_keyword_is(Keyword::EXECUTE)?; let exec_body = self.parse_trigger_exec_body()?; Ok(Statement::CreateTrigger { + or_alter, or_replace, is_constraint, name, @@ -4272,7 +5390,8 @@ impl<'a> Parser<'a> { trigger_object, include_each, condition, - exec_body, + exec_body: Some(exec_body), + statements: None, characteristics, }) } @@ -4280,14 +5399,16 @@ impl<'a> Parser<'a> { pub fn parse_trigger_period(&mut self) -> Result { Ok( match self.expect_one_of_keywords(&[ + Keyword::FOR, Keyword::BEFORE, Keyword::AFTER, Keyword::INSTEAD, ])? { + Keyword::FOR => TriggerPeriod::For, Keyword::BEFORE => TriggerPeriod::Before, Keyword::AFTER => TriggerPeriod::After, Keyword::INSTEAD => self - .expect_keyword(Keyword::OF) + .expect_keyword_is(Keyword::OF) .map(|_| TriggerPeriod::InsteadOf)?, _ => unreachable!(), }, @@ -4305,9 +5426,7 @@ impl<'a> Parser<'a> { Keyword::INSERT => TriggerEvent::Insert, Keyword::UPDATE => { if self.parse_keyword(Keyword::OF) { - let cols = self.parse_comma_separated(|ident| { - Parser::parse_identifier(ident, false) - })?; + let cols = self.parse_comma_separated(Parser::parse_identifier)?; TriggerEvent::Update(cols) } else { TriggerEvent::Update(vec![]) @@ -4371,7 +5490,7 @@ impl<'a> Parser<'a> { }; self.expect_token(&Token::RParen)?; - self.expect_keyword(Keyword::AS)?; + self.expect_keyword_is(Keyword::AS)?; Ok(Statement::CreateMacro { or_replace, @@ -4391,7 +5510,7 @@ impl<'a> Parser<'a> { } fn parse_macro_arg(&mut self) -> Result { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let default_expr = if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) { @@ -4406,7 +5525,7 @@ impl<'a> Parser<'a> { &mut self, or_replace: bool, ) -> Result { - self.expect_keyword(Keyword::TABLE)?; + self.expect_keyword_is(Keyword::TABLE)?; let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let table_name = self.parse_object_name(false)?; let (columns, constraints) = self.parse_columns()?; @@ -4424,12 +5543,17 @@ impl<'a> Parser<'a> { }; let location = hive_formats.location.clone(); let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?; + let table_options = if !table_properties.is_empty() { + CreateTableOptions::TableProperties(table_properties) + } else { + CreateTableOptions::None + }; Ok(CreateTableBuilder::new(table_name) .columns(columns) .constraints(constraints) .hive_distribution(hive_distribution) .hive_formats(Some(hive_formats)) - .table_properties(table_properties) + .table_options(table_options) .or_replace(or_replace) .if_not_exists(if_not_exists) .external(true) @@ -4470,11 +5594,13 @@ impl<'a> Parser<'a> { pub fn parse_create_view( &mut self, + or_alter: bool, or_replace: bool, temporary: bool, + create_view_params: Option, ) -> Result { let materialized = self.parse_keyword(Keyword::MATERIALIZED); - self.expect_keyword(Keyword::VIEW)?; + self.expect_keyword_is(Keyword::VIEW)?; let if_not_exists = dialect_of!(self is BigQueryDialect|SQLiteDialect|GenericDialect) && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); // Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet). @@ -4489,7 +5615,7 @@ impl<'a> Parser<'a> { } let cluster_by = if self.parse_keyword(Keyword::CLUSTER) { - self.expect_keyword(Keyword::BY)?; + self.expect_keyword_is(Keyword::BY)?; self.parse_parenthesized_column_list(Optional, false)? } else { vec![] @@ -4515,16 +5641,12 @@ impl<'a> Parser<'a> { && self.parse_keyword(Keyword::COMMENT) { self.expect_token(&Token::Eq)?; - let next_token = self.next_token(); - match next_token.token { - Token::SingleQuotedString(str) => Some(str), - _ => self.expected("string literal", next_token)?, - } + Some(self.parse_comment_value()?) } else { None }; - self.expect_keyword(Keyword::AS)?; + self.expect_keyword_is(Keyword::AS)?; let query = self.parse_query()?; // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here. @@ -4537,6 +5659,7 @@ impl<'a> Parser<'a> { ]); Ok(Statement::CreateView { + or_alter, name, columns, query, @@ -4549,9 +5672,68 @@ impl<'a> Parser<'a> { if_not_exists, temporary, to, + params: create_view_params, }) } + /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL]. + /// + /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html + fn parse_create_view_params(&mut self) -> Result, ParserError> { + let algorithm = if self.parse_keyword(Keyword::ALGORITHM) { + self.expect_token(&Token::Eq)?; + Some( + match self.expect_one_of_keywords(&[ + Keyword::UNDEFINED, + Keyword::MERGE, + Keyword::TEMPTABLE, + ])? { + Keyword::UNDEFINED => CreateViewAlgorithm::Undefined, + Keyword::MERGE => CreateViewAlgorithm::Merge, + Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable, + _ => { + self.prev_token(); + let found = self.next_token(); + return self + .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found); + } + }, + ) + } else { + None + }; + let definer = if self.parse_keyword(Keyword::DEFINER) { + self.expect_token(&Token::Eq)?; + Some(self.parse_grantee_name()?) + } else { + None + }; + let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) { + Some( + match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? { + Keyword::DEFINER => CreateViewSecurity::Definer, + Keyword::INVOKER => CreateViewSecurity::Invoker, + _ => { + self.prev_token(); + let found = self.next_token(); + return self.expected("DEFINER or INVOKER after SQL SECURITY", found); + } + }, + ) + } else { + None + }; + if algorithm.is_some() || definer.is_some() || security.is_some() { + Ok(Some(CreateViewParams { + algorithm, + definer, + security, + })) + } else { + Ok(None) + } + } + pub fn parse_create_role(&mut self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let names = self.parse_comma_separated(|p| p.parse_object_name(false))?; @@ -4611,7 +5793,7 @@ impl<'a> Parser<'a> { let loc = self .tokens .get(self.index - 1) - .map_or(Location { line: 0, column: 0 }, |t| t.location); + .map_or(Location { line: 0, column: 0 }, |t| t.span.start); match keyword { Keyword::AUTHORIZATION => { if authorization_owner.is_some() { @@ -4690,7 +5872,7 @@ impl<'a> Parser<'a> { } } Keyword::CONNECTION => { - self.expect_keyword(Keyword::LIMIT)?; + self.expect_keyword_is(Keyword::LIMIT)?; if connection_limit.is_some() { parser_err!("Found multiple CONNECTION LIMIT", loc) } else { @@ -4699,7 +5881,7 @@ impl<'a> Parser<'a> { } } Keyword::VALID => { - self.expect_keyword(Keyword::UNTIL)?; + self.expect_keyword_is(Keyword::UNTIL)?; if valid_until.is_some() { parser_err!("Found multiple VALID UNTIL", loc) } else { @@ -4712,14 +5894,14 @@ impl<'a> Parser<'a> { if !in_role.is_empty() { parser_err!("Found multiple IN ROLE", loc) } else { - in_role = self.parse_comma_separated(|p| p.parse_identifier(false))?; + in_role = self.parse_comma_separated(|p| p.parse_identifier())?; Ok(()) } } else if self.parse_keyword(Keyword::GROUP) { if !in_group.is_empty() { parser_err!("Found multiple IN GROUP", loc) } else { - in_group = self.parse_comma_separated(|p| p.parse_identifier(false))?; + in_group = self.parse_comma_separated(|p| p.parse_identifier())?; Ok(()) } } else { @@ -4730,7 +5912,7 @@ impl<'a> Parser<'a> { if !role.is_empty() { parser_err!("Found multiple ROLE", loc) } else { - role = self.parse_comma_separated(|p| p.parse_identifier(false))?; + role = self.parse_comma_separated(|p| p.parse_identifier())?; Ok(()) } } @@ -4738,7 +5920,7 @@ impl<'a> Parser<'a> { if !user.is_empty() { parser_err!("Found multiple USER", loc) } else { - user = self.parse_comma_separated(|p| p.parse_identifier(false))?; + user = self.parse_comma_separated(|p| p.parse_identifier())?; Ok(()) } } @@ -4746,7 +5928,7 @@ impl<'a> Parser<'a> { if !admin.is_empty() { parser_err!("Found multiple ADMIN", loc) } else { - admin = self.parse_comma_separated(|p| p.parse_identifier(false))?; + admin = self.parse_comma_separated(|p| p.parse_identifier())?; Ok(()) } } @@ -4783,17 +5965,46 @@ impl<'a> Parser<'a> { Some(Keyword::SESSION_USER) => Owner::SessionUser, Some(_) => unreachable!(), None => { - match self.parse_identifier(false) { + match self.parse_identifier() { Ok(ident) => Owner::Ident(ident), Err(e) => { return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}"))) } } - }, + } }; Ok(owner) } + /// Parses a [Statement::CreateDomain] statement. + fn parse_create_domain(&mut self) -> Result { + let name = self.parse_object_name(false)?; + self.expect_keyword_is(Keyword::AS)?; + let data_type = self.parse_data_type()?; + let collation = if self.parse_keyword(Keyword::COLLATE) { + Some(self.parse_identifier()?) + } else { + None + }; + let default = if self.parse_keyword(Keyword::DEFAULT) { + Some(self.parse_expr()?) + } else { + None + }; + let mut constraints = Vec::new(); + while let Some(constraint) = self.parse_optional_table_constraint()? { + constraints.push(constraint); + } + + Ok(Statement::CreateDomain(CreateDomain { + name, + data_type, + collation, + default, + constraints, + })) + } + /// ```sql /// CREATE POLICY name ON table_name [ AS { PERMISSIVE | RESTRICTIVE } ] /// [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ] @@ -4804,8 +6015,8 @@ impl<'a> Parser<'a> { /// /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html) pub fn parse_create_policy(&mut self) -> Result { - let name = self.parse_identifier(false)?; - self.expect_keyword(Keyword::ON)?; + let name = self.parse_identifier()?; + self.expect_keyword_is(Keyword::ON)?; let table_name = self.parse_object_name(false)?; let policy_type = if self.parse_keyword(Keyword::AS) { @@ -4875,6 +6086,49 @@ impl<'a> Parser<'a> { }) } + /// ```sql + /// CREATE CONNECTOR [IF NOT EXISTS] connector_name + /// [TYPE datasource_type] + /// [URL datasource_url] + /// [COMMENT connector_comment] + /// [WITH DCPROPERTIES(property_name=property_value, ...)] + /// ``` + /// + /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector) + pub fn parse_create_connector(&mut self) -> Result { + let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); + let name = self.parse_identifier()?; + + let connector_type = if self.parse_keyword(Keyword::TYPE) { + Some(self.parse_literal_string()?) + } else { + None + }; + + let url = if self.parse_keyword(Keyword::URL) { + Some(self.parse_literal_string()?) + } else { + None + }; + + let comment = self.parse_optional_inline_comment()?; + + let with_dcproperties = + match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? { + properties if !properties.is_empty() => Some(properties), + _ => None, + }; + + Ok(Statement::CreateConnector(CreateConnector { + name, + if_not_exists, + connector_type, + url, + comment, + with_dcproperties, + })) + } + pub fn parse_drop(&mut self) -> Result { // MySQL dialect supports `TEMPORARY` let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect) @@ -4886,6 +6140,8 @@ impl<'a> Parser<'a> { ObjectType::Table } else if self.parse_keyword(Keyword::VIEW) { ObjectType::View + } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) { + ObjectType::MaterializedView } else if self.parse_keyword(Keyword::INDEX) { ObjectType::Index } else if self.parse_keyword(Keyword::ROLE) { @@ -4904,15 +6160,21 @@ impl<'a> Parser<'a> { return self.parse_drop_function(); } else if self.parse_keyword(Keyword::POLICY) { return self.parse_drop_policy(); + } else if self.parse_keyword(Keyword::CONNECTOR) { + return self.parse_drop_connector(); + } else if self.parse_keyword(Keyword::DOMAIN) { + return self.parse_drop_domain(); } else if self.parse_keyword(Keyword::PROCEDURE) { return self.parse_drop_procedure(); } else if self.parse_keyword(Keyword::SECRET) { return self.parse_drop_secret(temporary, persistent); } else if self.parse_keyword(Keyword::TRIGGER) { return self.parse_drop_trigger(); + } else if self.parse_keyword(Keyword::EXTENSION) { + return self.parse_drop_extension(); } else { return self.expected( - "TABLE, VIEW, INDEX, ROLE, SCHEMA, DATABASE, FUNCTION, PROCEDURE, STAGE, TRIGGER, SECRET, SEQUENCE, or TYPE after DROP", + "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, or MATERIALIZED VIEW after DROP", self.peek_token(), ); }; @@ -4921,7 +6183,7 @@ impl<'a> Parser<'a> { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let names = self.parse_comma_separated(|p| p.parse_object_name(false))?; - let loc = self.peek_token().location; + let loc = self.peek_token().span.start; let cascade = self.parse_keyword(Keyword::CASCADE); let restrict = self.parse_keyword(Keyword::RESTRICT); let purge = self.parse_keyword(Keyword::PURGE); @@ -4945,10 +6207,10 @@ impl<'a> Parser<'a> { }) } - fn parse_optional_referential_action(&mut self) -> Option { + fn parse_optional_drop_behavior(&mut self) -> Option { match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) { - Some(Keyword::CASCADE) => Some(ReferentialAction::Cascade), - Some(Keyword::RESTRICT) => Some(ReferentialAction::Restrict), + Some(Keyword::CASCADE) => Some(DropBehavior::Cascade), + Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict), _ => None, } } @@ -4960,11 +6222,11 @@ impl<'a> Parser<'a> { fn parse_drop_function(&mut self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?; - let option = self.parse_optional_referential_action(); + let drop_behavior = self.parse_optional_drop_behavior(); Ok(Statement::DropFunction { if_exists, func_desc, - option, + drop_behavior, }) } @@ -4975,17 +6237,41 @@ impl<'a> Parser<'a> { /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html) fn parse_drop_policy(&mut self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_identifier(false)?; - self.expect_keyword(Keyword::ON)?; + let name = self.parse_identifier()?; + self.expect_keyword_is(Keyword::ON)?; let table_name = self.parse_object_name(false)?; - let option = self.parse_optional_referential_action(); + let drop_behavior = self.parse_optional_drop_behavior(); Ok(Statement::DropPolicy { if_exists, name, table_name, - option, + drop_behavior, }) } + /// ```sql + /// DROP CONNECTOR [IF EXISTS] name + /// ``` + /// + /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector) + fn parse_drop_connector(&mut self) -> Result { + let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); + let name = self.parse_identifier()?; + Ok(Statement::DropConnector { if_exists, name }) + } + + /// ```sql + /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ] + /// ``` + fn parse_drop_domain(&mut self) -> Result { + let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); + let name = self.parse_object_name(false)?; + let drop_behavior = self.parse_optional_drop_behavior(); + Ok(Statement::DropDomain(DropDomain { + if_exists, + name, + drop_behavior, + })) + } /// ```sql /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...] @@ -4994,11 +6280,11 @@ impl<'a> Parser<'a> { fn parse_drop_procedure(&mut self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?; - let option = self.parse_optional_referential_action(); + let drop_behavior = self.parse_optional_drop_behavior(); Ok(Statement::DropProcedure { if_exists, proc_desc, - option, + drop_behavior, }) } @@ -5027,9 +6313,9 @@ impl<'a> Parser<'a> { persistent: bool, ) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let storage_specifier = if self.parse_keyword(Keyword::FROM) { - self.parse_identifier(false).ok() + self.parse_identifier().ok() } else { None }; @@ -5068,7 +6354,7 @@ impl<'a> Parser<'a> { return self.parse_mssql_declare(); } - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let binary = Some(self.parse_keyword(Keyword::BINARY)); let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) { @@ -5086,12 +6372,12 @@ impl<'a> Parser<'a> { None }; - self.expect_keyword(Keyword::CURSOR)?; + self.expect_keyword_is(Keyword::CURSOR)?; let declare_type = Some(DeclareType::Cursor); let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) { Some(keyword) => { - self.expect_keyword(Keyword::HOLD)?; + self.expect_keyword_is(Keyword::HOLD)?; match keyword { Keyword::WITH => Some(true), @@ -5102,7 +6388,7 @@ impl<'a> Parser<'a> { None => None, }; - self.expect_keyword(Keyword::FOR)?; + self.expect_keyword_is(Keyword::FOR)?; let query = Some(self.parse_query()?); @@ -5129,7 +6415,7 @@ impl<'a> Parser<'a> { /// ``` /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare pub fn parse_big_query_declare(&mut self) -> Result { - let names = self.parse_comma_separated(|parser| Parser::parse_identifier(parser, false))?; + let names = self.parse_comma_separated(Parser::parse_identifier)?; let data_type = match self.peek_token().token { Token::Word(w) if w.keyword == Keyword::DEFAULT => None, @@ -5145,7 +6431,7 @@ impl<'a> Parser<'a> { } else { // If no variable type - default expression must be specified, per BQ docs. // i.e `DECLARE foo;` is invalid. - self.expect_keyword(Keyword::DEFAULT)?; + self.expect_keyword_is(Keyword::DEFAULT)?; Some(self.parse_expr()?) }; @@ -5191,10 +6477,10 @@ impl<'a> Parser<'a> { pub fn parse_snowflake_declare(&mut self) -> Result { let mut stmts = vec![]; loop { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let (declare_type, for_query, assigned_expr, data_type) = if self.parse_keyword(Keyword::CURSOR) { - self.expect_keyword(Keyword::FOR)?; + self.expect_keyword_is(Keyword::FOR)?; match self.peek_token().token { Token::Word(w) if w.keyword == Keyword::SELECT => ( Some(DeclareType::Cursor), @@ -5287,60 +6573,78 @@ impl<'a> Parser<'a> { /// DECLARE // { // { @local_variable [AS] data_type [ = value ] } - // | { @cursor_variable_name CURSOR } + // | { @cursor_variable_name CURSOR [ FOR ] } // } [ ,...n ] /// ``` /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16 pub fn parse_mssql_declare(&mut self) -> Result { - let mut stmts = vec![]; + let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?; - loop { - let name = { - let ident = self.parse_identifier(false)?; - if !ident.value.starts_with('@') { - Err(ParserError::TokenizerError( - "Invalid MsSql variable declaration.".to_string(), - )) - } else { - Ok(ident) - } - }?; + Ok(Statement::Declare { stmts }) + } - let (declare_type, data_type) = match self.peek_token().token { - Token::Word(w) => match w.keyword { - Keyword::CURSOR => { - self.next_token(); - (Some(DeclareType::Cursor), None) - } - Keyword::AS => { - self.next_token(); - (None, Some(self.parse_data_type()?)) - } - _ => (None, Some(self.parse_data_type()?)), - }, + /// Parse the body of a [MsSql] `DECLARE`statement. + /// + /// Syntax: + /// ```text + // { + // { @local_variable [AS] data_type [ = value ] } + // | { @cursor_variable_name CURSOR [ FOR ]} + // } [ ,...n ] + /// ``` + /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16 + pub fn parse_mssql_declare_stmt(&mut self) -> Result { + let name = { + let ident = self.parse_identifier()?; + if !ident.value.starts_with('@') + && !matches!( + self.peek_token().token, + Token::Word(w) if w.keyword == Keyword::CURSOR + ) + { + Err(ParserError::TokenizerError( + "Invalid MsSql variable declaration.".to_string(), + )) + } else { + Ok(ident) + } + }?; + + let (declare_type, data_type) = match self.peek_token().token { + Token::Word(w) => match w.keyword { + Keyword::CURSOR => { + self.next_token(); + (Some(DeclareType::Cursor), None) + } + Keyword::AS => { + self.next_token(); + (None, Some(self.parse_data_type()?)) + } _ => (None, Some(self.parse_data_type()?)), - }; + }, + _ => (None, Some(self.parse_data_type()?)), + }; + let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) { + self.next_token(); + let query = Some(self.parse_query()?); + (query, None) + } else { let assignment = self.parse_mssql_variable_declaration_expression()?; + (None, assignment) + }; - stmts.push(Declare { - names: vec![name], - data_type, - assignment, - declare_type, - binary: None, - sensitive: None, - scroll: None, - hold: None, - for_query: None, - }); - - if self.next_token() != Token::Comma { - break; - } - } - - Ok(Statement::Declare { stmts }) + Ok(Declare { + names: vec![name], + data_type, + assignment, + declare_type, + binary: None, + sensitive: None, + scroll: None, + hold: None, + for_query, + }) } /// Parses the assigned expression in a variable declaration. @@ -5400,11 +6704,11 @@ impl<'a> Parser<'a> { FetchDirection::Last } else if self.parse_keyword(Keyword::ABSOLUTE) { FetchDirection::Absolute { - limit: self.parse_number_value()?, + limit: self.parse_number_value()?.value, } } else if self.parse_keyword(Keyword::RELATIVE) { FetchDirection::Relative { - limit: self.parse_number_value()?, + limit: self.parse_number_value()?.value, } } else if self.parse_keyword(Keyword::FORWARD) { if self.parse_keyword(Keyword::ALL) { @@ -5412,7 +6716,7 @@ impl<'a> Parser<'a> { } else { FetchDirection::Forward { // TODO: Support optional - limit: Some(self.parse_number_value()?), + limit: Some(self.parse_number_value()?.value), } } } else if self.parse_keyword(Keyword::BACKWARD) { @@ -5421,20 +6725,28 @@ impl<'a> Parser<'a> { } else { FetchDirection::Backward { // TODO: Support optional - limit: Some(self.parse_number_value()?), + limit: Some(self.parse_number_value()?.value), } } } else if self.parse_keyword(Keyword::ALL) { FetchDirection::All } else { FetchDirection::Count { - limit: self.parse_number_value()?, + limit: self.parse_number_value()?.value, } }; - self.expect_one_of_keywords(&[Keyword::FROM, Keyword::IN])?; + let position = if self.peek_keyword(Keyword::FROM) { + self.expect_keyword(Keyword::FROM)?; + FetchPosition::From + } else if self.peek_keyword(Keyword::IN) { + self.expect_keyword(Keyword::IN)?; + FetchPosition::In + } else { + return parser_err!("Expected FROM or IN", self.peek_token().span.start); + }; - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let into = if self.parse_keyword(Keyword::INTO) { Some(self.parse_object_name(false)?) @@ -5445,6 +6757,7 @@ impl<'a> Parser<'a> { Ok(Statement::Fetch { name, direction, + position, into, }) } @@ -5472,24 +6785,25 @@ impl<'a> Parser<'a> { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) { let index_name = self.parse_object_name(false)?; - self.expect_keyword(Keyword::ON)?; + self.expect_keyword_is(Keyword::ON)?; Some(index_name) } else { None }; let table_name = self.parse_object_name(false)?; let using = if self.parse_keyword(Keyword::USING) { - Some(self.parse_identifier(false)?) + Some(self.parse_index_type()?) } else { None }; + self.expect_token(&Token::LParen)?; - let columns = self.parse_comma_separated(Parser::parse_order_by_expr)?; + let columns = self.parse_comma_separated(Parser::parse_create_index_expr)?; self.expect_token(&Token::RParen)?; let include = if self.parse_keyword(Keyword::INCLUDE) { self.expect_token(&Token::LParen)?; - let columns = self.parse_comma_separated(|p| p.parse_identifier(false))?; + let columns = self.parse_comma_separated(|p| p.parse_identifier())?; self.expect_token(&Token::RParen)?; columns } else { @@ -5498,7 +6812,7 @@ impl<'a> Parser<'a> { let nulls_distinct = if self.parse_keyword(Keyword::NULLS) { let not = self.parse_keyword(Keyword::NOT); - self.expect_keyword(Keyword::DISTINCT)?; + self.expect_keyword_is(Keyword::DISTINCT)?; Some(!not) } else { None @@ -5538,17 +6852,17 @@ impl<'a> Parser<'a> { pub fn parse_create_extension(&mut self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) { let schema = if self.parse_keyword(Keyword::SCHEMA) { - Some(self.parse_identifier(false)?) + Some(self.parse_identifier()?) } else { None }; let version = if self.parse_keyword(Keyword::VERSION) { - Some(self.parse_identifier(false)?) + Some(self.parse_identifier()?) } else { None }; @@ -5569,6 +6883,25 @@ impl<'a> Parser<'a> { }) } + /// Parse a PostgreSQL-specific [Statement::DropExtension] statement. + pub fn parse_drop_extension(&mut self) -> Result { + let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); + let names = self.parse_comma_separated(|p| p.parse_identifier())?; + let cascade_or_restrict = + self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]); + Ok(Statement::DropExtension { + names, + if_exists, + cascade_or_restrict: cascade_or_restrict + .map(|k| match k { + Keyword::CASCADE => Ok(ReferentialAction::Cascade), + Keyword::RESTRICT => Ok(ReferentialAction::Restrict), + _ => self.expected("CASCADE or RESTRICT", self.peek_token()), + }) + .transpose()?, + }) + } + //TODO: Implement parsing for Skewed pub fn parse_hive_distribution(&mut self) -> Result { if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) { @@ -5594,10 +6927,10 @@ impl<'a> Parser<'a> { hive_format.row_format = Some(self.parse_row_format()?); } Some(Keyword::STORED) => { - self.expect_keyword(Keyword::AS)?; + self.expect_keyword_is(Keyword::AS)?; if self.parse_keyword(Keyword::INPUTFORMAT) { let input_format = self.parse_expr()?; - self.expect_keyword(Keyword::OUTPUTFORMAT)?; + self.expect_keyword_is(Keyword::OUTPUTFORMAT)?; let output_format = self.parse_expr()?; hive_format.storage = Some(HiveIOFormat::IOF { input_format, @@ -5630,7 +6963,7 @@ impl<'a> Parser<'a> { } pub fn parse_row_format(&mut self) -> Result { - self.expect_keyword(Keyword::FORMAT)?; + self.expect_keyword_is(Keyword::FORMAT)?; match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) { Some(Keyword::SERDE) => { let class = self.parse_literal_string()?; @@ -5651,13 +6984,13 @@ impl<'a> Parser<'a> { if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) { row_delimiters.push(HiveRowDelimiter { delimiter: HiveDelimiter::FieldsTerminatedBy, - char: self.parse_identifier(false)?, + char: self.parse_identifier()?, }); if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) { row_delimiters.push(HiveRowDelimiter { delimiter: HiveDelimiter::FieldsEscapedBy, - char: self.parse_identifier(false)?, + char: self.parse_identifier()?, }); } } else { @@ -5672,7 +7005,7 @@ impl<'a> Parser<'a> { ]) { row_delimiters.push(HiveRowDelimiter { delimiter: HiveDelimiter::CollectionItemsTerminatedBy, - char: self.parse_identifier(false)?, + char: self.parse_identifier()?, }); } else { break; @@ -5686,7 +7019,7 @@ impl<'a> Parser<'a> { ]) { row_delimiters.push(HiveRowDelimiter { delimiter: HiveDelimiter::MapKeysTerminatedBy, - char: self.parse_identifier(false)?, + char: self.parse_identifier()?, }); } else { break; @@ -5696,7 +7029,7 @@ impl<'a> Parser<'a> { if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) { row_delimiters.push(HiveRowDelimiter { delimiter: HiveDelimiter::LinesTerminatedBy, - char: self.parse_identifier(false)?, + char: self.parse_identifier()?, }); } else { break; @@ -5706,7 +7039,7 @@ impl<'a> Parser<'a> { if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) { row_delimiters.push(HiveRowDelimiter { delimiter: HiveDelimiter::NullDefinedAs, - char: self.parse_identifier(false)?, + char: self.parse_identifier()?, }); } else { break; @@ -5727,7 +7060,7 @@ impl<'a> Parser<'a> { fn parse_optional_on_cluster(&mut self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) { - Ok(Some(self.parse_identifier(false)?)) + Ok(Some(self.parse_identifier()?)) } else { Ok(None) } @@ -5761,17 +7094,16 @@ impl<'a> Parser<'a> { // parse optional column list (schema) let (columns, constraints) = self.parse_columns()?; - let mut comment = if dialect_of!(self is HiveDialect) - && self.parse_keyword(Keyword::COMMENT) - { - let next_token = self.next_token(); - match next_token.token { - Token::SingleQuotedString(str) => Some(CommentDef::AfterColumnDefsWithoutEq(str)), - _ => self.expected("comment", next_token)?, - } - } else { - None - }; + let comment_after_column_def = + if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) { + let next_token = self.next_token(); + match next_token.token { + Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)), + _ => self.expected("comment", next_token)?, + } + } else { + None + }; // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE` let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]); @@ -5779,39 +7111,8 @@ impl<'a> Parser<'a> { let hive_distribution = self.parse_hive_distribution()?; let clustered_by = self.parse_optional_clustered_by()?; let hive_formats = self.parse_hive_formats()?; - // PostgreSQL supports `WITH ( options )`, before `AS` - let with_options = self.parse_options(Keyword::WITH)?; - let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?; - let engine = if self.parse_keyword(Keyword::ENGINE) { - self.expect_token(&Token::Eq)?; - let next_token = self.next_token(); - match next_token.token { - Token::Word(w) => { - let name = w.value; - let parameters = if self.peek_token() == Token::LParen { - Some(self.parse_parenthesized_identifiers()?) - } else { - None - }; - Some(TableEngine { name, parameters }) - } - _ => self.expected("identifier", next_token)?, - } - } else { - None - }; - - let auto_increment_offset = if self.parse_keyword(Keyword::AUTO_INCREMENT) { - let _ = self.consume_token(&Token::Eq); - let next_token = self.next_token(); - match next_token.token { - Token::Number(s, _) => Some(Self::parse::(s, next_token.location)?), - _ => self.expected("literal int", next_token)?, - } - } else { - None - }; + let create_table_config = self.parse_optional_create_table_config()?; // ClickHouse supports `PRIMARY KEY`, before `ORDER BY` // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key @@ -5839,59 +7140,22 @@ impl<'a> Parser<'a> { None }; - let create_table_config = self.parse_optional_create_table_config()?; - - let default_charset = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) { - self.expect_token(&Token::Eq)?; - let next_token = self.next_token(); - match next_token.token { - Token::Word(w) => Some(w.value), - _ => self.expected("identifier", next_token)?, - } - } else { - None - }; - - let collation = if self.parse_keywords(&[Keyword::COLLATE]) { - self.expect_token(&Token::Eq)?; - let next_token = self.next_token(); - match next_token.token { - Token::Word(w) => Some(w.value), - _ => self.expected("identifier", next_token)?, - } + let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) { + Some(self.parse_create_table_on_commit()?) } else { None }; - let on_commit: Option = - if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT, Keyword::DELETE, Keyword::ROWS]) - { - Some(OnCommit::DeleteRows) - } else if self.parse_keywords(&[ - Keyword::ON, - Keyword::COMMIT, - Keyword::PRESERVE, - Keyword::ROWS, - ]) { - Some(OnCommit::PreserveRows) - } else if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT, Keyword::DROP]) { - Some(OnCommit::Drop) - } else { - None - }; - let strict = self.parse_keyword(Keyword::STRICT); - // Excludes Hive dialect here since it has been handled after table column definitions. - if !dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) { - // rewind the COMMENT keyword - self.prev_token(); - comment = self.parse_optional_inline_comment()? - }; - // Parse optional `AS ( query )` let query = if self.parse_keyword(Keyword::AS) { Some(self.parse_query()?) + } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT) + { + // rewind the SELECT keyword + self.prev_token(); + Some(self.parse_query()?) } else { None }; @@ -5900,8 +7164,6 @@ impl<'a> Parser<'a> { .temporary(temporary) .columns(columns) .constraints(constraints) - .with_options(with_options) - .table_properties(table_properties) .or_replace(or_replace) .if_not_exists(if_not_exists) .transient(transient) @@ -5912,30 +7174,61 @@ impl<'a> Parser<'a> { .without_rowid(without_rowid) .like(like) .clone_clause(clone) - .engine(engine) - .comment(comment) - .auto_increment_offset(auto_increment_offset) + .comment_after_column_def(comment_after_column_def) .order_by(order_by) - .default_charset(default_charset) - .collation(collation) .on_commit(on_commit) .on_cluster(on_cluster) .clustered_by(clustered_by) .partition_by(create_table_config.partition_by) .cluster_by(create_table_config.cluster_by) - .options(create_table_config.options) + .inherits(create_table_config.inherits) + .table_options(create_table_config.table_options) .primary_key(primary_key) .strict(strict) .build()) } - /// Parse configuration like partitioning, clustering information during the table creation. + pub(crate) fn parse_create_table_on_commit(&mut self) -> Result { + if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) { + Ok(OnCommit::DeleteRows) + } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) { + Ok(OnCommit::PreserveRows) + } else if self.parse_keywords(&[Keyword::DROP]) { + Ok(OnCommit::Drop) + } else { + parser_err!( + "Expecting DELETE ROWS, PRESERVE ROWS or DROP", + self.peek_token() + ) + } + } + + /// Parse configuration like inheritance, partitioning, clustering information during the table creation. /// /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2) /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html) + /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html) fn parse_optional_create_table_config( &mut self, ) -> Result { + let mut table_options = CreateTableOptions::None; + + let inherits = if self.parse_keyword(Keyword::INHERITS) { + Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?) + } else { + None + }; + + // PostgreSQL supports `WITH ( options )`, before `AS` + let with_options = self.parse_options(Keyword::WITH)?; + if !with_options.is_empty() { + table_options = CreateTableOptions::With(with_options) + } + + let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?; + if !table_properties.is_empty() { + table_options = CreateTableOptions::TableProperties(table_properties); + } let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect) && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) { @@ -5945,46 +7238,264 @@ impl<'a> Parser<'a> { }; let mut cluster_by = None; - let mut options = None; if dialect_of!(self is BigQueryDialect | GenericDialect) { if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) { cluster_by = Some(WrappedCollection::NoWrapping( - self.parse_comma_separated(|p| p.parse_identifier(false))?, + self.parse_comma_separated(|p| p.parse_identifier())?, )); }; if let Token::Word(word) = self.peek_token().token { if word.keyword == Keyword::OPTIONS { - options = Some(self.parse_options(Keyword::OPTIONS)?); + table_options = + CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?) } }; } + if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None { + let plain_options = self.parse_plain_options()?; + if !plain_options.is_empty() { + table_options = CreateTableOptions::Plain(plain_options) + } + }; + Ok(CreateTableConfiguration { partition_by, cluster_by, - options, + inherits, + table_options, }) } - pub fn parse_optional_inline_comment(&mut self) -> Result, ParserError> { - let comment = if self.parse_keyword(Keyword::COMMENT) { + fn parse_plain_option(&mut self) -> Result, ParserError> { + // Single parameter option + // + if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) { + return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION")))); + } + + // Custom option + // + if self.parse_keywords(&[Keyword::COMMENT]) { let has_eq = self.consume_token(&Token::Eq); - let next_token = self.next_token(); - match next_token.token { - Token::SingleQuotedString(str) => Some(if has_eq { - CommentDef::WithEq(str) - } else { - CommentDef::WithoutEq(str) - }), - _ => self.expected("comment", next_token)?, + let value = self.next_token(); + + let comment = match (has_eq, value.token) { + (true, Token::SingleQuotedString(s)) => { + Ok(Some(SqlOption::Comment(CommentDef::WithEq(s)))) + } + (false, Token::SingleQuotedString(s)) => { + Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s)))) + } + (_, token) => { + self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token)) + } + }; + return comment; + } + + // + // + if self.parse_keywords(&[Keyword::ENGINE]) { + let _ = self.consume_token(&Token::Eq); + let value = self.next_token(); + + let engine = match value.token { + Token::Word(w) => { + let parameters = if self.peek_token() == Token::LParen { + self.parse_parenthesized_identifiers()? + } else { + vec![] + }; + + Ok(Some(SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("ENGINE"), + name: Some(Ident::new(w.value)), + values: parameters, + }, + ))) + } + _ => { + return self.expected("Token::Word", value)?; + } + }; + + return engine; + } + + // + if self.parse_keywords(&[Keyword::TABLESPACE]) { + let _ = self.consume_token(&Token::Eq); + let value = self.next_token(); + + let tablespace = match value.token { + Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => { + let storage = match self.parse_keyword(Keyword::STORAGE) { + true => { + let _ = self.consume_token(&Token::Eq); + let storage_token = self.next_token(); + match &storage_token.token { + Token::Word(w) => match w.value.to_uppercase().as_str() { + "DISK" => Some(StorageType::Disk), + "MEMORY" => Some(StorageType::Memory), + _ => self + .expected("Storage type (DISK or MEMORY)", storage_token)?, + }, + _ => self.expected("Token::Word", storage_token)?, + } + } + false => None, + }; + + Ok(Some(SqlOption::TableSpace(TablespaceOption { + name, + storage, + }))) + } + _ => { + return self.expected("Token::Word", value)?; + } + }; + + return tablespace; + } + + // + if self.parse_keyword(Keyword::UNION) { + let _ = self.consume_token(&Token::Eq); + let value = self.next_token(); + + match value.token { + Token::LParen => { + let tables: Vec = + self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?; + self.expect_token(&Token::RParen)?; + + return Ok(Some(SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("UNION"), + name: None, + values: tables, + }, + ))); + } + _ => { + return self.expected("Token::LParen", value)?; + } } + } + + // Key/Value parameter option + let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) { + Ident::new("DEFAULT CHARSET") + } else if self.parse_keyword(Keyword::CHARSET) { + Ident::new("CHARSET") + } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) { + Ident::new("DEFAULT CHARACTER SET") + } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) { + Ident::new("CHARACTER SET") + } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) { + Ident::new("DEFAULT COLLATE") + } else if self.parse_keyword(Keyword::COLLATE) { + Ident::new("COLLATE") + } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) { + Ident::new("DATA DIRECTORY") + } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) { + Ident::new("INDEX DIRECTORY") + } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) { + Ident::new("KEY_BLOCK_SIZE") + } else if self.parse_keyword(Keyword::ROW_FORMAT) { + Ident::new("ROW_FORMAT") + } else if self.parse_keyword(Keyword::PACK_KEYS) { + Ident::new("PACK_KEYS") + } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) { + Ident::new("STATS_AUTO_RECALC") + } else if self.parse_keyword(Keyword::STATS_PERSISTENT) { + Ident::new("STATS_PERSISTENT") + } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) { + Ident::new("STATS_SAMPLE_PAGES") + } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) { + Ident::new("DELAY_KEY_WRITE") + } else if self.parse_keyword(Keyword::COMPRESSION) { + Ident::new("COMPRESSION") + } else if self.parse_keyword(Keyword::ENCRYPTION) { + Ident::new("ENCRYPTION") + } else if self.parse_keyword(Keyword::MAX_ROWS) { + Ident::new("MAX_ROWS") + } else if self.parse_keyword(Keyword::MIN_ROWS) { + Ident::new("MIN_ROWS") + } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) { + Ident::new("AUTOEXTEND_SIZE") + } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) { + Ident::new("AVG_ROW_LENGTH") + } else if self.parse_keyword(Keyword::CHECKSUM) { + Ident::new("CHECKSUM") + } else if self.parse_keyword(Keyword::CONNECTION) { + Ident::new("CONNECTION") + } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) { + Ident::new("ENGINE_ATTRIBUTE") + } else if self.parse_keyword(Keyword::PASSWORD) { + Ident::new("PASSWORD") + } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) { + Ident::new("SECONDARY_ENGINE_ATTRIBUTE") + } else if self.parse_keyword(Keyword::INSERT_METHOD) { + Ident::new("INSERT_METHOD") + } else if self.parse_keyword(Keyword::AUTO_INCREMENT) { + Ident::new("AUTO_INCREMENT") + } else { + return Ok(None); + }; + + let _ = self.consume_token(&Token::Eq); + + let value = match self + .maybe_parse(|parser| parser.parse_value())? + .map(Expr::Value) + { + Some(expr) => expr, + None => Expr::Identifier(self.parse_identifier()?), + }; + + Ok(Some(SqlOption::KeyValue { key, value })) + } + + pub fn parse_plain_options(&mut self) -> Result, ParserError> { + let mut options = Vec::new(); + + while let Some(option) = self.parse_plain_option()? { + options.push(option); + } + + Ok(options) + } + + pub fn parse_optional_inline_comment(&mut self) -> Result, ParserError> { + let comment = if self.parse_keyword(Keyword::COMMENT) { + let has_eq = self.consume_token(&Token::Eq); + let comment = self.parse_comment_value()?; + Some(if has_eq { + CommentDef::WithEq(comment) + } else { + CommentDef::WithoutEq(comment) + }) } else { None }; Ok(comment) } + pub fn parse_comment_value(&mut self) -> Result { + let next_token = self.next_token(); + let value = match next_token.token { + Token::SingleQuotedString(str) => str, + Token::DollarQuotedString(str) => str.value, + _ => self.expected("string literal", next_token)?, + }; + Ok(value) + } + pub fn parse_optional_procedure_parameters( &mut self, ) -> Result>, ParserError> { @@ -6030,7 +7541,11 @@ impl<'a> Parser<'a> { return self.expected("',' or ')' after column definition", self.peek_token()); }; - if rparen && (!comma || self.options.trailing_commas) { + if rparen + && (!comma + || self.dialect.supports_column_definition_trailing_commas() + || self.options.trailing_commas) + { let _ = self.consume_token(&Token::RParen); break; } @@ -6040,27 +7555,22 @@ impl<'a> Parser<'a> { } pub fn parse_procedure_param(&mut self) -> Result { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let data_type = self.parse_data_type()?; Ok(ProcedureParam { name, data_type }) } pub fn parse_column_def(&mut self) -> Result { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let data_type = if self.is_column_type_sqlite_unspecified() { DataType::Unspecified } else { self.parse_data_type()? }; - let mut collation = if self.parse_keyword(Keyword::COLLATE) { - Some(self.parse_object_name(false)?) - } else { - None - }; let mut options = vec![]; loop { if self.parse_keyword(Keyword::CONSTRAINT) { - let name = Some(self.parse_identifier(false)?); + let name = Some(self.parse_identifier()?); if let Some(option) = self.parse_optional_column_option()? { options.push(ColumnOptionDef { name, option }); } else { @@ -6071,10 +7581,6 @@ impl<'a> Parser<'a> { } } else if let Some(option) = self.parse_optional_column_option()? { options.push(ColumnOptionDef { name: None, option }); - } else if dialect_of!(self is MySqlDialect | SnowflakeDialect | GenericDialect) - && self.parse_keyword(Keyword::COLLATE) - { - collation = Some(self.parse_object_name(false)?); } else { break; }; @@ -6082,7 +7588,6 @@ impl<'a> Parser<'a> { Ok(ColumnDef { name, data_type, - collation, options, }) } @@ -6119,14 +7624,14 @@ impl<'a> Parser<'a> { Ok(Some(ColumnOption::CharacterSet( self.parse_object_name(false)?, ))) + } else if self.parse_keywords(&[Keyword::COLLATE]) { + Ok(Some(ColumnOption::Collation( + self.parse_object_name(false)?, + ))) } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) { Ok(Some(ColumnOption::NotNull)) } else if self.parse_keywords(&[Keyword::COMMENT]) { - let next_token = self.next_token(); - match next_token.token { - Token::SingleQuotedString(value, ..) => Ok(Some(ColumnOption::Comment(value))), - _ => self.expected("string", next_token), - } + Ok(Some(ColumnOption::Comment(self.parse_comment_value()?))) } else if self.parse_keyword(Keyword::NULL) { Ok(Some(ColumnOption::Null)) } else if self.parse_keyword(Keyword::DEFAULT) { @@ -6279,7 +7784,7 @@ impl<'a> Parser<'a> { } pub(crate) fn parse_tag(&mut self) -> Result { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; self.expect_token(&Token::Eq)?; let value = self.parse_literal_string()?; @@ -6394,9 +7899,9 @@ impl<'a> Parser<'a> { None }; - self.expect_keyword(Keyword::INTO)?; - let num_buckets = self.parse_number_value()?; - self.expect_keyword(Keyword::BUCKETS)?; + self.expect_keyword_is(Keyword::INTO)?; + let num_buckets = self.parse_number_value()?.value; + self.expect_keyword_is(Keyword::BUCKETS)?; Some(ClusteredBy { columns, sorted_by, @@ -6468,7 +7973,7 @@ impl<'a> Parser<'a> { &mut self, ) -> Result, ParserError> { let name = if self.parse_keyword(Keyword::CONSTRAINT) { - Some(self.parse_identifier(false)?) + Some(self.parse_identifier()?) } else { None }; @@ -6484,6 +7989,8 @@ impl<'a> Parser<'a> { .expected("`index_name` or `(column_name [, ...])`", self.peek_token()); } + let nulls_distinct = self.parse_optional_nulls_distinct()?; + // optional index name let index_name = self.parse_optional_indent()?; let index_type = self.parse_optional_using_then_index_type()?; @@ -6499,11 +8006,12 @@ impl<'a> Parser<'a> { columns, index_options, characteristics, + nulls_distinct, })) } Token::Word(w) if w.keyword == Keyword::PRIMARY => { // after `PRIMARY` always stay `KEY` - self.expect_keyword(Keyword::KEY)?; + self.expect_keyword_is(Keyword::KEY)?; // optional index name let index_name = self.parse_optional_indent()?; @@ -6522,11 +8030,11 @@ impl<'a> Parser<'a> { })) } Token::Word(w) if w.keyword == Keyword::FOREIGN => { - self.expect_keyword(Keyword::KEY)?; + self.expect_keyword_is(Keyword::KEY)?; let columns = self.parse_parenthesized_column_list(Mandatory, false)?; - self.expect_keyword(Keyword::REFERENCES)?; + self.expect_keyword_is(Keyword::REFERENCES)?; let foreign_table = self.parse_object_name(false)?; - let referred_columns = self.parse_parenthesized_column_list(Mandatory, false)?; + let referred_columns = self.parse_parenthesized_column_list(Optional, false)?; let mut on_delete = None; let mut on_update = None; loop { @@ -6588,9 +8096,9 @@ impl<'a> Parser<'a> { if let Some(name) = name { return self.expected( "FULLTEXT or SPATIAL option without constraint name", - TokenWithLocation { + TokenWithSpan { token: Token::make_keyword(&name.to_string()), - location: next_token.location, + span: next_token.span, }, ); } @@ -6621,6 +8129,20 @@ impl<'a> Parser<'a> { } } + fn parse_optional_nulls_distinct(&mut self) -> Result { + Ok(if self.parse_keyword(Keyword::NULLS) { + let not = self.parse_keyword(Keyword::NOT); + self.expect_keyword_is(Keyword::DISTINCT)?; + if not { + NullsDistinctOption::NotDistinct + } else { + NullsDistinctOption::Distinct + } + } else { + NullsDistinctOption::None + }) + } + pub fn maybe_parse_options( &mut self, keyword: Keyword, @@ -6636,7 +8158,7 @@ impl<'a> Parser<'a> { pub fn parse_options(&mut self, keyword: Keyword) -> Result, ParserError> { if self.parse_keyword(keyword) { self.expect_token(&Token::LParen)?; - let options = self.parse_comma_separated(Parser::parse_sql_option)?; + let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?; self.expect_token(&Token::RParen)?; Ok(options) } else { @@ -6659,16 +8181,30 @@ impl<'a> Parser<'a> { } pub fn parse_index_type(&mut self) -> Result { - if self.parse_keyword(Keyword::BTREE) { - Ok(IndexType::BTree) + Ok(if self.parse_keyword(Keyword::BTREE) { + IndexType::BTree } else if self.parse_keyword(Keyword::HASH) { - Ok(IndexType::Hash) + IndexType::Hash + } else if self.parse_keyword(Keyword::GIN) { + IndexType::GIN + } else if self.parse_keyword(Keyword::GIST) { + IndexType::GiST + } else if self.parse_keyword(Keyword::SPGIST) { + IndexType::SPGiST + } else if self.parse_keyword(Keyword::BRIN) { + IndexType::BRIN + } else if self.parse_keyword(Keyword::BLOOM) { + IndexType::Bloom } else { - self.expected("index type {BTREE | HASH}", self.peek_token()) - } + IndexType::Custom(self.parse_identifier()?) + }) } - /// Parse [USING {BTREE | HASH}] + /// Optionally parse the `USING` keyword, followed by an [IndexType] + /// Example: + /// ```sql + //// USING BTREE (name, age DESC) + /// ``` pub fn parse_optional_using_then_index_type( &mut self, ) -> Result, ParserError> { @@ -6682,7 +8218,7 @@ impl<'a> Parser<'a> { /// Parse `[ident]`, mostly `ident` is name, like: /// `window_name`, `index_name`, ... pub fn parse_optional_indent(&mut self) -> Result, ParserError> { - self.maybe_parse(|parser| parser.parse_identifier(false)) + self.maybe_parse(|parser| parser.parse_identifier()) } #[must_use] @@ -6723,7 +8259,7 @@ impl<'a> Parser<'a> { match self.peek_token().token { Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => { - Ok(SqlOption::Ident(self.parse_identifier(false)?)) + Ok(SqlOption::Ident(self.parse_identifier()?)) } Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => { self.parse_option_partition() @@ -6732,7 +8268,7 @@ impl<'a> Parser<'a> { self.parse_option_clustered() } _ => { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; self.expect_token(&Token::Eq)?; let value = self.parse_expr()?; @@ -6761,7 +8297,7 @@ impl<'a> Parser<'a> { self.expect_token(&Token::LParen)?; let columns = self.parse_comma_separated(|p| { - let name = p.parse_identifier(false)?; + let name = p.parse_identifier()?; let asc = p.parse_asc_desc(); Ok(ClusteredIndex { name, asc }) @@ -6778,11 +8314,11 @@ impl<'a> Parser<'a> { } pub fn parse_option_partition(&mut self) -> Result { - self.expect_keyword(Keyword::PARTITION)?; + self.expect_keyword_is(Keyword::PARTITION)?; self.expect_token(&Token::LParen)?; - let column_name = self.parse_identifier(false)?; + let column_name = self.parse_identifier()?; - self.expect_keyword(Keyword::RANGE)?; + self.expect_keyword_is(Keyword::RANGE)?; let range_direction = if self.parse_keyword(Keyword::LEFT) { Some(PartitionRangeDirection::Left) } else if self.parse_keyword(Keyword::RIGHT) { @@ -6815,7 +8351,7 @@ impl<'a> Parser<'a> { pub fn parse_projection_select(&mut self) -> Result { self.expect_token(&Token::LParen)?; - self.expect_keyword(Keyword::SELECT)?; + self.expect_keyword_is(Keyword::SELECT)?; let projection = self.parse_projection()?; let group_by = self.parse_optional_group_by()?; let order_by = self.parse_optional_order_by()?; @@ -6828,7 +8364,7 @@ impl<'a> Parser<'a> { } pub fn parse_alter_table_add_projection(&mut self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let query = self.parse_projection_select()?; Ok(AlterTableOperation::AddProjection { if_not_exists, @@ -6886,18 +8422,18 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::RENAME) { if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) { - let old_name = self.parse_identifier(false)?; - self.expect_keyword(Keyword::TO)?; - let new_name = self.parse_identifier(false)?; + let old_name = self.parse_identifier()?; + self.expect_keyword_is(Keyword::TO)?; + let new_name = self.parse_identifier()?; AlterTableOperation::RenameConstraint { old_name, new_name } } else if self.parse_keyword(Keyword::TO) { let table_name = self.parse_object_name(false)?; AlterTableOperation::RenameTable { table_name } } else { let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ] - let old_column_name = self.parse_identifier(false)?; - self.expect_keyword(Keyword::TO)?; - let new_column_name = self.parse_identifier(false)?; + let old_column_name = self.parse_identifier()?; + self.expect_keyword_is(Keyword::TO)?; + let new_column_name = self.parse_identifier()?; AlterTableOperation::RenameColumn { old_column_name, new_column_name, @@ -6907,10 +8443,10 @@ impl<'a> Parser<'a> { if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) { AlterTableOperation::DisableRowLevelSecurity {} } else if self.parse_keyword(Keyword::RULE) { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; AlterTableOperation::DisableRule { name } } else if self.parse_keyword(Keyword::TRIGGER) { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; AlterTableOperation::DisableTrigger { name } } else { return self.expected( @@ -6920,24 +8456,24 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::ENABLE) { if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; AlterTableOperation::EnableAlwaysRule { name } } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; AlterTableOperation::EnableAlwaysTrigger { name } } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) { AlterTableOperation::EnableRowLevelSecurity {} } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; AlterTableOperation::EnableReplicaRule { name } } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; AlterTableOperation::EnableReplicaTrigger { name } } else if self.parse_keyword(Keyword::RULE) { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; AlterTableOperation::EnableRule { name } } else if self.parse_keyword(Keyword::TRIGGER) { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; AlterTableOperation::EnableTrigger { name } } else { return self.expected( @@ -6949,9 +8485,9 @@ impl<'a> Parser<'a> { && dialect_of!(self is ClickHouseDialect|GenericDialect) { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) { - Some(self.parse_identifier(false)?) + Some(self.parse_identifier()?) } else { None }; @@ -6964,9 +8500,9 @@ impl<'a> Parser<'a> { && dialect_of!(self is ClickHouseDialect|GenericDialect) { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) { - Some(self.parse_identifier(false)?) + Some(self.parse_identifier()?) } else { None }; @@ -6994,39 +8530,42 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::CONSTRAINT) { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_identifier(false)?; - let cascade = self.parse_keyword(Keyword::CASCADE); + let name = self.parse_identifier()?; + let drop_behavior = self.parse_optional_drop_behavior(); AlterTableOperation::DropConstraint { if_exists, name, - cascade, + drop_behavior, } - } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) - && dialect_of!(self is MySqlDialect | GenericDialect) - { + } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) { AlterTableOperation::DropPrimaryKey + } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) { + let name = self.parse_identifier()?; + AlterTableOperation::DropForeignKey { name } } else if self.parse_keyword(Keyword::PROJECTION) && dialect_of!(self is ClickHouseDialect|GenericDialect) { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; AlterTableOperation::DropProjection { if_exists, name } + } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) { + AlterTableOperation::DropClusteringKey } else { let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ] let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); - let column_name = self.parse_identifier(false)?; - let cascade = self.parse_keyword(Keyword::CASCADE); + let column_name = self.parse_identifier()?; + let drop_behavior = self.parse_optional_drop_behavior(); AlterTableOperation::DropColumn { column_name, if_exists, - cascade, + drop_behavior, } } } else if self.parse_keyword(Keyword::PARTITION) { self.expect_token(&Token::LParen)?; let before = self.parse_comma_separated(Parser::parse_expr)?; self.expect_token(&Token::RParen)?; - self.expect_keyword(Keyword::RENAME)?; + self.expect_keyword_is(Keyword::RENAME)?; self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?; self.expect_token(&Token::LParen)?; let renames = self.parse_comma_separated(Parser::parse_expr)?; @@ -7037,8 +8576,8 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::CHANGE) { let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ] - let old_name = self.parse_identifier(false)?; - let new_name = self.parse_identifier(false)?; + let old_name = self.parse_identifier()?; + let new_name = self.parse_identifier()?; let data_type = self.parse_data_type()?; let mut options = vec![]; while let Some(option) = self.parse_optional_column_option()? { @@ -7056,7 +8595,7 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::MODIFY) { let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ] - let col_name = self.parse_identifier(false)?; + let col_name = self.parse_identifier()?; let data_type = self.parse_data_type()?; let mut options = vec![]; while let Some(option) = self.parse_optional_column_option()? { @@ -7073,7 +8612,7 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::ALTER) { let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ] - let column_name = self.parse_identifier(false)?; + let column_name = self.parse_identifier()?; let is_postgresql = dialect_of!(self is PostgreSqlDialect); let op: AlterColumnOperation = if self.parse_keywords(&[ @@ -7134,7 +8673,7 @@ impl<'a> Parser<'a> { }; AlterTableOperation::AlterColumn { column_name, op } } else if self.parse_keyword(Keyword::SWAP) { - self.expect_keyword(Keyword::WITH)?; + self.expect_keyword_is(Keyword::WITH)?; let table_name = self.parse_object_name(false)?; AlterTableOperation::SwapWith { table_name } } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) @@ -7159,8 +8698,8 @@ impl<'a> Parser<'a> { { let partition = self.parse_part_or_partition()?; let with_name = if self.parse_keyword(Keyword::WITH) { - self.expect_keyword(Keyword::NAME)?; - Some(self.parse_identifier(false)?) + self.expect_keyword_is(Keyword::NAME)?; + Some(self.parse_identifier()?) } else { None }; @@ -7173,8 +8712,8 @@ impl<'a> Parser<'a> { { let partition = self.parse_part_or_partition()?; let with_name = if self.parse_keyword(Keyword::WITH) { - self.expect_keyword(Keyword::NAME)?; - Some(self.parse_identifier(false)?) + self.expect_keyword_is(Keyword::NAME)?; + Some(self.parse_identifier()?) } else { None }; @@ -7182,6 +8721,55 @@ impl<'a> Parser<'a> { partition, with_name, } + } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) { + self.expect_token(&Token::LParen)?; + let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?; + self.expect_token(&Token::RParen)?; + AlterTableOperation::ClusterBy { exprs } + } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) { + AlterTableOperation::SuspendRecluster + } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) { + AlterTableOperation::ResumeRecluster + } else if self.parse_keyword(Keyword::LOCK) { + let equals = self.consume_token(&Token::Eq); + let lock = match self.parse_one_of_keywords(&[ + Keyword::DEFAULT, + Keyword::EXCLUSIVE, + Keyword::NONE, + Keyword::SHARED, + ]) { + Some(Keyword::DEFAULT) => AlterTableLock::Default, + Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive, + Some(Keyword::NONE) => AlterTableLock::None, + Some(Keyword::SHARED) => AlterTableLock::Shared, + _ => self.expected( + "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]", + self.peek_token(), + )?, + }; + AlterTableOperation::Lock { equals, lock } + } else if self.parse_keyword(Keyword::ALGORITHM) { + let equals = self.consume_token(&Token::Eq); + let algorithm = match self.parse_one_of_keywords(&[ + Keyword::DEFAULT, + Keyword::INSTANT, + Keyword::INPLACE, + Keyword::COPY, + ]) { + Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default, + Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant, + Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace, + Some(Keyword::COPY) => AlterTableAlgorithm::Copy, + _ => self.expected( + "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]", + self.peek_token(), + )?, + }; + AlterTableOperation::Algorithm { equals, algorithm } + } else if self.parse_keyword(Keyword::AUTO_INCREMENT) { + let equals = self.consume_token(&Token::Eq); + let value = self.parse_number_value()?; + AlterTableOperation::AutoIncrement { equals, value } } else { let options: Vec = self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?; @@ -7212,13 +8800,16 @@ impl<'a> Parser<'a> { pub fn parse_alter(&mut self) -> Result { let object_type = self.expect_one_of_keywords(&[ Keyword::VIEW, + Keyword::TYPE, Keyword::TABLE, Keyword::INDEX, Keyword::ROLE, Keyword::POLICY, + Keyword::CONNECTOR, ])?; match object_type { Keyword::VIEW => self.parse_alter_view(), + Keyword::TYPE => self.parse_alter_type(), Keyword::TABLE => { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ] @@ -7230,12 +8821,12 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::LOCATION) { location = Some(HiveSetLocation { has_set: false, - location: self.parse_identifier(false)?, + location: self.parse_identifier()?, }); } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) { location = Some(HiveSetLocation { has_set: true, - location: self.parse_identifier(false)?, + location: self.parse_identifier()?, }); } @@ -7268,6 +8859,7 @@ impl<'a> Parser<'a> { } Keyword::ROLE => self.parse_alter_role(), Keyword::POLICY => self.parse_alter_policy(), + Keyword::CONNECTOR => self.parse_alter_connector(), // unreachable because expect_one_of_keywords used above _ => unreachable!(), } @@ -7279,7 +8871,7 @@ impl<'a> Parser<'a> { let with_options = self.parse_options(Keyword::WITH)?; - self.expect_keyword(Keyword::AS)?; + self.expect_keyword_is(Keyword::AS)?; let query = self.parse_query()?; Ok(Statement::AlterView { @@ -7290,6 +8882,55 @@ impl<'a> Parser<'a> { }) } + /// Parse a [Statement::AlterType] + pub fn parse_alter_type(&mut self) -> Result { + let name = self.parse_object_name(false)?; + + if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) { + let new_name = self.parse_identifier()?; + Ok(Statement::AlterType(AlterType { + name, + operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }), + })) + } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) { + let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); + let new_enum_value = self.parse_identifier()?; + let position = if self.parse_keyword(Keyword::BEFORE) { + Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?)) + } else if self.parse_keyword(Keyword::AFTER) { + Some(AlterTypeAddValuePosition::After(self.parse_identifier()?)) + } else { + None + }; + + Ok(Statement::AlterType(AlterType { + name, + operation: AlterTypeOperation::AddValue(AlterTypeAddValue { + if_not_exists, + value: new_enum_value, + position, + }), + })) + } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) { + let existing_enum_value = self.parse_identifier()?; + self.expect_keyword(Keyword::TO)?; + let new_enum_value = self.parse_identifier()?; + + Ok(Statement::AlterType(AlterType { + name, + operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue { + from: existing_enum_value, + to: new_enum_value, + }), + })) + } else { + return self.expected_ref( + "{RENAME TO | { RENAME | ADD } VALUE}", + self.peek_token_ref(), + ); + } + } + /// Parse a `CALL procedure_name(arg1, arg2, ...)` /// or `CALL procedure_name` statement pub fn parse_call(&mut self) -> Result { @@ -7299,12 +8940,13 @@ impl<'a> Parser<'a> { Expr::Function(f) => Ok(Statement::Call(f)), other => parser_err!( format!("Expected a simple procedure call but found: {other}"), - self.peek_token().location + self.peek_token().span.start ), } } else { Ok(Statement::Call(Function { name: object_name, + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::None, over: None, @@ -7382,11 +9024,19 @@ impl<'a> Parser<'a> { }) } + /// Parse [Statement::Open] + fn parse_open(&mut self) -> Result { + self.expect_keyword(Keyword::OPEN)?; + Ok(Statement::Open(OpenStatement { + cursor_name: self.parse_identifier()?, + })) + } + pub fn parse_close(&mut self) -> Result { let cursor = if self.parse_keyword(Keyword::ALL) { CloseCursor::All } else { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; CloseCursor::Specific { name } }; @@ -7408,7 +9058,7 @@ impl<'a> Parser<'a> { Keyword::FORCE_NULL, Keyword::ENCODING, ]) { - Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier(false)?), + Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?), Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!( self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]), Some(Keyword::FALSE) @@ -7484,12 +9134,12 @@ impl<'a> Parser<'a> { } Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => { CopyLegacyCsvOption::ForceNotNull( - self.parse_comma_separated(|p| p.parse_identifier(false))?, + self.parse_comma_separated(|p| p.parse_identifier())?, ) } Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => { CopyLegacyCsvOption::ForceQuote( - self.parse_comma_separated(|p| p.parse_identifier(false))?, + self.parse_comma_separated(|p| p.parse_identifier())?, ) } _ => self.expected("csv option", self.peek_token())?, @@ -7503,7 +9153,7 @@ impl<'a> Parser<'a> { let loc = self .tokens .get(self.index - 1) - .map_or(Location { line: 0, column: 0 }, |t| t.location); + .map_or(Location { line: 0, column: 0 }, |t| t.span.start); return parser_err!(format!("Expect a char, found {s:?}"), loc); } Ok(s.chars().next().unwrap()) @@ -7547,102 +9197,114 @@ impl<'a> Parser<'a> { } /// Parse a literal value (numbers, strings, date/time, booleans) - pub fn parse_value(&mut self) -> Result { + pub fn parse_value(&mut self) -> Result { let next_token = self.next_token(); - let location = next_token.location; + let span = next_token.span; + let ok_value = |value: Value| Ok(value.with_span(span)); match next_token.token { Token::Word(w) => match w.keyword { - Keyword::TRUE => Ok(Value::Boolean(true)), - Keyword::FALSE => Ok(Value::Boolean(false)), - Keyword::NULL => Ok(Value::Null), + Keyword::TRUE if self.dialect.supports_boolean_literals() => { + ok_value(Value::Boolean(true)) + } + Keyword::FALSE if self.dialect.supports_boolean_literals() => { + ok_value(Value::Boolean(false)) + } + Keyword::NULL => ok_value(Value::Null), Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style { - Some('"') => Ok(Value::DoubleQuotedString(w.value)), - Some('\'') => Ok(Value::SingleQuotedString(w.value)), + Some('"') => ok_value(Value::DoubleQuotedString(w.value)), + Some('\'') => ok_value(Value::SingleQuotedString(w.value)), _ => self.expected( "A value?", - TokenWithLocation { + TokenWithSpan { token: Token::Word(w), - location, + span, }, )?, }, _ => self.expected( "a concrete value", - TokenWithLocation { + TokenWithSpan { token: Token::Word(w), - location, + span, }, ), }, // The call to n.parse() returns a bigdecimal when the // bigdecimal feature is enabled, and is otherwise a no-op // (i.e., it returns the input string). - Token::Number(n, l) => Ok(Value::Number(Self::parse(n, location)?, l)), - Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())), - Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())), + Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)), + Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(s.to_string())), + Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(s.to_string())), Token::TripleSingleQuotedString(ref s) => { - Ok(Value::TripleSingleQuotedString(s.to_string())) + ok_value(Value::TripleSingleQuotedString(s.to_string())) } Token::TripleDoubleQuotedString(ref s) => { - Ok(Value::TripleDoubleQuotedString(s.to_string())) + ok_value(Value::TripleDoubleQuotedString(s.to_string())) } - Token::DollarQuotedString(ref s) => Ok(Value::DollarQuotedString(s.clone())), + Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())), Token::SingleQuotedByteStringLiteral(ref s) => { - Ok(Value::SingleQuotedByteStringLiteral(s.clone())) + ok_value(Value::SingleQuotedByteStringLiteral(s.clone())) } Token::DoubleQuotedByteStringLiteral(ref s) => { - Ok(Value::DoubleQuotedByteStringLiteral(s.clone())) + ok_value(Value::DoubleQuotedByteStringLiteral(s.clone())) } Token::TripleSingleQuotedByteStringLiteral(ref s) => { - Ok(Value::TripleSingleQuotedByteStringLiteral(s.clone())) + ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone())) } Token::TripleDoubleQuotedByteStringLiteral(ref s) => { - Ok(Value::TripleDoubleQuotedByteStringLiteral(s.clone())) + ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone())) } Token::SingleQuotedRawStringLiteral(ref s) => { - Ok(Value::SingleQuotedRawStringLiteral(s.clone())) + ok_value(Value::SingleQuotedRawStringLiteral(s.clone())) } Token::DoubleQuotedRawStringLiteral(ref s) => { - Ok(Value::DoubleQuotedRawStringLiteral(s.clone())) + ok_value(Value::DoubleQuotedRawStringLiteral(s.clone())) } Token::TripleSingleQuotedRawStringLiteral(ref s) => { - Ok(Value::TripleSingleQuotedRawStringLiteral(s.clone())) + ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone())) } Token::TripleDoubleQuotedRawStringLiteral(ref s) => { - Ok(Value::TripleDoubleQuotedRawStringLiteral(s.clone())) + ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone())) + } + Token::NationalStringLiteral(ref s) => { + ok_value(Value::NationalStringLiteral(s.to_string())) } - Token::NationalStringLiteral(ref s) => Ok(Value::NationalStringLiteral(s.to_string())), - Token::EscapedStringLiteral(ref s) => Ok(Value::EscapedStringLiteral(s.to_string())), - Token::UnicodeStringLiteral(ref s) => Ok(Value::UnicodeStringLiteral(s.to_string())), - Token::HexStringLiteral(ref s) => Ok(Value::HexStringLiteral(s.to_string())), - Token::Placeholder(ref s) => Ok(Value::Placeholder(s.to_string())), + Token::EscapedStringLiteral(ref s) => { + ok_value(Value::EscapedStringLiteral(s.to_string())) + } + Token::UnicodeStringLiteral(ref s) => { + ok_value(Value::UnicodeStringLiteral(s.to_string())) + } + Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())), + Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())), tok @ Token::Colon | tok @ Token::AtSign => { // Not calling self.parse_identifier(false)? because only in placeholder we want to check numbers as idfentifies // This because snowflake allows numbers as placeholders let next_token = self.next_token(); let ident = match next_token.token { - Token::Word(w) => Ok(w.to_ident()), + Token::Word(w) => Ok(w.into_ident(next_token.span)), Token::Number(w, false) => Ok(Ident::new(w)), _ => self.expected("placeholder", next_token), }?; let placeholder = tok.to_string() + &ident.value; - Ok(Value::Placeholder(placeholder)) + ok_value(Value::Placeholder(placeholder)) } unexpected => self.expected( "a value", - TokenWithLocation { + TokenWithSpan { token: unexpected, - location, + span, }, ), } } /// Parse an unsigned numeric literal - pub fn parse_number_value(&mut self) -> Result { - match self.parse_value()? { - v @ Value::Number(_, _) => Ok(v), - v @ Value::Placeholder(_) => Ok(v), + pub fn parse_number_value(&mut self) -> Result { + let value_wrapper = self.parse_value()?; + match &value_wrapper.value { + Value::Number(_, _) => Ok(value_wrapper), + Value::Placeholder(_) => Ok(value_wrapper), _ => { self.prev_token(); self.expected("literal number", self.peek_token()) @@ -7670,18 +9332,24 @@ impl<'a> Parser<'a> { } } - fn parse_introduced_string_value(&mut self) -> Result { + fn parse_introduced_string_expr(&mut self) -> Result { let next_token = self.next_token(); - let location = next_token.location; + let span = next_token.span; match next_token.token { - Token::SingleQuotedString(ref s) => Ok(Value::SingleQuotedString(s.to_string())), - Token::DoubleQuotedString(ref s) => Ok(Value::DoubleQuotedString(s.to_string())), - Token::HexStringLiteral(ref s) => Ok(Value::HexStringLiteral(s.to_string())), + Token::SingleQuotedString(ref s) => Ok(Expr::Value( + Value::SingleQuotedString(s.to_string()).with_span(span), + )), + Token::DoubleQuotedString(ref s) => Ok(Expr::Value( + Value::DoubleQuotedString(s.to_string()).with_span(span), + )), + Token::HexStringLiteral(ref s) => Ok(Expr::Value( + Value::HexStringLiteral(s.to_string()).with_span(span), + )), unexpected => self.expected( "a string value", - TokenWithLocation { + TokenWithSpan { token: unexpected, - location, + span, }, ), } @@ -7691,7 +9359,7 @@ impl<'a> Parser<'a> { pub fn parse_literal_uint(&mut self) -> Result { let next_token = self.next_token(); match next_token.token { - Token::Number(s, _) => Self::parse::(s, next_token.location), + Token::Number(s, _) => Self::parse::(s, next_token.span.start), _ => self.expected("literal int", next_token), } } @@ -7700,15 +9368,16 @@ impl<'a> Parser<'a> { /// e.g. `CREATE FUNCTION ... AS $$ body $$`. fn parse_create_function_body_string(&mut self) -> Result { let peek_token = self.peek_token(); + let span = peek_token.span; match peek_token.token { Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => { self.next_token(); - Ok(Expr::Value(Value::DollarQuotedString(s))) + Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span))) } - _ => Ok(Expr::Value(Value::SingleQuotedString( - self.parse_literal_string()?, - ))), + _ => Ok(Expr::Value( + Value::SingleQuotedString(self.parse_literal_string()?).with_span(span), + )), } } @@ -7731,6 +9400,50 @@ impl<'a> Parser<'a> { } } + /// Parse a literal unicode normalization clause + pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result { + let neg = self.parse_keyword(Keyword::NOT); + let normalized_form = self.maybe_parse(|parser| { + match parser.parse_one_of_keywords(&[ + Keyword::NFC, + Keyword::NFD, + Keyword::NFKC, + Keyword::NFKD, + ]) { + Some(Keyword::NFC) => Ok(NormalizationForm::NFC), + Some(Keyword::NFD) => Ok(NormalizationForm::NFD), + Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC), + Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD), + _ => parser.expected("unicode normalization form", parser.peek_token()), + } + })?; + if self.parse_keyword(Keyword::NORMALIZED) { + return Ok(Expr::IsNormalized { + expr: Box::new(expr), + form: normalized_form, + negated: neg, + }); + } + self.expected("unicode normalization form", self.peek_token()) + } + + pub fn parse_enum_values(&mut self) -> Result, ParserError> { + self.expect_token(&Token::LParen)?; + let values = self.parse_comma_separated(|parser| { + let name = parser.parse_literal_string()?; + let e = if parser.consume_token(&Token::Eq) { + let value = parser.parse_number()?; + EnumMember::NamedValue(name, value) + } else { + EnumMember::Name(name) + }; + Ok(e) + })?; + self.expect_token(&Token::RParen)?; + + Ok(values) + } + /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example) pub fn parse_data_type(&mut self) -> Result { let (ty, trailing_bracket) = self.parse_data_type_helper()?; @@ -7747,9 +9460,13 @@ impl<'a> Parser<'a> { fn parse_data_type_helper( &mut self, ) -> Result<(DataType, MatchedTrailingBracket), ParserError> { - let next_token = self.next_token(); + let dialect = self.dialect; + self.advance_token(); + let next_token = self.get_current_token(); + let next_token_index = self.get_current_index(); + let mut trailing_bracket: MatchedTrailingBracket = false.into(); - let mut data = match next_token.token { + let mut data = match &next_token.token { Token::Word(w) => match w.keyword { Keyword::BOOLEAN => Ok(DataType::Boolean), Keyword::BOOL => Ok(DataType::Bool), @@ -7763,13 +9480,15 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::PRECISION) { Ok(DataType::DoublePrecision) } else { - Ok(DataType::Double) + Ok(DataType::Double( + self.parse_exact_number_optional_precision_scale()?, + )) } } Keyword::TINYINT => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::UnsignedTinyInt(optional_precision?)) + Ok(DataType::TinyIntUnsigned(optional_precision?)) } else { Ok(DataType::TinyInt(optional_precision?)) } @@ -7777,7 +9496,7 @@ impl<'a> Parser<'a> { Keyword::INT2 => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::UnsignedInt2(optional_precision?)) + Ok(DataType::Int2Unsigned(optional_precision?)) } else { Ok(DataType::Int2(optional_precision?)) } @@ -7785,7 +9504,7 @@ impl<'a> Parser<'a> { Keyword::SMALLINT => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::UnsignedSmallInt(optional_precision?)) + Ok(DataType::SmallIntUnsigned(optional_precision?)) } else { Ok(DataType::SmallInt(optional_precision?)) } @@ -7793,7 +9512,7 @@ impl<'a> Parser<'a> { Keyword::MEDIUMINT => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::UnsignedMediumInt(optional_precision?)) + Ok(DataType::MediumIntUnsigned(optional_precision?)) } else { Ok(DataType::MediumInt(optional_precision?)) } @@ -7801,7 +9520,7 @@ impl<'a> Parser<'a> { Keyword::INT => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::UnsignedInt(optional_precision?)) + Ok(DataType::IntUnsigned(optional_precision?)) } else { Ok(DataType::Int(optional_precision?)) } @@ -7809,7 +9528,7 @@ impl<'a> Parser<'a> { Keyword::INT4 => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::UnsignedInt4(optional_precision?)) + Ok(DataType::Int4Unsigned(optional_precision?)) } else { Ok(DataType::Int4(optional_precision?)) } @@ -7817,7 +9536,7 @@ impl<'a> Parser<'a> { Keyword::INT8 => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::UnsignedInt8(optional_precision?)) + Ok(DataType::Int8Unsigned(optional_precision?)) } else { Ok(DataType::Int8(optional_precision?)) } @@ -7830,7 +9549,7 @@ impl<'a> Parser<'a> { Keyword::INTEGER => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::UnsignedInteger(optional_precision?)) + Ok(DataType::IntegerUnsigned(optional_precision?)) } else { Ok(DataType::Integer(optional_precision?)) } @@ -7838,11 +9557,16 @@ impl<'a> Parser<'a> { Keyword::BIGINT => { let optional_precision = self.parse_optional_precision(); if self.parse_keyword(Keyword::UNSIGNED) { - Ok(DataType::UnsignedBigInt(optional_precision?)) + Ok(DataType::BigIntUnsigned(optional_precision?)) } else { Ok(DataType::BigInt(optional_precision?)) } } + Keyword::HUGEINT => Ok(DataType::HugeInt), + Keyword::UBIGINT => Ok(DataType::UBigInt), + Keyword::UHUGEINT => Ok(DataType::UHugeInt), + Keyword::USMALLINT => Ok(DataType::USmallInt), + Keyword::UTINYINT => Ok(DataType::UTinyInt), Keyword::UINT8 => Ok(DataType::UInt8), Keyword::UINT16 => Ok(DataType::UInt16), Keyword::UINT32 => Ok(DataType::UInt32), @@ -7879,9 +9603,20 @@ impl<'a> Parser<'a> { } Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)), Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)), - Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_precision()?)), + Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)), Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)), + Keyword::TINYBLOB => Ok(DataType::TinyBlob), + Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob), + Keyword::LONGBLOB => Ok(DataType::LongBlob), Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)), + Keyword::BIT => { + if self.parse_keyword(Keyword::VARYING) { + Ok(DataType::BitVarying(self.parse_optional_precision()?)) + } else { + Ok(DataType::Bit(self.parse_optional_precision()?)) + } + } + Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)), Keyword::UUID => Ok(DataType::Uuid), Keyword::DATE => Ok(DataType::Date), Keyword::DATE32 => Ok(DataType::Date32), @@ -7908,6 +9643,7 @@ impl<'a> Parser<'a> { self.parse_optional_precision()?, TimezoneInfo::Tz, )), + Keyword::TIMESTAMP_NTZ => Ok(DataType::TimestampNtz), Keyword::TIME => { let precision = self.parse_optional_precision()?; let tz = if self.parse_keyword(Keyword::WITH) { @@ -7940,6 +9676,9 @@ impl<'a> Parser<'a> { Ok(DataType::FixedString(character_length)) } Keyword::TEXT => Ok(DataType::Text), + Keyword::TINYTEXT => Ok(DataType::TinyText), + Keyword::MEDIUMTEXT => Ok(DataType::MediumText), + Keyword::LONGTEXT => Ok(DataType::LongText), Keyword::BYTEA => Ok(DataType::Bytea), Keyword::NUMERIC => Ok(DataType::Numeric( self.parse_exact_number_optional_precision_scale()?, @@ -7956,7 +9695,9 @@ impl<'a> Parser<'a> { Keyword::BIGDECIMAL => Ok(DataType::BigDecimal( self.parse_exact_number_optional_precision_scale()?, )), - Keyword::ENUM => Ok(DataType::Enum(self.parse_string_values()?)), + Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)), + Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))), + Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))), Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)), Keyword::ARRAY => { if dialect_of!(self is SnowflakeDialect) { @@ -7974,12 +9715,12 @@ impl<'a> Parser<'a> { )))) } } - Keyword::STRUCT if dialect_of!(self is DuckDbDialect) => { + Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => { self.prev_token(); let field_defs = self.parse_duckdb_struct_type_def()?; Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses)) } - Keyword::STRUCT if dialect_of!(self is BigQueryDialect | GenericDialect) => { + Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => { self.prev_token(); let (field_defs, _trailing_bracket) = self.parse_struct_type_def(Self::parse_struct_field_def)?; @@ -7989,18 +9730,18 @@ impl<'a> Parser<'a> { StructBracketKind::AngleBrackets, )) } - Keyword::UNION if dialect_of!(self is DuckDbDialect | GenericDialect) => { + Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => { self.prev_token(); let fields = self.parse_union_type_def()?; Ok(DataType::Union(fields)) } - Keyword::NULLABLE if dialect_of!(self is ClickHouseDialect | GenericDialect) => { + Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => { Ok(self.parse_sub_type(DataType::Nullable)?) } - Keyword::LOWCARDINALITY if dialect_of!(self is ClickHouseDialect | GenericDialect) => { + Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => { Ok(self.parse_sub_type(DataType::LowCardinality)?) } - Keyword::MAP if dialect_of!(self is ClickHouseDialect | GenericDialect) => { + Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => { self.prev_token(); let (key_data_type, value_data_type) = self.parse_click_house_map_def()?; Ok(DataType::Map( @@ -8008,18 +9749,40 @@ impl<'a> Parser<'a> { Box::new(value_data_type), )) } - Keyword::NESTED if dialect_of!(self is ClickHouseDialect | GenericDialect) => { + Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => { self.expect_token(&Token::LParen)?; let field_defs = self.parse_comma_separated(Parser::parse_column_def)?; self.expect_token(&Token::RParen)?; Ok(DataType::Nested(field_defs)) } - Keyword::TUPLE if dialect_of!(self is ClickHouseDialect | GenericDialect) => { + Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => { self.prev_token(); let field_defs = self.parse_click_house_tuple_def()?; Ok(DataType::Tuple(field_defs)) } Keyword::TRIGGER => Ok(DataType::Trigger), + Keyword::ANY if self.peek_keyword(Keyword::TYPE) => { + let _ = self.parse_keyword(Keyword::TYPE); + Ok(DataType::AnyType) + } + Keyword::TABLE => { + let columns = self.parse_returns_table_columns()?; + Ok(DataType::Table(columns)) + } + Keyword::SIGNED => { + if self.parse_keyword(Keyword::INTEGER) { + Ok(DataType::SignedInteger) + } else { + Ok(DataType::Signed) + } + } + Keyword::UNSIGNED => { + if self.parse_keyword(Keyword::INTEGER) { + Ok(DataType::UnsignedInteger) + } else { + Ok(DataType::Unsigned) + } + } _ => { self.prev_token(); let type_name = self.parse_object_name(false)?; @@ -8030,23 +9793,37 @@ impl<'a> Parser<'a> { } } }, - _ => self.expected("a data type name", next_token), + _ => self.expected_at("a data type name", next_token_index), }?; - // Parse array data types. Note: this is postgresql-specific and different from - // Keyword::ARRAY syntax from above - while self.consume_token(&Token::LBracket) { - let size = if dialect_of!(self is GenericDialect | DuckDbDialect | PostgreSqlDialect) { - self.maybe_parse(|p| p.parse_literal_uint())? - } else { - None - }; - self.expect_token(&Token::RBracket)?; - data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size)) + if self.dialect.supports_array_typedef_with_brackets() { + while self.consume_token(&Token::LBracket) { + // Parse optional array data type size + let size = self.maybe_parse(|p| p.parse_literal_uint())?; + self.expect_token(&Token::RBracket)?; + data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size)) + } } Ok((data, trailing_bracket)) } + fn parse_returns_table_column(&mut self) -> Result { + let name = self.parse_identifier()?; + let data_type = self.parse_data_type()?; + Ok(ColumnDef { + name, + data_type, + options: Vec::new(), // No constraints expected here + }) + } + + fn parse_returns_table_columns(&mut self) -> Result, ParserError> { + self.expect_token(&Token::LParen)?; + let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?; + self.expect_token(&Token::RParen)?; + Ok(columns) + } + pub fn parse_string_values(&mut self) -> Result, ParserError> { self.expect_token(&Token::LParen)?; let mut values = Vec::new(); @@ -8068,44 +9845,140 @@ impl<'a> Parser<'a> { /// Strictly parse `identifier AS identifier` pub fn parse_identifier_with_alias(&mut self) -> Result { - let ident = self.parse_identifier(false)?; - self.expect_keyword(Keyword::AS)?; - let alias = self.parse_identifier(false)?; + let ident = self.parse_identifier()?; + self.expect_keyword_is(Keyword::AS)?; + let alias = self.parse_identifier()?; Ok(IdentWithAlias { ident, alias }) } - /// Parse `AS identifier` (or simply `identifier` if it's not a reserved keyword) - /// Some examples with aliases: `SELECT 1 foo`, `SELECT COUNT(*) AS cnt`, - /// `SELECT ... FROM t1 foo, t2 bar`, `SELECT ... FROM (...) AS bar` + /// Optionally parses an alias for a select list item + fn maybe_parse_select_item_alias(&mut self) -> Result, ParserError> { + fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { + parser.dialect.is_select_item_alias(explicit, kw, parser) + } + self.parse_optional_alias_inner(None, validator) + } + + /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`. + /// In this case, the alias is allowed to optionally name the columns in the table, in + /// addition to the table itself. + pub fn maybe_parse_table_alias(&mut self) -> Result, ParserError> { + fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool { + parser.dialect.is_table_factor_alias(explicit, kw, parser) + } + match self.parse_optional_alias_inner(None, validator)? { + Some(name) => { + let columns = self.parse_table_alias_column_defs()?; + Ok(Some(TableAlias { name, columns })) + } + None => Ok(None), + } + } + + fn parse_table_index_hints(&mut self) -> Result, ParserError> { + let mut hints = vec![]; + while let Some(hint_type) = + self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE]) + { + let hint_type = match hint_type { + Keyword::USE => TableIndexHintType::Use, + Keyword::IGNORE => TableIndexHintType::Ignore, + Keyword::FORCE => TableIndexHintType::Force, + _ => { + return self.expected( + "expected to match USE/IGNORE/FORCE keyword", + self.peek_token(), + ) + } + }; + let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) { + Some(Keyword::INDEX) => TableIndexType::Index, + Some(Keyword::KEY) => TableIndexType::Key, + _ => { + return self.expected("expected to match INDEX/KEY keyword", self.peek_token()) + } + }; + let for_clause = if self.parse_keyword(Keyword::FOR) { + let clause = if self.parse_keyword(Keyword::JOIN) { + TableIndexHintForClause::Join + } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) { + TableIndexHintForClause::OrderBy + } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) { + TableIndexHintForClause::GroupBy + } else { + return self.expected( + "expected to match FOR/ORDER BY/GROUP BY table hint in for clause", + self.peek_token(), + ); + }; + Some(clause) + } else { + None + }; + + self.expect_token(&Token::LParen)?; + let index_names = if self.peek_token().token != Token::RParen { + self.parse_comma_separated(Parser::parse_identifier)? + } else { + vec![] + }; + self.expect_token(&Token::RParen)?; + hints.push(TableIndexHints { + hint_type, + index_type, + for_clause, + index_names, + }); + } + Ok(hints) + } + + /// Wrapper for parse_optional_alias_inner, left for backwards-compatibility + /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias` + /// and `maybe_parse_table_alias`. pub fn parse_optional_alias( &mut self, reserved_kwds: &[Keyword], ) -> Result, ParserError> { + fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool { + false + } + self.parse_optional_alias_inner(Some(reserved_kwds), validator) + } + + /// Parses an optional alias after a SQL element such as a select list item + /// or a table name. + /// + /// This method accepts an optional list of reserved keywords or a function + /// to call to validate if a keyword should be parsed as an alias, to allow + /// callers to customize the parsing logic based on their context. + fn parse_optional_alias_inner( + &mut self, + reserved_kwds: Option<&[Keyword]>, + validator: F, + ) -> Result, ParserError> + where + F: Fn(bool, &Keyword, &mut Parser) -> bool, + { let after_as = self.parse_keyword(Keyword::AS); + let next_token = self.next_token(); match next_token.token { - // Accept any identifier after `AS` (though many dialects have restrictions on - // keywords that may appear here). If there's no `AS`: don't parse keywords, - // which may start a construct allowed in this position, to be parsed as aliases. - // (For example, in `FROM t1 JOIN` the `JOIN` will always be parsed as a keyword, - // not an alias.) - Token::Word(w) if after_as || !reserved_kwds.contains(&w.keyword) => { - Ok(Some(w.to_ident())) - } - // MSSQL supports single-quoted strings as aliases for columns - // We accept them as table aliases too, although MSSQL does not. - // - // Note, that this conflicts with an obscure rule from the SQL - // standard, which we don't implement: - // https://crate.io/docs/sql-99/en/latest/chapters/07.html#character-string-literal-s - // "[Obscure Rule] SQL allows you to break a long up into two or more smaller s, split by a that includes a newline - // character. When it sees such a , your DBMS will - // ignore the and treat the multiple strings as - // a single ." + // By default, if a word is located after the `AS` keyword we consider it an alias + // as long as it's not reserved. + Token::Word(w) + if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) => + { + Ok(Some(w.into_ident(next_token.span))) + } + // This pattern allows for customizing the acceptance of words as aliases based on the caller's + // context, such as to what SQL element this word is a potential alias of (select item alias, table name + // alias, etc.) or dialect-specific logic that goes beyond a simple list of reserved keywords. + Token::Word(w) if validator(after_as, &w.keyword, self) => { + Ok(Some(w.into_ident(next_token.span))) + } + // For backwards-compatibility, we accept quoted strings as aliases regardless of the context. Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))), - // Support for MySql dialect double-quoted string, `AS "HOUR"` for example Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))), _ => { if after_as { @@ -8117,23 +9990,6 @@ impl<'a> Parser<'a> { } } - /// Parse `AS identifier` when the AS is describing a table-valued object, - /// like in `... FROM generate_series(1, 10) AS t (col)`. In this case - /// the alias is allowed to optionally name the columns in the table, in - /// addition to the table itself. - pub fn parse_optional_table_alias( - &mut self, - reserved_kwds: &[Keyword], - ) -> Result, ParserError> { - match self.parse_optional_alias(reserved_kwds)? { - Some(name) => { - let columns = self.parse_parenthesized_column_list(Optional, false)?; - Ok(Some(TableAlias { name, columns })) - } - None => Ok(None), - } - } - pub fn parse_optional_group_by(&mut self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) { let expressions = if self.parse_keyword(Keyword::ALL) { @@ -8143,7 +9999,7 @@ impl<'a> Parser<'a> { }; let mut modifiers = vec![]; - if dialect_of!(self is ClickHouseDialect | GenericDialect) { + if self.dialect.supports_group_by_with_modifier() { loop { if !self.parse_keyword(Keyword::WITH) { break; @@ -8160,12 +10016,20 @@ impl<'a> Parser<'a> { _ => { return parser_err!( "BUG: expected to match GroupBy modifier keyword", - self.peek_token().location + self.peek_token().span.start ) } }); } } + if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) { + self.expect_token(&Token::LParen)?; + let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?; + self.expect_token(&Token::RParen)?; + modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets( + result, + ))); + }; let group_by = match expressions { None => GroupByExpr::All(modifiers), Some(exprs) => GroupByExpr::Expressions(exprs, modifiers), @@ -8178,22 +10042,142 @@ impl<'a> Parser<'a> { pub fn parse_optional_order_by(&mut self) -> Result, ParserError> { if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) { - let order_by_exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?; - let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) { - self.parse_interpolations()? + let order_by = + if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) { + let order_by_options = self.parse_order_by_options()?; + OrderBy { + kind: OrderByKind::All(order_by_options), + interpolate: None, + } + } else { + let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?; + let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) { + self.parse_interpolations()? + } else { + None + }; + OrderBy { + kind: OrderByKind::Expressions(exprs), + interpolate, + } + }; + Ok(Some(order_by)) + } else { + Ok(None) + } + } + + fn parse_optional_limit_clause(&mut self) -> Result, ParserError> { + let mut offset = if self.parse_keyword(Keyword::OFFSET) { + Some(self.parse_offset()?) + } else { + None + }; + + let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) { + let expr = self.parse_limit()?; + + if self.dialect.supports_limit_comma() + && offset.is_none() + && expr.is_some() // ALL not supported with comma + && self.consume_token(&Token::Comma) + { + let offset = expr.ok_or_else(|| { + ParserError::ParserError( + "Missing offset for LIMIT , ".to_string(), + ) + })?; + return Ok(Some(LimitClause::OffsetCommaLimit { + offset, + limit: self.parse_expr()?, + })); + } + + let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect) + && self.parse_keyword(Keyword::BY) + { + Some(self.parse_comma_separated(Parser::parse_expr)?) } else { None }; - Ok(Some(OrderBy { - exprs: order_by_exprs, - interpolate, + (Some(expr), limit_by) + } else { + (None, None) + }; + + if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) { + offset = Some(self.parse_offset()?); + } + + if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() { + Ok(Some(LimitClause::LimitOffset { + limit: limit.unwrap_or_default(), + offset, + limit_by: limit_by.unwrap_or_default(), })) } else { Ok(None) } } + /// Parse a table object for insertion + /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)` + pub fn parse_table_object(&mut self) -> Result { + if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) { + let fn_name = self.parse_object_name(false)?; + self.parse_function_call(fn_name) + .map(TableObject::TableFunction) + } else { + self.parse_object_name(false).map(TableObject::TableName) + } + } + + /// Parse a possibly qualified, possibly quoted identifier, optionally allowing for wildcards, + /// e.g. *, *.*, `foo`.*, or "foo"."bar" + fn parse_object_name_with_wildcards( + &mut self, + in_table_clause: bool, + allow_wildcards: bool, + ) -> Result { + let mut idents = vec![]; + + if dialect_of!(self is BigQueryDialect) && in_table_clause { + loop { + let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?; + idents.push(ident); + if !self.consume_token(&Token::Period) && !end_with_period { + break; + } + } + } else { + loop { + let ident = if allow_wildcards && self.peek_token().token == Token::Mul { + let span = self.next_token().span; + Ident { + value: Token::Mul.to_string(), + quote_style: None, + span, + } + } else { + if self.dialect.supports_object_name_double_dot_notation() + && idents.len() == 1 + && self.consume_token(&Token::Period) + { + // Empty string here means default schema + idents.push(Ident::new("")); + } + self.parse_identifier()? + }; + idents.push(ident); + if !self.consume_token(&Token::Period) { + break; + } + } + } + Ok(ObjectName::from(idents)) + } + /// Parse a possibly qualified, possibly quoted identifier, e.g. /// `foo` or `myschema."table" /// @@ -8201,30 +10185,32 @@ impl<'a> Parser<'a> { /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers /// in this context on BigQuery. pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result { - let mut idents = vec![]; - loop { - idents.push(self.parse_identifier(in_table_clause)?); - if !self.consume_token(&Token::Period) { - break; - } - } + let ObjectName(mut idents) = + self.parse_object_name_with_wildcards(in_table_clause, false)?; // BigQuery accepts any number of quoted identifiers of a table name. // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers if dialect_of!(self is BigQueryDialect) - && idents.iter().any(|ident| ident.value.contains('.')) + && idents.iter().any(|part| { + part.as_ident() + .is_some_and(|ident| ident.value.contains('.')) + }) { idents = idents .into_iter() - .flat_map(|ident| { - ident + .flat_map(|part| match part.as_ident() { + Some(ident) => ident .value .split('.') - .map(|value| Ident { - value: value.into(), - quote_style: ident.quote_style, + .map(|value| { + ObjectNamePart::Identifier(Ident { + value: value.into(), + quote_style: ident.quote_style, + span: ident.span, + }) }) - .collect::>() + .collect::>(), + None => vec![part], }) .collect() } @@ -8236,14 +10222,14 @@ impl<'a> Parser<'a> { pub fn parse_identifiers(&mut self) -> Result, ParserError> { let mut idents = vec![]; loop { - match self.peek_token().token { + match &self.peek_token_ref().token { Token::Word(w) => { - idents.push(w.to_ident()); + idents.push(w.clone().into_ident(self.peek_token_ref().span)); } Token::EOF | Token::Eq => break, _ => {} } - self.next_token(); + self.advance_token(); } Ok(idents) } @@ -8291,8 +10277,9 @@ impl<'a> Parser<'a> { let mut idents = vec![]; // expecting at least one word for identifier - match self.next_token().token { - Token::Word(w) => idents.push(w.to_ident()), + let next_token = self.next_token(); + match next_token.token { + Token::Word(w) => idents.push(w.into_ident(next_token.span)), Token::EOF => { return Err(ParserError::ParserError( "Empty input when parsing identifier".to_string(), @@ -8309,19 +10296,22 @@ impl<'a> Parser<'a> { loop { match self.next_token().token { // ensure that optional period is succeeded by another identifier - Token::Period => match self.next_token().token { - Token::Word(w) => idents.push(w.to_ident()), - Token::EOF => { - return Err(ParserError::ParserError( - "Trailing period in identifier".to_string(), - ))? - } - token => { - return Err(ParserError::ParserError(format!( - "Unexpected token following period in identifier: {token}" - )))? + Token::Period => { + let next_token = self.next_token(); + match next_token.token { + Token::Word(w) => idents.push(w.into_ident(next_token.span)), + Token::EOF => { + return Err(ParserError::ParserError( + "Trailing period in identifier".to_string(), + ))? + } + token => { + return Err(ParserError::ParserError(format!( + "Unexpected token following period in identifier: {token}" + )))? + } } - }, + } Token::EOF => break, token => { return Err(ParserError::ParserError(format!( @@ -8335,29 +10325,33 @@ impl<'a> Parser<'a> { } /// Parse a simple one-word identifier (possibly quoted, possibly a keyword) - /// - /// The `in_table_clause` parameter indicates whether the identifier is a table in a FROM, JOIN, or - /// similar table clause. Currently, this is used only to support unquoted hyphenated identifiers in - // this context on BigQuery. - pub fn parse_identifier(&mut self, in_table_clause: bool) -> Result { + pub fn parse_identifier(&mut self) -> Result { let next_token = self.next_token(); match next_token.token { - Token::Word(w) => { - let mut ident = w.to_ident(); + Token::Word(w) => Ok(w.into_ident(next_token.span)), + Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)), + Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)), + _ => self.expected("identifier", next_token), + } + } - // On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or - // TABLE clause [0]. - // - // The first segment must be an ordinary unquoted identifier, e.g. it must not start - // with a digit. Subsequent segments are either must either be valid identifiers or - // integers, e.g. foo-123 is allowed, but foo-123a is not. - // - // [0] https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical - if dialect_of!(self is BigQueryDialect) - && w.quote_style.is_none() - && in_table_clause - { - let mut requires_whitespace = false; + /// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or + /// TABLE clause. + /// + /// The first segment must be an ordinary unquoted identifier, e.g. it must not start + /// with a digit. Subsequent segments are either must either be valid identifiers or + /// integers, e.g. foo-123 is allowed, but foo-123a is not. + /// + /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical) + /// + /// Return a tuple of the identifier and a boolean indicating it ends with a period. + fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> { + match self.peek_token().token { + Token::Word(w) => { + let quote_style_is_none = w.quote_style.is_none(); + let mut requires_whitespace = false; + let mut ident = w.into_ident(self.next_token().span); + if quote_style_is_none { while matches!(self.peek_token_no_skip().token, Token::Minus) { self.next_token(); ident.value.push('-'); @@ -8365,15 +10359,36 @@ impl<'a> Parser<'a> { let token = self .next_token_no_skip() .cloned() - .unwrap_or(TokenWithLocation::wrap(Token::EOF)); + .unwrap_or(TokenWithSpan::wrap(Token::EOF)); requires_whitespace = match token.token { Token::Word(next_word) if next_word.quote_style.is_none() => { ident.value.push_str(&next_word.value); false } - Token::Number(s, false) if s.chars().all(|c| c.is_ascii_digit()) => { - ident.value.push_str(&s); - true + Token::Number(s, false) => { + // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`. + // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`. + // + // If a number token is followed by a period, it is part of an [ObjectName]. + // Return the identifier with `true` if the number token is followed by a period, indicating that + // parsing should continue for the next part of the hyphenated identifier. + if s.ends_with('.') { + let Some(s) = s.split('.').next().filter(|s| { + !s.is_empty() && s.chars().all(|c| c.is_ascii_digit()) + }) else { + return self.expected( + "continuation of hyphenated identifier", + TokenWithSpan::new(Token::Number(s, false), token.span), + ); + }; + ident.value.push_str(s); + return Ok((ident, true)); + } else { + ident.value.push_str(&s); + } + // If next token is period, then it is part of an ObjectName and we don't expect whitespace + // after the number. + !matches!(self.peek_token().token, Token::Period) } _ => { return self @@ -8392,11 +10407,9 @@ impl<'a> Parser<'a> { } } } - Ok(ident) + Ok((ident, false)) } - Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)), - Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)), - _ => self.expected("identifier", next_token), + _ => Ok((self.parse_identifier()?, false)), } } @@ -8407,7 +10420,11 @@ impl<'a> Parser<'a> { self.next_token(); Ok(vec![]) } else { - let cols = self.parse_comma_separated(Parser::parse_view_column)?; + let cols = self.parse_comma_separated_with_trailing_commas( + Parser::parse_view_column, + self.dialect.supports_column_definition_trailing_commas(), + Self::is_reserved_for_column_alias, + )?; self.expect_token(&Token::RParen)?; Ok(cols) } @@ -8418,7 +10435,7 @@ impl<'a> Parser<'a> { /// Parses a column definition within a view. fn parse_view_column(&mut self) -> Result { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let options = if (dialect_of!(self is BigQueryDialect | GenericDialect) && self.parse_keyword(Keyword::OPTIONS)) || (dialect_of!(self is SnowflakeDialect | GenericDialect) @@ -8442,18 +10459,45 @@ impl<'a> Parser<'a> { }) } - /// Parse a parenthesized comma-separated list of unqualified, possibly quoted identifiers + /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers. + /// For example: `(col1, "col 2", ...)` pub fn parse_parenthesized_column_list( &mut self, optional: IsOptional, allow_empty: bool, ) -> Result, ParserError> { + self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier()) + } + + /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers. + /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)` + pub fn parse_parenthesized_qualified_column_list( + &mut self, + optional: IsOptional, + allow_empty: bool, + ) -> Result, ParserError> { + self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| { + p.parse_object_name(true) + }) + } + + /// Parses a parenthesized comma-separated list of columns using + /// the provided function to parse each element. + fn parse_parenthesized_column_list_inner( + &mut self, + optional: IsOptional, + allow_empty: bool, + mut f: F, + ) -> Result, ParserError> + where + F: FnMut(&mut Parser) -> Result, + { if self.consume_token(&Token::LParen) { if allow_empty && self.peek_token().token == Token::RParen { self.next_token(); Ok(vec![]) } else { - let cols = self.parse_comma_separated(|p| p.parse_identifier(false))?; + let cols = self.parse_comma_separated(|p| f(p))?; self.expect_token(&Token::RParen)?; Ok(cols) } @@ -8464,6 +10508,21 @@ impl<'a> Parser<'a> { } } + /// Parses a parenthesized comma-separated list of table alias column definitions. + fn parse_table_alias_column_defs(&mut self) -> Result, ParserError> { + if self.consume_token(&Token::LParen) { + let cols = self.parse_comma_separated(|p| { + let name = p.parse_identifier()?; + let data_type = p.maybe_parse(|p| p.parse_data_type())?; + Ok(TableAliasColumnDef { name, data_type }) + })?; + self.expect_token(&Token::RParen)?; + Ok(cols) + } else { + Ok(vec![]) + } + } + pub fn parse_precision(&mut self) -> Result { self.expect_token(&Token::LParen)?; let n = self.parse_literal_uint()?; @@ -8489,7 +10548,7 @@ impl<'a> Parser<'a> { /// /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64 pub fn parse_datetime_64(&mut self) -> Result<(u64, Option), ParserError> { - self.expect_keyword(Keyword::DATETIME64)?; + self.expect_keyword_is(Keyword::DATETIME64)?; self.expect_token(&Token::LParen)?; let precision = self.parse_literal_uint()?; let time_zone = if self.consume_token(&Token::Comma) { @@ -8513,6 +10572,16 @@ impl<'a> Parser<'a> { } } + pub fn parse_optional_binary_length(&mut self) -> Result, ParserError> { + if self.consume_token(&Token::LParen) { + let binary_length = self.parse_binary_length()?; + self.expect_token(&Token::RParen)?; + Ok(Some(binary_length)) + } else { + Ok(None) + } + } + pub fn parse_character_length(&mut self) -> Result { if self.parse_keyword(Keyword::MAX) { return Ok(CharacterLength::Max); @@ -8528,6 +10597,14 @@ impl<'a> Parser<'a> { Ok(CharacterLength::IntegerLength { length, unit }) } + pub fn parse_binary_length(&mut self) -> Result { + if self.parse_keyword(Keyword::MAX) { + return Ok(BinaryLength::Max); + } + let length = self.parse_literal_uint()?; + Ok(BinaryLength::IntegerLength { length }) + } + pub fn parse_optional_precision_scale( &mut self, ) -> Result<(Option, Option), ParserError> { @@ -8604,6 +10681,13 @@ impl<'a> Parser<'a> { Ok(parent_type(inside_type.into())) } + /// Parse a DELETE statement, returning a `Box`ed SetExpr + /// + /// This is used to reduce the size of the stack frames in debug builds + fn parse_delete_setexpr_boxed(&mut self) -> Result, ParserError> { + Ok(Box::new(SetExpr::Delete(self.parse_delete()?))) + } + pub fn parse_delete(&mut self) -> Result { let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) { // `FROM` keyword is optional in BigQuery SQL. @@ -8612,7 +10696,7 @@ impl<'a> Parser<'a> { (vec![], false) } else { let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?; - self.expect_keyword(Keyword::FROM)?; + self.expect_keyword_is(Keyword::FROM)?; (tables, true) } } else { @@ -8694,6 +10778,7 @@ impl<'a> Parser<'a> { let mut analyze = false; let mut verbose = false; let mut query_plan = false; + let mut estimate = false; let mut format = None; let mut options = None; @@ -8706,6 +10791,8 @@ impl<'a> Parser<'a> { options = Some(self.parse_utility_options()?) } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) { query_plan = true; + } else if self.parse_keyword(Keyword::ESTIMATE) { + estimate = true; } else { analyze = self.parse_keyword(Keyword::ANALYZE); verbose = self.parse_keyword(Keyword::VERBOSE); @@ -8723,6 +10810,7 @@ impl<'a> Parser<'a> { analyze, verbose, query_plan, + estimate, statement: Box::new(statement), format, options, @@ -8760,7 +10848,9 @@ impl<'a> Parser<'a> { pub fn parse_query(&mut self) -> Result, ParserError> { let _guard = self.recursion_counter.try_decrease()?; let with = if self.parse_keyword(Keyword::WITH) { + let with_token = self.get_current_token(); Some(With { + with_token: with_token.clone().into(), recursive: self.parse_keyword(Keyword::RECURSIVE), cte_tables: self.parse_comma_separated(Parser::parse_cte)?, }) @@ -8771,30 +10861,42 @@ impl<'a> Parser<'a> { Ok(Query { with, body: self.parse_insert_setexpr_boxed()?, - limit: None, - limit_by: vec![], order_by: None, - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], } .into()) } else if self.parse_keyword(Keyword::UPDATE) { Ok(Query { with, body: self.parse_update_setexpr_boxed()?, - limit: None, - limit_by: vec![], order_by: None, - offset: None, + limit_clause: None, + fetch: None, + locks: vec![], + for_clause: None, + settings: None, + format_clause: None, + pipe_operators: vec![], + } + .into()) + } else if self.parse_keyword(Keyword::DELETE) { + Ok(Query { + with, + body: self.parse_delete_setexpr_boxed()?, + limit_clause: None, + order_by: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], } .into()) } else { @@ -8802,40 +10904,7 @@ impl<'a> Parser<'a> { let order_by = self.parse_optional_order_by()?; - let mut limit = None; - let mut offset = None; - - for _x in 0..2 { - if limit.is_none() && self.parse_keyword(Keyword::LIMIT) { - limit = self.parse_limit()? - } - - if offset.is_none() && self.parse_keyword(Keyword::OFFSET) { - offset = Some(self.parse_offset()?) - } - - if self.dialect.supports_limit_comma() - && limit.is_some() - && offset.is_none() - && self.consume_token(&Token::Comma) - { - // MySQL style LIMIT x,y => LIMIT y OFFSET x. - // Check for more details. - offset = Some(Offset { - value: limit.unwrap(), - rows: OffsetRows::None, - }); - limit = Some(self.parse_expr()?); - } - } - - let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect) - && self.parse_keyword(Keyword::BY) - { - self.parse_comma_separated(Parser::parse_expr)? - } else { - vec![] - }; + let limit_clause = self.parse_optional_limit_clause()?; let settings = self.parse_settings()?; @@ -8861,38 +10930,129 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::NULL) { Some(FormatClause::Null) } else { - let ident = self.parse_identifier(false)?; + let ident = self.parse_identifier()?; Some(FormatClause::Identifier(ident)) } } else { None }; + let pipe_operators = if self.dialect.supports_pipe_operator() { + self.parse_pipe_operators()? + } else { + Vec::new() + }; + Ok(Query { with, body, order_by, - limit, - limit_by, - offset, + limit_clause, fetch, locks, for_clause, settings, format_clause, + pipe_operators, } .into()) } } + fn parse_pipe_operators(&mut self) -> Result, ParserError> { + let mut pipe_operators = Vec::new(); + + while self.consume_token(&Token::VerticalBarRightAngleBracket) { + let kw = self.expect_one_of_keywords(&[ + Keyword::SELECT, + Keyword::EXTEND, + Keyword::SET, + Keyword::DROP, + Keyword::AS, + Keyword::WHERE, + Keyword::LIMIT, + Keyword::AGGREGATE, + Keyword::ORDER, + ])?; + match kw { + Keyword::SELECT => { + let exprs = self.parse_comma_separated(Parser::parse_select_item)?; + pipe_operators.push(PipeOperator::Select { exprs }) + } + Keyword::EXTEND => { + let exprs = self.parse_comma_separated(Parser::parse_select_item)?; + pipe_operators.push(PipeOperator::Extend { exprs }) + } + Keyword::SET => { + let assignments = self.parse_comma_separated(Parser::parse_assignment)?; + pipe_operators.push(PipeOperator::Set { assignments }) + } + Keyword::DROP => { + let columns = self.parse_identifiers()?; + pipe_operators.push(PipeOperator::Drop { columns }) + } + Keyword::AS => { + let alias = self.parse_identifier()?; + pipe_operators.push(PipeOperator::As { alias }) + } + Keyword::WHERE => { + let expr = self.parse_expr()?; + pipe_operators.push(PipeOperator::Where { expr }) + } + Keyword::LIMIT => { + let expr = self.parse_expr()?; + let offset = if self.parse_keyword(Keyword::OFFSET) { + Some(self.parse_expr()?) + } else { + None + }; + pipe_operators.push(PipeOperator::Limit { expr, offset }) + } + Keyword::AGGREGATE => { + let full_table_exprs = if self.peek_keyword(Keyword::GROUP) { + vec![] + } else { + self.parse_comma_separated(|parser| { + parser.parse_expr_with_alias_and_order_by() + })? + }; + + let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) { + self.parse_comma_separated(|parser| { + parser.parse_expr_with_alias_and_order_by() + })? + } else { + vec![] + }; + + pipe_operators.push(PipeOperator::Aggregate { + full_table_exprs, + group_by_expr, + }) + } + Keyword::ORDER => { + self.expect_one_of_keywords(&[Keyword::BY])?; + let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?; + pipe_operators.push(PipeOperator::OrderBy { exprs }) + } + unhandled => { + return Err(ParserError::ParserError(format!( + "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}" + ))) + } + } + } + Ok(pipe_operators) + } + fn parse_settings(&mut self) -> Result>, ParserError> { let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect) && self.parse_keyword(Keyword::SETTINGS) { let key_values = self.parse_comma_separated(|p| { - let key = p.parse_identifier(false)?; + let key = p.parse_identifier()?; p.expect_token(&Token::Eq)?; - let value = p.parse_value()?; + let value = p.parse_value()?.value; Ok(Setting { key, value }) })?; Some(key_values) @@ -8951,7 +11111,7 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::ELEMENTS) { elements = true; } else if self.parse_keyword(Keyword::BINARY) { - self.expect_keyword(Keyword::BASE64)?; + self.expect_keyword_is(Keyword::BASE64)?; binary_base64 = true; } else if self.parse_keyword(Keyword::ROOT) { self.expect_token(&Token::LParen)?; @@ -9006,7 +11166,7 @@ impl<'a> Parser<'a> { /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`) pub fn parse_cte(&mut self) -> Result { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let mut cte = if self.parse_keyword(Keyword::AS) { let mut is_materialized = None; @@ -9018,8 +11178,10 @@ impl<'a> Parser<'a> { } } self.expect_token(&Token::LParen)?; + let query = self.parse_query()?; - self.expect_token(&Token::RParen)?; + let closing_paren_token = self.expect_token(&Token::RParen)?; + let alias = TableAlias { name, columns: vec![], @@ -9029,10 +11191,11 @@ impl<'a> Parser<'a> { query, from: None, materialized: is_materialized, + closing_paren_token: closing_paren_token.into(), } } else { - let columns = self.parse_parenthesized_column_list(Optional, false)?; - self.expect_keyword(Keyword::AS)?; + let columns = self.parse_table_alias_column_defs()?; + self.expect_keyword_is(Keyword::AS)?; let mut is_materialized = None; if dialect_of!(self is PostgreSqlDialect) { if self.parse_keyword(Keyword::MATERIALIZED) { @@ -9042,18 +11205,21 @@ impl<'a> Parser<'a> { } } self.expect_token(&Token::LParen)?; + let query = self.parse_query()?; - self.expect_token(&Token::RParen)?; + let closing_paren_token = self.expect_token(&Token::RParen)?; + let alias = TableAlias { name, columns }; Cte { alias, query, from: None, materialized: is_materialized, + closing_paren_token: closing_paren_token.into(), } }; if self.parse_keyword(Keyword::FROM) { - cte.from = Some(self.parse_identifier(false)?); + cte.from = Some(self.parse_identifier()?); } Ok(cte) } @@ -9069,7 +11235,9 @@ impl<'a> Parser<'a> { pub fn parse_query_body(&mut self, precedence: u8) -> Result, ParserError> { // We parse the expression using a Pratt parser, as in `parse_expr()`. // Start by parsing a restricted SELECT or a `(subquery)`: - let expr = if self.parse_keyword(Keyword::SELECT) { + let expr = if self.peek_keyword(Keyword::SELECT) + || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select()) + { SetExpr::Select(self.parse_select().map(Box::new)?) } else if self.consume_token(&Token::LParen) { // CTEs are not allowed here, but the parser currently accepts them @@ -9104,7 +11272,9 @@ impl<'a> Parser<'a> { let op = self.parse_set_operator(&self.peek_token().token); let next_precedence = match op { // UNION and EXCEPT have the same binding power and evaluate left-to-right - Some(SetOperator::Union) | Some(SetOperator::Except) => 10, + Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => { + 10 + } // INTERSECT has higher precedence than UNION/EXCEPT Some(SetOperator::Intersect) => 20, // Unexpected token or EOF => stop parsing the query body @@ -9131,13 +11301,19 @@ impl<'a> Parser<'a> { Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union), Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except), Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect), + Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus), _ => None, } } pub fn parse_set_quantifier(&mut self, op: &Option) -> SetQuantifier { match op { - Some(SetOperator::Except | SetOperator::Intersect | SetOperator::Union) => { + Some( + SetOperator::Except + | SetOperator::Intersect + | SetOperator::Union + | SetOperator::Minus, + ) => { if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) { SetQuantifier::DistinctByName } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) { @@ -9158,9 +11334,42 @@ impl<'a> Parser<'a> { } } - /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`), - /// assuming the initial `SELECT` was already consumed + /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`) pub fn parse_select(&mut self) -> Result { + let mut from_first = None; + + if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) { + let from_token = self.expect_keyword(Keyword::FROM)?; + let from = self.parse_table_with_joins()?; + if !self.peek_keyword(Keyword::SELECT) { + return Ok(Select { + select_token: AttachedToken(from_token), + distinct: None, + top: None, + top_before_distinct: false, + projection: vec![], + into: None, + from, + lateral_views: vec![], + prewhere: None, + selection: None, + group_by: GroupByExpr::Expressions(vec![], vec![]), + cluster_by: vec![], + distribute_by: vec![], + sort_by: vec![], + having: None, + named_window: vec![], + window_before_qualify: false, + qualify: None, + value_table_mode: None, + connect_by: None, + flavor: SelectFlavor::FromFirstNoSelect, + }); + } + from_first = Some(from); + } + + let select_token = self.expect_keyword(Keyword::SELECT)?; let value_table_mode = if dialect_of!(self is BigQueryDialect) && self.parse_keyword(Keyword::AS) { if self.parse_keyword(Keyword::VALUE) { @@ -9174,29 +11383,26 @@ impl<'a> Parser<'a> { None }; + let mut top_before_distinct = false; + let mut top = None; + if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) { + top = Some(self.parse_top()?); + top_before_distinct = true; + } let distinct = self.parse_all_or_distinct()?; + if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) { + top = Some(self.parse_top()?); + } - let top = if self.parse_keyword(Keyword::TOP) { - Some(self.parse_top()?) - } else { - None - }; - - let projection = self.parse_projection()?; + let projection = + if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) { + vec![] + } else { + self.parse_projection()? + }; let into = if self.parse_keyword(Keyword::INTO) { - let temporary = self - .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY]) - .is_some(); - let unlogged = self.parse_keyword(Keyword::UNLOGGED); - let table = self.parse_keyword(Keyword::TABLE); - let name = self.parse_object_name(false)?; - Some(SelectInto { - temporary, - unlogged, - table, - name, - }) + Some(self.parse_select_into()?) } else { None }; @@ -9206,10 +11412,12 @@ impl<'a> Parser<'a> { // otherwise they may be parsed as an alias as part of the `projection` // or `from`. - let from = if self.parse_keyword(Keyword::FROM) { - self.parse_comma_separated(Parser::parse_table_and_joins)? + let (from, from_first) = if let Some(from) = from_first.take() { + (from, true) + } else if self.parse_keyword(Keyword::FROM) { + (self.parse_table_with_joins()?, false) } else { - vec![] + (vec![], false) }; let mut lateral_views = vec![]; @@ -9321,8 +11529,10 @@ impl<'a> Parser<'a> { }; Ok(Select { + select_token: AttachedToken(select_token), distinct, top, + top_before_distinct, projection, into, from, @@ -9339,6 +11549,11 @@ impl<'a> Parser<'a> { qualify, value_table_mode, connect_by, + flavor: if from_first { + SelectFlavor::FromFirst + } else { + SelectFlavor::Standard + }, }) } @@ -9425,160 +11640,326 @@ impl<'a> Parser<'a> { } /// Parse a `SET ROLE` statement. Expects SET to be consumed already. - fn parse_set_role(&mut self, modifier: Option) -> Result { - self.expect_keyword(Keyword::ROLE)?; - let context_modifier = match modifier { - Some(Keyword::LOCAL) => ContextModifier::Local, - Some(Keyword::SESSION) => ContextModifier::Session, - _ => ContextModifier::None, - }; + fn parse_set_role( + &mut self, + modifier: Option, + ) -> Result { + self.expect_keyword_is(Keyword::ROLE)?; let role_name = if self.parse_keyword(Keyword::NONE) { None } else { - Some(self.parse_identifier(false)?) + Some(self.parse_identifier()?) }; - Ok(Statement::SetRole { - context_modifier, + Ok(Statement::Set(Set::SetRole { + context_modifier: modifier, role_name, - }) + })) } - pub fn parse_set(&mut self) -> Result { - let modifier = - self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::HIVEVAR]); - if let Some(Keyword::HIVEVAR) = modifier { - self.expect_token(&Token::Colon)?; - } else if let Some(set_role_stmt) = - self.maybe_parse(|parser| parser.parse_set_role(modifier))? - { - return Ok(set_role_stmt); + fn parse_set_values( + &mut self, + parenthesized_assignment: bool, + ) -> Result, ParserError> { + let mut values = vec![]; + + if parenthesized_assignment { + self.expect_token(&Token::LParen)?; } - let variables = if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE]) { - OneOrManyWithParens::One(ObjectName(vec!["TIMEZONE".into()])) - } else if self.dialect.supports_parenthesized_set_variables() + loop { + let value = if let Some(expr) = self.try_parse_expr_sub_query()? { + expr + } else if let Ok(expr) = self.parse_expr() { + expr + } else { + self.expected("variable value", self.peek_token())? + }; + + values.push(value); + if self.consume_token(&Token::Comma) { + continue; + } + + if parenthesized_assignment { + self.expect_token(&Token::RParen)?; + } + return Ok(values); + } + } + + fn parse_context_modifier(&mut self) -> Option { + let modifier = + self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?; + + Self::keyword_to_modifier(modifier) + } + + /// Parse a single SET statement assignment `var = expr`. + fn parse_set_assignment(&mut self) -> Result { + let scope = self.parse_context_modifier(); + + let name = if self.dialect.supports_parenthesized_set_variables() && self.consume_token(&Token::LParen) { - let variables = OneOrManyWithParens::Many( - self.parse_comma_separated(|parser: &mut Parser<'a>| { - parser.parse_identifier(false) - })? - .into_iter() - .map(|ident| ObjectName(vec![ident])) - .collect(), - ); - self.expect_token(&Token::RParen)?; - variables + // Parenthesized assignments are handled in the `parse_set` function after + // trying to parse list of assignments using this function. + // If a dialect supports both, and we find a LParen, we early exit from this function. + self.expected("Unparenthesized assignment", self.peek_token())? } else { - OneOrManyWithParens::One(self.parse_object_name(false)?) + self.parse_object_name(false)? + }; + + if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) { + return self.expected("assignment operator", self.peek_token()); + } + + let value = self.parse_expr()?; + + Ok(SetAssignment { scope, name, value }) + } + + fn parse_set(&mut self) -> Result { + let hivevar = self.parse_keyword(Keyword::HIVEVAR); + + // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both + let scope = if !hivevar { + self.parse_context_modifier() + } else { + None }; - if matches!(&variables, OneOrManyWithParens::One(variable) if variable.to_string().eq_ignore_ascii_case("NAMES") - && dialect_of!(self is MySqlDialect | GenericDialect)) + if hivevar { + self.expect_token(&Token::Colon)?; + } + + if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? { + return Ok(set_role_stmt); + } + + // Handle special cases first + if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE]) + || self.parse_keyword(Keyword::TIMEZONE) { + if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) { + return Ok(Set::SingleAssignment { + scope, + hivevar, + variable: ObjectName::from(vec!["TIMEZONE".into()]), + values: self.parse_set_values(false)?, + } + .into()); + } else { + // A shorthand alias for SET TIME ZONE that doesn't require + // the assignment operator. It's originally PostgreSQL specific, + // but we allow it for all the dialects + return Ok(Set::SetTimeZone { + local: scope == Some(ContextModifier::Local), + value: self.parse_expr()?, + } + .into()); + } + } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) { if self.parse_keyword(Keyword::DEFAULT) { - return Ok(Statement::SetNamesDefault {}); + return Ok(Set::SetNamesDefault {}.into()); } - - let charset_name = self.parse_literal_string()?; + let charset_name = self.parse_identifier()?; let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() { Some(self.parse_literal_string()?) } else { None }; - return Ok(Statement::SetNames { + return Ok(Set::SetNames { charset_name, collation_name, - }); - } - - let parenthesized_assignment = matches!(&variables, OneOrManyWithParens::Many(_)); - - if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) { - if parenthesized_assignment { - self.expect_token(&Token::LParen)?; } - - let mut values = vec![]; - loop { - let value = if let Some(expr) = self.try_parse_expr_sub_query()? { - expr - } else if let Ok(expr) = self.parse_expr() { - expr - } else { - self.expected("variable value", self.peek_token())? - }; - - values.push(value); - if self.consume_token(&Token::Comma) { - continue; - } - - if parenthesized_assignment { - self.expect_token(&Token::RParen)?; - } - return Ok(Statement::SetVariable { - local: modifier == Some(Keyword::LOCAL), - hivevar: Some(Keyword::HIVEVAR) == modifier, - variables, - value: values, - }); - } - } - - let OneOrManyWithParens::One(variable) = variables else { - return self.expected("set variable", self.peek_token()); - }; - - if variable.to_string().eq_ignore_ascii_case("TIMEZONE") { - // for some db (e.g. postgresql), SET TIME ZONE is an alias for SET TIMEZONE [TO|=] - match self.parse_expr() { - Ok(expr) => Ok(Statement::SetTimeZone { - local: modifier == Some(Keyword::LOCAL), - value: expr, - }), - _ => self.expected("timezone value", self.peek_token())?, - } - } else if variable.to_string() == "CHARACTERISTICS" { + .into()); + } else if self.parse_keyword(Keyword::CHARACTERISTICS) { self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?; - Ok(Statement::SetTransaction { + return Ok(Set::SetTransaction { modes: self.parse_transaction_modes()?, snapshot: None, session: true, - }) - } else if variable.to_string() == "TRANSACTION" && modifier.is_none() { + } + .into()); + } else if self.parse_keyword(Keyword::TRANSACTION) { if self.parse_keyword(Keyword::SNAPSHOT) { - let snapshot_id = self.parse_value()?; - return Ok(Statement::SetTransaction { + let snapshot_id = self.parse_value()?.value; + return Ok(Set::SetTransaction { modes: vec![], snapshot: Some(snapshot_id), session: false, - }); + } + .into()); } - Ok(Statement::SetTransaction { + return Ok(Set::SetTransaction { modes: self.parse_transaction_modes()?, snapshot: None, session: false, - }) + } + .into()); + } + + if self.dialect.supports_comma_separated_set_assignments() { + if scope.is_some() { + self.prev_token(); + } + + if let Some(assignments) = self + .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))? + { + return if assignments.len() > 1 { + Ok(Set::MultipleAssignments { assignments }.into()) + } else { + let SetAssignment { scope, name, value } = + assignments.into_iter().next().ok_or_else(|| { + ParserError::ParserError("Expected at least one assignment".to_string()) + })?; + + Ok(Set::SingleAssignment { + scope, + hivevar, + variable: name, + values: vec![value], + } + .into()) + }; + } + } + + let variables = if self.dialect.supports_parenthesized_set_variables() + && self.consume_token(&Token::LParen) + { + let vars = OneOrManyWithParens::Many( + self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())? + .into_iter() + .map(|ident| ObjectName::from(vec![ident])) + .collect(), + ); + self.expect_token(&Token::RParen)?; + vars } else { - self.expected("equals sign or TO", self.peek_token()) + OneOrManyWithParens::One(self.parse_object_name(false)?) + }; + + if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) { + let stmt = match variables { + OneOrManyWithParens::One(var) => Set::SingleAssignment { + scope, + hivevar, + variable: var, + values: self.parse_set_values(false)?, + }, + OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments { + variables: vars, + values: self.parse_set_values(true)?, + }, + }; + + return Ok(stmt.into()); + } + + if self.dialect.supports_set_stmt_without_operator() { + self.prev_token(); + return self.parse_set_session_params(); + }; + + self.expected("equals sign or TO", self.peek_token()) + } + + pub fn parse_set_session_params(&mut self) -> Result { + if self.parse_keyword(Keyword::STATISTICS) { + let topic = match self.parse_one_of_keywords(&[ + Keyword::IO, + Keyword::PROFILE, + Keyword::TIME, + Keyword::XML, + ]) { + Some(Keyword::IO) => SessionParamStatsTopic::IO, + Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile, + Some(Keyword::TIME) => SessionParamStatsTopic::Time, + Some(Keyword::XML) => SessionParamStatsTopic::Xml, + _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()), + }; + let value = self.parse_session_param_value()?; + Ok( + Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics { + topic, + value, + })) + .into(), + ) + } else if self.parse_keyword(Keyword::IDENTITY_INSERT) { + let obj = self.parse_object_name(false)?; + let value = self.parse_session_param_value()?; + Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert( + SetSessionParamIdentityInsert { obj, value }, + )) + .into()) + } else if self.parse_keyword(Keyword::OFFSETS) { + let keywords = self.parse_comma_separated(|parser| { + let next_token = parser.next_token(); + match &next_token.token { + Token::Word(w) => Ok(w.to_string()), + _ => parser.expected("SQL keyword", next_token), + } + })?; + let value = self.parse_session_param_value()?; + Ok( + Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets { + keywords, + value, + })) + .into(), + ) + } else { + let names = self.parse_comma_separated(|parser| { + let next_token = parser.next_token(); + match next_token.token { + Token::Word(w) => Ok(w.to_string()), + _ => parser.expected("Session param name", next_token), + } + })?; + let value = self.parse_expr()?.to_string(); + Ok( + Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric { + names, + value, + })) + .into(), + ) + } + } + + fn parse_session_param_value(&mut self) -> Result { + if self.parse_keyword(Keyword::ON) { + Ok(SessionParamValue::On) + } else if self.parse_keyword(Keyword::OFF) { + Ok(SessionParamValue::Off) + } else { + self.expected("ON or OFF", self.peek_token()) } } pub fn parse_show(&mut self) -> Result { + let terse = self.parse_keyword(Keyword::TERSE); let extended = self.parse_keyword(Keyword::EXTENDED); let full = self.parse_keyword(Keyword::FULL); let session = self.parse_keyword(Keyword::SESSION); let global = self.parse_keyword(Keyword::GLOBAL); + let external = self.parse_keyword(Keyword::EXTERNAL); if self .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS]) .is_some() { Ok(self.parse_show_columns(extended, full)?) } else if self.parse_keyword(Keyword::TABLES) { - Ok(self.parse_show_tables(extended, full)?) + Ok(self.parse_show_tables(terse, extended, full, external)?) + } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) { + Ok(self.parse_show_views(terse, true)?) + } else if self.parse_keyword(Keyword::VIEWS) { + Ok(self.parse_show_views(terse, false)?) } else if self.parse_keyword(Keyword::FUNCTIONS) { Ok(self.parse_show_functions()?) } else if extended || full { @@ -9605,6 +11986,10 @@ impl<'a> Parser<'a> { session, global, }) + } else if self.parse_keyword(Keyword::DATABASES) { + self.parse_show_databases(terse) + } else if self.parse_keyword(Keyword::SCHEMAS) { + self.parse_show_schemas(terse) } else { Ok(Statement::ShowVariable { variable: self.parse_identifiers()?, @@ -9612,6 +11997,26 @@ impl<'a> Parser<'a> { } } + fn parse_show_databases(&mut self, terse: bool) -> Result { + let history = self.parse_keyword(Keyword::HISTORY); + let show_options = self.parse_show_stmt_options()?; + Ok(Statement::ShowDatabases { + terse, + history, + show_options, + }) + } + + fn parse_show_schemas(&mut self, terse: bool) -> Result { + let history = self.parse_keyword(Keyword::HISTORY); + let show_options = self.parse_show_stmt_options()?; + Ok(Statement::ShowSchemas { + terse, + history, + show_options, + }) + } + pub fn parse_show_create(&mut self) -> Result { let obj_type = match self.expect_one_of_keywords(&[ Keyword::TABLE, @@ -9642,41 +12047,43 @@ impl<'a> Parser<'a> { extended: bool, full: bool, ) -> Result { - self.expect_one_of_keywords(&[Keyword::FROM, Keyword::IN])?; - let object_name = self.parse_object_name(false)?; - let table_name = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) { - Some(_) => { - let db_name = vec![self.parse_identifier(false)?]; - let ObjectName(table_name) = object_name; - let object_name = db_name.into_iter().chain(table_name).collect(); - ObjectName(object_name) - } - None => object_name, - }; - let filter = self.parse_show_statement_filter()?; + let show_options = self.parse_show_stmt_options()?; Ok(Statement::ShowColumns { extended, full, - table_name, - filter, + show_options, }) } - pub fn parse_show_tables( + fn parse_show_tables( &mut self, + terse: bool, extended: bool, full: bool, + external: bool, ) -> Result { - let db_name = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) { - Some(_) => Some(self.parse_identifier(false)?), - None => None, - }; - let filter = self.parse_show_statement_filter()?; + let history = !external && self.parse_keyword(Keyword::HISTORY); + let show_options = self.parse_show_stmt_options()?; Ok(Statement::ShowTables { + terse, + history, extended, full, - db_name, - filter, + external, + show_options, + }) + } + + fn parse_show_views( + &mut self, + terse: bool, + materialized: bool, + ) -> Result { + let show_options = self.parse_show_stmt_options()?; + Ok(Statement::ShowViews { + materialized, + terse, + show_options, }) } @@ -9704,7 +12111,12 @@ impl<'a> Parser<'a> { } else if self.parse_keyword(Keyword::WHERE) { Ok(Some(ShowStatementFilter::Where(self.parse_expr()?))) } else { - Ok(None) + self.maybe_parse(|parser| -> Result { + parser.parse_literal_string() + })? + .map_or(Ok(None), |filter| { + Ok(Some(ShowStatementFilter::NoKeyword(filter))) + }) } } @@ -9719,28 +12131,56 @@ impl<'a> Parser<'a> { } else if dialect_of!(self is DatabricksDialect) { self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA]) } else if dialect_of!(self is SnowflakeDialect) { - self.parse_one_of_keywords(&[Keyword::DATABASE, Keyword::SCHEMA, Keyword::WAREHOUSE]) + self.parse_one_of_keywords(&[ + Keyword::DATABASE, + Keyword::SCHEMA, + Keyword::WAREHOUSE, + Keyword::ROLE, + Keyword::SECONDARY, + ]) } else { None // No specific keywords for other dialects, including GenericDialect }; - let obj_name = self.parse_object_name(false)?; - let result = match parsed_keyword { - Some(Keyword::CATALOG) => Use::Catalog(obj_name), - Some(Keyword::DATABASE) => Use::Database(obj_name), - Some(Keyword::SCHEMA) => Use::Schema(obj_name), - Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name), - _ => Use::Object(obj_name), + let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) { + self.parse_secondary_roles()? + } else { + let obj_name = self.parse_object_name(false)?; + match parsed_keyword { + Some(Keyword::CATALOG) => Use::Catalog(obj_name), + Some(Keyword::DATABASE) => Use::Database(obj_name), + Some(Keyword::SCHEMA) => Use::Schema(obj_name), + Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name), + Some(Keyword::ROLE) => Use::Role(obj_name), + _ => Use::Object(obj_name), + } }; Ok(Statement::Use(result)) } + fn parse_secondary_roles(&mut self) -> Result { + self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?; + if self.parse_keyword(Keyword::NONE) { + Ok(Use::SecondaryRoles(SecondaryRoles::None)) + } else if self.parse_keyword(Keyword::ALL) { + Ok(Use::SecondaryRoles(SecondaryRoles::All)) + } else { + let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?; + Ok(Use::SecondaryRoles(SecondaryRoles::List(roles))) + } + } + pub fn parse_table_and_joins(&mut self) -> Result { let relation = self.parse_table_factor()?; // Note that for keywords to be properly handled here, they need to be // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as // a table alias. + let joins = self.parse_joins()?; + Ok(TableWithJoins { relation, joins }) + } + + fn parse_joins(&mut self) -> Result, ParserError> { let mut joins = vec![]; loop { let global = self.parse_keyword(Keyword::GLOBAL); @@ -9760,16 +12200,16 @@ impl<'a> Parser<'a> { } } else if self.parse_keyword(Keyword::OUTER) { // MSSQL extension, similar to LEFT JOIN LATERAL .. ON 1=1 - self.expect_keyword(Keyword::APPLY)?; + self.expect_keyword_is(Keyword::APPLY)?; Join { relation: self.parse_table_factor()?, global, join_operator: JoinOperator::OuterApply, } } else if self.parse_keyword(Keyword::ASOF) { - self.expect_keyword(Keyword::JOIN)?; + self.expect_keyword_is(Keyword::JOIN)?; let relation = self.parse_table_factor()?; - self.expect_keyword(Keyword::MATCH_CONDITION)?; + self.expect_keyword_is(Keyword::MATCH_CONDITION)?; let match_condition = self.parse_parenthesized(Self::parse_expr)?; Join { relation, @@ -9789,9 +12229,13 @@ impl<'a> Parser<'a> { let join_operator_type = match peek_keyword { Keyword::INNER | Keyword::JOIN => { - let _ = self.parse_keyword(Keyword::INNER); // [ INNER ] - self.expect_keyword(Keyword::JOIN)?; - JoinOperator::Inner + let inner = self.parse_keyword(Keyword::INNER); // [ INNER ] + self.expect_keyword_is(Keyword::JOIN)?; + if inner { + JoinOperator::Inner + } else { + JoinOperator::Join + } } kw @ Keyword::LEFT | kw @ Keyword::RIGHT => { let _ = self.next_token(); // consume LEFT/RIGHT @@ -9804,7 +12248,7 @@ impl<'a> Parser<'a> { ]); match join_type { Some(Keyword::OUTER) => { - self.expect_keyword(Keyword::JOIN)?; + self.expect_keyword_is(Keyword::JOIN)?; if is_left { JoinOperator::LeftOuter } else { @@ -9812,7 +12256,7 @@ impl<'a> Parser<'a> { } } Some(Keyword::SEMI) => { - self.expect_keyword(Keyword::JOIN)?; + self.expect_keyword_is(Keyword::JOIN)?; if is_left { JoinOperator::LeftSemi } else { @@ -9820,7 +12264,7 @@ impl<'a> Parser<'a> { } } Some(Keyword::ANTI) => { - self.expect_keyword(Keyword::JOIN)?; + self.expect_keyword_is(Keyword::JOIN)?; if is_left { JoinOperator::LeftAnti } else { @@ -9829,9 +12273,9 @@ impl<'a> Parser<'a> { } Some(Keyword::JOIN) => { if is_left { - JoinOperator::LeftOuter + JoinOperator::Left } else { - JoinOperator::RightOuter + JoinOperator::Right } } _ => { @@ -9841,21 +12285,44 @@ impl<'a> Parser<'a> { } } } + Keyword::ANTI => { + let _ = self.next_token(); // consume ANTI + self.expect_keyword_is(Keyword::JOIN)?; + JoinOperator::Anti + } + Keyword::SEMI => { + let _ = self.next_token(); // consume SEMI + self.expect_keyword_is(Keyword::JOIN)?; + JoinOperator::Semi + } Keyword::FULL => { let _ = self.next_token(); // consume FULL let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ] - self.expect_keyword(Keyword::JOIN)?; + self.expect_keyword_is(Keyword::JOIN)?; JoinOperator::FullOuter } Keyword::OUTER => { return self.expected("LEFT, RIGHT, or FULL", self.peek_token()); } + Keyword::STRAIGHT_JOIN => { + let _ = self.next_token(); // consume STRAIGHT_JOIN + JoinOperator::StraightJoin + } _ if natural => { return self.expected("a join type after NATURAL", self.peek_token()); } _ => break, }; - let relation = self.parse_table_factor()?; + let mut relation = self.parse_table_factor()?; + + if self.peek_parens_less_nested_join() { + let joins = self.parse_joins()?; + relation = TableFactor::NestedJoin { + table_with_joins: Box::new(TableWithJoins { relation, joins }), + alias: None, + }; + } + let join_constraint = self.parse_join_constraint(natural)?; Join { relation, @@ -9865,7 +12332,21 @@ impl<'a> Parser<'a> { }; joins.push(join); } - Ok(TableWithJoins { relation, joins }) + Ok(joins) + } + + fn peek_parens_less_nested_join(&self) -> bool { + matches!( + self.peek_token_ref().token, + Token::Word(Word { + keyword: Keyword::JOIN + | Keyword::INNER + | Keyword::LEFT + | Keyword::RIGHT + | Keyword::FULL, + .. + }) + ) } /// A table name or a parenthesized subquery, followed by optional `[AS] alias` @@ -9878,7 +12359,7 @@ impl<'a> Parser<'a> { let name = self.parse_object_name(false)?; self.expect_token(&Token::LParen)?; let args = self.parse_optional_args()?; - let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; + let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::Function { lateral: true, name, @@ -9891,7 +12372,7 @@ impl<'a> Parser<'a> { self.expect_token(&Token::LParen)?; let expr = self.parse_expr()?; self.expect_token(&Token::RParen)?; - let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; + let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::TableFunction { expr, alias }) } else if self.consume_token(&Token::LParen) { // A left paren introduces either a derived table (i.e., a subquery) @@ -9940,7 +12421,7 @@ impl<'a> Parser<'a> { #[allow(clippy::if_same_then_else)] if !table_and_joins.joins.is_empty() { self.expect_token(&Token::RParen)?; - let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; + let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::NestedJoin { table_with_joins: Box::new(table_and_joins), alias, @@ -9953,7 +12434,7 @@ impl<'a> Parser<'a> { // (B): `table_and_joins` (what we found inside the parentheses) // is a nested join `(foo JOIN bar)`, not followed by other joins. self.expect_token(&Token::RParen)?; - let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; + let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::NestedJoin { table_with_joins: Box::new(table_and_joins), alias, @@ -9967,9 +12448,7 @@ impl<'a> Parser<'a> { // [AS alias])`) as well. self.expect_token(&Token::RParen)?; - if let Some(outer_alias) = - self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)? - { + if let Some(outer_alias) = self.maybe_parse_table_alias()? { // Snowflake also allows specifying an alias *after* parens // e.g. `FROM (mytable) AS alias` match &mut table_and_joins.relation { @@ -9978,6 +12457,8 @@ impl<'a> Parser<'a> { | TableFactor::Function { alias, .. } | TableFactor::UNNEST { alias, .. } | TableFactor::JsonTable { alias, .. } + | TableFactor::XmlTable { alias, .. } + | TableFactor::OpenJsonTable { alias, .. } | TableFactor::TableFunction { alias, .. } | TableFactor::Pivot { alias, .. } | TableFactor::Unpivot { alias, .. } @@ -10015,27 +12496,26 @@ impl<'a> Parser<'a> { ] ) { - self.expect_keyword(Keyword::VALUES)?; + self.expect_keyword_is(Keyword::VALUES)?; // Snowflake and Databricks allow syntax like below: // SELECT * FROM VALUES (1, 'a'), (2, 'b') AS t (col1, col2) // where there are no parentheses around the VALUES clause. let values = SetExpr::Values(self.parse_values(false)?); - let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; + let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::Derived { lateral: false, subquery: Box::new(Query { with: None, body: Box::new(values), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], }), alias, }) @@ -10047,7 +12527,7 @@ impl<'a> Parser<'a> { self.expect_token(&Token::RParen)?; let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]); - let alias = match self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS) { + let alias = match self.maybe_parse_table_alias() { Ok(Some(alias)) => Some(alias), Ok(None) => None, Err(e) => return Err(e), @@ -10078,22 +12558,33 @@ impl<'a> Parser<'a> { } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) { let json_expr = self.parse_expr()?; self.expect_token(&Token::Comma)?; - let json_path = self.parse_value()?; - self.expect_keyword(Keyword::COLUMNS)?; + let json_path = self.parse_value()?.value; + self.expect_keyword_is(Keyword::COLUMNS)?; self.expect_token(&Token::LParen)?; let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?; self.expect_token(&Token::RParen)?; self.expect_token(&Token::RParen)?; - let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; + let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::JsonTable { json_expr, json_path, columns, alias, }) + } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) { + self.prev_token(); + self.parse_open_json_table_factor() + } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) { + self.prev_token(); + self.parse_xml_table_factor() } else { let name = self.parse_object_name(true)?; + let json_path = match self.peek_token().token { + Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?), + _ => None, + }; + let partitions: Vec = if dialect_of!(self is MySqlDialect | GenericDialect) && self.parse_keyword(Keyword::PARTITION) { @@ -10103,7 +12594,7 @@ impl<'a> Parser<'a> { }; // Parse potential version qualifier - let version = self.parse_table_version()?; + let version = self.maybe_parse_table_version()?; // Postgres, MSSQL, ClickHouse: table-valued functions: let args = if self.consume_token(&Token::LParen) { @@ -10114,7 +12605,22 @@ impl<'a> Parser<'a> { let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]); - let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; + let mut sample = None; + if self.dialect.supports_table_sample_before_alias() { + if let Some(parsed_sample) = self.maybe_parse_table_sample()? { + sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample)); + } + } + + let alias = self.maybe_parse_table_alias()?; + + // MYSQL-specific table hints: + let index_hints = if self.dialect.supports_table_hints() { + self.maybe_parse(|p| p.parse_table_index_hints())? + .unwrap_or(vec![]) + } else { + vec![] + }; // MSSQL-specific table hints: let mut with_hints = vec![]; @@ -10128,32 +12634,272 @@ impl<'a> Parser<'a> { } }; - let mut table = TableFactor::Table { - name, - alias, - args, - with_hints, - version, - partitions, - with_ordinality, - }; + if !self.dialect.supports_table_sample_before_alias() { + if let Some(parsed_sample) = self.maybe_parse_table_sample()? { + sample = Some(TableSampleKind::AfterTableAlias(parsed_sample)); + } + } + + let mut table = TableFactor::Table { + name, + alias, + args, + with_hints, + version, + partitions, + with_ordinality, + json_path, + sample, + index_hints, + }; + + while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) { + table = match kw { + Keyword::PIVOT => self.parse_pivot_table_factor(table)?, + Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?, + _ => unreachable!(), + } + } + + if self.dialect.supports_match_recognize() + && self.parse_keyword(Keyword::MATCH_RECOGNIZE) + { + table = self.parse_match_recognize(table)?; + } + + Ok(table) + } + } + + fn maybe_parse_table_sample(&mut self) -> Result>, ParserError> { + let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) { + TableSampleModifier::TableSample + } else if self.parse_keyword(Keyword::SAMPLE) { + TableSampleModifier::Sample + } else { + return Ok(None); + }; + + let name = match self.parse_one_of_keywords(&[ + Keyword::BERNOULLI, + Keyword::ROW, + Keyword::SYSTEM, + Keyword::BLOCK, + ]) { + Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli), + Some(Keyword::ROW) => Some(TableSampleMethod::Row), + Some(Keyword::SYSTEM) => Some(TableSampleMethod::System), + Some(Keyword::BLOCK) => Some(TableSampleMethod::Block), + _ => None, + }; + + let parenthesized = self.consume_token(&Token::LParen); + + let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) { + let selected_bucket = self.parse_number_value()?.value; + self.expect_keywords(&[Keyword::OUT, Keyword::OF])?; + let total = self.parse_number_value()?.value; + let on = if self.parse_keyword(Keyword::ON) { + Some(self.parse_expr()?) + } else { + None + }; + ( + None, + Some(TableSampleBucket { + bucket: selected_bucket, + total, + on, + }), + ) + } else { + let value = match self.maybe_parse(|p| p.parse_expr())? { + Some(num) => num, + None => { + let next_token = self.next_token(); + if let Token::Word(w) = next_token.token { + Expr::Value(Value::Placeholder(w.value).with_span(next_token.span)) + } else { + return parser_err!( + "Expecting number or byte length e.g. 100M", + self.peek_token().span.start + ); + } + } + }; + let unit = if self.parse_keyword(Keyword::ROWS) { + Some(TableSampleUnit::Rows) + } else if self.parse_keyword(Keyword::PERCENT) { + Some(TableSampleUnit::Percent) + } else { + None + }; + ( + Some(TableSampleQuantity { + parenthesized, + value, + unit, + }), + None, + ) + }; + if parenthesized { + self.expect_token(&Token::RParen)?; + } + + let seed = if self.parse_keyword(Keyword::REPEATABLE) { + Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?) + } else if self.parse_keyword(Keyword::SEED) { + Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?) + } else { + None + }; + + let offset = if self.parse_keyword(Keyword::OFFSET) { + Some(self.parse_expr()?) + } else { + None + }; + + Ok(Some(Box::new(TableSample { + modifier, + name, + quantity, + seed, + bucket, + offset, + }))) + } + + fn parse_table_sample_seed( + &mut self, + modifier: TableSampleSeedModifier, + ) -> Result { + self.expect_token(&Token::LParen)?; + let value = self.parse_number_value()?.value; + self.expect_token(&Token::RParen)?; + Ok(TableSampleSeed { modifier, value }) + } + + /// Parses `OPENJSON( jsonExpression [ , path ] ) [ ]` clause, + /// assuming the `OPENJSON` keyword was already consumed. + fn parse_open_json_table_factor(&mut self) -> Result { + self.expect_token(&Token::LParen)?; + let json_expr = self.parse_expr()?; + let json_path = if self.consume_token(&Token::Comma) { + Some(self.parse_value()?.value) + } else { + None + }; + self.expect_token(&Token::RParen)?; + let columns = if self.parse_keyword(Keyword::WITH) { + self.expect_token(&Token::LParen)?; + let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?; + self.expect_token(&Token::RParen)?; + columns + } else { + Vec::new() + }; + let alias = self.maybe_parse_table_alias()?; + Ok(TableFactor::OpenJsonTable { + json_expr, + json_path, + columns, + alias, + }) + } + + fn parse_xml_table_factor(&mut self) -> Result { + self.expect_token(&Token::LParen)?; + let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) { + self.expect_token(&Token::LParen)?; + let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?; + self.expect_token(&Token::RParen)?; + self.expect_token(&Token::Comma)?; + namespaces + } else { + vec![] + }; + let row_expression = self.parse_expr()?; + let passing = self.parse_xml_passing_clause()?; + self.expect_keyword_is(Keyword::COLUMNS)?; + let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?; + self.expect_token(&Token::RParen)?; + let alias = self.maybe_parse_table_alias()?; + Ok(TableFactor::XmlTable { + namespaces, + row_expression, + passing, + columns, + alias, + }) + } + + fn parse_xml_namespace_definition(&mut self) -> Result { + let uri = self.parse_expr()?; + self.expect_keyword_is(Keyword::AS)?; + let name = self.parse_identifier()?; + Ok(XmlNamespaceDefinition { uri, name }) + } + + fn parse_xml_table_column(&mut self) -> Result { + let name = self.parse_identifier()?; + + let option = if self.parse_keyword(Keyword::FOR) { + self.expect_keyword(Keyword::ORDINALITY)?; + XmlTableColumnOption::ForOrdinality + } else { + let r#type = self.parse_data_type()?; + let mut path = None; + let mut default = None; + + if self.parse_keyword(Keyword::PATH) { + path = Some(self.parse_expr()?); + } - while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) { - table = match kw { - Keyword::PIVOT => self.parse_pivot_table_factor(table)?, - Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?, - _ => unreachable!(), - } + if self.parse_keyword(Keyword::DEFAULT) { + default = Some(self.parse_expr()?); } - if self.dialect.supports_match_recognize() - && self.parse_keyword(Keyword::MATCH_RECOGNIZE) - { - table = self.parse_match_recognize(table)?; + let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]); + if !not_null { + // NULL is the default but can be specified explicitly + let _ = self.parse_keyword(Keyword::NULL); } - Ok(table) + XmlTableColumnOption::NamedInfo { + r#type, + path, + default, + nullable: !not_null, + } + }; + Ok(XmlTableColumn { name, option }) + } + + fn parse_xml_passing_clause(&mut self) -> Result { + let mut arguments = vec![]; + if self.parse_keyword(Keyword::PASSING) { + loop { + let by_value = + self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok(); + let expr = self.parse_expr()?; + let alias = if self.parse_keyword(Keyword::AS) { + Some(self.parse_identifier()?) + } else { + None + }; + arguments.push(XmlPassingArgument { + expr, + alias, + by_value, + }); + if !self.consume_token(&Token::Comma) { + break; + } + } } + Ok(XmlPassingClause { arguments }) } fn parse_match_recognize(&mut self, table: TableFactor) -> Result { @@ -10175,7 +12921,7 @@ impl<'a> Parser<'a> { self.parse_comma_separated(|p| { let expr = p.parse_expr()?; let _ = p.parse_keyword(Keyword::AS); - let alias = p.parse_identifier(false)?; + let alias = p.parse_identifier()?; Ok(Measure { expr, alias }) })? } else { @@ -10221,9 +12967,9 @@ impl<'a> Parser<'a> { } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) { Some(AfterMatchSkip::ToNextRow) } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) { - Some(AfterMatchSkip::ToFirst(self.parse_identifier(false)?)) + Some(AfterMatchSkip::ToFirst(self.parse_identifier()?)) } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) { - Some(AfterMatchSkip::ToLast(self.parse_identifier(false)?)) + Some(AfterMatchSkip::ToLast(self.parse_identifier()?)) } else { let found = self.next_token(); return self.expected("after match skip option", found); @@ -10232,21 +12978,21 @@ impl<'a> Parser<'a> { None }; - self.expect_keyword(Keyword::PATTERN)?; + self.expect_keyword_is(Keyword::PATTERN)?; let pattern = self.parse_parenthesized(Self::parse_pattern)?; - self.expect_keyword(Keyword::DEFINE)?; + self.expect_keyword_is(Keyword::DEFINE)?; let symbols = self.parse_comma_separated(|p| { - let symbol = p.parse_identifier(false)?; - p.expect_keyword(Keyword::AS)?; + let symbol = p.parse_identifier()?; + p.expect_keyword_is(Keyword::AS)?; let definition = p.parse_expr()?; Ok(SymbolDefinition { symbol, definition }) })?; self.expect_token(&Token::RParen)?; - let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; + let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::MatchRecognize { table: Box::new(table), @@ -10269,9 +13015,7 @@ impl<'a> Parser<'a> { } Token::LBrace => { self.expect_token(&Token::Minus)?; - let symbol = self - .parse_identifier(false) - .map(MatchRecognizeSymbol::Named)?; + let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?; self.expect_token(&Token::Minus)?; self.expect_token(&Token::RBrace)?; Ok(MatchRecognizePattern::Exclude(symbol)) @@ -10283,7 +13027,7 @@ impl<'a> Parser<'a> { }) if value == "PERMUTE" => { self.expect_token(&Token::LParen)?; let symbols = self.parse_comma_separated(|p| { - p.parse_identifier(false).map(MatchRecognizeSymbol::Named) + p.parse_identifier().map(MatchRecognizeSymbol::Named) })?; self.expect_token(&Token::RParen)?; Ok(MatchRecognizePattern::Permute(symbols)) @@ -10295,7 +13039,7 @@ impl<'a> Parser<'a> { } _ => { self.prev_token(); - self.parse_identifier(false) + self.parse_identifier() .map(MatchRecognizeSymbol::Named) .map(MatchRecognizePattern::Symbol) } @@ -10320,7 +13064,7 @@ impl<'a> Parser<'a> { return self.expected("literal number", next_token); }; self.expect_token(&Token::RBrace)?; - RepetitionQuantifier::AtMost(Self::parse(n, token.location)?) + RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?) } Token::Number(n, _) if self.consume_token(&Token::Comma) => { let next_token = self.next_token(); @@ -10328,12 +13072,12 @@ impl<'a> Parser<'a> { Token::Number(m, _) => { self.expect_token(&Token::RBrace)?; RepetitionQuantifier::Range( - Self::parse(n, token.location)?, - Self::parse(m, token.location)?, + Self::parse(n, token.span.start)?, + Self::parse(m, token.span.start)?, ) } Token::RBrace => { - RepetitionQuantifier::AtLeast(Self::parse(n, token.location)?) + RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?) } _ => { return self.expected("} or upper bound", next_token); @@ -10342,7 +13086,7 @@ impl<'a> Parser<'a> { } Token::Number(n, _) => { self.expect_token(&Token::RBrace)?; - RepetitionQuantifier::Exactly(Self::parse(n, token.location)?) + RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?) } _ => return self.expected("quantifier range", token), } @@ -10384,45 +13128,91 @@ impl<'a> Parser<'a> { } } - /// Parse a given table version specifier. - /// - /// For now it only supports timestamp versioning for BigQuery and MSSQL dialects. - pub fn parse_table_version(&mut self) -> Result, ParserError> { - if dialect_of!(self is BigQueryDialect | MsSqlDialect) - && self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF]) - { - let expr = self.parse_expr()?; - Ok(Some(TableVersion::ForSystemTimeAsOf(expr))) - } else { - Ok(None) + /// Parses a the timestamp version specifier (i.e. query historical data) + pub fn maybe_parse_table_version(&mut self) -> Result, ParserError> { + if self.dialect.supports_timestamp_versioning() { + if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF]) + { + let expr = self.parse_expr()?; + return Ok(Some(TableVersion::ForSystemTimeAsOf(expr))); + } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) { + let func_name = self.parse_object_name(true)?; + let func = self.parse_function(func_name)?; + return Ok(Some(TableVersion::Function(func))); + } } + Ok(None) } /// Parses MySQL's JSON_TABLE column definition. /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR` pub fn parse_json_table_column_def(&mut self) -> Result { - let name = self.parse_identifier(false)?; + if self.parse_keyword(Keyword::NESTED) { + let _has_path_keyword = self.parse_keyword(Keyword::PATH); + let path = self.parse_value()?.value; + self.expect_keyword_is(Keyword::COLUMNS)?; + let columns = self.parse_parenthesized(|p| { + p.parse_comma_separated(Self::parse_json_table_column_def) + })?; + return Ok(JsonTableColumn::Nested(JsonTableNestedColumn { + path, + columns, + })); + } + let name = self.parse_identifier()?; + if self.parse_keyword(Keyword::FOR) { + self.expect_keyword_is(Keyword::ORDINALITY)?; + return Ok(JsonTableColumn::ForOrdinality(name)); + } let r#type = self.parse_data_type()?; let exists = self.parse_keyword(Keyword::EXISTS); - self.expect_keyword(Keyword::PATH)?; - let path = self.parse_value()?; + self.expect_keyword_is(Keyword::PATH)?; + let path = self.parse_value()?.value; let mut on_empty = None; let mut on_error = None; while let Some(error_handling) = self.parse_json_table_column_error_handling()? { if self.parse_keyword(Keyword::EMPTY) { on_empty = Some(error_handling); } else { - self.expect_keyword(Keyword::ERROR)?; + self.expect_keyword_is(Keyword::ERROR)?; on_error = Some(error_handling); } } - Ok(JsonTableColumn { + Ok(JsonTableColumn::Named(JsonTableNamedColumn { name, r#type, path, exists, on_empty, on_error, + })) + } + + /// Parses MSSQL's `OPENJSON WITH` column definition. + /// + /// ```sql + /// colName type [ column_path ] [ AS JSON ] + /// ``` + /// + /// Reference: + pub fn parse_openjson_table_column_def(&mut self) -> Result { + let name = self.parse_identifier()?; + let r#type = self.parse_data_type()?; + let path = if let Token::SingleQuotedString(path) = self.peek_token().token { + self.next_token(); + Some(path) + } else { + None + }; + let as_json = self.parse_keyword(Keyword::AS); + if as_json { + self.expect_keyword_is(Keyword::JSON)?; + } + Ok(OpenJsonTableColumn { + name, + r#type, + path, + as_json, }) } @@ -10434,11 +13224,11 @@ impl<'a> Parser<'a> { } else if self.parse_keyword(Keyword::ERROR) { JsonTableColumnErrorHandling::Error } else if self.parse_keyword(Keyword::DEFAULT) { - JsonTableColumnErrorHandling::Default(self.parse_value()?) + JsonTableColumnErrorHandling::Default(self.parse_value()?.value) } else { return Ok(None); }; - self.expect_keyword(Keyword::ON)?; + self.expect_keyword_is(Keyword::ON)?; Ok(Some(res)) } @@ -10448,7 +13238,7 @@ impl<'a> Parser<'a> { ) -> Result { let subquery = self.parse_query()?; self.expect_token(&Token::RParen)?; - let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; + let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::Derived { lateral: match lateral { Lateral => true, @@ -10464,9 +13254,9 @@ impl<'a> Parser<'a> { Token::Word(w) => Ok(w.value), _ => self.expected("a function identifier", self.peek_token()), }?; - let expr = self.parse_function(ObjectName(vec![Ident::new(function_name)]))?; + let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?; let alias = if self.parse_keyword(Keyword::AS) { - Some(self.parse_identifier(false)?) + Some(self.parse_identifier()?) } else { None }; @@ -10474,13 +13264,12 @@ impl<'a> Parser<'a> { Ok(ExprWithAlias { expr, alias }) } /// Parses an expression with an optional alias - + /// /// Examples: - + /// /// ```sql /// SUM(price) AS total_price /// ``` - /// ```sql /// SUM(price) /// ``` @@ -10496,11 +13285,10 @@ impl<'a> Parser<'a> { /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value)); /// # Ok(()) /// # } - pub fn parse_expr_with_alias(&mut self) -> Result { let expr = self.parse_expr()?; let alias = if self.parse_keyword(Keyword::AS) { - Some(self.parse_identifier(false)?) + Some(self.parse_identifier()?) } else { None }; @@ -10514,9 +13302,9 @@ impl<'a> Parser<'a> { ) -> Result { self.expect_token(&Token::LParen)?; let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?; - self.expect_keyword(Keyword::FOR)?; - let value_column = self.parse_object_name(false)?.0; - self.expect_keyword(Keyword::IN)?; + self.expect_keyword_is(Keyword::FOR)?; + let value_column = self.parse_period_separated(|p| p.parse_identifier())?; + self.expect_keyword_is(Keyword::IN)?; self.expect_token(&Token::LParen)?; let value_source = if self.parse_keyword(Keyword::ANY) { @@ -10544,7 +13332,7 @@ impl<'a> Parser<'a> { }; self.expect_token(&Token::RParen)?; - let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; + let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::Pivot { table: Box::new(table), aggregate_functions, @@ -10560,13 +13348,13 @@ impl<'a> Parser<'a> { table: TableFactor, ) -> Result { self.expect_token(&Token::LParen)?; - let value = self.parse_identifier(false)?; - self.expect_keyword(Keyword::FOR)?; - let name = self.parse_identifier(false)?; - self.expect_keyword(Keyword::IN)?; + let value = self.parse_identifier()?; + self.expect_keyword_is(Keyword::FOR)?; + let name = self.parse_identifier()?; + self.expect_keyword_is(Keyword::IN)?; let columns = self.parse_parenthesized_column_list(Mandatory, false)?; self.expect_token(&Token::RParen)?; - let alias = self.parse_optional_table_alias(keywords::RESERVED_FOR_TABLE_ALIAS)?; + let alias = self.maybe_parse_table_alias()?; Ok(TableFactor::Unpivot { table: Box::new(table), value, @@ -10583,7 +13371,7 @@ impl<'a> Parser<'a> { let constraint = self.parse_expr()?; Ok(JoinConstraint::On(constraint)) } else if self.parse_keyword(Keyword::USING) { - let columns = self.parse_parenthesized_column_list(Mandatory, false)?; + let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?; Ok(JoinConstraint::Using(columns)) } else { Ok(JoinConstraint::None) @@ -10595,15 +13383,15 @@ impl<'a> Parser<'a> { pub fn parse_grant(&mut self) -> Result { let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?; - self.expect_keyword(Keyword::TO)?; - let grantees = self.parse_comma_separated(|p| p.parse_identifier(false))?; + self.expect_keyword_is(Keyword::TO)?; + let grantees = self.parse_grantees()?; let with_grant_option = self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]); let granted_by = self .parse_keywords(&[Keyword::GRANTED, Keyword::BY]) - .then(|| self.parse_identifier(false).unwrap()); + .then(|| self.parse_identifier().unwrap()); Ok(Statement::Grant { privileges, @@ -10614,131 +13402,419 @@ impl<'a> Parser<'a> { }) } + fn parse_grantees(&mut self) -> Result, ParserError> { + let mut values = vec![]; + let mut grantee_type = GranteesType::None; + loop { + grantee_type = if self.parse_keyword(Keyword::ROLE) { + GranteesType::Role + } else if self.parse_keyword(Keyword::USER) { + GranteesType::User + } else if self.parse_keyword(Keyword::SHARE) { + GranteesType::Share + } else if self.parse_keyword(Keyword::GROUP) { + GranteesType::Group + } else if self.parse_keyword(Keyword::PUBLIC) { + GranteesType::Public + } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) { + GranteesType::DatabaseRole + } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) { + GranteesType::ApplicationRole + } else if self.parse_keyword(Keyword::APPLICATION) { + GranteesType::Application + } else { + grantee_type // keep from previous iteraton, if not specified + }; + + let grantee = if grantee_type == GranteesType::Public { + Grantee { + grantee_type: grantee_type.clone(), + name: None, + } + } else { + let mut name = self.parse_grantee_name()?; + if self.consume_token(&Token::Colon) { + // Redshift supports namespace prefix for external users and groups: + // : or : + // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html + let ident = self.parse_identifier()?; + if let GranteeName::ObjectName(namespace) = name { + name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new( + format!("{}:{}", namespace, ident), + )])); + }; + } + Grantee { + grantee_type: grantee_type.clone(), + name: Some(name), + } + }; + + values.push(grantee); + + if !self.consume_token(&Token::Comma) { + break; + } + } + + Ok(values) + } + pub fn parse_grant_revoke_privileges_objects( &mut self, - ) -> Result<(Privileges, GrantObjects), ParserError> { + ) -> Result<(Privileges, Option), ParserError> { let privileges = if self.parse_keyword(Keyword::ALL) { Privileges::All { with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES), } } else { - let (actions, err): (Vec<_>, Vec<_>) = self - .parse_actions_list()? - .into_iter() - .map(|(kw, columns)| match kw { - Keyword::DELETE => Ok(Action::Delete), - Keyword::INSERT => Ok(Action::Insert { columns }), - Keyword::REFERENCES => Ok(Action::References { columns }), - Keyword::SELECT => Ok(Action::Select { columns }), - Keyword::TRIGGER => Ok(Action::Trigger), - Keyword::TRUNCATE => Ok(Action::Truncate), - Keyword::UPDATE => Ok(Action::Update { columns }), - Keyword::USAGE => Ok(Action::Usage), - Keyword::CONNECT => Ok(Action::Connect), - Keyword::CREATE => Ok(Action::Create), - Keyword::EXECUTE => Ok(Action::Execute), - Keyword::TEMPORARY => Ok(Action::Temporary), - // This will cover all future added keywords to - // parse_grant_permission and unhandled in this - // match - _ => Err(kw), - }) - .partition(Result::is_ok); - - if !err.is_empty() { - let errors: Vec = err.into_iter().filter_map(|x| x.err()).collect(); - return Err(ParserError::ParserError(format!( - "INTERNAL ERROR: GRANT/REVOKE unexpected keyword(s) - {errors:?}" - ))); - } - let act = actions.into_iter().filter_map(|x| x.ok()).collect(); - Privileges::Actions(act) + let actions = self.parse_actions_list()?; + Privileges::Actions(actions) }; - self.expect_keyword(Keyword::ON)?; - - let objects = if self.parse_keywords(&[ - Keyword::ALL, - Keyword::TABLES, - Keyword::IN, - Keyword::SCHEMA, - ]) { - GrantObjects::AllTablesInSchema { - schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, - } - } else if self.parse_keywords(&[ - Keyword::ALL, - Keyword::SEQUENCES, - Keyword::IN, - Keyword::SCHEMA, - ]) { - GrantObjects::AllSequencesInSchema { - schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, + let objects = if self.parse_keyword(Keyword::ON) { + if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) { + Some(GrantObjects::AllTablesInSchema { + schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, + }) + } else if self.parse_keywords(&[ + Keyword::ALL, + Keyword::SEQUENCES, + Keyword::IN, + Keyword::SCHEMA, + ]) { + Some(GrantObjects::AllSequencesInSchema { + schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?, + }) + } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) { + Some(GrantObjects::ResourceMonitors(self.parse_comma_separated( + |p| p.parse_object_name_with_wildcards(false, true), + )?)) + } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) { + Some(GrantObjects::ComputePools(self.parse_comma_separated( + |p| p.parse_object_name_with_wildcards(false, true), + )?)) + } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) { + Some(GrantObjects::FailoverGroup(self.parse_comma_separated( + |p| p.parse_object_name_with_wildcards(false, true), + )?)) + } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) { + Some(GrantObjects::ReplicationGroup(self.parse_comma_separated( + |p| p.parse_object_name_with_wildcards(false, true), + )?)) + } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) { + Some(GrantObjects::ExternalVolumes(self.parse_comma_separated( + |p| p.parse_object_name_with_wildcards(false, true), + )?)) + } else { + let object_type = self.parse_one_of_keywords(&[ + Keyword::SEQUENCE, + Keyword::DATABASE, + Keyword::DATABASE, + Keyword::SCHEMA, + Keyword::TABLE, + Keyword::VIEW, + Keyword::WAREHOUSE, + Keyword::INTEGRATION, + Keyword::VIEW, + Keyword::WAREHOUSE, + Keyword::INTEGRATION, + Keyword::USER, + Keyword::CONNECTION, + ]); + let objects = + self.parse_comma_separated(|p| p.parse_object_name_with_wildcards(false, true)); + match object_type { + Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)), + Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)), + Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)), + Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)), + Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)), + Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)), + Some(Keyword::USER) => Some(GrantObjects::Users(objects?)), + Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)), + Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)), + _ => unreachable!(), + } } } else { - let object_type = - self.parse_one_of_keywords(&[Keyword::SEQUENCE, Keyword::SCHEMA, Keyword::TABLE]); - let objects = self.parse_comma_separated(|p| p.parse_object_name(false)); - match object_type { - Some(Keyword::SCHEMA) => GrantObjects::Schemas(objects?), - Some(Keyword::SEQUENCE) => GrantObjects::Sequences(objects?), - Some(Keyword::TABLE) | None => GrantObjects::Tables(objects?), - _ => unreachable!(), - } + None }; Ok((privileges, objects)) } - pub fn parse_grant_permission(&mut self) -> Result { - if let Some(kw) = self.parse_one_of_keywords(&[ - Keyword::CONNECT, - Keyword::CREATE, - Keyword::DELETE, - Keyword::EXECUTE, - Keyword::INSERT, - Keyword::REFERENCES, - Keyword::SELECT, - Keyword::TEMPORARY, - Keyword::TRIGGER, - Keyword::TRUNCATE, - Keyword::UPDATE, - Keyword::USAGE, + pub fn parse_grant_permission(&mut self) -> Result { + fn parse_columns(parser: &mut Parser) -> Result>, ParserError> { + let columns = parser.parse_parenthesized_column_list(Optional, false)?; + if columns.is_empty() { + Ok(None) + } else { + Ok(Some(columns)) + } + } + + // Multi-word privileges + if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) { + Ok(Action::ImportedPrivileges) + } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) { + Ok(Action::AddSearchOptimization) + } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) { + Ok(Action::AttachListing) + } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) { + Ok(Action::AttachPolicy) + } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) { + Ok(Action::BindServiceEndpoint) + } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) { + let role = self.parse_object_name(false)?; + Ok(Action::DatabaseRole { role }) + } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) { + Ok(Action::EvolveSchema) + } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) { + Ok(Action::ImportShare) + } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) { + Ok(Action::ManageVersions) + } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) { + Ok(Action::ManageReleases) + } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) { + Ok(Action::OverrideShareRestrictions) + } else if self.parse_keywords(&[ + Keyword::PURCHASE, + Keyword::DATA, + Keyword::EXCHANGE, + Keyword::LISTING, ]) { - let columns = match kw { - Keyword::INSERT | Keyword::REFERENCES | Keyword::SELECT | Keyword::UPDATE => { - let columns = self.parse_parenthesized_column_list(Optional, false)?; - if columns.is_empty() { - None - } else { - Some(columns) - } - } - _ => None, - }; - Ok((kw, columns)) + Ok(Action::PurchaseDataExchangeListing) + } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) { + Ok(Action::ResolveAll) + } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) { + Ok(Action::ReadSession) + + // Single-word privileges + } else if self.parse_keyword(Keyword::APPLY) { + let apply_type = self.parse_action_apply_type()?; + Ok(Action::Apply { apply_type }) + } else if self.parse_keyword(Keyword::APPLYBUDGET) { + Ok(Action::ApplyBudget) + } else if self.parse_keyword(Keyword::AUDIT) { + Ok(Action::Audit) + } else if self.parse_keyword(Keyword::CONNECT) { + Ok(Action::Connect) + } else if self.parse_keyword(Keyword::CREATE) { + let obj_type = self.maybe_parse_action_create_object_type(); + Ok(Action::Create { obj_type }) + } else if self.parse_keyword(Keyword::DELETE) { + Ok(Action::Delete) + } else if self.parse_keyword(Keyword::EXECUTE) { + let obj_type = self.maybe_parse_action_execute_obj_type(); + Ok(Action::Execute { obj_type }) + } else if self.parse_keyword(Keyword::FAILOVER) { + Ok(Action::Failover) + } else if self.parse_keyword(Keyword::INSERT) { + Ok(Action::Insert { + columns: parse_columns(self)?, + }) + } else if self.parse_keyword(Keyword::MANAGE) { + let manage_type = self.parse_action_manage_type()?; + Ok(Action::Manage { manage_type }) + } else if self.parse_keyword(Keyword::MODIFY) { + let modify_type = self.parse_action_modify_type(); + Ok(Action::Modify { modify_type }) + } else if self.parse_keyword(Keyword::MONITOR) { + let monitor_type = self.parse_action_monitor_type(); + Ok(Action::Monitor { monitor_type }) + } else if self.parse_keyword(Keyword::OPERATE) { + Ok(Action::Operate) + } else if self.parse_keyword(Keyword::REFERENCES) { + Ok(Action::References { + columns: parse_columns(self)?, + }) + } else if self.parse_keyword(Keyword::READ) { + Ok(Action::Read) + } else if self.parse_keyword(Keyword::REPLICATE) { + Ok(Action::Replicate) + } else if self.parse_keyword(Keyword::ROLE) { + let role = self.parse_identifier()?; + Ok(Action::Role { role }) + } else if self.parse_keyword(Keyword::SELECT) { + Ok(Action::Select { + columns: parse_columns(self)?, + }) + } else if self.parse_keyword(Keyword::TEMPORARY) { + Ok(Action::Temporary) + } else if self.parse_keyword(Keyword::TRIGGER) { + Ok(Action::Trigger) + } else if self.parse_keyword(Keyword::TRUNCATE) { + Ok(Action::Truncate) + } else if self.parse_keyword(Keyword::UPDATE) { + Ok(Action::Update { + columns: parse_columns(self)?, + }) + } else if self.parse_keyword(Keyword::USAGE) { + Ok(Action::Usage) + } else if self.parse_keyword(Keyword::OWNERSHIP) { + Ok(Action::Ownership) } else { self.expected("a privilege keyword", self.peek_token())? } } + fn maybe_parse_action_create_object_type(&mut self) -> Option { + // Multi-word object types + if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) { + Some(ActionCreateObjectType::ApplicationPackage) + } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) { + Some(ActionCreateObjectType::ComputePool) + } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) { + Some(ActionCreateObjectType::DataExchangeListing) + } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) { + Some(ActionCreateObjectType::ExternalVolume) + } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) { + Some(ActionCreateObjectType::FailoverGroup) + } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) { + Some(ActionCreateObjectType::NetworkPolicy) + } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) { + Some(ActionCreateObjectType::OrganiationListing) + } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) { + Some(ActionCreateObjectType::ReplicationGroup) + } + // Single-word object types + else if self.parse_keyword(Keyword::ACCOUNT) { + Some(ActionCreateObjectType::Account) + } else if self.parse_keyword(Keyword::APPLICATION) { + Some(ActionCreateObjectType::Application) + } else if self.parse_keyword(Keyword::DATABASE) { + Some(ActionCreateObjectType::Database) + } else if self.parse_keyword(Keyword::INTEGRATION) { + Some(ActionCreateObjectType::Integration) + } else if self.parse_keyword(Keyword::ROLE) { + Some(ActionCreateObjectType::Role) + } else if self.parse_keyword(Keyword::SHARE) { + Some(ActionCreateObjectType::Share) + } else if self.parse_keyword(Keyword::USER) { + Some(ActionCreateObjectType::User) + } else if self.parse_keyword(Keyword::WAREHOUSE) { + Some(ActionCreateObjectType::Warehouse) + } else { + None + } + } + + fn parse_action_apply_type(&mut self) -> Result { + if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) { + Ok(ActionApplyType::AggregationPolicy) + } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) { + Ok(ActionApplyType::AuthenticationPolicy) + } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) { + Ok(ActionApplyType::JoinPolicy) + } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) { + Ok(ActionApplyType::MaskingPolicy) + } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) { + Ok(ActionApplyType::PackagesPolicy) + } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) { + Ok(ActionApplyType::PasswordPolicy) + } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) { + Ok(ActionApplyType::ProjectionPolicy) + } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) { + Ok(ActionApplyType::RowAccessPolicy) + } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) { + Ok(ActionApplyType::SessionPolicy) + } else if self.parse_keyword(Keyword::TAG) { + Ok(ActionApplyType::Tag) + } else { + self.expected("GRANT APPLY type", self.peek_token()) + } + } + + fn maybe_parse_action_execute_obj_type(&mut self) -> Option { + if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) { + Some(ActionExecuteObjectType::DataMetricFunction) + } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) { + Some(ActionExecuteObjectType::ManagedAlert) + } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) { + Some(ActionExecuteObjectType::ManagedTask) + } else if self.parse_keyword(Keyword::ALERT) { + Some(ActionExecuteObjectType::Alert) + } else if self.parse_keyword(Keyword::TASK) { + Some(ActionExecuteObjectType::Task) + } else { + None + } + } + + fn parse_action_manage_type(&mut self) -> Result { + if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) { + Ok(ActionManageType::AccountSupportCases) + } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) { + Ok(ActionManageType::EventSharing) + } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) { + Ok(ActionManageType::ListingAutoFulfillment) + } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) { + Ok(ActionManageType::OrganizationSupportCases) + } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) { + Ok(ActionManageType::UserSupportCases) + } else if self.parse_keyword(Keyword::GRANTS) { + Ok(ActionManageType::Grants) + } else if self.parse_keyword(Keyword::WAREHOUSES) { + Ok(ActionManageType::Warehouses) + } else { + self.expected("GRANT MANAGE type", self.peek_token()) + } + } + + fn parse_action_modify_type(&mut self) -> Option { + if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) { + Some(ActionModifyType::LogLevel) + } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) { + Some(ActionModifyType::TraceLevel) + } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) { + Some(ActionModifyType::SessionLogLevel) + } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) { + Some(ActionModifyType::SessionTraceLevel) + } else { + None + } + } + + fn parse_action_monitor_type(&mut self) -> Option { + if self.parse_keyword(Keyword::EXECUTION) { + Some(ActionMonitorType::Execution) + } else if self.parse_keyword(Keyword::SECURITY) { + Some(ActionMonitorType::Security) + } else if self.parse_keyword(Keyword::USAGE) { + Some(ActionMonitorType::Usage) + } else { + None + } + } + + pub fn parse_grantee_name(&mut self) -> Result { + let mut name = self.parse_object_name(false)?; + if self.dialect.supports_user_host_grantee() + && name.0.len() == 1 + && name.0[0].as_ident().is_some() + && self.consume_token(&Token::AtSign) + { + let user = name.0.pop().unwrap().as_ident().unwrap().clone(); + let host = self.parse_identifier()?; + Ok(GranteeName::UserHost { user, host }) + } else { + Ok(GranteeName::ObjectName(name)) + } + } + /// Parse a REVOKE statement pub fn parse_revoke(&mut self) -> Result { let (privileges, objects) = self.parse_grant_revoke_privileges_objects()?; - self.expect_keyword(Keyword::FROM)?; - let grantees = self.parse_comma_separated(|p| p.parse_identifier(false))?; + self.expect_keyword_is(Keyword::FROM)?; + let grantees = self.parse_grantees()?; let granted_by = self .parse_keywords(&[Keyword::GRANTED, Keyword::BY]) - .then(|| self.parse_identifier(false).unwrap()); + .then(|| self.parse_identifier().unwrap()); - let loc = self.peek_token().location; - let cascade = self.parse_keyword(Keyword::CASCADE); - let restrict = self.parse_keyword(Keyword::RESTRICT); - if cascade && restrict { - return parser_err!("Cannot specify both CASCADE and RESTRICT in REVOKE", loc); - } + let cascade = self.parse_cascade_option(); Ok(Statement::Revoke { privileges, @@ -10752,7 +13828,10 @@ impl<'a> Parser<'a> { /// Parse an REPLACE statement pub fn parse_replace(&mut self) -> Result { if !dialect_of!(self is MySqlDialect | GenericDialect) { - return parser_err!("Unsupported statement REPLACE", self.peek_token().location); + return parser_err!( + "Unsupported statement REPLACE", + self.peek_token().span.start + ); } let mut insert = self.parse_insert()?; @@ -10772,24 +13851,7 @@ impl<'a> Parser<'a> { /// Parse an INSERT statement pub fn parse_insert(&mut self) -> Result { - let or = if !dialect_of!(self is SQLiteDialect) { - None - } else if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) { - Some(SqliteOnConflict::Replace) - } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) { - Some(SqliteOnConflict::Rollback) - } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) { - Some(SqliteOnConflict::Abort) - } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) { - Some(SqliteOnConflict::Fail) - } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) { - Some(SqliteOnConflict::Ignore) - } else if self.parse_keyword(Keyword::REPLACE) { - Some(SqliteOnConflict::Replace) - } else { - None - }; - + let or = self.parse_conflict_clause(); let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) { None } else if self.parse_keyword(Keyword::LOW_PRIORITY) { @@ -10807,9 +13869,8 @@ impl<'a> Parser<'a> { let replace_into = false; - let action = self.parse_one_of_keywords(&[Keyword::INTO, Keyword::OVERWRITE]); - let into = action == Some(Keyword::INTO); - let overwrite = action == Some(Keyword::OVERWRITE); + let overwrite = self.parse_keyword(Keyword::OVERWRITE); + let into = self.parse_keyword(Keyword::INTO); let local = self.parse_keyword(Keyword::LOCAL); @@ -10831,21 +13892,23 @@ impl<'a> Parser<'a> { } else { // Hive lets you put table here regardless let table = self.parse_keyword(Keyword::TABLE); - let table_name = self.parse_object_name(false)?; + let table_object = self.parse_table_object()?; let table_alias = if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) { - Some(self.parse_identifier(false)?) + Some(self.parse_identifier()?) } else { None }; let is_mysql = dialect_of!(self is MySqlDialect); - let (columns, partitioned, after_columns, source) = - if self.parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES]) { - (vec![], None, vec![], None) - } else { + let (columns, partitioned, after_columns, source, assignments) = if self + .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES]) + { + (vec![], None, vec![], None, vec![]) + } else { + let (columns, partitioned, after_columns) = if !self.peek_subquery_start() { let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?; let partitioned = self.parse_insert_partition()?; @@ -10855,12 +13918,40 @@ impl<'a> Parser<'a> { } else { vec![] }; + (columns, partitioned, after_columns) + } else { + Default::default() + }; + + let (source, assignments) = if self.peek_keyword(Keyword::FORMAT) + || self.peek_keyword(Keyword::SETTINGS) + { + (None, vec![]) + } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) { + (None, self.parse_comma_separated(Parser::parse_assignment)?) + } else { + (Some(self.parse_query()?), vec![]) + }; + + (columns, partitioned, after_columns, source, assignments) + }; - let source = Some(self.parse_query()?); + let (format_clause, settings) = if self.dialect.supports_insert_format() { + // Settings always comes before `FORMAT` for ClickHouse: + // + let settings = self.parse_settings()?; - (columns, partitioned, after_columns, source) + let format = if self.parse_keyword(Keyword::FORMAT) { + Some(self.parse_input_format_clause()?) + } else { + None }; + (format, settings) + } else { + Default::default() + }; + let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect) && self.parse_keyword(Keyword::AS) { @@ -10887,12 +13978,12 @@ impl<'a> Parser<'a> { None }; - self.expect_keyword(Keyword::DO)?; + self.expect_keyword_is(Keyword::DO)?; let action = if self.parse_keyword(Keyword::NOTHING) { OnConflictAction::DoNothing } else { - self.expect_keyword(Keyword::UPDATE)?; - self.expect_keyword(Keyword::SET)?; + self.expect_keyword_is(Keyword::UPDATE)?; + self.expect_keyword_is(Keyword::SET)?; let assignments = self.parse_comma_separated(Parser::parse_assignment)?; let selection = if self.parse_keyword(Keyword::WHERE) { Some(self.parse_expr()?) @@ -10910,9 +14001,9 @@ impl<'a> Parser<'a> { action, })) } else { - self.expect_keyword(Keyword::DUPLICATE)?; - self.expect_keyword(Keyword::KEY)?; - self.expect_keyword(Keyword::UPDATE)?; + self.expect_keyword_is(Keyword::DUPLICATE)?; + self.expect_keyword_is(Keyword::KEY)?; + self.expect_keyword_is(Keyword::UPDATE)?; let l = self.parse_comma_separated(Parser::parse_assignment)?; Some(OnInsert::DuplicateKeyUpdate(l)) @@ -10929,7 +14020,7 @@ impl<'a> Parser<'a> { Ok(Statement::Insert(Insert { or, - table_name, + table: table_object, table_alias, ignore, into, @@ -10938,16 +14029,57 @@ impl<'a> Parser<'a> { columns, after_columns, source, - table, + assignments, + has_table_keyword: table, on, returning, replace_into, priority, insert_alias, + settings, + format_clause, })) } } + // Parses input format clause used for [ClickHouse]. + // + // + pub fn parse_input_format_clause(&mut self) -> Result { + let ident = self.parse_identifier()?; + let values = self + .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))? + .unwrap_or_default(); + + Ok(InputFormatClause { ident, values }) + } + + /// Returns true if the immediate tokens look like the + /// beginning of a subquery. `(SELECT ...` + fn peek_subquery_start(&mut self) -> bool { + let [maybe_lparen, maybe_select] = self.peek_tokens(); + Token::LParen == maybe_lparen + && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT) + } + + fn parse_conflict_clause(&mut self) -> Option { + if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) { + Some(SqliteOnConflict::Replace) + } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) { + Some(SqliteOnConflict::Rollback) + } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) { + Some(SqliteOnConflict::Abort) + } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) { + Some(SqliteOnConflict::Fail) + } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) { + Some(SqliteOnConflict::Ignore) + } else if self.parse_keyword(Keyword::REPLACE) { + Some(SqliteOnConflict::Replace) + } else { + None + } + } + pub fn parse_insert_partition(&mut self) -> Result>, ParserError> { if self.parse_keyword(Keyword::PARTITION) { self.expect_token(&Token::LParen)?; @@ -10959,6 +14091,22 @@ impl<'a> Parser<'a> { } } + pub fn parse_load_data_table_format( + &mut self, + ) -> Result, ParserError> { + if self.parse_keyword(Keyword::INPUTFORMAT) { + let input_format = self.parse_expr()?; + self.expect_keyword_is(Keyword::SERDE)?; + let serde = self.parse_expr()?; + Ok(Some(HiveLoadDataFormat { + input_format, + serde, + })) + } else { + Ok(None) + } + } + /// Parse an UPDATE statement, returning a `Box`ed SetExpr /// /// This is used to reduce the size of the stack frames in debug builds @@ -10967,15 +14115,23 @@ impl<'a> Parser<'a> { } pub fn parse_update(&mut self) -> Result { + let or = self.parse_conflict_clause(); let table = self.parse_table_and_joins()?; + let from_before_set = if self.parse_keyword(Keyword::FROM) { + Some(UpdateTableFromKind::BeforeSet( + self.parse_table_with_joins()?, + )) + } else { + None + }; self.expect_keyword(Keyword::SET)?; let assignments = self.parse_comma_separated(Parser::parse_assignment)?; - let from = if self.parse_keyword(Keyword::FROM) - && dialect_of!(self is GenericDialect | PostgreSqlDialect | DuckDbDialect | BigQueryDialect | SnowflakeDialect | RedshiftSqlDialect | MsSqlDialect | SQLiteDialect ) - { - Some(self.parse_table_and_joins()?) + let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) { + Some(UpdateTableFromKind::AfterSet( + self.parse_table_with_joins()?, + )) } else { - None + from_before_set }; let selection = if self.parse_keyword(Keyword::WHERE) { Some(self.parse_expr()?) @@ -10993,6 +14149,7 @@ impl<'a> Parser<'a> { from, selection, returning, + or, }) } @@ -11017,45 +14174,61 @@ impl<'a> Parser<'a> { } pub fn parse_function_args(&mut self) -> Result { - if self.peek_nth_token(1) == Token::RArrow { - let name = self.parse_identifier(false)?; - - self.expect_token(&Token::RArrow)?; - let arg = self.parse_wildcard_expr()?.into(); - - Ok(FunctionArg::Named { - name, - arg, - operator: FunctionArgOperator::RightArrow, - }) - } else if self.dialect.supports_named_fn_args_with_eq_operator() - && self.peek_nth_token(1) == Token::Eq - { - let name = self.parse_identifier(false)?; - - self.expect_token(&Token::Eq)?; - let arg = self.parse_wildcard_expr()?.into(); - - Ok(FunctionArg::Named { - name, - arg, - operator: FunctionArgOperator::Equals, - }) - } else if dialect_of!(self is DuckDbDialect | GenericDialect) - && self.peek_nth_token(1) == Token::Assignment - { - let name = self.parse_identifier(false)?; - - self.expect_token(&Token::Assignment)?; - let arg = self.parse_expr()?.into(); - - Ok(FunctionArg::Named { - name, - arg, - operator: FunctionArgOperator::Assignment, - }) + let arg = if self.dialect.supports_named_fn_args_with_expr_name() { + self.maybe_parse(|p| { + let name = p.parse_expr()?; + let operator = p.parse_function_named_arg_operator()?; + let arg = p.parse_wildcard_expr()?.into(); + Ok(FunctionArg::ExprNamed { + name, + arg, + operator, + }) + })? } else { - Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into())) + self.maybe_parse(|p| { + let name = p.parse_identifier()?; + let operator = p.parse_function_named_arg_operator()?; + let arg = p.parse_wildcard_expr()?.into(); + Ok(FunctionArg::Named { + name, + arg, + operator, + }) + })? + }; + if let Some(arg) = arg { + return Ok(arg); + } + Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into())) + } + + fn parse_function_named_arg_operator(&mut self) -> Result { + if self.parse_keyword(Keyword::VALUE) { + return Ok(FunctionArgOperator::Value); + } + let tok = self.next_token(); + match tok.token { + Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => { + Ok(FunctionArgOperator::RightArrow) + } + Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => { + Ok(FunctionArgOperator::Equals) + } + Token::Assignment + if self + .dialect + .supports_named_fn_args_with_assignment_operator() => + { + Ok(FunctionArgOperator::Assignment) + } + Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => { + Ok(FunctionArgOperator::Colon) + } + _ => { + self.prev_token(); + self.expected("argument operator", tok) + } } } @@ -11099,19 +14272,24 @@ impl<'a> Parser<'a> { /// FIRST_VALUE(x IGNORE NULL); /// ``` fn parse_function_argument_list(&mut self) -> Result { + let mut clauses = vec![]; + + // For MSSQL empty argument list with json-null-clause case, e.g. `JSON_ARRAY(NULL ON NULL)` + if let Some(null_clause) = self.parse_json_null_clause() { + clauses.push(FunctionArgumentClause::JsonNullClause(null_clause)); + } + if self.consume_token(&Token::RParen) { return Ok(FunctionArgumentList { duplicate_treatment: None, args: vec![], - clauses: vec![], + clauses, }); } let duplicate_treatment = self.parse_duplicate_treatment()?; let args = self.parse_comma_separated(Parser::parse_function_args)?; - let mut clauses = vec![]; - if self.dialect.supports_window_function_null_treatment_arg() { if let Some(null_treatment) = self.parse_null_treatment()? { clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment)); @@ -11145,13 +14323,17 @@ impl<'a> Parser<'a> { if dialect_of!(self is GenericDialect | MySqlDialect) && self.parse_keyword(Keyword::SEPARATOR) { - clauses.push(FunctionArgumentClause::Separator(self.parse_value()?)); + clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value)); } if let Some(on_overflow) = self.parse_listagg_on_overflow()? { clauses.push(FunctionArgumentClause::OnOverflow(on_overflow)); } + if let Some(null_clause) = self.parse_json_null_clause() { + clauses.push(FunctionArgumentClause::JsonNullClause(null_clause)); + } + self.expect_token(&Token::RParen)?; Ok(FunctionArgumentList { duplicate_treatment, @@ -11160,8 +14342,19 @@ impl<'a> Parser<'a> { }) } + /// Parses MSSQL's json-null-clause + fn parse_json_null_clause(&mut self) -> Option { + if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) { + Some(JsonNullClause::AbsentOnNull) + } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) { + Some(JsonNullClause::NullOnNull) + } else { + None + } + } + fn parse_duplicate_treatment(&mut self) -> Result, ParserError> { - let loc = self.peek_token().location; + let loc = self.peek_token().span.start; match ( self.parse_keyword(Keyword::ALL), self.parse_keyword(Keyword::DISTINCT), @@ -11175,31 +14368,38 @@ impl<'a> Parser<'a> { /// Parse a comma-delimited list of projections after SELECT pub fn parse_select_item(&mut self) -> Result { + let prefix = self + .parse_one_of_keywords( + self.dialect + .get_reserved_keywords_for_select_item_operator(), + ) + .map(|keyword| Ident::new(format!("{:?}", keyword))); + match self.parse_wildcard_expr()? { - Expr::QualifiedWildcard(prefix) => Ok(SelectItem::QualifiedWildcard( - prefix, - self.parse_wildcard_additional_options()?, + Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard( + SelectItemQualifiedWildcardKind::ObjectName(prefix), + self.parse_wildcard_additional_options(token.0)?, )), - Expr::Wildcard => Ok(SelectItem::Wildcard( - self.parse_wildcard_additional_options()?, + Expr::Wildcard(token) => Ok(SelectItem::Wildcard( + self.parse_wildcard_additional_options(token.0)?, )), Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => { parser_err!( format!("Expected an expression, found: {}", v), - self.peek_token().location + self.peek_token().span.start ) } Expr::BinaryOp { left, op: BinaryOperator::Eq, right, - } if self.dialect.supports_eq_alias_assigment() + } if self.dialect.supports_eq_alias_assignment() && matches!(left.as_ref(), Expr::Identifier(_)) => { let Expr::Identifier(alias) = *left else { return parser_err!( "BUG: expected identifier expression as alias", - self.peek_token().location + self.peek_token().span.start ); }; Ok(SelectItem::ExprWithAlias { @@ -11207,11 +14407,23 @@ impl<'a> Parser<'a> { alias, }) } + expr if self.dialect.supports_select_expr_star() + && self.consume_tokens(&[Token::Period, Token::Mul]) => + { + let wildcard_token = self.get_previous_token().clone(); + Ok(SelectItem::QualifiedWildcard( + SelectItemQualifiedWildcardKind::Expr(expr), + self.parse_wildcard_additional_options(wildcard_token)?, + )) + } expr => self - .parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) + .maybe_parse_select_item_alias() .map(|alias| match alias { - Some(alias) => SelectItem::ExprWithAlias { expr, alias }, - None => SelectItem::UnnamedExpr(expr), + Some(alias) => SelectItem::ExprWithAlias { + expr: maybe_prefixed_expr(expr, prefix), + alias, + }, + None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)), }), } } @@ -11221,6 +14433,7 @@ impl<'a> Parser<'a> { /// If it is not possible to parse it, will return an option. pub fn parse_wildcard_additional_options( &mut self, + wildcard_token: TokenWithSpan, ) -> Result { let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) { self.parse_optional_select_item_ilike()? @@ -11252,6 +14465,7 @@ impl<'a> Parser<'a> { }; Ok(WildcardAdditionalOptions { + wildcard_token: wildcard_token.into(), opt_ilike, opt_exclude, opt_except, @@ -11287,12 +14501,11 @@ impl<'a> Parser<'a> { ) -> Result, ParserError> { let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) { if self.consume_token(&Token::LParen) { - let columns = - self.parse_comma_separated(|parser| parser.parse_identifier(false))?; + let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?; self.expect_token(&Token::RParen)?; Some(ExcludeSelectItem::Multiple(columns)) } else { - let column = self.parse_identifier(false)?; + let column = self.parse_identifier()?; Some(ExcludeSelectItem::Single(column)) } } else { @@ -11325,7 +14538,7 @@ impl<'a> Parser<'a> { } } else { // Clickhouse allows EXCEPT column_name - let ident = self.parse_identifier(false)?; + let ident = self.parse_identifier()?; Some(ExceptSelectItem { first_element: ident, additional_elements: vec![], @@ -11383,7 +14596,7 @@ impl<'a> Parser<'a> { pub fn parse_replace_elements(&mut self) -> Result { let expr = self.parse_expr()?; let as_keyword = self.parse_keyword(Keyword::AS); - let ident = self.parse_identifier(false)?; + let ident = self.parse_identifier()?; Ok(ReplaceSelectElement { expr, column_name: ident, @@ -11403,20 +14616,44 @@ impl<'a> Parser<'a> { } } - /// Parse an expression, optionally followed by ASC or DESC (used in ORDER BY) + /// Parse an [OrderByExpr] expression. pub fn parse_order_by_expr(&mut self) -> Result { - let expr = self.parse_expr()?; + self.parse_order_by_expr_inner(false) + .map(|(order_by, _)| order_by) + } - let asc = self.parse_asc_desc(); + /// Parse an [IndexColumn]. + pub fn parse_create_index_expr(&mut self) -> Result { + self.parse_order_by_expr_inner(true) + .map(|(column, operator_class)| IndexColumn { + column, + operator_class, + }) + } - let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) { - Some(true) - } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) { - Some(false) + fn parse_order_by_expr_inner( + &mut self, + with_operator_class: bool, + ) -> Result<(OrderByExpr, Option), ParserError> { + let expr = self.parse_expr()?; + + let operator_class: Option = if with_operator_class { + // We check that if non of the following keywords are present, then we parse an + // identifier as operator class. + if self + .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH]) + .is_some() + { + None + } else { + self.maybe_parse(|parser| parser.parse_identifier())? + } } else { None }; + let options = self.parse_order_by_options()?; + let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect) && self.parse_keywords(&[Keyword::WITH, Keyword::FILL]) { @@ -11425,12 +14662,28 @@ impl<'a> Parser<'a> { None }; - Ok(OrderByExpr { - expr, - asc, - nulls_first, - with_fill, - }) + Ok(( + OrderByExpr { + expr, + options, + with_fill, + }, + operator_class, + )) + } + + fn parse_order_by_options(&mut self) -> Result { + let asc = self.parse_asc_desc(); + + let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) { + Some(true) + } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) { + Some(false) + } else { + None + }; + + Ok(OrderByOptions { asc, nulls_first }) } // Parse a WITH FILL clause (ClickHouse dialect) @@ -11457,7 +14710,7 @@ impl<'a> Parser<'a> { Ok(WithFill { from, to, step }) } - // Parse a set of comma seperated INTERPOLATE expressions (ClickHouse dialect) + // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect) // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier pub fn parse_interpolations(&mut self) -> Result, ParserError> { if !self.parse_keyword(Keyword::INTERPOLATE) { @@ -11480,7 +14733,7 @@ impl<'a> Parser<'a> { // Parse a INTERPOLATE expression (ClickHouse dialect) pub fn parse_interpolation(&mut self) -> Result { - let column = self.parse_identifier(false)?; + let column = self.parse_identifier()?; let expr = if self.parse_keyword(Keyword::AS) { Some(self.parse_expr()?) } else { @@ -11499,7 +14752,7 @@ impl<'a> Parser<'a> { } else { let next_token = self.next_token(); let quantity = match next_token.token { - Token::Number(s, _) => Self::parse::(s, next_token.location)?, + Token::Number(s, _) => Self::parse::(s, next_token.span.start)?, _ => self.expected("literal int", next_token)?, }; Some(TopQuantity::Constant(quantity)) @@ -11614,11 +14867,15 @@ impl<'a> Parser<'a> { } pub fn parse_start_transaction(&mut self) -> Result { - self.expect_keyword(Keyword::TRANSACTION)?; + self.expect_keyword_is(Keyword::TRANSACTION)?; Ok(Statement::StartTransaction { modes: self.parse_transaction_modes()?, begin: false, + transaction: Some(BeginTransactionKind::Transaction), modifier: None, + statements: vec![], + exception_statements: None, + has_end_keyword: false, }) } @@ -11631,20 +14888,43 @@ impl<'a> Parser<'a> { Some(TransactionModifier::Immediate) } else if self.parse_keyword(Keyword::EXCLUSIVE) { Some(TransactionModifier::Exclusive) + } else if self.parse_keyword(Keyword::TRY) { + Some(TransactionModifier::Try) + } else if self.parse_keyword(Keyword::CATCH) { + Some(TransactionModifier::Catch) } else { None }; - let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]); + let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) { + Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction), + Some(Keyword::WORK) => Some(BeginTransactionKind::Work), + _ => None, + }; Ok(Statement::StartTransaction { modes: self.parse_transaction_modes()?, begin: true, + transaction, modifier, + statements: vec![], + exception_statements: None, + has_end_keyword: false, }) } pub fn parse_end(&mut self) -> Result { + let modifier = if !self.dialect.supports_end_transaction_modifier() { + None + } else if self.parse_keyword(Keyword::TRY) { + Some(TransactionModifier::Try) + } else if self.parse_keyword(Keyword::CATCH) { + Some(TransactionModifier::Catch) + } else { + None + }; Ok(Statement::Commit { chain: self.parse_commit_rollback_chain()?, + end: true, + modifier, }) } @@ -11661,6 +14941,8 @@ impl<'a> Parser<'a> { TransactionIsolationLevel::RepeatableRead } else if self.parse_keyword(Keyword::SERIALIZABLE) { TransactionIsolationLevel::Serializable + } else if self.parse_keyword(Keyword::SNAPSHOT) { + TransactionIsolationLevel::Snapshot } else { self.expected("isolation level", self.peek_token())? }; @@ -11687,6 +14969,8 @@ impl<'a> Parser<'a> { pub fn parse_commit(&mut self) -> Result { Ok(Statement::Commit { chain: self.parse_commit_rollback_chain()?, + end: false, + modifier: None, }) } @@ -11701,7 +14985,7 @@ impl<'a> Parser<'a> { let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]); if self.parse_keyword(Keyword::AND) { let chain = !self.parse_keyword(Keyword::NO); - self.expect_keyword(Keyword::CHAIN)?; + self.expect_keyword_is(Keyword::CHAIN)?; Ok(chain) } else { Ok(false) @@ -11711,7 +14995,7 @@ impl<'a> Parser<'a> { pub fn parse_rollback_savepoint(&mut self) -> Result, ParserError> { if self.parse_keyword(Keyword::TO) { let _ = self.parse_keyword(Keyword::SAVEPOINT); - let savepoint = self.parse_identifier(false)?; + let savepoint = self.parse_identifier()?; Ok(Some(savepoint)) } else { @@ -11719,39 +15003,101 @@ impl<'a> Parser<'a> { } } + /// Parse a 'RAISERROR' statement + pub fn parse_raiserror(&mut self) -> Result { + self.expect_token(&Token::LParen)?; + let message = Box::new(self.parse_expr()?); + self.expect_token(&Token::Comma)?; + let severity = Box::new(self.parse_expr()?); + self.expect_token(&Token::Comma)?; + let state = Box::new(self.parse_expr()?); + let arguments = if self.consume_token(&Token::Comma) { + self.parse_comma_separated(Parser::parse_expr)? + } else { + vec![] + }; + self.expect_token(&Token::RParen)?; + let options = if self.parse_keyword(Keyword::WITH) { + self.parse_comma_separated(Parser::parse_raiserror_option)? + } else { + vec![] + }; + Ok(Statement::RaisError { + message, + severity, + state, + arguments, + options, + }) + } + + pub fn parse_raiserror_option(&mut self) -> Result { + match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? { + Keyword::LOG => Ok(RaisErrorOption::Log), + Keyword::NOWAIT => Ok(RaisErrorOption::NoWait), + Keyword::SETERROR => Ok(RaisErrorOption::SetError), + _ => self.expected( + "LOG, NOWAIT OR SETERROR raiserror option", + self.peek_token(), + ), + } + } + pub fn parse_deallocate(&mut self) -> Result { let prepare = self.parse_keyword(Keyword::PREPARE); - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; Ok(Statement::Deallocate { name, prepare }) } pub fn parse_execute(&mut self) -> Result { - let name = self.parse_identifier(false)?; + let name = if self.dialect.supports_execute_immediate() + && self.parse_keyword(Keyword::IMMEDIATE) + { + None + } else { + let name = self.parse_object_name(false)?; + Some(name) + }; - let mut parameters = vec![]; - if self.consume_token(&Token::LParen) { - parameters = self.parse_comma_separated(Parser::parse_expr)?; + let has_parentheses = self.consume_token(&Token::LParen); + + let end_token = match (has_parentheses, self.peek_token().token) { + (true, _) => Token::RParen, + (false, Token::EOF) => Token::EOF, + (false, Token::Word(w)) if w.keyword == Keyword::USING => Token::Word(w), + (false, _) => Token::SemiColon, + }; + + let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?; + + if has_parentheses { self.expect_token(&Token::RParen)?; } - let mut using = vec![]; - if self.parse_keyword(Keyword::USING) { - using.push(self.parse_expr()?); + let into = if self.parse_keyword(Keyword::INTO) { + self.parse_comma_separated(Self::parse_identifier)? + } else { + vec![] + }; - while self.consume_token(&Token::Comma) { - using.push(self.parse_expr()?); - } + let using = if self.parse_keyword(Keyword::USING) { + self.parse_comma_separated(Self::parse_expr_with_alias)? + } else { + vec![] }; Ok(Statement::Execute { + immediate: name.is_none(), name, parameters, + has_parentheses, + into, using, }) } pub fn parse_prepare(&mut self) -> Result { - let name = self.parse_identifier(false)?; + let name = self.parse_identifier()?; let mut data_types = vec![]; if self.consume_token(&Token::LParen) { @@ -11759,7 +15105,7 @@ impl<'a> Parser<'a> { self.expect_token(&Token::RParen)?; } - self.expect_keyword(Keyword::AS)?; + self.expect_keyword_is(Keyword::AS)?; let statement = Box::new(self.parse_statement()?); Ok(Statement::Prepare { name, @@ -11773,8 +15119,8 @@ impl<'a> Parser<'a> { let query = self.parse_query()?; self.expect_token(&Token::RParen)?; - self.expect_keyword(Keyword::TO)?; - let to = self.parse_identifier(false)?; + self.expect_keyword_is(Keyword::TO)?; + let to = self.parse_identifier()?; let with_options = self.parse_options(Keyword::WITH)?; @@ -11788,16 +15134,15 @@ impl<'a> Parser<'a> { pub fn parse_merge_clauses(&mut self) -> Result, ParserError> { let mut clauses = vec![]; loop { - if self.peek_token() == Token::EOF || self.peek_token() == Token::SemiColon { + if !(self.parse_keyword(Keyword::WHEN)) { break; } - self.expect_keyword(Keyword::WHEN)?; let mut clause_kind = MergeClauseKind::Matched; if self.parse_keyword(Keyword::NOT) { clause_kind = MergeClauseKind::NotMatched; } - self.expect_keyword(Keyword::MATCHED)?; + self.expect_keyword_is(Keyword::MATCHED)?; if matches!(clause_kind, MergeClauseKind::NotMatched) && self.parse_keywords(&[Keyword::BY, Keyword::SOURCE]) @@ -11815,7 +15160,7 @@ impl<'a> Parser<'a> { None }; - self.expect_keyword(Keyword::THEN)?; + self.expect_keyword_is(Keyword::THEN)?; let merge_clause = match self.parse_one_of_keywords(&[ Keyword::UPDATE, @@ -11831,7 +15176,7 @@ impl<'a> Parser<'a> { "UPDATE is not allowed in a {clause_kind} merge clause" ))); } - self.expect_keyword(Keyword::SET)?; + self.expect_keyword_is(Keyword::SET)?; MergeAction::Update { assignments: self.parse_comma_separated(Parser::parse_assignment)?, } @@ -11864,7 +15209,7 @@ impl<'a> Parser<'a> { { MergeInsertKind::Row } else { - self.expect_keyword(Keyword::VALUES)?; + self.expect_keyword_is(Keyword::VALUES)?; let values = self.parse_values(is_mysql)?; MergeInsertKind::Values(values) }; @@ -11885,16 +15230,49 @@ impl<'a> Parser<'a> { Ok(clauses) } + fn parse_output(&mut self) -> Result { + self.expect_keyword_is(Keyword::OUTPUT)?; + let select_items = self.parse_projection()?; + self.expect_keyword_is(Keyword::INTO)?; + let into_table = self.parse_select_into()?; + + Ok(OutputClause { + select_items, + into_table, + }) + } + + fn parse_select_into(&mut self) -> Result { + let temporary = self + .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY]) + .is_some(); + let unlogged = self.parse_keyword(Keyword::UNLOGGED); + let table = self.parse_keyword(Keyword::TABLE); + let name = self.parse_object_name(false)?; + + Ok(SelectInto { + temporary, + unlogged, + table, + name, + }) + } + pub fn parse_merge(&mut self) -> Result { let into = self.parse_keyword(Keyword::INTO); let table = self.parse_table_factor()?; - self.expect_keyword(Keyword::USING)?; + self.expect_keyword_is(Keyword::USING)?; let source = self.parse_table_factor()?; - self.expect_keyword(Keyword::ON)?; + self.expect_keyword_is(Keyword::ON)?; let on = self.parse_expr()?; let clauses = self.parse_merge_clauses()?; + let output = if self.peek_keyword(Keyword::OUTPUT) { + Some(self.parse_output()?) + } else { + None + }; Ok(Statement::Merge { into, @@ -11902,11 +15280,12 @@ impl<'a> Parser<'a> { source, on: Box::new(on), clauses, + output, }) } fn parse_pragma_value(&mut self) -> Result { - match self.parse_value()? { + match self.parse_value()?.value { v @ Value::SingleQuotedString(_) => Ok(v), v @ Value::DoubleQuotedString(_) => Ok(v), v @ Value::Number(_, _) => Ok(v), @@ -11946,15 +15325,40 @@ impl<'a> Parser<'a> { /// `INSTALL [extension_name]` pub fn parse_install(&mut self) -> Result { - let extension_name = self.parse_identifier(false)?; + let extension_name = self.parse_identifier()?; Ok(Statement::Install { extension_name }) } - /// `LOAD [extension_name]` + /// Parse a SQL LOAD statement pub fn parse_load(&mut self) -> Result { - let extension_name = self.parse_identifier(false)?; - Ok(Statement::Load { extension_name }) + if self.dialect.supports_load_extension() { + let extension_name = self.parse_identifier()?; + Ok(Statement::Load { extension_name }) + } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() { + let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some(); + self.expect_keyword_is(Keyword::INPATH)?; + let inpath = self.parse_literal_string()?; + let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some(); + self.expect_keyword_is(Keyword::INTO)?; + self.expect_keyword_is(Keyword::TABLE)?; + let table_name = self.parse_object_name(false)?; + let partitioned = self.parse_insert_partition()?; + let table_format = self.parse_load_data_table_format()?; + Ok(Statement::LoadData { + local, + inpath, + overwrite, + table_name, + partitioned, + table_format, + }) + } else { + self.expected( + "`DATA` or an extension name after `LOAD`", + self.peek_token(), + ) + } } /// ```sql @@ -11962,13 +15366,13 @@ impl<'a> Parser<'a> { /// ``` /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize) pub fn parse_optimize_table(&mut self) -> Result { - self.expect_keyword(Keyword::TABLE)?; + self.expect_keyword_is(Keyword::TABLE)?; let name = self.parse_object_name(false)?; let on_cluster = self.parse_optional_on_cluster()?; let partition = if self.parse_keyword(Keyword::PARTITION) { if self.parse_keyword(Keyword::ID) { - Some(Partition::Identifier(self.parse_identifier(false)?)) + Some(Partition::Identifier(self.parse_identifier()?)) } else { Some(Partition::Expr(self.parse_expr()?)) } @@ -12015,7 +15419,7 @@ impl<'a> Parser<'a> { // [ OWNED BY { table_name.column_name | NONE } ] let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) { if self.parse_keywords(&[Keyword::NONE]) { - Some(ObjectName(vec![Ident::new("NONE")])) + Some(ObjectName::from(vec![Ident::new("NONE")])) } else { Some(self.parse_object_name(false)?) } @@ -12083,13 +15487,13 @@ impl<'a> Parser<'a> { } pub fn parse_named_window(&mut self) -> Result { - let ident = self.parse_identifier(false)?; - self.expect_keyword(Keyword::AS)?; + let ident = self.parse_identifier()?; + self.expect_keyword_is(Keyword::AS)?; let window_expr = if self.consume_token(&Token::LParen) { NamedWindowExpr::WindowSpec(self.parse_window_spec()?) } else if self.dialect.supports_window_clause_named_window_reference() { - NamedWindowExpr::NamedWindow(self.parse_identifier(false)?) + NamedWindowExpr::NamedWindow(self.parse_identifier()?) } else { return self.expected("(", self.peek_token()); }; @@ -12100,15 +15504,15 @@ impl<'a> Parser<'a> { pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result { let name = self.parse_object_name(false)?; let params = self.parse_optional_procedure_parameters()?; - self.expect_keyword(Keyword::AS)?; - self.expect_keyword(Keyword::BEGIN)?; - let statements = self.parse_statements()?; - self.expect_keyword(Keyword::END)?; + self.expect_keyword_is(Keyword::AS)?; + + let body = self.parse_conditional_statements(&[Keyword::END])?; + Ok(Statement::CreateProcedure { name, or_alter, params, - body: statements, + body, }) } @@ -12148,7 +15552,11 @@ impl<'a> Parser<'a> { pub fn parse_create_type(&mut self) -> Result { let name = self.parse_object_name(false)?; - self.expect_keyword(Keyword::AS)?; + self.expect_keyword_is(Keyword::AS)?; + + if self.parse_keyword(Keyword::ENUM) { + return self.parse_create_type_enum(name); + } let mut attributes = vec![]; if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) { @@ -12159,7 +15567,7 @@ impl<'a> Parser<'a> { } loop { - let attr_name = self.parse_identifier(false)?; + let attr_name = self.parse_identifier()?; let attr_data_type = self.parse_data_type()?; let attr_collation = if self.parse_keyword(Keyword::COLLATE) { Some(self.parse_object_name(false)?) @@ -12186,9 +15594,23 @@ impl<'a> Parser<'a> { }) } + /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type]) + /// + /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html) + pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result { + self.expect_token(&Token::LParen)?; + let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?; + self.expect_token(&Token::RParen)?; + + Ok(Statement::CreateType { + name, + representation: UserDefinedTypeRepresentation::Enum { labels }, + }) + } + fn parse_parenthesized_identifiers(&mut self) -> Result, ParserError> { self.expect_token(&Token::LParen)?; - let partitions = self.parse_comma_separated(|p| p.parse_identifier(false))?; + let partitions = self.parse_comma_separated(|p| p.parse_identifier())?; self.expect_token(&Token::RParen)?; Ok(partitions) } @@ -12198,7 +15620,7 @@ impl<'a> Parser<'a> { if self.parse_keyword(Keyword::FIRST) { Ok(Some(MySQLColumnPosition::First)) } else if self.parse_keyword(Keyword::AFTER) { - let ident = self.parse_identifier(false)?; + let ident = self.parse_identifier()?; Ok(Some(MySQLColumnPosition::After(ident))) } else { Ok(None) @@ -12208,8 +15630,25 @@ impl<'a> Parser<'a> { } } + /// Parse [Statement::Print] + fn parse_print(&mut self) -> Result { + Ok(Statement::Print(PrintStatement { + message: Box::new(self.parse_expr()?), + })) + } + + /// Parse [Statement::Return] + fn parse_return(&mut self) -> Result { + match self.maybe_parse(|p| p.parse_expr())? { + Some(expr) => Ok(Statement::Return(ReturnStatement { + value: Some(ReturnStatementValue::Expr(expr)), + })), + None => Ok(Statement::Return(ReturnStatement { value: None })), + } + } + /// Consume the parser and return its underlying token buffer - pub fn into_tokens(self) -> Vec { + pub fn into_tokens(self) -> Vec { self.tokens } @@ -12224,13 +15663,155 @@ impl<'a> Parser<'a> { } false } + + pub(crate) fn parse_show_stmt_options(&mut self) -> Result { + let show_in; + let mut filter_position = None; + if self.dialect.supports_show_like_before_in() { + if let Some(filter) = self.parse_show_statement_filter()? { + filter_position = Some(ShowStatementFilterPosition::Infix(filter)); + } + show_in = self.maybe_parse_show_stmt_in()?; + } else { + show_in = self.maybe_parse_show_stmt_in()?; + if let Some(filter) = self.parse_show_statement_filter()? { + filter_position = Some(ShowStatementFilterPosition::Suffix(filter)); + } + } + let starts_with = self.maybe_parse_show_stmt_starts_with()?; + let limit = self.maybe_parse_show_stmt_limit()?; + let from = self.maybe_parse_show_stmt_from()?; + Ok(ShowStatementOptions { + filter_position, + show_in, + starts_with, + limit, + limit_from: from, + }) + } + + fn maybe_parse_show_stmt_in(&mut self) -> Result, ParserError> { + let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) { + Some(Keyword::FROM) => ShowStatementInClause::FROM, + Some(Keyword::IN) => ShowStatementInClause::IN, + None => return Ok(None), + _ => return self.expected("FROM or IN", self.peek_token()), + }; + + let (parent_type, parent_name) = match self.parse_one_of_keywords(&[ + Keyword::ACCOUNT, + Keyword::DATABASE, + Keyword::SCHEMA, + Keyword::TABLE, + Keyword::VIEW, + ]) { + // If we see these next keywords it means we don't have a parent name + Some(Keyword::DATABASE) + if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH]) + | self.peek_keyword(Keyword::LIMIT) => + { + (Some(ShowStatementInParentType::Database), None) + } + Some(Keyword::SCHEMA) + if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH]) + | self.peek_keyword(Keyword::LIMIT) => + { + (Some(ShowStatementInParentType::Schema), None) + } + Some(parent_kw) => { + // The parent name here is still optional, for example: + // SHOW TABLES IN ACCOUNT, so parsing the object name + // may fail because the statement ends. + let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?; + match parent_kw { + Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name), + Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name), + Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name), + Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name), + Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name), + _ => { + return self.expected( + "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW", + self.peek_token(), + ) + } + } + } + None => { + // Parsing MySQL style FROM tbl_name FROM db_name + // which is equivalent to FROM tbl_name.db_name + let mut parent_name = self.parse_object_name(false)?; + if self + .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) + .is_some() + { + parent_name + .0 + .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?)); + } + (None, Some(parent_name)) + } + }; + + Ok(Some(ShowStatementIn { + clause, + parent_type, + parent_name, + })) + } + + fn maybe_parse_show_stmt_starts_with(&mut self) -> Result, ParserError> { + if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) { + Ok(Some(self.parse_value()?.value)) + } else { + Ok(None) + } + } + + fn maybe_parse_show_stmt_limit(&mut self) -> Result, ParserError> { + if self.parse_keyword(Keyword::LIMIT) { + Ok(self.parse_limit()?) + } else { + Ok(None) + } + } + + fn maybe_parse_show_stmt_from(&mut self) -> Result, ParserError> { + if self.parse_keyword(Keyword::FROM) { + Ok(Some(self.parse_value()?.value)) + } else { + Ok(None) + } + } +} + +fn maybe_prefixed_expr(expr: Expr, prefix: Option) -> Expr { + if let Some(prefix) = prefix { + Expr::Prefixed { + prefix, + value: Box::new(expr), + } + } else { + expr + } } impl Word { - pub fn to_ident(&self) -> Ident { + #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")] + pub fn to_ident(&self, span: Span) -> Ident { Ident { value: self.value.clone(), quote_style: self.quote_style, + span, + } + } + + /// Convert this word into an [`Ident`] identifier + pub fn into_ident(self, span: Span) -> Ident { + Ident { + value: self.value, + quote_style: self.quote_style, + span, } } } @@ -12493,14 +16074,14 @@ mod tests { test_parse_data_type!( dialect, "GEOMETRY", - DataType::Custom(ObjectName(vec!["GEOMETRY".into()]), vec![]) + DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![]) ); test_parse_data_type!( dialect, "GEOMETRY(POINT)", DataType::Custom( - ObjectName(vec!["GEOMETRY".into()]), + ObjectName::from(vec!["GEOMETRY".into()]), vec!["POINT".to_string()] ) ); @@ -12509,7 +16090,7 @@ mod tests { dialect, "GEOMETRY(POINT, 4326)", DataType::Custom( - ObjectName(vec!["GEOMETRY".into()]), + ObjectName::from(vec!["GEOMETRY".into()]), vec!["POINT".to_string(), "4326".to_string()] ) ); @@ -12645,7 +16226,7 @@ mod tests { }}; } - let dummy_name = ObjectName(vec![Ident::new("dummy_name")]); + let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]); let dummy_authorization = Ident::new("dummy_authorization"); test_parse_schema_name!( @@ -12777,7 +16358,7 @@ mod tests { assert_eq!( ast, Err(ParserError::ParserError( - "Expected: [NOT] NULL or TRUE|FALSE or [NOT] DISTINCT FROM after IS, found: a at Line: 1, Column: 16" + "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16" .to_string() )) ); @@ -12804,14 +16385,17 @@ mod tests { Ident { value: "CATALOG".to_string(), quote_style: None, + span: Span::empty(), }, Ident { value: "F(o)o. \"bar".to_string(), quote_style: Some('"'), + span: Span::empty(), }, Ident { value: "table".to_string(), quote_style: None, + span: Span::empty(), }, ]; dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| { @@ -12824,10 +16408,12 @@ mod tests { Ident { value: "CATALOG".to_string(), quote_style: None, + span: Span::empty(), }, Ident { value: "table".to_string(), quote_style: None, + span: Span::empty(), }, ]; dialect.run_parser_method("CATALOG . table", |parser| { @@ -12905,16 +16491,6 @@ mod tests { assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err()); } - #[test] - fn test_replace_into_set() { - // NOTE: This is actually valid MySQL syntax, REPLACE and INSERT, - // but the parser does not yet support it. - // https://dev.mysql.com/doc/refman/8.3/en/insert.html - let sql = "REPLACE INTO t SET a='1'"; - - assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err()); - } - #[test] fn test_replace_into_set_placeholder() { let sql = "REPLACE INTO t SET ?"; diff --git a/src/test_utils.rs b/src/test_utils.rs index b35fc45c2..3c22fa911 100644 --- a/src/test_utils.rs +++ b/src/test_utils.rs @@ -33,7 +33,7 @@ use core::fmt::Debug; use crate::dialect::*; use crate::parser::{Parser, ParserError}; -use crate::tokenizer::Tokenizer; +use crate::tokenizer::{Token, Tokenizer}; use crate::{ast::*, parser::ParserOptions}; #[cfg(test)] @@ -151,10 +151,11 @@ impl TestedDialects { /// /// 2. re-serializing the result of parsing `sql` produces the same /// `canonical` sql string + /// + /// For multiple statements, use [`statements_parse_to`]. pub fn one_statement_parses_to(&self, sql: &str, canonical: &str) -> Statement { let mut statements = self.parse_sql_statements(sql).expect(sql); assert_eq!(statements.len(), 1); - if !canonical.is_empty() && sql != canonical { assert_eq!(self.parse_sql_statements(canonical).unwrap(), statements); } @@ -167,6 +168,24 @@ impl TestedDialects { only_statement } + /// The same as [`one_statement_parses_to`] but it works for a multiple statements + pub fn statements_parse_to(&self, sql: &str, canonical: &str) -> Vec { + let statements = self.parse_sql_statements(sql).expect(sql); + if !canonical.is_empty() && sql != canonical { + assert_eq!(self.parse_sql_statements(canonical).unwrap(), statements); + } else { + assert_eq!( + sql, + statements + .iter() + .map(|s| s.to_string()) + .collect::>() + .join("; ") + ); + } + statements + } + /// Ensures that `sql` parses as an [`Expr`], and that /// re-serializing the parse result produces canonical pub fn expr_parses_to(&self, sql: &str, canonical: &str) -> Expr { @@ -238,6 +257,22 @@ impl TestedDialects { pub fn verified_expr(&self, sql: &str) -> Expr { self.expr_parses_to(sql, sql) } + + /// Check that the tokenizer returns the expected tokens for the given SQL. + pub fn tokenizes_to(&self, sql: &str, expected: Vec) { + if self.dialects.is_empty() { + panic!("No dialects to test"); + } + + self.dialects.iter().for_each(|dialect| { + let mut tokenizer = Tokenizer::new(&**dialect, sql); + if let Some(options) = &self.options { + tokenizer = tokenizer.with_unescape(options.unescape); + } + let tokens = tokenizer.tokenize().unwrap(); + assert_eq!(expected, tokens, "Tokenized differently for {:?}", dialect); + }); + } } /// Returns all available dialects. @@ -338,19 +373,37 @@ pub fn table_alias(name: impl Into) -> Option { pub fn table(name: impl Into) -> TableFactor { TableFactor::Table { - name: ObjectName(vec![Ident::new(name.into())]), + name: ObjectName::from(vec![Ident::new(name.into())]), + alias: None, + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], + } +} + +pub fn table_from_name(name: ObjectName) -> TableFactor { + TableFactor::Table { + name, alias: None, args: None, with_hints: vec![], version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], } } pub fn table_with_alias(name: impl Into, alias: impl Into) -> TableFactor { TableFactor::Table { - name: ObjectName(vec![Ident::new(name)]), + name: ObjectName::from(vec![Ident::new(name)]), alias: Some(TableAlias { name: Ident::new(alias), columns: vec![], @@ -360,6 +413,9 @@ pub fn table_with_alias(name: impl Into, alias: impl Into) -> Ta version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], } } @@ -367,13 +423,14 @@ pub fn join(relation: TableFactor) -> Join { Join { relation, global: false, - join_operator: JoinOperator::Inner(JoinConstraint::Natural), + join_operator: JoinOperator::Join(JoinConstraint::Natural), } } pub fn call(function: &str, args: impl IntoIterator) -> Expr { Expr::Function(Function { - name: ObjectName(vec![Ident::new(function)]), + name: ObjectName::from(vec![Ident::new(function)]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 4186ec824..afe1e35c7 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -29,10 +29,10 @@ use alloc::{ vec, vec::Vec, }; -use core::fmt; use core::iter::Peekable; use core::num::NonZeroU8; use core::str::Chars; +use core::{cmp, fmt}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -40,13 +40,13 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "visitor")] use sqlparser_derive::{Visit, VisitMut}; -use crate::ast::DollarQuotedString; use crate::dialect::Dialect; use crate::dialect::{ BigQueryDialect, DuckDbDialect, GenericDialect, MySqlDialect, PostgreSqlDialect, SnowflakeDialect, }; use crate::keywords::{Keyword, ALL_KEYWORDS, ALL_KEYWORDS_INDEX}; +use crate::{ast::DollarQuotedString, dialect::HiveDialect}; /// SQL Token enumeration #[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] @@ -170,8 +170,10 @@ pub enum Token { RBrace, /// Right Arrow `=>` RArrow, - /// Sharp `#` used for PostgreSQL Bitwise XOR operator + /// Sharp `#` used for PostgreSQL Bitwise XOR operator, also PostgreSQL/Redshift geometrical unary/binary operator (Number of points in path or polygon/Intersection) Sharp, + /// `##` PostgreSQL/Redshift geometrical binary operator (Point of closest proximity) + DoubleSharp, /// Tilde `~` used for PostgreSQL Bitwise NOT operator or case sensitive match regular expression operator Tilde, /// `~*` , a case insensitive match regular expression operator in PostgreSQL @@ -198,7 +200,7 @@ pub enum Token { ExclamationMark, /// Double Exclamation Mark `!!` used for PostgreSQL prefix factorial operator DoubleExclamationMark, - /// AtSign `@` used for PostgreSQL abs operator + /// AtSign `@` used for PostgreSQL abs operator, also PostgreSQL/Redshift geometrical unary/binary operator (Center, Contained or on) AtSign, /// `^@`, a "starts with" string operator in PostgreSQL CaretAt, @@ -214,6 +216,38 @@ pub enum Token { LongArrow, /// `#>`, extracts JSON sub-object at the specified path HashArrow, + /// `@-@` PostgreSQL/Redshift geometrical unary operator (Length or circumference) + AtDashAt, + /// `?-` PostgreSQL/Redshift geometrical unary/binary operator (Is horizontal?/Are horizontally aligned?) + QuestionMarkDash, + /// `&<` PostgreSQL/Redshift geometrical binary operator (Overlaps to left?) + AmpersandLeftAngleBracket, + /// `&>` PostgreSQL/Redshift geometrical binary operator (Overlaps to right?)` + AmpersandRightAngleBracket, + /// `&<|` PostgreSQL/Redshift geometrical binary operator (Does not extend above?)` + AmpersandLeftAngleBracketVerticalBar, + /// `|&>` PostgreSQL/Redshift geometrical binary operator (Does not extend below?)` + VerticalBarAmpersandRightAngleBracket, + /// `<->` PostgreSQL/Redshift geometrical binary operator (Distance between) + TwoWayArrow, + /// `<^` PostgreSQL/Redshift geometrical binary operator (Is below?) + LeftAngleBracketCaret, + /// `>^` PostgreSQL/Redshift geometrical binary operator (Is above?) + RightAngleBracketCaret, + /// `?#` PostgreSQL/Redshift geometrical binary operator (Intersects or overlaps) + QuestionMarkSharp, + /// `?-|` PostgreSQL/Redshift geometrical binary operator (Is perpendicular?) + QuestionMarkDashVerticalBar, + /// `?||` PostgreSQL/Redshift geometrical binary operator (Are parallel?) + QuestionMarkDoubleVerticalBar, + /// `~=` PostgreSQL/Redshift geometrical binary operator (Same as) + TildeEqual, + /// `<<| PostgreSQL/Redshift geometrical binary operator (Is strictly below?) + ShiftLeftVerticalBar, + /// `|>> PostgreSQL/Redshift geometrical binary operator (Is strictly above?) + VerticalBarShiftRight, + /// `|> BigQuery pipe operator + VerticalBarRightAngleBracket, /// `#>>`, extracts JSON sub-object at the specified path as text HashLongArrow, /// jsonb @> jsonb -> boolean: Test whether left json contains the right json @@ -303,6 +337,7 @@ impl fmt::Display for Token { Token::RBrace => f.write_str("}"), Token::RArrow => f.write_str("=>"), Token::Sharp => f.write_str("#"), + Token::DoubleSharp => f.write_str("##"), Token::ExclamationMark => f.write_str("!"), Token::DoubleExclamationMark => f.write_str("!!"), Token::Tilde => f.write_str("~"), @@ -320,6 +355,22 @@ impl fmt::Display for Token { Token::Overlap => f.write_str("&&"), Token::PGSquareRoot => f.write_str("|/"), Token::PGCubeRoot => f.write_str("||/"), + Token::AtDashAt => f.write_str("@-@"), + Token::QuestionMarkDash => f.write_str("?-"), + Token::AmpersandLeftAngleBracket => f.write_str("&<"), + Token::AmpersandRightAngleBracket => f.write_str("&>"), + Token::AmpersandLeftAngleBracketVerticalBar => f.write_str("&<|"), + Token::VerticalBarAmpersandRightAngleBracket => f.write_str("|&>"), + Token::VerticalBarRightAngleBracket => f.write_str("|>"), + Token::TwoWayArrow => f.write_str("<->"), + Token::LeftAngleBracketCaret => f.write_str("<^"), + Token::RightAngleBracketCaret => f.write_str(">^"), + Token::QuestionMarkSharp => f.write_str("?#"), + Token::QuestionMarkDashVerticalBar => f.write_str("?-|"), + Token::QuestionMarkDoubleVerticalBar => f.write_str("?||"), + Token::TildeEqual => f.write_str("~="), + Token::ShiftLeftVerticalBar => f.write_str("<<|"), + Token::VerticalBarShiftRight => f.write_str("|>>"), Token::Placeholder(ref s) => write!(f, "{s}"), Token::Arrow => write!(f, "->"), Token::LongArrow => write!(f, "->>"), @@ -422,61 +473,253 @@ impl fmt::Display for Whitespace { } /// Location in input string -#[derive(Debug, Eq, PartialEq, Clone, Copy)] +/// +/// # Create an "empty" (unknown) `Location` +/// ``` +/// # use sqlparser::tokenizer::Location; +/// let location = Location::empty(); +/// ``` +/// +/// # Create a `Location` from a line and column +/// ``` +/// # use sqlparser::tokenizer::Location; +/// let location = Location::new(1, 1); +/// ``` +/// +/// # Create a `Location` from a pair +/// ``` +/// # use sqlparser::tokenizer::Location; +/// let location = Location::from((1, 1)); +/// ``` +#[derive(Eq, PartialEq, Hash, Clone, Copy, Ord, PartialOrd)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub struct Location { - /// Line number, starting from 1 + /// Line number, starting from 1. + /// + /// Note: Line 0 is used for empty spans pub line: u64, - /// Line column, starting from 1 + /// Line column, starting from 1. + /// + /// Note: Column 0 is used for empty spans pub column: u64, } impl fmt::Display for Location { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.line == 0 { return Ok(()); } - write!( - f, - // TODO: use standard compiler location syntax (::) - " at Line: {}, Column: {}", - self.line, self.column, - ) + write!(f, " at Line: {}, Column: {}", self.line, self.column) } } -/// A [Token] with [Location] attached to it -#[derive(Debug, Eq, PartialEq, Clone)] -pub struct TokenWithLocation { - pub token: Token, - pub location: Location, +impl fmt::Debug for Location { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "Location({},{})", self.line, self.column) + } } -impl TokenWithLocation { - pub fn new(token: Token, line: u64, column: u64) -> TokenWithLocation { - TokenWithLocation { - token, - location: Location { line, column }, +impl Location { + /// Return an "empty" / unknown location + pub fn empty() -> Self { + Self { line: 0, column: 0 } + } + + /// Create a new `Location` for a given line and column + pub fn new(line: u64, column: u64) -> Self { + Self { line, column } + } + + /// Create a new location for a given line and column + /// + /// Alias for [`Self::new`] + // TODO: remove / deprecate in favor of` `new` for consistency? + pub fn of(line: u64, column: u64) -> Self { + Self::new(line, column) + } + + /// Combine self and `end` into a new `Span` + pub fn span_to(self, end: Self) -> Span { + Span { start: self, end } + } +} + +impl From<(u64, u64)> for Location { + fn from((line, column): (u64, u64)) -> Self { + Self { line, column } + } +} + +/// A span represents a linear portion of the input string (start, end) +/// +/// See [Spanned](crate::ast::Spanned) for more information. +#[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord, Copy)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct Span { + pub start: Location, + pub end: Location, +} + +impl fmt::Debug for Span { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "Span({:?}..{:?})", self.start, self.end) + } +} + +impl Span { + // An empty span (0, 0) -> (0, 0) + // We need a const instance for pattern matching + const EMPTY: Span = Self::empty(); + + /// Create a new span from a start and end [`Location`] + pub fn new(start: Location, end: Location) -> Span { + Span { start, end } + } + + /// Returns an empty span `(0, 0) -> (0, 0)` + /// + /// Empty spans represent no knowledge of source location + /// See [Spanned](crate::ast::Spanned) for more information. + pub const fn empty() -> Span { + Span { + start: Location { line: 0, column: 0 }, + end: Location { line: 0, column: 0 }, + } + } + + /// Returns the smallest Span that contains both `self` and `other` + /// If either span is [Span::empty], the other span is returned + /// + /// # Examples + /// ``` + /// # use sqlparser::tokenizer::{Span, Location}; + /// // line 1, column1 -> line 2, column 5 + /// let span1 = Span::new(Location::new(1, 1), Location::new(2, 5)); + /// // line 2, column 3 -> line 3, column 7 + /// let span2 = Span::new(Location::new(2, 3), Location::new(3, 7)); + /// // Union of the two is the min/max of the two spans + /// // line 1, column 1 -> line 3, column 7 + /// let union = span1.union(&span2); + /// assert_eq!(union, Span::new(Location::new(1, 1), Location::new(3, 7))); + /// ``` + pub fn union(&self, other: &Span) -> Span { + // If either span is empty, return the other + // this prevents propagating (0, 0) through the tree + match (self, other) { + (&Span::EMPTY, _) => *other, + (_, &Span::EMPTY) => *self, + _ => Span { + start: cmp::min(self.start, other.start), + end: cmp::max(self.end, other.end), + }, } } - pub fn wrap(token: Token) -> TokenWithLocation { - TokenWithLocation::new(token, 0, 0) + /// Same as [Span::union] for `Option` + /// + /// If `other` is `None`, `self` is returned + pub fn union_opt(&self, other: &Option) -> Span { + match other { + Some(other) => self.union(other), + None => *self, + } + } + + /// Return the [Span::union] of all spans in the iterator + /// + /// If the iterator is empty, an empty span is returned + /// + /// # Example + /// ``` + /// # use sqlparser::tokenizer::{Span, Location}; + /// let spans = vec![ + /// Span::new(Location::new(1, 1), Location::new(2, 5)), + /// Span::new(Location::new(2, 3), Location::new(3, 7)), + /// Span::new(Location::new(3, 1), Location::new(4, 2)), + /// ]; + /// // line 1, column 1 -> line 4, column 2 + /// assert_eq!( + /// Span::union_iter(spans), + /// Span::new(Location::new(1, 1), Location::new(4, 2)) + /// ); + pub fn union_iter>(iter: I) -> Span { + iter.into_iter() + .reduce(|acc, item| acc.union(&item)) + .unwrap_or(Span::empty()) } } -impl PartialEq for TokenWithLocation { +/// Backwards compatibility struct for [`TokenWithSpan`] +#[deprecated(since = "0.53.0", note = "please use `TokenWithSpan` instead")] +pub type TokenWithLocation = TokenWithSpan; + +/// A [Token] with [Span] attached to it +/// +/// This is used to track the location of a token in the input string +/// +/// # Examples +/// ``` +/// # use sqlparser::tokenizer::{Location, Span, Token, TokenWithSpan}; +/// // commas @ line 1, column 10 +/// let tok1 = TokenWithSpan::new( +/// Token::Comma, +/// Span::new(Location::new(1, 10), Location::new(1, 11)), +/// ); +/// assert_eq!(tok1, Token::Comma); // can compare the token +/// +/// // commas @ line 2, column 20 +/// let tok2 = TokenWithSpan::new( +/// Token::Comma, +/// Span::new(Location::new(2, 20), Location::new(2, 21)), +/// ); +/// // same token but different locations are not equal +/// assert_ne!(tok1, tok2); +/// ``` +#[derive(Debug, Clone, Hash, Ord, PartialOrd, Eq, PartialEq)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] +pub struct TokenWithSpan { + pub token: Token, + pub span: Span, +} + +impl TokenWithSpan { + /// Create a new [`TokenWithSpan`] from a [`Token`] and a [`Span`] + pub fn new(token: Token, span: Span) -> Self { + Self { token, span } + } + + /// Wrap a token with an empty span + pub fn wrap(token: Token) -> Self { + Self::new(token, Span::empty()) + } + + /// Wrap a token with a location from `start` to `end` + pub fn at(token: Token, start: Location, end: Location) -> Self { + Self::new(token, Span::new(start, end)) + } + + /// Return an EOF token with no location + pub fn new_eof() -> Self { + Self::wrap(Token::EOF) + } +} + +impl PartialEq for TokenWithSpan { fn eq(&self, other: &Token) -> bool { &self.token == other } } -impl PartialEq for Token { - fn eq(&self, other: &TokenWithLocation) -> bool { +impl PartialEq for Token { + fn eq(&self, other: &TokenWithSpan) -> bool { self == &other.token } } -impl fmt::Display for TokenWithLocation { +impl fmt::Display for TokenWithSpan { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.token.fmt(f) } @@ -504,7 +747,7 @@ struct State<'a> { pub col: u64, } -impl<'a> State<'a> { +impl State<'_> { /// return the next character and advance the stream pub fn next(&mut self) -> Option { match self.peekable.next() { @@ -636,8 +879,8 @@ impl<'a> Tokenizer<'a> { } /// Tokenize the statement and produce a vector of tokens with location information - pub fn tokenize_with_location(&mut self) -> Result, TokenizerError> { - let mut tokens: Vec = vec![]; + pub fn tokenize_with_location(&mut self) -> Result, TokenizerError> { + let mut tokens: Vec = vec![]; self.tokenize_with_location_into_buf(&mut tokens) .map(|_| tokens) } @@ -646,7 +889,7 @@ impl<'a> Tokenizer<'a> { /// If an error is thrown, the buffer will contain all tokens that were successfully parsed before the error. pub fn tokenize_with_location_into_buf( &mut self, - buf: &mut Vec, + buf: &mut Vec, ) -> Result<(), TokenizerError> { let mut state = State { peekable: self.query.chars().peekable(), @@ -655,8 +898,10 @@ impl<'a> Tokenizer<'a> { }; let mut location = state.location(); - while let Some(token) = self.next_token(&mut state)? { - buf.push(TokenWithLocation { token, location }); + while let Some(token) = self.next_token(&mut state, buf.last().map(|t| &t.token))? { + let span = location.span_to(state.location()); + + buf.push(TokenWithSpan { token, span }); location = state.location(); } @@ -690,7 +935,11 @@ impl<'a> Tokenizer<'a> { } /// Get the next token or return None - fn next_token(&self, chars: &mut State) -> Result, TokenizerError> { + fn next_token( + &self, + chars: &mut State, + prev_token: Option<&Token>, + ) -> Result, TokenizerError> { match chars.peek() { Some(&ch) => match ch { ' ' => self.consume_and_return(chars, Token::Whitespace(Whitespace::Space)), @@ -704,8 +953,9 @@ impl<'a> Tokenizer<'a> { } Ok(Some(Token::Whitespace(Whitespace::Newline))) } - // BigQuery uses b or B for byte string literal - b @ 'B' | b @ 'b' if dialect_of!(self is BigQueryDialect | GenericDialect) => { + // BigQuery and MySQL use b or B for byte string literal, Postgres for bit strings + b @ 'B' | b @ 'b' if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | MySqlDialect | GenericDialect) => + { chars.next(); // consume match chars.peek() { Some('\'') => { @@ -776,7 +1026,10 @@ impl<'a> Tokenizer<'a> { match chars.peek() { Some('\'') => { // N'...' - a - let s = self.tokenize_single_quoted_string(chars, '\'', true)?; + let backslash_escape = + self.dialect.supports_string_literal_backslash_escape(); + let s = + self.tokenize_single_quoted_string(chars, '\'', backslash_escape)?; Ok(Some(Token::NationalStringLiteral(s))) } _ => { @@ -787,7 +1040,7 @@ impl<'a> Tokenizer<'a> { } } // PostgreSQL accepts "escape" string constants, which are an extension to the SQL standard. - x @ 'e' | x @ 'E' => { + x @ 'e' | x @ 'E' if self.dialect.supports_string_escape_constant() => { let starting_loc = chars.location(); chars.next(); // consume, to check the next char match chars.peek() { @@ -880,34 +1133,98 @@ impl<'a> Tokenizer<'a> { Ok(Some(Token::DoubleQuotedString(s))) } // delimited (quoted) identifier + quote_start if self.dialect.is_delimited_identifier_start(ch) => { + let word = self.tokenize_quoted_identifier(quote_start, chars)?; + Ok(Some(Token::make_word(&word, Some(quote_start)))) + } + // Potentially nested delimited (quoted) identifier quote_start - if self.dialect.is_delimited_identifier_start(ch) + if self + .dialect + .is_nested_delimited_identifier_start(quote_start) && self .dialect - .is_proper_identifier_inside_quotes(chars.peekable.clone()) => + .peek_nested_delimited_identifier_quotes(chars.peekable.clone()) + .is_some() => { - let error_loc = chars.location(); - chars.next(); // consume the opening quote + let Some((quote_start, nested_quote_start)) = self + .dialect + .peek_nested_delimited_identifier_quotes(chars.peekable.clone()) + else { + return self.tokenizer_error( + chars.location(), + format!("Expected nested delimiter '{quote_start}' before EOF."), + ); + }; + + let Some(nested_quote_start) = nested_quote_start else { + let word = self.tokenize_quoted_identifier(quote_start, chars)?; + return Ok(Some(Token::make_word(&word, Some(quote_start)))); + }; + + let mut word = vec![]; let quote_end = Word::matching_end_quote(quote_start); - let (s, last_char) = self.parse_quoted_ident(chars, quote_end); + let nested_quote_end = Word::matching_end_quote(nested_quote_start); + let error_loc = chars.location(); - if last_char == Some(quote_end) { - Ok(Some(Token::make_word(&s, Some(quote_start)))) - } else { - self.tokenizer_error( + chars.next(); // skip the first delimiter + peeking_take_while(chars, |ch| ch.is_whitespace()); + if chars.peek() != Some(&nested_quote_start) { + return self.tokenizer_error( + error_loc, + format!("Expected nested delimiter '{nested_quote_start}' before EOF."), + ); + } + word.push(nested_quote_start.into()); + word.push(self.tokenize_quoted_identifier(nested_quote_end, chars)?); + word.push(nested_quote_end.into()); + peeking_take_while(chars, |ch| ch.is_whitespace()); + if chars.peek() != Some("e_end) { + return self.tokenizer_error( error_loc, format!("Expected close delimiter '{quote_end}' before EOF."), - ) + ); } + chars.next(); // skip close delimiter + + Ok(Some(Token::make_word(&word.concat(), Some(quote_start)))) } // numbers and period '0'..='9' | '.' => { - let mut s = peeking_take_while(chars, |ch| ch.is_ascii_digit()); + // special case where if ._ is encountered after a word then that word + // is a table and the _ is the start of the col name. + // if the prev token is not a word, then this is not a valid sql + // word or number. + if ch == '.' && chars.peekable.clone().nth(1) == Some('_') { + if let Some(Token::Word(_)) = prev_token { + chars.next(); + return Ok(Some(Token::Period)); + } + + return self.tokenizer_error( + chars.location(), + "Unexpected character '_'".to_string(), + ); + } + + // Some dialects support underscore as number separator + // There can only be one at a time and it must be followed by another digit + let is_number_separator = |ch: char, next_char: Option| { + self.dialect.supports_numeric_literal_underscores() + && ch == '_' + && next_char.is_some_and(|next_ch| next_ch.is_ascii_hexdigit()) + }; + + let mut s = peeking_next_take_while(chars, |ch, next_ch| { + ch.is_ascii_digit() || is_number_separator(ch, next_ch) + }); // match binary literal that starts with 0x if s == "0" && chars.peek() == Some(&'x') { chars.next(); - let s2 = peeking_take_while(chars, |ch| ch.is_ascii_hexdigit()); + let s2 = peeking_next_take_while(chars, |ch, next_ch| { + ch.is_ascii_hexdigit() || is_number_separator(ch, next_ch) + }); return Ok(Some(Token::HexStringLiteral(s2))); } @@ -916,15 +1233,30 @@ impl<'a> Tokenizer<'a> { s.push('.'); chars.next(); } - s += &peeking_take_while(chars, |ch| ch.is_ascii_digit()); - // No number -> Token::Period + // If the dialect supports identifiers that start with a numeric prefix + // and we have now consumed a dot, check if the previous token was a Word. + // If so, what follows is definitely not part of a decimal number and + // we should yield the dot as a dedicated token so compound identifiers + // starting with digits can be parsed correctly. + if s == "." && self.dialect.supports_numeric_prefix() { + if let Some(Token::Word(_)) = prev_token { + return Ok(Some(Token::Period)); + } + } + + // Consume fractional digits. + s += &peeking_next_take_while(chars, |ch, next_ch| { + ch.is_ascii_digit() || is_number_separator(ch, next_ch) + }); + + // No fraction -> Token::Period if s == "." { return Ok(Some(Token::Period)); } - let mut exponent_part = String::new(); // Parse exponent as number + let mut exponent_part = String::new(); if chars.peek() == Some(&'e') || chars.peek() == Some(&'E') { let mut char_clone = chars.peekable.clone(); exponent_part.push(char_clone.next().unwrap()); @@ -953,14 +1285,23 @@ impl<'a> Tokenizer<'a> { } } - // mysql dialect supports identifiers that start with a numeric prefix, - // as long as they aren't an exponent number. - if self.dialect.supports_numeric_prefix() && exponent_part.is_empty() { - let word = - peeking_take_while(chars, |ch| self.dialect.is_identifier_part(ch)); - - if !word.is_empty() { - s += word.as_str(); + // If the dialect supports identifiers that start with a numeric prefix, + // we need to check if the value is in fact an identifier and must thus + // be tokenized as a word. + if self.dialect.supports_numeric_prefix() { + if exponent_part.is_empty() { + // If it is not a number with an exponent, it may be + // an identifier starting with digits. + let word = + peeking_take_while(chars, |ch| self.dialect.is_identifier_part(ch)); + + if !word.is_empty() { + s += word.as_str(); + return Ok(Some(Token::make_word(s.as_str(), None))); + } + } else if prev_token == Some(&Token::Period) { + // If the previous token was a period, thus not belonging to a number, + // the value we have is part of an identifier. return Ok(Some(Token::make_word(s.as_str(), None))); } } @@ -980,14 +1321,26 @@ impl<'a> Tokenizer<'a> { // operators '-' => { chars.next(); // consume the '-' + match chars.peek() { Some('-') => { - chars.next(); // consume the second '-', starting a single-line comment - let comment = self.tokenize_single_line_comment(chars); - Ok(Some(Token::Whitespace(Whitespace::SingleLineComment { - prefix: "--".to_owned(), - comment, - }))) + let mut is_comment = true; + if self.dialect.requires_single_line_comment_whitespace() { + is_comment = Some(' ') == chars.peekable.clone().nth(1); + } + + if is_comment { + chars.next(); // consume second '-' + let comment = self.tokenize_single_line_comment(chars); + return Ok(Some(Token::Whitespace( + Whitespace::SingleLineComment { + prefix: "--".to_owned(), + comment, + }, + ))); + } + + self.start_binop(chars, "-", Token::Minus) } Some('>') => { chars.next(); @@ -1047,6 +1400,31 @@ impl<'a> Tokenizer<'a> { _ => self.start_binop(chars, "||", Token::StringConcat), } } + Some('&') if self.dialect.supports_geometric_types() => { + chars.next(); // consume + match chars.peek() { + Some('>') => self.consume_for_binop( + chars, + "|&>", + Token::VerticalBarAmpersandRightAngleBracket, + ), + _ => self.start_binop_opt(chars, "|&", None), + } + } + Some('>') if self.dialect.supports_geometric_types() => { + chars.next(); // consume + match chars.peek() { + Some('>') => self.consume_for_binop( + chars, + "|>>", + Token::VerticalBarShiftRight, + ), + _ => self.start_binop_opt(chars, "|>", None), + } + } + Some('>') if self.dialect.supports_pipe_operator() => { + self.consume_for_binop(chars, "|>", Token::VerticalBarRightAngleBracket) + } // Bitshift '|' operator _ => self.start_binop(chars, "|", Token::Pipe), } @@ -1095,8 +1473,34 @@ impl<'a> Tokenizer<'a> { _ => self.start_binop(chars, "<=", Token::LtEq), } } + Some('|') if self.dialect.supports_geometric_types() => { + self.consume_for_binop(chars, "<<|", Token::ShiftLeftVerticalBar) + } Some('>') => self.consume_for_binop(chars, "<>", Token::Neq), + Some('<') if self.dialect.supports_geometric_types() => { + chars.next(); // consume + match chars.peek() { + Some('|') => self.consume_for_binop( + chars, + "<<|", + Token::ShiftLeftVerticalBar, + ), + _ => self.start_binop(chars, "<<", Token::ShiftLeft), + } + } Some('<') => self.consume_for_binop(chars, "<<", Token::ShiftLeft), + Some('-') if self.dialect.supports_geometric_types() => { + chars.next(); // consume + match chars.peek() { + Some('>') => { + self.consume_for_binop(chars, "<->", Token::TwoWayArrow) + } + _ => self.start_binop_opt(chars, "<-", None), + } + } + Some('^') if self.dialect.supports_geometric_types() => { + self.consume_for_binop(chars, "<^", Token::LeftAngleBracketCaret) + } Some('@') => self.consume_for_binop(chars, "<@", Token::ArrowAt), _ => self.start_binop(chars, "<", Token::Lt), } @@ -1106,6 +1510,9 @@ impl<'a> Tokenizer<'a> { match chars.peek() { Some('=') => self.consume_for_binop(chars, ">=", Token::GtEq), Some('>') => self.consume_for_binop(chars, ">>", Token::ShiftRight), + Some('^') if self.dialect.supports_geometric_types() => { + self.consume_for_binop(chars, ">^", Token::RightAngleBracketCaret) + } _ => self.start_binop(chars, ">", Token::Gt), } } @@ -1124,6 +1531,22 @@ impl<'a> Tokenizer<'a> { '&' => { chars.next(); // consume the '&' match chars.peek() { + Some('>') if self.dialect.supports_geometric_types() => { + chars.next(); + self.consume_and_return(chars, Token::AmpersandRightAngleBracket) + } + Some('<') if self.dialect.supports_geometric_types() => { + chars.next(); // consume + match chars.peek() { + Some('|') => self.consume_and_return( + chars, + Token::AmpersandLeftAngleBracketVerticalBar, + ), + _ => { + self.start_binop(chars, "&<", Token::AmpersandLeftAngleBracket) + } + } + } Some('&') => { chars.next(); // consume the second '&' self.start_binop(chars, "&&", Token::Overlap) @@ -1141,7 +1564,8 @@ impl<'a> Tokenizer<'a> { } '{' => self.consume_and_return(chars, Token::LBrace), '}' => self.consume_and_return(chars, Token::RBrace), - '#' if dialect_of!(self is SnowflakeDialect | BigQueryDialect | MySqlDialect) => { + '#' if dialect_of!(self is SnowflakeDialect | BigQueryDialect | MySqlDialect | HiveDialect) => + { chars.next(); // consume the '#', starting a snowflake single-line comment let comment = self.tokenize_single_line_comment(chars); Ok(Some(Token::Whitespace(Whitespace::SingleLineComment { @@ -1153,6 +1577,9 @@ impl<'a> Tokenizer<'a> { chars.next(); // consume match chars.peek() { Some('*') => self.consume_for_binop(chars, "~*", Token::TildeAsterisk), + Some('=') if self.dialect.supports_geometric_types() => { + self.consume_for_binop(chars, "~=", Token::TildeEqual) + } Some('~') => { chars.next(); match chars.peek() { @@ -1179,6 +1606,9 @@ impl<'a> Tokenizer<'a> { } } Some(' ') => Ok(Some(Token::Sharp)), + Some('#') if self.dialect.supports_geometric_types() => { + self.consume_for_binop(chars, "##", Token::DoubleSharp) + } Some(sch) if self.dialect.is_identifier_start('#') => { self.tokenize_identifier_or_keyword([ch, *sch], chars) } @@ -1188,6 +1618,16 @@ impl<'a> Tokenizer<'a> { '@' => { chars.next(); match chars.peek() { + Some('@') if self.dialect.supports_geometric_types() => { + self.consume_and_return(chars, Token::AtAt) + } + Some('-') if self.dialect.supports_geometric_types() => { + chars.next(); + match chars.peek() { + Some('@') => self.consume_and_return(chars, Token::AtDashAt), + _ => self.start_binop_opt(chars, "@-", None), + } + } Some('>') => self.consume_and_return(chars, Token::AtArrow), Some('?') => self.consume_and_return(chars, Token::AtQuestion), Some('@') => { @@ -1201,6 +1641,18 @@ impl<'a> Tokenizer<'a> { } } Some(' ') => Ok(Some(Token::AtSign)), + // We break on quotes here, because no dialect allows identifiers starting + // with @ and containing quotation marks (e.g. `@'foo'`) unless they are + // quoted, which is tokenized as a quoted string, not here (e.g. + // `"@'foo'"`). Further, at least two dialects parse `@` followed by a + // quoted string as two separate tokens, which this allows. For example, + // Postgres parses `@'1'` as the absolute value of '1' which is implicitly + // cast to a numeric type. And when parsing MySQL-style grantees (e.g. + // `GRANT ALL ON *.* to 'root'@'localhost'`), we also want separate tokens + // for the user, the `@`, and the host. + Some('\'') => Ok(Some(Token::AtSign)), + Some('\"') => Ok(Some(Token::AtSign)), + Some('`') => Ok(Some(Token::AtSign)), Some(sch) if self.dialect.is_identifier_start('@') => { self.tokenize_identifier_or_keyword([ch, *sch], chars) } @@ -1208,11 +1660,30 @@ impl<'a> Tokenizer<'a> { } } // Postgres uses ? for jsonb operators, not prepared statements - '?' if dialect_of!(self is PostgreSqlDialect) => { - chars.next(); + '?' if self.dialect.supports_geometric_types() => { + chars.next(); // consume match chars.peek() { - Some('|') => self.consume_and_return(chars, Token::QuestionPipe), + Some('|') => { + chars.next(); + match chars.peek() { + Some('|') => self.consume_and_return( + chars, + Token::QuestionMarkDoubleVerticalBar, + ), + _ => Ok(Some(Token::QuestionPipe)), + } + } + Some('&') => self.consume_and_return(chars, Token::QuestionAnd), + Some('-') => { + chars.next(); // consume + match chars.peek() { + Some('|') => self + .consume_and_return(chars, Token::QuestionMarkDashVerticalBar), + _ => Ok(Some(Token::QuestionMarkDash)), + } + } + Some('#') => self.consume_and_return(chars, Token::QuestionMarkSharp), _ => self.consume_and_return(chars, Token::Question), } } @@ -1228,7 +1699,7 @@ impl<'a> Tokenizer<'a> { } '$' => Ok(Some(self.tokenize_dollar_preceded_value(chars)?)), - //whitespace check (including unicode chars) should be last as it covers some of the chars above + // whitespace check (including unicode chars) should be last as it covers some of the chars above ch if ch.is_whitespace() => { self.consume_and_return(chars, Token::Whitespace(Whitespace::Space)) } @@ -1246,7 +1717,7 @@ impl<'a> Tokenizer<'a> { default: Token, ) -> Result, TokenizerError> { chars.next(); // consume the first char - self.start_binop(chars, prefix, default) + self.start_binop_opt(chars, prefix, Some(default)) } /// parse a custom binary operator @@ -1255,6 +1726,16 @@ impl<'a> Tokenizer<'a> { chars: &mut State, prefix: &str, default: Token, + ) -> Result, TokenizerError> { + self.start_binop_opt(chars, prefix, Some(default)) + } + + /// parse a custom binary operator + fn start_binop_opt( + &self, + chars: &mut State, + prefix: &str, + default: Option, ) -> Result, TokenizerError> { let mut custom = None; while let Some(&ch) = chars.peek() { @@ -1265,10 +1746,14 @@ impl<'a> Tokenizer<'a> { custom.get_or_insert_with(|| prefix.to_string()).push(ch); chars.next(); } - - Ok(Some( - custom.map(Token::CustomBinaryOperator).unwrap_or(default), - )) + match (custom, default) { + (Some(custom), _) => Ok(Token::CustomBinaryOperator(custom).into()), + (None, Some(tok)) => Ok(Some(tok)), + (None, None) => self.tokenizer_error( + chars.location(), + format!("Expected a valid binary operator after '{}'", prefix), + ), + } } /// Tokenize dollar preceded value (i.e: a string/placeholder) @@ -1278,7 +1763,8 @@ impl<'a> Tokenizer<'a> { chars.next(); - if let Some('$') = chars.peek() { + // If the dialect does not support dollar-quoted strings, then `$$` is rather a placeholder. + if matches!(chars.peek(), Some('$')) && !self.dialect.supports_dollar_placeholder() { chars.next(); let mut is_terminated = false; @@ -1312,52 +1798,43 @@ impl<'a> Tokenizer<'a> { }; } else { value.push_str(&peeking_take_while(chars, |ch| { - ch.is_alphanumeric() || ch == '_' + ch.is_alphanumeric() + || ch == '_' + // Allow $ as a placeholder character if the dialect supports it + || matches!(ch, '$' if self.dialect.supports_dollar_placeholder()) })); - if let Some('$') = chars.peek() { + // If the dialect does not support dollar-quoted strings, don't look for the end delimiter. + if matches!(chars.peek(), Some('$')) && !self.dialect.supports_dollar_placeholder() { chars.next(); - 'searching_for_end: loop { - s.push_str(&peeking_take_while(chars, |ch| ch != '$')); - match chars.peek() { - Some('$') => { - chars.next(); - let mut maybe_s = String::from("$"); - for c in value.chars() { - if let Some(next_char) = chars.next() { - maybe_s.push(next_char); - if next_char != c { - // This doesn't match the dollar quote delimiter so this - // is not the end of the string. - s.push_str(&maybe_s); - continue 'searching_for_end; - } - } else { - return self.tokenizer_error( - chars.location(), - "Unterminated dollar-quoted, expected $", - ); + let mut temp = String::new(); + let end_delimiter = format!("${}$", value); + + loop { + match chars.next() { + Some(ch) => { + temp.push(ch); + + if temp.ends_with(&end_delimiter) { + if let Some(temp) = temp.strip_suffix(&end_delimiter) { + s.push_str(temp); } - } - if chars.peek() == Some(&'$') { - chars.next(); - maybe_s.push('$'); - // maybe_s matches the end delimiter - break 'searching_for_end; - } else { - // This also doesn't match the dollar quote delimiter as there are - // more characters before the second dollar so this is not the end - // of the string. - s.push_str(&maybe_s); - continue 'searching_for_end; + break; } } - _ => { + None => { + if temp.ends_with(&end_delimiter) { + if let Some(temp) = temp.strip_suffix(&end_delimiter) { + s.push_str(temp); + } + break; + } + return self.tokenizer_error( chars.location(), "Unterminated dollar-quoted, expected $", - ) + ); } } } @@ -1385,11 +1862,17 @@ impl<'a> Tokenizer<'a> { // Consume characters until newline fn tokenize_single_line_comment(&self, chars: &mut State) -> String { - let mut comment = peeking_take_while(chars, |ch| ch != '\n'); + let mut comment = peeking_take_while(chars, |ch| match ch { + '\n' => false, // Always stop at \n + '\r' if dialect_of!(self is PostgreSqlDialect) => false, // Stop at \r for Postgres + _ => true, // Keep consuming for other characters + }); + if let Some(ch) = chars.next() { - assert_eq!(ch, '\n'); + assert!(ch == '\n' || ch == '\r'); comment.push(ch); } + comment } @@ -1402,6 +1885,27 @@ impl<'a> Tokenizer<'a> { s } + /// Read a quoted identifier + fn tokenize_quoted_identifier( + &self, + quote_start: char, + chars: &mut State, + ) -> Result { + let error_loc = chars.location(); + chars.next(); // consume the opening quote + let quote_end = Word::matching_end_quote(quote_start); + let (s, last_char) = self.parse_quoted_ident(chars, quote_end); + + if last_char == Some(quote_end) { + Ok(s) + } else { + self.tokenizer_error( + error_loc, + format!("Expected close delimiter '{quote_end}' before EOF."), + ) + } + } + /// Read a single quoted string, starting with the opening quote. fn tokenize_escaped_single_quoted_string( &self, @@ -1554,8 +2058,13 @@ impl<'a> Tokenizer<'a> { num_consecutive_quotes = 0; if let Some(next) = chars.peek() { - if !self.unescape { - // In no-escape mode, the given query has to be saved completely including backslashes. + if !self.unescape + || (self.dialect.ignores_wildcard_escapes() + && (*next == '%' || *next == '_')) + { + // In no-escape mode, the given query has to be saved completely + // including backslashes. Similarly, with ignore_like_wildcard_escapes, + // the backslash is not stripped. s.push(ch); s.push(*next); chars.next(); // consume next @@ -1598,28 +2107,33 @@ impl<'a> Tokenizer<'a> { ) -> Result, TokenizerError> { let mut s = String::new(); let mut nested = 1; - let mut last_ch = ' '; + let supports_nested_comments = self.dialect.supports_nested_comments(); loop { match chars.next() { - Some(ch) => { - if last_ch == '/' && ch == '*' { - nested += 1; - } else if last_ch == '*' && ch == '/' { - nested -= 1; - if nested == 0 { - s.pop(); - break Ok(Some(Token::Whitespace(Whitespace::MultiLineComment(s)))); - } + Some('/') if matches!(chars.peek(), Some('*')) && supports_nested_comments => { + chars.next(); // consume the '*' + s.push('/'); + s.push('*'); + nested += 1; + } + Some('*') if matches!(chars.peek(), Some('/')) => { + chars.next(); // consume the '/' + nested -= 1; + if nested == 0 { + break Ok(Some(Token::Whitespace(Whitespace::MultiLineComment(s)))); } + s.push('*'); + s.push('/'); + } + Some(ch) => { s.push(ch); - last_ch = ch; } None => { break self.tokenizer_error( chars.location(), "Unexpected EOF while in a multi-line comment", - ) + ); } } } @@ -1675,6 +2189,24 @@ fn peeking_take_while(chars: &mut State, mut predicate: impl FnMut(char) -> bool s } +/// Same as peeking_take_while, but also passes the next character to the predicate. +fn peeking_next_take_while( + chars: &mut State, + mut predicate: impl FnMut(char, Option) -> bool, +) -> String { + let mut s = String::new(); + while let Some(&ch) = chars.peek() { + let next_char = chars.peekable.clone().nth(1); + if predicate(ch, next_char) { + chars.next(); // consume + s.push(ch); + } else { + break; + } + } + s +} + fn unescape_single_quoted_string(chars: &mut State<'_>) -> Option { Unescape::new(chars).unescape() } @@ -1885,8 +2417,9 @@ fn take_char_from_hex_digits( mod tests { use super::*; use crate::dialect::{ - BigQueryDialect, ClickHouseDialect, HiveDialect, MsSqlDialect, MySqlDialect, + BigQueryDialect, ClickHouseDialect, HiveDialect, MsSqlDialect, MySqlDialect, SQLiteDialect, }; + use crate::test_utils::all_dialects_where; use core::fmt::Debug; #[test] @@ -1955,6 +2488,41 @@ mod tests { compare(expected, tokens); } + #[test] + fn tokenize_numeric_literal_underscore() { + let dialect = GenericDialect {}; + let sql = String::from("SELECT 10_000"); + let mut tokenizer = Tokenizer::new(&dialect, &sql); + let tokens = tokenizer.tokenize().unwrap(); + let expected = vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Number("10".to_string(), false), + Token::make_word("_000", None), + ]; + compare(expected, tokens); + + all_dialects_where(|dialect| dialect.supports_numeric_literal_underscores()).tokenizes_to( + "SELECT 10_000, _10_000, 10_00_, 10___0", + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Number("10_000".to_string(), false), + Token::Comma, + Token::Whitespace(Whitespace::Space), + Token::make_word("_10_000", None), // leading underscore tokenizes as a word (parsed as column identifier) + Token::Comma, + Token::Whitespace(Whitespace::Space), + Token::Number("10_00".to_string(), false), + Token::make_word("_", None), // trailing underscores tokenizes as a word (syntax error in some dialects) + Token::Comma, + Token::Whitespace(Whitespace::Space), + Token::Number("10".to_string(), false), + Token::make_word("___0", None), // multiple underscores tokenizes as a word (syntax error in some dialects) + ], + ); + } + #[test] fn tokenize_select_exponent() { let sql = String::from("SELECT 1e10, 1e-10, 1e+10, 1ea, 1e-10a, 1e-10-10"); @@ -2289,20 +2857,67 @@ mod tests { #[test] fn tokenize_dollar_quoted_string_tagged() { - let sql = String::from( - "SELECT $tag$dollar '$' quoted strings have $tags like this$ or like this $$$tag$", - ); - let dialect = GenericDialect {}; - let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); - let expected = vec![ - Token::make_keyword("SELECT"), - Token::Whitespace(Whitespace::Space), - Token::DollarQuotedString(DollarQuotedString { - value: "dollar '$' quoted strings have $tags like this$ or like this $$".into(), - tag: Some("tag".into()), - }), + let test_cases = vec![ + ( + String::from("SELECT $tag$dollar '$' quoted strings have $tags like this$ or like this $$$tag$"), + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::DollarQuotedString(DollarQuotedString { + value: "dollar '$' quoted strings have $tags like this$ or like this $$".into(), + tag: Some("tag".into()), + }) + ] + ), + ( + String::from("SELECT $abc$x$ab$abc$"), + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::DollarQuotedString(DollarQuotedString { + value: "x$ab".into(), + tag: Some("abc".into()), + }) + ] + ), + ( + String::from("SELECT $abc$$abc$"), + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::DollarQuotedString(DollarQuotedString { + value: "".into(), + tag: Some("abc".into()), + }) + ] + ), + ( + String::from("0$abc$$abc$1"), + vec![ + Token::Number("0".into(), false), + Token::DollarQuotedString(DollarQuotedString { + value: "".into(), + tag: Some("abc".into()), + }), + Token::Number("1".into(), false), + ] + ), + ( + String::from("$function$abc$q$data$q$$function$"), + vec![ + Token::DollarQuotedString(DollarQuotedString { + value: "abc$q$data$q$".into(), + tag: Some("function".into()), + }), + ] + ), ]; - compare(expected, tokens); + + let dialect = GenericDialect {}; + for (sql, expected) in test_cases { + let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); + compare(expected, tokens); + } } #[test] @@ -2321,6 +2936,78 @@ mod tests { ); } + #[test] + fn tokenize_dollar_quoted_string_tagged_unterminated_mirror() { + let sql = String::from("SELECT $abc$abc$"); + let dialect = GenericDialect {}; + assert_eq!( + Tokenizer::new(&dialect, &sql).tokenize(), + Err(TokenizerError { + message: "Unterminated dollar-quoted, expected $".into(), + location: Location { + line: 1, + column: 17 + } + }) + ); + } + + #[test] + fn tokenize_dollar_placeholder() { + let sql = String::from("SELECT $$, $$ABC$$, $ABC$, $ABC"); + let dialect = SQLiteDialect {}; + let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); + assert_eq!( + tokens, + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Placeholder("$$".into()), + Token::Comma, + Token::Whitespace(Whitespace::Space), + Token::Placeholder("$$ABC$$".into()), + Token::Comma, + Token::Whitespace(Whitespace::Space), + Token::Placeholder("$ABC$".into()), + Token::Comma, + Token::Whitespace(Whitespace::Space), + Token::Placeholder("$ABC".into()), + ] + ); + } + + #[test] + fn tokenize_nested_dollar_quoted_strings() { + let sql = String::from("SELECT $tag$dollar $nested$ string$tag$"); + let dialect = GenericDialect {}; + let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); + let expected = vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::DollarQuotedString(DollarQuotedString { + value: "dollar $nested$ string".into(), + tag: Some("tag".into()), + }), + ]; + compare(expected, tokens); + } + + #[test] + fn tokenize_dollar_quoted_string_untagged_empty() { + let sql = String::from("SELECT $$$$"); + let dialect = GenericDialect {}; + let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); + let expected = vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::DollarQuotedString(DollarQuotedString { + value: "".into(), + tag: None, + }), + ]; + compare(expected, tokens); + } + #[test] fn tokenize_dollar_quoted_string_untagged() { let sql = @@ -2391,17 +3078,62 @@ mod tests { #[test] fn tokenize_comment() { - let sql = String::from("0--this is a comment\n1"); + let test_cases = vec![ + ( + String::from("0--this is a comment\n1"), + vec![ + Token::Number("0".to_string(), false), + Token::Whitespace(Whitespace::SingleLineComment { + prefix: "--".to_string(), + comment: "this is a comment\n".to_string(), + }), + Token::Number("1".to_string(), false), + ], + ), + ( + String::from("0--this is a comment\r1"), + vec![ + Token::Number("0".to_string(), false), + Token::Whitespace(Whitespace::SingleLineComment { + prefix: "--".to_string(), + comment: "this is a comment\r1".to_string(), + }), + ], + ), + ( + String::from("0--this is a comment\r\n1"), + vec![ + Token::Number("0".to_string(), false), + Token::Whitespace(Whitespace::SingleLineComment { + prefix: "--".to_string(), + comment: "this is a comment\r\n".to_string(), + }), + Token::Number("1".to_string(), false), + ], + ), + ]; let dialect = GenericDialect {}; + + for (sql, expected) in test_cases { + let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); + compare(expected, tokens); + } + } + + #[test] + fn tokenize_comment_postgres() { + let sql = String::from("1--\r0"); + + let dialect = PostgreSqlDialect {}; let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); let expected = vec![ - Token::Number("0".to_string(), false), + Token::Number("1".to_string(), false), Token::Whitespace(Whitespace::SingleLineComment { prefix: "--".to_string(), - comment: "this is a comment\n".to_string(), + comment: "\r".to_string(), }), - Token::Number("1".to_string(), false), + Token::Number("0".to_string(), false), ]; compare(expected, tokens); } @@ -2437,18 +3169,90 @@ mod tests { #[test] fn tokenize_nested_multiline_comment() { - let sql = String::from("0/*multi-line\n* \n/* comment \n /*comment*/*/ */ /comment*/1"); + let dialect = GenericDialect {}; + let test_cases = vec![ + ( + "0/*multi-line\n* \n/* comment \n /*comment*/*/ */ /comment*/1", + vec![ + Token::Number("0".to_string(), false), + Token::Whitespace(Whitespace::MultiLineComment( + "multi-line\n* \n/* comment \n /*comment*/*/ ".into(), + )), + Token::Whitespace(Whitespace::Space), + Token::Div, + Token::Word(Word { + value: "comment".to_string(), + quote_style: None, + keyword: Keyword::COMMENT, + }), + Token::Mul, + Token::Div, + Token::Number("1".to_string(), false), + ], + ), + ( + "0/*multi-line\n* \n/* comment \n /*comment/**/ */ /comment*/*/1", + vec![ + Token::Number("0".to_string(), false), + Token::Whitespace(Whitespace::MultiLineComment( + "multi-line\n* \n/* comment \n /*comment/**/ */ /comment*/".into(), + )), + Token::Number("1".to_string(), false), + ], + ), + ( + "SELECT 1/* a /* b */ c */0", + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Number("1".to_string(), false), + Token::Whitespace(Whitespace::MultiLineComment(" a /* b */ c ".to_string())), + Token::Number("0".to_string(), false), + ], + ), + ]; + + for (sql, expected) in test_cases { + let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); + compare(expected, tokens); + } + } + + #[test] + fn tokenize_nested_multiline_comment_empty() { + let sql = "select 1/*/**/*/0"; let dialect = GenericDialect {}; - let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); + let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); let expected = vec![ + Token::make_keyword("select"), + Token::Whitespace(Whitespace::Space), + Token::Number("1".to_string(), false), + Token::Whitespace(Whitespace::MultiLineComment("/**/".to_string())), Token::Number("0".to_string(), false), + ]; + + compare(expected, tokens); + } + + #[test] + fn tokenize_nested_comments_if_not_supported() { + let dialect = SQLiteDialect {}; + let sql = "SELECT 1/*/* nested comment */*/0"; + let tokens = Tokenizer::new(&dialect, sql).tokenize(); + let expected = vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Number("1".to_string(), false), Token::Whitespace(Whitespace::MultiLineComment( - "multi-line\n* \n/* comment \n /*comment*/*/ */ /comment".to_string(), + "/* nested comment ".to_string(), )), - Token::Number("1".to_string(), false), + Token::Mul, + Token::Div, + Token::Number("0".to_string(), false), ]; - compare(expected, tokens); + + compare(expected, tokens.unwrap()); } #[test] @@ -2668,18 +3472,30 @@ mod tests { .tokenize_with_location() .unwrap(); let expected = vec![ - TokenWithLocation::new(Token::make_keyword("SELECT"), 1, 1), - TokenWithLocation::new(Token::Whitespace(Whitespace::Space), 1, 7), - TokenWithLocation::new(Token::make_word("a", None), 1, 8), - TokenWithLocation::new(Token::Comma, 1, 9), - TokenWithLocation::new(Token::Whitespace(Whitespace::Newline), 1, 10), - TokenWithLocation::new(Token::Whitespace(Whitespace::Space), 2, 1), - TokenWithLocation::new(Token::make_word("b", None), 2, 2), + TokenWithSpan::at(Token::make_keyword("SELECT"), (1, 1).into(), (1, 7).into()), + TokenWithSpan::at( + Token::Whitespace(Whitespace::Space), + (1, 7).into(), + (1, 8).into(), + ), + TokenWithSpan::at(Token::make_word("a", None), (1, 8).into(), (1, 9).into()), + TokenWithSpan::at(Token::Comma, (1, 9).into(), (1, 10).into()), + TokenWithSpan::at( + Token::Whitespace(Whitespace::Newline), + (1, 10).into(), + (2, 1).into(), + ), + TokenWithSpan::at( + Token::Whitespace(Whitespace::Space), + (2, 1).into(), + (2, 2).into(), + ), + TokenWithSpan::at(Token::make_word("b", None), (2, 2).into(), (2, 3).into()), ]; compare(expected, tokens); } - fn compare(expected: Vec, actual: Vec) { + fn compare(expected: Vec, actual: Vec) { //println!("------------------------------"); //println!("tokens = {:?}", actual); //println!("expected = {:?}", expected); @@ -2821,6 +3637,9 @@ mod tests { (r#"'\\a\\b\'c'"#, r#"\\a\\b\'c"#, r#"\a\b'c"#), (r#"'\'abcd'"#, r#"\'abcd"#, r#"'abcd"#), (r#"'''a''b'"#, r#"''a''b"#, r#"'a'b"#), + (r#"'\q'"#, r#"\q"#, r#"q"#), + (r#"'\%\_'"#, r#"\%\_"#, r#"%_"#), + (r#"'\\%\\_'"#, r#"\\%\\_"#, r#"\%\_"#), ] { let tokens = Tokenizer::new(&dialect, sql) .with_unescape(false) @@ -2854,6 +3673,16 @@ mod tests { compare(expected, tokens); } + + // MySQL special case for LIKE escapes + for (sql, expected) in [(r#"'\%'"#, r#"\%"#), (r#"'\_'"#, r#"\_"#)] { + let dialect = MySqlDialect {}; + let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); + + let expected = vec![Token::SingleQuotedString(expected.to_string())]; + + compare(expected, tokens); + } } #[test] @@ -2975,4 +3804,270 @@ mod tests { let expected = vec![Token::SingleQuotedString("''".to_string())]; compare(expected, tokens); } + + #[test] + fn test_mysql_users_grantees() { + let dialect = MySqlDialect {}; + + let sql = "CREATE USER `root`@`%`"; + let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); + let expected = vec![ + Token::make_keyword("CREATE"), + Token::Whitespace(Whitespace::Space), + Token::make_keyword("USER"), + Token::Whitespace(Whitespace::Space), + Token::make_word("root", Some('`')), + Token::AtSign, + Token::make_word("%", Some('`')), + ]; + compare(expected, tokens); + } + + #[test] + fn test_postgres_abs_without_space_and_string_literal() { + let dialect = MySqlDialect {}; + + let sql = "SELECT @'1'"; + let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); + let expected = vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::AtSign, + Token::SingleQuotedString("1".to_string()), + ]; + compare(expected, tokens); + } + + #[test] + fn test_postgres_abs_without_space_and_quoted_column() { + let dialect = MySqlDialect {}; + + let sql = r#"SELECT @"bar" FROM foo"#; + let tokens = Tokenizer::new(&dialect, sql).tokenize().unwrap(); + let expected = vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::AtSign, + Token::DoubleQuotedString("bar".to_string()), + Token::Whitespace(Whitespace::Space), + Token::make_keyword("FROM"), + Token::Whitespace(Whitespace::Space), + Token::make_word("foo", None), + ]; + compare(expected, tokens); + } + + #[test] + fn test_national_strings_backslash_escape_not_supported() { + all_dialects_where(|dialect| !dialect.supports_string_literal_backslash_escape()) + .tokenizes_to( + "select n'''''\\'", + vec![ + Token::make_keyword("select"), + Token::Whitespace(Whitespace::Space), + Token::NationalStringLiteral("''\\".to_string()), + ], + ); + } + + #[test] + fn test_national_strings_backslash_escape_supported() { + all_dialects_where(|dialect| dialect.supports_string_literal_backslash_escape()) + .tokenizes_to( + "select n'''''\\''", + vec![ + Token::make_keyword("select"), + Token::Whitespace(Whitespace::Space), + Token::NationalStringLiteral("'''".to_string()), + ], + ); + } + + #[test] + fn test_string_escape_constant_not_supported() { + all_dialects_where(|dialect| !dialect.supports_string_escape_constant()).tokenizes_to( + "select e'...'", + vec![ + Token::make_keyword("select"), + Token::Whitespace(Whitespace::Space), + Token::make_word("e", None), + Token::SingleQuotedString("...".to_string()), + ], + ); + + all_dialects_where(|dialect| !dialect.supports_string_escape_constant()).tokenizes_to( + "select E'...'", + vec![ + Token::make_keyword("select"), + Token::Whitespace(Whitespace::Space), + Token::make_word("E", None), + Token::SingleQuotedString("...".to_string()), + ], + ); + } + + #[test] + fn test_string_escape_constant_supported() { + all_dialects_where(|dialect| dialect.supports_string_escape_constant()).tokenizes_to( + "select e'\\''", + vec![ + Token::make_keyword("select"), + Token::Whitespace(Whitespace::Space), + Token::EscapedStringLiteral("'".to_string()), + ], + ); + + all_dialects_where(|dialect| dialect.supports_string_escape_constant()).tokenizes_to( + "select E'\\''", + vec![ + Token::make_keyword("select"), + Token::Whitespace(Whitespace::Space), + Token::EscapedStringLiteral("'".to_string()), + ], + ); + } + + #[test] + fn test_whitespace_required_after_single_line_comment() { + all_dialects_where(|dialect| dialect.requires_single_line_comment_whitespace()) + .tokenizes_to( + "SELECT --'abc'", + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Minus, + Token::Minus, + Token::SingleQuotedString("abc".to_string()), + ], + ); + + all_dialects_where(|dialect| dialect.requires_single_line_comment_whitespace()) + .tokenizes_to( + "SELECT -- 'abc'", + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Whitespace(Whitespace::SingleLineComment { + prefix: "--".to_string(), + comment: " 'abc'".to_string(), + }), + ], + ); + + all_dialects_where(|dialect| dialect.requires_single_line_comment_whitespace()) + .tokenizes_to( + "SELECT --", + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Minus, + Token::Minus, + ], + ); + } + + #[test] + fn test_whitespace_not_required_after_single_line_comment() { + all_dialects_where(|dialect| !dialect.requires_single_line_comment_whitespace()) + .tokenizes_to( + "SELECT --'abc'", + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Whitespace(Whitespace::SingleLineComment { + prefix: "--".to_string(), + comment: "'abc'".to_string(), + }), + ], + ); + + all_dialects_where(|dialect| !dialect.requires_single_line_comment_whitespace()) + .tokenizes_to( + "SELECT -- 'abc'", + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Whitespace(Whitespace::SingleLineComment { + prefix: "--".to_string(), + comment: " 'abc'".to_string(), + }), + ], + ); + + all_dialects_where(|dialect| !dialect.requires_single_line_comment_whitespace()) + .tokenizes_to( + "SELECT --", + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Whitespace(Whitespace::SingleLineComment { + prefix: "--".to_string(), + comment: "".to_string(), + }), + ], + ); + } + + #[test] + fn test_tokenize_identifiers_numeric_prefix() { + all_dialects_where(|dialect| dialect.supports_numeric_prefix()) + .tokenizes_to("123abc", vec![Token::make_word("123abc", None)]); + + all_dialects_where(|dialect| dialect.supports_numeric_prefix()) + .tokenizes_to("12e34", vec![Token::Number("12e34".to_string(), false)]); + + all_dialects_where(|dialect| dialect.supports_numeric_prefix()).tokenizes_to( + "t.12e34", + vec![ + Token::make_word("t", None), + Token::Period, + Token::make_word("12e34", None), + ], + ); + + all_dialects_where(|dialect| dialect.supports_numeric_prefix()).tokenizes_to( + "t.1two3", + vec![ + Token::make_word("t", None), + Token::Period, + Token::make_word("1two3", None), + ], + ); + } + + #[test] + fn tokenize_period_underscore() { + let sql = String::from("SELECT table._col"); + // a dialect that supports underscores in numeric literals + let dialect = PostgreSqlDialect {}; + let tokens = Tokenizer::new(&dialect, &sql).tokenize().unwrap(); + + let expected = vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Word(Word { + value: "table".to_string(), + quote_style: None, + keyword: Keyword::TABLE, + }), + Token::Period, + Token::Word(Word { + value: "_col".to_string(), + quote_style: None, + keyword: Keyword::NoKeyword, + }), + ]; + + compare(expected, tokens); + + let sql = String::from("SELECT ._123"); + if let Ok(tokens) = Tokenizer::new(&dialect, &sql).tokenize() { + panic!("Tokenizer should have failed on {sql}, but it succeeded with {tokens:?}"); + } + + let sql = String::from("SELECT ._abc"); + if let Ok(tokens) = Tokenizer::new(&dialect, &sql).tokenize() { + panic!("Tokenizer should have failed on {sql}, but it succeeded with {tokens:?}"); + } + } } diff --git a/tests/queries/tpch/1.sql b/tests/queries/tpch/1.sql index ae44c94d2..89232181e 100644 --- a/tests/queries/tpch/1.sql +++ b/tests/queries/tpch/1.sql @@ -1,3 +1,21 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + + select l_returnflag, l_linestatus, diff --git a/tests/queries/tpch/10.sql b/tests/queries/tpch/10.sql index a8de12995..06f6256b4 100644 --- a/tests/queries/tpch/10.sql +++ b/tests/queries/tpch/10.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/11.sql b/tests/queries/tpch/11.sql index f9cf254b5..6115eb59d 100644 --- a/tests/queries/tpch/11.sql +++ b/tests/queries/tpch/11.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/12.sql b/tests/queries/tpch/12.sql index ca9c494e7..0ffdb6668 100644 --- a/tests/queries/tpch/12.sql +++ b/tests/queries/tpch/12.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/13.sql b/tests/queries/tpch/13.sql index 32b0ebeb9..64572aba8 100644 --- a/tests/queries/tpch/13.sql +++ b/tests/queries/tpch/13.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/14.sql b/tests/queries/tpch/14.sql index 74f9643ef..abe7f2ea5 100644 --- a/tests/queries/tpch/14.sql +++ b/tests/queries/tpch/14.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/15.sql b/tests/queries/tpch/15.sql index 8b3b8c1ef..16646fb60 100644 --- a/tests/queries/tpch/15.sql +++ b/tests/queries/tpch/15.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions create view revenue0 (supplier_no, total_revenue) as diff --git a/tests/queries/tpch/16.sql b/tests/queries/tpch/16.sql index a0412fcbb..e7e489796 100644 --- a/tests/queries/tpch/16.sql +++ b/tests/queries/tpch/16.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/17.sql b/tests/queries/tpch/17.sql index d59bc18ab..d895fb27f 100644 --- a/tests/queries/tpch/17.sql +++ b/tests/queries/tpch/17.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/18.sql b/tests/queries/tpch/18.sql index e07956fed..54b8b85d3 100644 --- a/tests/queries/tpch/18.sql +++ b/tests/queries/tpch/18.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/19.sql b/tests/queries/tpch/19.sql index 908e08297..1aca74329 100644 --- a/tests/queries/tpch/19.sql +++ b/tests/queries/tpch/19.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/2.sql b/tests/queries/tpch/2.sql index f04c1d497..9426b91d6 100644 --- a/tests/queries/tpch/2.sql +++ b/tests/queries/tpch/2.sql @@ -1,5 +1,21 @@ --- using default substitutions +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. +-- using default substitutions select s_acctbal, diff --git a/tests/queries/tpch/20.sql b/tests/queries/tpch/20.sql index 7aaabc2d5..6f06dcb7a 100644 --- a/tests/queries/tpch/20.sql +++ b/tests/queries/tpch/20.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/21.sql b/tests/queries/tpch/21.sql index 5a287f9a3..58879495f 100644 --- a/tests/queries/tpch/21.sql +++ b/tests/queries/tpch/21.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/22.sql b/tests/queries/tpch/22.sql index 1fc8523ad..2a7f45e95 100644 --- a/tests/queries/tpch/22.sql +++ b/tests/queries/tpch/22.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/3.sql b/tests/queries/tpch/3.sql index 710aac520..df50cf760 100644 --- a/tests/queries/tpch/3.sql +++ b/tests/queries/tpch/3.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/4.sql b/tests/queries/tpch/4.sql index e5adb9349..e4a84f79c 100644 --- a/tests/queries/tpch/4.sql +++ b/tests/queries/tpch/4.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/5.sql b/tests/queries/tpch/5.sql index ea376576f..9b92b30ed 100644 --- a/tests/queries/tpch/5.sql +++ b/tests/queries/tpch/5.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/6.sql b/tests/queries/tpch/6.sql index 949b7b16a..709fd201f 100644 --- a/tests/queries/tpch/6.sql +++ b/tests/queries/tpch/6.sql @@ -1,5 +1,22 @@ --- using default substitutions +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + +-- using default substitutions select sum(l_extendedprice * l_discount) as revenue diff --git a/tests/queries/tpch/7.sql b/tests/queries/tpch/7.sql index 85dd8f9c8..1379dbb47 100644 --- a/tests/queries/tpch/7.sql +++ b/tests/queries/tpch/7.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/8.sql b/tests/queries/tpch/8.sql index e6e4d30b8..22ae8b906 100644 --- a/tests/queries/tpch/8.sql +++ b/tests/queries/tpch/8.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/queries/tpch/9.sql b/tests/queries/tpch/9.sql index f9eaf65ee..fa957b052 100644 --- a/tests/queries/tpch/9.sql +++ b/tests/queries/tpch/9.sql @@ -1,3 +1,20 @@ +-- Licensed to the Apache Software Foundation (ASF) under one +-- or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information +-- regarding copyright ownership. The ASF licenses this file +-- to you under the Apache License, Version 2.0 (the +-- "License"); you may not use this file except in compliance +-- with the License. You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, +-- software distributed under the License is distributed on an +-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +-- KIND, either express or implied. See the License for the +-- specific language governing permissions and limitations +-- under the License. + -- using default substitutions diff --git a/tests/sqlparser_bigquery.rs b/tests/sqlparser_bigquery.rs index 2bf470f71..8f54f3c97 100644 --- a/tests/sqlparser_bigquery.rs +++ b/tests/sqlparser_bigquery.rs @@ -23,124 +23,151 @@ use std::ops::Deref; use sqlparser::ast::*; use sqlparser::dialect::{BigQueryDialect, GenericDialect}; use sqlparser::parser::{ParserError, ParserOptions}; +use sqlparser::tokenizer::{Location, Span}; use test_utils::*; #[test] fn parse_literal_string() { let sql = concat!( - "SELECT ", - "'single', ", - r#""double", "#, - "'''triple-single''', ", - r#""""triple-double""", "#, - r#"'single\'escaped', "#, - r#"'''triple-single\'escaped''', "#, - r#"'''triple-single'unescaped''', "#, - r#""double\"escaped", "#, - r#""""triple-double\"escaped""", "#, - r#""""triple-double"unescaped""""#, + "SELECT ", // line 1, column 1 + "'single', ", // line 1, column 7 + r#""double", "#, // line 1, column 14 + "'''triple-single''', ", // line 1, column 22 + r#""""triple-double""", "#, // line 1, column 33 + r#"'single\'escaped', "#, // line 1, column 43 + r#"'''triple-single\'escaped''', "#, // line 1, column 55 + r#"'''triple-single'unescaped''', "#, // line 1, column 68 + r#""double\"escaped", "#, // line 1, column 83 + r#""""triple-double\"escaped""", "#, // line 1, column 92 + r#""""triple-double"unescaped""", "#, // line 1, column 105 + r#""""triple-double'unescaped""", "#, // line 1, column 118 + r#"'''triple-single"unescaped'''"#, // line 1, column 131 ); let dialect = TestedDialects::new_with_options( vec![Box::new(BigQueryDialect {})], ParserOptions::new().with_unescape(false), ); let select = dialect.verified_only_select(sql); - assert_eq!(10, select.projection.len()); + assert_eq!(12, select.projection.len()); assert_eq!( - &Expr::Value(Value::SingleQuotedString("single".to_string())), + &Expr::Value(Value::SingleQuotedString("single".into()).with_empty_span()), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value(Value::DoubleQuotedString("double".to_string())), + &Expr::Value(Value::DoubleQuotedString("double".into()).with_empty_span()), expr_from_projection(&select.projection[1]) ); assert_eq!( - &Expr::Value(Value::TripleSingleQuotedString("triple-single".to_string())), + &Expr::Value(Value::TripleSingleQuotedString("triple-single".into()).with_empty_span()), expr_from_projection(&select.projection[2]) ); assert_eq!( - &Expr::Value(Value::TripleDoubleQuotedString("triple-double".to_string())), + &Expr::Value(Value::TripleDoubleQuotedString("triple-double".into()).with_empty_span()), expr_from_projection(&select.projection[3]) ); assert_eq!( - &Expr::Value(Value::SingleQuotedString(r#"single\'escaped"#.to_string())), + &Expr::Value(Value::SingleQuotedString(r#"single\'escaped"#.into()).with_empty_span()), expr_from_projection(&select.projection[4]) ); assert_eq!( - &Expr::Value(Value::TripleSingleQuotedString( - r#"triple-single\'escaped"#.to_string() - )), + &Expr::Value( + Value::TripleSingleQuotedString(r#"triple-single\'escaped"#.into()).with_empty_span() + ), expr_from_projection(&select.projection[5]) ); assert_eq!( - &Expr::Value(Value::TripleSingleQuotedString( - r#"triple-single'unescaped"#.to_string() - )), + &Expr::Value( + Value::TripleSingleQuotedString(r#"triple-single'unescaped"#.into()).with_empty_span() + ), expr_from_projection(&select.projection[6]) ); assert_eq!( - &Expr::Value(Value::DoubleQuotedString(r#"double\"escaped"#.to_string())), + &Expr::Value(Value::DoubleQuotedString(r#"double\"escaped"#.to_string()).with_empty_span()), expr_from_projection(&select.projection[7]) ); assert_eq!( - &Expr::Value(Value::TripleDoubleQuotedString( - r#"triple-double\"escaped"#.to_string() - )), + &Expr::Value( + Value::TripleDoubleQuotedString(r#"triple-double\"escaped"#.to_string()) + .with_empty_span() + ), expr_from_projection(&select.projection[8]) ); assert_eq!( - &Expr::Value(Value::TripleDoubleQuotedString( - r#"triple-double"unescaped"#.to_string() - )), + &Expr::Value( + Value::TripleDoubleQuotedString(r#"triple-double"unescaped"#.to_string()) + .with_empty_span() + ), expr_from_projection(&select.projection[9]) ); + assert_eq!( + &Expr::Value( + Value::TripleDoubleQuotedString(r#"triple-double'unescaped"#.to_string()) + .with_empty_span() + ), + expr_from_projection(&select.projection[10]) + ); + assert_eq!( + &Expr::Value( + Value::TripleSingleQuotedString(r#"triple-single"unescaped"#.to_string()) + .with_empty_span() + ), + expr_from_projection(&select.projection[11]) + ); } #[test] fn parse_byte_literal() { let sql = concat!( - "SELECT ", - "B'abc', ", - r#"B"abc", "#, - r#"B'f\(abc,(.*),def\)', "#, - r#"B"f\(abc,(.*),def\)", "#, - r#"B'''abc''', "#, - r#"B"""abc""""#, + "SELECT ", // line 1, column 1 + "B'abc', ", // line 1, column 8 + r#"B"abc", "#, // line 1, column 15 + r#"B'f\(abc,(.*),def\)', "#, // line 1, column 22 + r#"B"f\(abc,(.*),def\)", "#, // line 1, column 42 + r#"B'''abc''', "#, // line 1, column 62 + r#"B"""abc""""#, // line 1, column 74 ); let stmt = bigquery().verified_stmt(sql); if let Statement::Query(query) = stmt { if let SetExpr::Select(select) = *query.body { assert_eq!(6, select.projection.len()); assert_eq!( - &Expr::Value(Value::SingleQuotedByteStringLiteral("abc".to_string())), + &Expr::Value( + Value::SingleQuotedByteStringLiteral("abc".to_string()).with_empty_span() + ), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value(Value::DoubleQuotedByteStringLiteral("abc".to_string())), + &Expr::Value( + Value::DoubleQuotedByteStringLiteral("abc".to_string()).with_empty_span() + ), expr_from_projection(&select.projection[1]) ); assert_eq!( - &Expr::Value(Value::SingleQuotedByteStringLiteral( - r"f\(abc,(.*),def\)".to_string() - )), + &Expr::Value( + Value::SingleQuotedByteStringLiteral(r"f\(abc,(.*),def\)".to_string()) + .with_empty_span() + ), expr_from_projection(&select.projection[2]) ); assert_eq!( - &Expr::Value(Value::DoubleQuotedByteStringLiteral( - r"f\(abc,(.*),def\)".to_string() - )), + &Expr::Value( + Value::DoubleQuotedByteStringLiteral(r"f\(abc,(.*),def\)".to_string()) + .with_empty_span() + ), expr_from_projection(&select.projection[3]) ); assert_eq!( - &Expr::Value(Value::TripleSingleQuotedByteStringLiteral( - r"abc".to_string() - )), + &Expr::Value( + Value::TripleSingleQuotedByteStringLiteral(r"abc".to_string()) + .with_empty_span() + ), expr_from_projection(&select.projection[4]) ); assert_eq!( - &Expr::Value(Value::TripleDoubleQuotedByteStringLiteral( - r"abc".to_string() - )), + &Expr::Value( + Value::TripleDoubleQuotedByteStringLiteral(r"abc".to_string()) + .with_empty_span() + ), expr_from_projection(&select.projection[5]) ); } @@ -157,48 +184,54 @@ fn parse_byte_literal() { #[test] fn parse_raw_literal() { let sql = concat!( - "SELECT ", - "R'abc', ", - r#"R"abc", "#, - r#"R'f\(abc,(.*),def\)', "#, - r#"R"f\(abc,(.*),def\)", "#, - r#"R'''abc''', "#, - r#"R"""abc""""#, + "SELECT ", // line 1, column 1 + "R'abc', ", // line 1, column 8 + r#"R"abc", "#, // line 1, column 15 + r#"R'f\(abc,(.*),def\)', "#, // line 1, column 22 + r#"R"f\(abc,(.*),def\)", "#, // line 1, column 42 + r#"R'''abc''', "#, // line 1, column 62 + r#"R"""abc""""#, // line 1, column 74 ); let stmt = bigquery().verified_stmt(sql); if let Statement::Query(query) = stmt { if let SetExpr::Select(select) = *query.body { assert_eq!(6, select.projection.len()); assert_eq!( - &Expr::Value(Value::SingleQuotedRawStringLiteral("abc".to_string())), + &Expr::Value( + Value::SingleQuotedRawStringLiteral("abc".to_string()).with_empty_span() + ), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value(Value::DoubleQuotedRawStringLiteral("abc".to_string())), + &Expr::Value( + Value::DoubleQuotedRawStringLiteral("abc".to_string()).with_empty_span() + ), expr_from_projection(&select.projection[1]) ); assert_eq!( - &Expr::Value(Value::SingleQuotedRawStringLiteral( - r"f\(abc,(.*),def\)".to_string() - )), + &Expr::Value( + Value::SingleQuotedRawStringLiteral(r"f\(abc,(.*),def\)".to_string()) + .with_empty_span() + ), expr_from_projection(&select.projection[2]) ); assert_eq!( - &Expr::Value(Value::DoubleQuotedRawStringLiteral( - r"f\(abc,(.*),def\)".to_string() - )), + &Expr::Value( + Value::DoubleQuotedRawStringLiteral(r"f\(abc,(.*),def\)".to_string()) + .with_empty_span() + ), expr_from_projection(&select.projection[3]) ); assert_eq!( - &Expr::Value(Value::TripleSingleQuotedRawStringLiteral( - r"abc".to_string() - )), + &Expr::Value( + Value::TripleSingleQuotedRawStringLiteral(r"abc".to_string()).with_empty_span() + ), expr_from_projection(&select.projection[4]) ); assert_eq!( - &Expr::Value(Value::TripleDoubleQuotedRawStringLiteral( - r"abc".to_string() - )), + &Expr::Value( + Value::TripleDoubleQuotedRawStringLiteral(r"abc".to_string()).with_empty_span() + ), expr_from_projection(&select.projection[5]) ); } @@ -212,6 +245,61 @@ fn parse_raw_literal() { ); } +#[test] +fn parse_big_query_non_reserved_column_alias() { + let sql = r#"SELECT OFFSET, EXPLAIN, ANALYZE, SORT, TOP, VIEW FROM T"#; + bigquery().verified_stmt(sql); + + let sql = r#"SELECT 1 AS OFFSET, 2 AS EXPLAIN, 3 AS ANALYZE FROM T"#; + bigquery().verified_stmt(sql); +} + +#[test] +fn parse_at_at_identifier() { + bigquery().verified_stmt("SELECT @@error.stack_trace, @@error.message"); +} + +#[test] +fn parse_begin() { + let sql = r#"BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT 2; END"#; + let Statement::StartTransaction { + statements, + exception_statements, + has_end_keyword, + .. + } = bigquery().verified_stmt(sql) + else { + unreachable!(); + }; + assert_eq!(1, statements.len()); + assert_eq!(1, exception_statements.unwrap().len()); + assert!(has_end_keyword); + + bigquery().verified_stmt( + "BEGIN SELECT 1; SELECT 2; EXCEPTION WHEN ERROR THEN SELECT 2; SELECT 4; END", + ); + bigquery() + .verified_stmt("BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT @@error.stack_trace; END"); + bigquery().verified_stmt("BEGIN EXCEPTION WHEN ERROR THEN SELECT 2; END"); + bigquery().verified_stmt("BEGIN SELECT 1; SELECT 2; EXCEPTION WHEN ERROR THEN END"); + bigquery().verified_stmt("BEGIN EXCEPTION WHEN ERROR THEN END"); + bigquery().verified_stmt("BEGIN SELECT 1; SELECT 2; END"); + bigquery().verified_stmt("BEGIN END"); + + assert_eq!( + bigquery() + .parse_sql_statements("BEGIN SELECT 1; SELECT 2 END") + .unwrap_err(), + ParserError::ParserError("Expected: ;, found: END".to_string()) + ); + assert_eq!( + bigquery() + .parse_sql_statements("BEGIN SELECT 1; EXCEPTION WHEN ERROR THEN SELECT 2 END") + .unwrap_err(), + ParserError::ParserError("Expected: ;, found: END".to_string()) + ); +} + #[test] fn parse_delete_statement() { let sql = "DELETE \"table\" WHERE 1"; @@ -221,15 +309,7 @@ fn parse_delete_statement() { .. }) => { assert_eq!( - TableFactor::Table { - name: ObjectName(vec![Ident::with_quote('"', "table")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + table_from_name(ObjectName::from(vec![Ident::with_quote('"', "table")])), from[0].relation ); } @@ -256,7 +336,7 @@ fn parse_create_view_with_options() { } => { assert_eq!( name, - ObjectName(vec![ + ObjectName::from(vec![ "myproject".into(), "mydataset".into(), "newview".into() @@ -274,7 +354,11 @@ fn parse_create_view_with_options() { data_type: None, options: Some(vec![ColumnOption::Options(vec![SqlOption::KeyValue { key: Ident::new("description"), - value: Expr::Value(Value::DoubleQuotedString("field age".to_string())), + value: Expr::Value( + Value::DoubleQuotedString("field age".to_string()).with_span( + Span::new(Location::new(1, 42), Location::new(1, 52)) + ) + ), }])]), }, ], @@ -294,9 +378,10 @@ fn parse_create_view_with_options() { assert_eq!( &SqlOption::KeyValue { key: Ident::new("description"), - value: Expr::Value(Value::DoubleQuotedString( - "a view that expires in 2 days".to_string() - )), + value: Expr::Value( + Value::DoubleQuotedString("a view that expires in 2 days".to_string()) + .with_empty_span() + ), }, &options[2], ); @@ -304,6 +389,7 @@ fn parse_create_view_with_options() { _ => unreachable!(), } } + #[test] fn parse_create_view_if_not_exists() { let sql = "CREATE VIEW IF NOT EXISTS mydataset.newview AS SELECT foo FROM bar"; @@ -363,7 +449,7 @@ fn parse_create_table_with_unquoted_hyphen() { Statement::CreateTable(CreateTable { name, columns, .. }) => { assert_eq!( name, - ObjectName(vec![ + ObjectName::from(vec![ "my-pro-ject".into(), "mydataset".into(), "mytable".into() @@ -373,7 +459,6 @@ fn parse_create_table_with_unquoted_hyphen() { vec![ColumnDef { name: Ident::new("x"), data_type: DataType::Int64, - collation: None, options: vec![] },], columns @@ -399,19 +484,18 @@ fn parse_create_table_with_options() { columns, partition_by, cluster_by, - options, + table_options, .. }) => { assert_eq!( name, - ObjectName(vec!["mydataset".into(), "newtable".into()]) + ObjectName::from(vec!["mydataset".into(), "newtable".into()]) ); assert_eq!( vec![ ColumnDef { name: Ident::new("x"), data_type: DataType::Int64, - collation: None, options: vec![ ColumnOptionDef { name: None, @@ -421,9 +505,11 @@ fn parse_create_table_with_options() { name: None, option: ColumnOption::Options(vec![SqlOption::KeyValue { key: Ident::new("description"), - value: Expr::Value(Value::DoubleQuotedString( - "field x".to_string() - )), + value: Expr::Value( + Value::DoubleQuotedString("field x".to_string()).with_span( + Span::new(Location::new(1, 42), Location::new(1, 52)) + ) + ), },]) }, ] @@ -431,14 +517,15 @@ fn parse_create_table_with_options() { ColumnDef { name: Ident::new("y"), data_type: DataType::Bool, - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Options(vec![SqlOption::KeyValue { key: Ident::new("description"), - value: Expr::Value(Value::DoubleQuotedString( - "field y".to_string() - )), + value: Expr::Value( + Value::DoubleQuotedString("field y".to_string()).with_span( + Span::new(Location::new(1, 42), Location::new(1, 52)) + ) + ), },]) }] }, @@ -452,20 +539,29 @@ fn parse_create_table_with_options() { Ident::new("userid"), Ident::new("age"), ])), - Some(vec![ + CreateTableOptions::Options(vec![ SqlOption::KeyValue { key: Ident::new("partition_expiration_days"), - value: Expr::Value(number("1")), + value: Expr::Value( + number("1").with_span(Span::new( + Location::new(1, 42), + Location::new(1, 43) + )) + ), }, SqlOption::KeyValue { key: Ident::new("description"), - value: Expr::Value(Value::DoubleQuotedString( - "table option description".to_string() - )), + value: Expr::Value( + Value::DoubleQuotedString("table option description".to_string()) + .with_span(Span::new( + Location::new(1, 42), + Location::new(1, 52) + )) + ), }, ]) ), - (partition_by, cluster_by, options) + (partition_by, cluster_by, table_options) ) } _ => unreachable!(), @@ -480,6 +576,12 @@ fn parse_create_table_with_options() { r#"description = "table option description")"# ); bigquery().verified_stmt(sql); + + let sql = "CREATE TABLE foo (x INT64) OPTIONS()"; + bigquery().verified_stmt(sql); + + let sql = "CREATE TABLE db.schema.test (x INT64 OPTIONS(description = 'An optional INTEGER field')) OPTIONS()"; + bigquery().verified_stmt(sql); } #[test] @@ -487,7 +589,7 @@ fn parse_nested_data_types() { let sql = "CREATE TABLE table (x STRUCT, b BYTES(42)>, y ARRAY>)"; match bigquery_and_generic().one_statement_parses_to(sql, sql) { Statement::CreateTable(CreateTable { name, columns, .. }) => { - assert_eq!(name, ObjectName(vec!["table".into()])); + assert_eq!(name, ObjectName::from(vec!["table".into()])); assert_eq!( columns, vec![ @@ -508,7 +610,6 @@ fn parse_nested_data_types() { ], StructBracketKind::AngleBrackets ), - collation: None, options: vec![], }, ColumnDef { @@ -522,7 +623,6 @@ fn parse_nested_data_types() { StructBracketKind::AngleBrackets ), ))), - collation: None, options: vec![], }, ] @@ -565,23 +665,23 @@ fn parse_invalid_brackets() { fn parse_tuple_struct_literal() { // tuple syntax: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#tuple_syntax // syntax: (expr1, expr2 [, ... ]) - let sql = "SELECT (1, 2, 3), (1, 1.0, '123', true)"; + let sql = "SELECT (1, 2, 3), (1, 1.0, '123', true)"; // line 1, column 1 let select = bigquery().verified_only_select(sql); assert_eq!(2, select.projection.len()); assert_eq!( &Expr::Tuple(vec![ - Expr::Value(number("1")), - Expr::Value(number("2")), - Expr::Value(number("3")), + Expr::value(number("1")), + Expr::value(number("2")), + Expr::value(number("3")), ]), expr_from_projection(&select.projection[0]) ); assert_eq!( &Expr::Tuple(vec![ - Expr::Value(number("1")), - Expr::Value(number("1.0")), - Expr::Value(Value::SingleQuotedString("123".to_string())), - Expr::Value(Value::Boolean(true)) + Expr::value(number("1")), + Expr::value(number("1.0")), + Expr::Value(Value::SingleQuotedString("123".into()).with_empty_span()), + Expr::Value(Value::Boolean(true).with_empty_span()) ]), expr_from_projection(&select.projection[1]) ); @@ -597,9 +697,9 @@ fn parse_typeless_struct_syntax() { assert_eq!( &Expr::Struct { values: vec![ - Expr::Value(number("1")), - Expr::Value(number("2")), - Expr::Value(number("3")), + Expr::value(number("1")), + Expr::value(number("2")), + Expr::value(number("3")), ], fields: Default::default() }, @@ -608,30 +708,35 @@ fn parse_typeless_struct_syntax() { assert_eq!( &Expr::Struct { - values: vec![Expr::Value(Value::SingleQuotedString("abc".to_string())),], + values: vec![Expr::Value( + Value::SingleQuotedString("abc".into()).with_empty_span() + )], fields: Default::default() }, expr_from_projection(&select.projection[1]) ); + assert_eq!( &Expr::Struct { values: vec![ - Expr::Value(number("1")), + Expr::value(number("1")), Expr::CompoundIdentifier(vec![Ident::from("t"), Ident::from("str_col")]), ], fields: Default::default() }, expr_from_projection(&select.projection[2]) ); + assert_eq!( &Expr::Struct { values: vec![ Expr::Named { - expr: Expr::Value(number("1")).into(), + expr: Expr::value(number("1")).into(), name: Ident::from("a") }, Expr::Named { - expr: Expr::Value(Value::SingleQuotedString("abc".to_string())).into(), + expr: Expr::Value(Value::SingleQuotedString("abc".into()).with_empty_span()) + .into(), name: Ident::from("b") }, ], @@ -639,6 +744,7 @@ fn parse_typeless_struct_syntax() { }, expr_from_projection(&select.projection[3]) ); + assert_eq!( &Expr::Struct { values: vec![Expr::Named { @@ -661,7 +767,7 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!(3, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(number("5")),], + values: vec![Expr::value(number("5"))], fields: vec![StructField { field_name: None, field_type: DataType::Int64, @@ -672,15 +778,17 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!( &Expr::Struct { values: vec![ - Expr::Value(number("1")), + Expr::value(number("1")), Expr::CompoundIdentifier(vec![ Ident { value: "t".into(), quote_style: None, + span: Span::empty(), }, Ident { value: "str_col".into(), quote_style: None, + span: Span::empty(), }, ]), ], @@ -689,6 +797,7 @@ fn parse_typed_struct_syntax_bigquery() { field_name: Some(Ident { value: "x".into(), quote_style: None, + span: Span::empty(), }), field_type: DataType::Int64 }, @@ -696,6 +805,7 @@ fn parse_typed_struct_syntax_bigquery() { field_name: Some(Ident { value: "y".into(), quote_style: None, + span: Span::empty(), }), field_type: DataType::String(None) }, @@ -708,7 +818,8 @@ fn parse_typed_struct_syntax_bigquery() { values: vec![Expr::Identifier(Ident { value: "nested_col".into(), quote_style: None, - }),], + span: Span::empty(), + })], fields: vec![ StructField { field_name: Some("arr".into()), @@ -739,7 +850,8 @@ fn parse_typed_struct_syntax_bigquery() { values: vec![Expr::Identifier(Ident { value: "nested_col".into(), quote_style: None, - }),], + span: Span::empty(), + })], fields: vec![ StructField { field_name: Some("x".into()), @@ -764,7 +876,7 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!(2, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(Value::Boolean(true)),], + values: vec![Expr::Value(Value::Boolean(true).with_empty_span())], fields: vec![StructField { field_name: None, field_type: DataType::Bool @@ -774,9 +886,9 @@ fn parse_typed_struct_syntax_bigquery() { ); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(Value::SingleQuotedByteStringLiteral( - "abc".into() - )),], + values: vec![Expr::Value( + Value::SingleQuotedByteStringLiteral("abc".into()).with_empty_span() + )], fields: vec![StructField { field_name: None, field_type: DataType::Bytes(Some(42)) @@ -790,9 +902,9 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!(4, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(Value::DoubleQuotedString( - "2011-05-05".to_string() - )),], + values: vec![Expr::Value( + Value::DoubleQuotedString("2011-05-05".into()).with_empty_span() + )], fields: vec![StructField { field_name: None, field_type: DataType::Date @@ -804,8 +916,8 @@ fn parse_typed_struct_syntax_bigquery() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Datetime(None), - value: "1999-01-01 01:23:34.45".to_string() - },], + value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()) + }], fields: vec![StructField { field_name: None, field_type: DataType::Datetime(None) @@ -815,7 +927,7 @@ fn parse_typed_struct_syntax_bigquery() { ); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(number("5.0")),], + values: vec![Expr::value(number("5.0"))], fields: vec![StructField { field_name: None, field_type: DataType::Float64 @@ -825,7 +937,7 @@ fn parse_typed_struct_syntax_bigquery() { ); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(number("1")),], + values: vec![Expr::value(number("1"))], fields: vec![StructField { field_name: None, field_type: DataType::Int64 @@ -840,12 +952,14 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!( &Expr::Struct { values: vec![Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString("2".to_string()))), + value: Box::new(Expr::Value( + Value::SingleQuotedString("2".into()).with_empty_span() + )), leading_field: Some(DateTimeField::Hour), leading_precision: None, last_field: None, fractional_seconds_precision: None - }),], + })], fields: vec![StructField { field_name: None, field_type: DataType::Interval @@ -857,8 +971,10 @@ fn parse_typed_struct_syntax_bigquery() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::JSON, - value: r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.to_string() - },], + value: Value::SingleQuotedString( + r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into() + ) + }], fields: vec![StructField { field_name: None, field_type: DataType::JSON @@ -872,7 +988,9 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!(3, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(Value::DoubleQuotedString("foo".to_string())),], + values: vec![Expr::Value( + Value::DoubleQuotedString("foo".into()).with_empty_span() + )], fields: vec![StructField { field_name: None, field_type: DataType::String(Some(42)) @@ -884,8 +1002,8 @@ fn parse_typed_struct_syntax_bigquery() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Timestamp(None, TimezoneInfo::None), - value: "2008-12-25 15:30:00 America/Los_Angeles".to_string() - },], + value: Value::SingleQuotedString("2008-12-25 15:30:00 America/Los_Angeles".into()) + }], fields: vec![StructField { field_name: None, field_type: DataType::Timestamp(None, TimezoneInfo::None) @@ -898,8 +1016,8 @@ fn parse_typed_struct_syntax_bigquery() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Time(None, TimezoneInfo::None), - value: "15:30:00".to_string() - },], + value: Value::SingleQuotedString("15:30:00".into()) + }], fields: vec![StructField { field_name: None, field_type: DataType::Time(None, TimezoneInfo::None) @@ -915,8 +1033,8 @@ fn parse_typed_struct_syntax_bigquery() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Numeric(ExactNumberInfo::None), - value: "1".to_string() - },], + value: Value::SingleQuotedString("1".into()) + }], fields: vec![StructField { field_name: None, field_type: DataType::Numeric(ExactNumberInfo::None) @@ -928,8 +1046,8 @@ fn parse_typed_struct_syntax_bigquery() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: "1".to_string() - },], + value: Value::SingleQuotedString("1".into()) + }], fields: vec![StructField { field_name: None, field_type: DataType::BigNumeric(ExactNumberInfo::None) @@ -944,7 +1062,7 @@ fn parse_typed_struct_syntax_bigquery() { assert_eq!(1, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(number("1")), Expr::Value(number("2")),], + values: vec![Expr::value(number("1")), Expr::value(number("2")),], fields: vec![ StructField { field_name: Some("key".into()), @@ -970,7 +1088,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { assert_eq!(3, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(number("5")),], + values: vec![Expr::value(number("5"))], fields: vec![StructField { field_name: None, field_type: DataType::Int64, @@ -981,15 +1099,17 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { assert_eq!( &Expr::Struct { values: vec![ - Expr::Value(number("1")), + Expr::value(number("1")), Expr::CompoundIdentifier(vec![ Ident { value: "t".into(), quote_style: None, + span: Span::empty(), }, Ident { value: "str_col".into(), quote_style: None, + span: Span::empty(), }, ]), ], @@ -998,6 +1118,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { field_name: Some(Ident { value: "x".into(), quote_style: None, + span: Span::empty(), }), field_type: DataType::Int64 }, @@ -1005,6 +1126,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { field_name: Some(Ident { value: "y".into(), quote_style: None, + span: Span::empty(), }), field_type: DataType::String(None) }, @@ -1012,33 +1134,6 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { }, expr_from_projection(&select.projection[1]) ); - assert_eq!( - &Expr::Struct { - values: vec![Expr::Identifier(Ident { - value: "nested_col".into(), - quote_style: None, - }),], - fields: vec![ - StructField { - field_name: Some("arr".into()), - field_type: DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new( - DataType::Float64 - ))) - }, - StructField { - field_name: Some("str".into()), - field_type: DataType::Struct( - vec![StructField { - field_name: None, - field_type: DataType::Bool - }], - StructBracketKind::AngleBrackets - ) - }, - ] - }, - expr_from_projection(&select.projection[2]) - ); let sql = r#"SELECT STRUCT>(nested_col)"#; let select = bigquery_and_generic().verified_only_select(sql); @@ -1048,7 +1143,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { values: vec![Expr::Identifier(Ident { value: "nested_col".into(), quote_style: None, - }),], + span: Span::empty(), + })], fields: vec![ StructField { field_name: Some("x".into()), @@ -1073,7 +1169,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { assert_eq!(2, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(Value::Boolean(true)),], + values: vec![Expr::Value(Value::Boolean(true).with_empty_span())], fields: vec![StructField { field_name: None, field_type: DataType::Bool @@ -1083,9 +1179,9 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { ); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(Value::SingleQuotedByteStringLiteral( - "abc".into() - )),], + values: vec![Expr::Value( + Value::SingleQuotedByteStringLiteral("abc".into()).with_empty_span() + )], fields: vec![StructField { field_name: None, field_type: DataType::Bytes(Some(42)) @@ -1099,9 +1195,9 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { assert_eq!(4, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(Value::SingleQuotedString( - "2011-05-05".to_string() - )),], + values: vec![Expr::Value( + Value::SingleQuotedString("2011-05-05".into()).with_empty_span() + )], fields: vec![StructField { field_name: None, field_type: DataType::Date @@ -1113,8 +1209,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Datetime(None), - value: "1999-01-01 01:23:34.45".to_string() - },], + value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()) + }], fields: vec![StructField { field_name: None, field_type: DataType::Datetime(None) @@ -1124,7 +1220,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { ); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(number("5.0")),], + values: vec![Expr::value(number("5.0"))], fields: vec![StructField { field_name: None, field_type: DataType::Float64 @@ -1134,7 +1230,7 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { ); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(number("1")),], + values: vec![Expr::value(number("1"))], fields: vec![StructField { field_name: None, field_type: DataType::Int64 @@ -1149,12 +1245,14 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { assert_eq!( &Expr::Struct { values: vec![Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString("1".to_string()))), + value: Box::new(Expr::Value( + Value::SingleQuotedString("1".into()).with_empty_span() + )), leading_field: Some(DateTimeField::Month), leading_precision: None, last_field: None, fractional_seconds_precision: None - }),], + })], fields: vec![StructField { field_name: None, field_type: DataType::Interval @@ -1166,8 +1264,10 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::JSON, - value: r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.to_string() - },], + value: Value::SingleQuotedString( + r#"{"class" : {"students" : [{"name" : "Jane"}]}}"#.into() + ) + }], fields: vec![StructField { field_name: None, field_type: DataType::JSON @@ -1181,7 +1281,9 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { assert_eq!(3, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(Value::SingleQuotedString("foo".to_string())),], + values: vec![Expr::Value( + Value::SingleQuotedString("foo".into()).with_empty_span() + )], fields: vec![StructField { field_name: None, field_type: DataType::String(Some(42)) @@ -1193,8 +1295,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Timestamp(None, TimezoneInfo::None), - value: "2008-12-25 15:30:00 America/Los_Angeles".to_string() - },], + value: Value::SingleQuotedString("2008-12-25 15:30:00 America/Los_Angeles".into()) + }], fields: vec![StructField { field_name: None, field_type: DataType::Timestamp(None, TimezoneInfo::None) @@ -1207,8 +1309,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Time(None, TimezoneInfo::None), - value: "15:30:00".to_string() - },], + value: Value::SingleQuotedString("15:30:00".into()) + }], fields: vec![StructField { field_name: None, field_type: DataType::Time(None, TimezoneInfo::None) @@ -1224,8 +1326,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::Numeric(ExactNumberInfo::None), - value: "1".to_string() - },], + value: Value::SingleQuotedString("1".into()) + }], fields: vec![StructField { field_name: None, field_type: DataType::Numeric(ExactNumberInfo::None) @@ -1237,8 +1339,8 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { &Expr::Struct { values: vec![Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: "1".to_string() - },], + value: Value::SingleQuotedString("1".into()) + }], fields: vec![StructField { field_name: None, field_type: DataType::BigNumeric(ExactNumberInfo::None) @@ -1250,12 +1352,12 @@ fn parse_typed_struct_syntax_bigquery_and_generic() { #[test] fn parse_typed_struct_with_field_name_bigquery() { - let sql = r#"SELECT STRUCT(5), STRUCT("foo")"#; + let sql = r#"SELECT STRUCT(5), STRUCT("foo")"#; // line 1, column 1 let select = bigquery().verified_only_select(sql); assert_eq!(2, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(number("5")),], + values: vec![Expr::value(number("5"))], fields: vec![StructField { field_name: Some(Ident::from("x")), field_type: DataType::Int64 @@ -1265,7 +1367,9 @@ fn parse_typed_struct_with_field_name_bigquery() { ); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(Value::DoubleQuotedString("foo".to_string())),], + values: vec![Expr::Value( + Value::DoubleQuotedString("foo".into()).with_empty_span() + )], fields: vec![StructField { field_name: Some(Ident::from("y")), field_type: DataType::String(None) @@ -1274,12 +1378,12 @@ fn parse_typed_struct_with_field_name_bigquery() { expr_from_projection(&select.projection[1]) ); - let sql = r#"SELECT STRUCT(5, 5)"#; + let sql = r#"SELECT STRUCT(5, 5)"#; // line 1, column 1 let select = bigquery().verified_only_select(sql); assert_eq!(1, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(number("5")), Expr::Value(number("5")),], + values: vec![Expr::value(number("5")), Expr::value(number("5")),], fields: vec![ StructField { field_name: Some(Ident::from("x")), @@ -1297,12 +1401,12 @@ fn parse_typed_struct_with_field_name_bigquery() { #[test] fn parse_typed_struct_with_field_name_bigquery_and_generic() { - let sql = r#"SELECT STRUCT(5), STRUCT('foo')"#; + let sql = r#"SELECT STRUCT(5), STRUCT('foo')"#; // line 1, column 1 let select = bigquery().verified_only_select(sql); assert_eq!(2, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(number("5")),], + values: vec![Expr::value(number("5"))], fields: vec![StructField { field_name: Some(Ident::from("x")), field_type: DataType::Int64 @@ -1312,7 +1416,9 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() { ); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(Value::SingleQuotedString("foo".to_string())),], + values: vec![Expr::Value( + Value::SingleQuotedString("foo".into()).with_empty_span() + )], fields: vec![StructField { field_name: Some(Ident::from("y")), field_type: DataType::String(None) @@ -1321,12 +1427,12 @@ fn parse_typed_struct_with_field_name_bigquery_and_generic() { expr_from_projection(&select.projection[1]) ); - let sql = r#"SELECT STRUCT(5, 5)"#; + let sql = r#"SELECT STRUCT(5, 5)"#; // line 1, column 1 let select = bigquery_and_generic().verified_only_select(sql); assert_eq!(1, select.projection.len()); assert_eq!( &Expr::Struct { - values: vec![Expr::Value(number("5")), Expr::Value(number("5")),], + values: vec![Expr::value(number("5")), Expr::value(number("5")),], fields: vec![ StructField { field_name: Some(Ident::from("x")), @@ -1365,15 +1471,7 @@ fn parse_table_identifiers() { assert_eq!( select.from, vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(expected), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(expected)), joins: vec![] },] ); @@ -1507,6 +1605,20 @@ fn parse_hyphenated_table_identifiers() { "SELECT * FROM foo-bar AS f JOIN baz-qux AS b ON f.id = b.id", ); + assert_eq!( + bigquery() + .verified_only_select_with_canonical( + "select * from foo-123.bar", + "SELECT * FROM foo-123.bar" + ) + .from[0] + .relation, + table_from_name(ObjectName::from(vec![ + Ident::new("foo-123"), + Ident::new("bar") + ])), + ); + assert_eq!( bigquery() .verified_only_select_with_canonical( @@ -1523,29 +1635,29 @@ fn parse_hyphenated_table_identifiers() { ])) }) ); - - let error_sql = "select foo-bar.* from foo-bar"; - assert!(bigquery().parse_sql_statements(error_sql).is_err()); } #[test] fn parse_table_time_travel() { let version = "2023-08-18 23:08:18".to_string(); - let sql = format!("SELECT 1 FROM t1 FOR SYSTEM_TIME AS OF '{version}'"); + let sql = format!("SELECT 1 FROM t1 FOR SYSTEM_TIME AS OF '{version}'"); // line 1, column 1 let select = bigquery().verified_only_select(&sql); assert_eq!( select.from, vec![TableWithJoins { relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("t1")]), + name: ObjectName::from(vec![Ident::new("t1")]), alias: None, args: None, with_hints: vec![], version: Some(TableVersion::ForSystemTimeAsOf(Expr::Value( - Value::SingleQuotedString(version) + Value::SingleQuotedString(version).with_empty_span() ))), partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }, joins: vec![] },] @@ -1576,7 +1688,7 @@ fn parse_join_constraint_unnest_alias() { with_ordinality: false, }, global: false, - join_operator: JoinOperator::Inner(JoinConstraint::On(Expr::BinaryOp { + join_operator: JoinOperator::Join(JoinConstraint::On(Expr::BinaryOp { left: Box::new(Expr::Identifier("c1".into())), op: BinaryOperator::Eq, right: Box::new(Expr::Identifier("c2".into())), @@ -1608,21 +1720,22 @@ fn parse_merge() { columns: vec![Ident::new("product"), Ident::new("quantity")], kind: MergeInsertKind::Values(Values { explicit_row: false, - rows: vec![vec![Expr::Value(number("1")), Expr::Value(number("2"))]], + rows: vec![vec![Expr::value(number("1")), Expr::value(number("2"))]], }), }); let update_action = MergeAction::Update { assignments: vec![ Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new("a")])), - value: Expr::Value(number("1")), + target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new("a")])), + value: Expr::value(number("1")), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new("b")])), - value: Expr::Value(number("2")), + target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new("b")])), + value: Expr::value(number("2")), }, ], }; + match bigquery_and_generic().verified_stmt(sql) { Statement::Merge { into, @@ -1630,11 +1743,12 @@ fn parse_merge() { source, on, clauses, + .. } => { assert!(!into); assert_eq!( TableFactor::Table { - name: ObjectName(vec![Ident::new("inventory")]), + name: ObjectName::from(vec![Ident::new("inventory")]), alias: Some(TableAlias { name: Ident::new("T"), columns: vec![], @@ -1644,12 +1758,15 @@ fn parse_merge() { version: Default::default(), partitions: Default::default(), with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }, table ); assert_eq!( TableFactor::Table { - name: ObjectName(vec![Ident::new("newArrivals")]), + name: ObjectName::from(vec![Ident::new("newArrivals")]), alias: Some(TableAlias { name: Ident::new("S"), columns: vec![], @@ -1659,20 +1776,23 @@ fn parse_merge() { version: Default::default(), partitions: Default::default(), with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }, source ); - assert_eq!(Expr::Value(Value::Boolean(false)), *on); + assert_eq!(Expr::Value(Value::Boolean(false).with_empty_span()), *on); assert_eq!( vec![ MergeClause { clause_kind: MergeClauseKind::NotMatched, - predicate: Some(Expr::Value(number("1"))), + predicate: Some(Expr::value(number("1"))), action: insert_action.clone(), }, MergeClause { clause_kind: MergeClauseKind::NotMatchedByTarget, - predicate: Some(Expr::Value(number("1"))), + predicate: Some(Expr::value(number("1"))), action: insert_action.clone(), }, MergeClause { @@ -1682,7 +1802,7 @@ fn parse_merge() { }, MergeClause { clause_kind: MergeClauseKind::NotMatchedBySource, - predicate: Some(Expr::Value(number("2"))), + predicate: Some(Expr::value(number("2"))), action: MergeAction::Delete }, MergeClause { @@ -1692,12 +1812,12 @@ fn parse_merge() { }, MergeClause { clause_kind: MergeClauseKind::NotMatchedBySource, - predicate: Some(Expr::Value(number("1"))), + predicate: Some(Expr::value(number("1"))), action: update_action.clone(), }, MergeClause { clause_kind: MergeClauseKind::NotMatched, - predicate: Some(Expr::Value(number("1"))), + predicate: Some(Expr::value(number("1"))), action: MergeAction::Insert(MergeInsertExpr { columns: vec![Ident::new("product"), Ident::new("quantity"),], kind: MergeInsertKind::Row, @@ -1713,7 +1833,7 @@ fn parse_merge() { }, MergeClause { clause_kind: MergeClauseKind::NotMatched, - predicate: Some(Expr::Value(number("1"))), + predicate: Some(Expr::value(number("1"))), action: MergeAction::Insert(MergeInsertExpr { columns: vec![], kind: MergeInsertKind::Row @@ -1729,7 +1849,7 @@ fn parse_merge() { }, MergeClause { clause_kind: MergeClauseKind::Matched, - predicate: Some(Expr::Value(number("1"))), + predicate: Some(Expr::value(number("1"))), action: MergeAction::Delete, }, MergeClause { @@ -1745,7 +1865,7 @@ fn parse_merge() { kind: MergeInsertKind::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Value(number("1")), + Expr::value(number("1")), Expr::Identifier(Ident::new("DEFAULT")), ]] }) @@ -1759,7 +1879,7 @@ fn parse_merge() { kind: MergeInsertKind::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Value(number("1")), + Expr::value(number("1")), Expr::Identifier(Ident::new("DEFAULT")), ]] }) @@ -1875,7 +1995,7 @@ fn parse_array_agg_func() { fn parse_big_query_declare() { for (sql, expected_names, expected_data_type, expected_assigned_expr) in [ ( - "DECLARE x INT64", + "DECLARE x INT64", // line 1, column 1 vec![Ident::new("x")], Some(DataType::Int64), None, @@ -1884,25 +2004,25 @@ fn parse_big_query_declare() { "DECLARE x INT64 DEFAULT 42", vec![Ident::new("x")], Some(DataType::Int64), - Some(DeclareAssignment::Default(Box::new(Expr::Value(number( - "42", - ))))), + Some(DeclareAssignment::Default(Box::new(Expr::Value( + number("42").with_empty_span(), + )))), ), ( "DECLARE x, y, z INT64 DEFAULT 42", vec![Ident::new("x"), Ident::new("y"), Ident::new("z")], Some(DataType::Int64), - Some(DeclareAssignment::Default(Box::new(Expr::Value(number( - "42", - ))))), + Some(DeclareAssignment::Default(Box::new(Expr::Value( + number("42").with_empty_span(), + )))), ), ( "DECLARE x DEFAULT 42", vec![Ident::new("x")], None, - Some(DeclareAssignment::Default(Box::new(Expr::Value(number( - "42", - ))))), + Some(DeclareAssignment::Default(Box::new(Expr::Value( + number("42").with_empty_span(), + )))), ), ] { match bigquery().verified_stmt(sql) { @@ -1951,27 +2071,47 @@ fn parse_map_access_expr() { let sql = "users[-1][safe_offset(2)].a.b"; let expr = bigquery().verified_expr(sql); - fn map_access_key(key: Expr, syntax: MapAccessSyntax) -> MapAccessKey { - MapAccessKey { key, syntax } - } - let expected = Expr::MapAccess { - column: Expr::Identifier(Ident::new("users")).into(), - keys: vec![ - map_access_key( - Expr::UnaryOp { + let expected = Expr::CompoundFieldAccess { + root: Box::new(Expr::Identifier(Ident::with_span( + Span::new(Location::of(1, 1), Location::of(1, 6)), + "users", + ))), + access_chain: vec![ + AccessExpr::Subscript(Subscript::Index { + index: Expr::UnaryOp { op: UnaryOperator::Minus, - expr: Expr::Value(number("1")).into(), + expr: Expr::value(number("1")).into(), }, - MapAccessSyntax::Bracket, - ), - map_access_key( - call("safe_offset", [Expr::Value(number("2"))]), - MapAccessSyntax::Bracket, - ), - map_access_key( - Expr::CompoundIdentifier(vec![Ident::new("a"), Ident::new("b")]), - MapAccessSyntax::Period, - ), + }), + AccessExpr::Subscript(Subscript::Index { + index: Expr::Function(Function { + name: ObjectName::from(vec![Ident::with_span( + Span::new(Location::of(1, 11), Location::of(1, 22)), + "safe_offset", + )]), + parameters: FunctionArguments::None, + args: FunctionArguments::List(FunctionArgumentList { + duplicate_treatment: None, + args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + number("2").with_empty_span(), + )))], + clauses: vec![], + }), + filter: None, + null_treatment: None, + over: None, + within_group: vec![], + uses_odbc_syntax: false, + }), + }), + AccessExpr::Dot(Expr::Identifier(Ident::with_span( + Span::new(Location::of(1, 24), Location::of(1, 25)), + "a", + ))), + AccessExpr::Dot(Expr::Identifier(Ident::with_span( + Span::new(Location::of(1, 26), Location::of(1, 27)), + "b", + ))), ], }; assert_eq!(expr, expected); @@ -1993,23 +2133,24 @@ fn test_bigquery_create_function() { let stmt = bigquery().verified_stmt(sql); assert_eq!( stmt, - Statement::CreateFunction { + Statement::CreateFunction(CreateFunction { + or_alter: false, or_replace: true, temporary: true, if_not_exists: false, - name: ObjectName(vec![ + name: ObjectName::from(vec![ Ident::new("project1"), Ident::new("mydataset"), Ident::new("myfunction"), ]), args: Some(vec![OperateFunctionArg::with_name("x", DataType::Float64),]), return_type: Some(DataType::Float64), - function_body: Some(CreateFunctionBody::AsAfterOptions(Expr::Value(number( - "42" - )))), + function_body: Some(CreateFunctionBody::AsAfterOptions(Expr::Value( + number("42").with_empty_span() + ))), options: Some(vec![SqlOption::KeyValue { key: Ident::new("x"), - value: Expr::Value(Value::SingleQuotedString("y".into())), + value: Expr::Value(Value::SingleQuotedString("y".into()).with_empty_span()), }]), behavior: None, using: None, @@ -2018,7 +2159,7 @@ fn test_bigquery_create_function() { remote_connection: None, called_on_null: None, parallel: None, - } + }) ); let sqls = [ @@ -2128,10 +2269,14 @@ fn test_bigquery_trim() { let select = bigquery().verified_only_select(sql_only_select); assert_eq!( &Expr::Trim { - expr: Box::new(Expr::Value(Value::SingleQuotedString("xyz".to_owned()))), + expr: Box::new(Expr::Value( + Value::SingleQuotedString("xyz".to_owned()).with_empty_span() + )), trim_where: None, trim_what: None, - trim_characters: Some(vec![Expr::Value(Value::SingleQuotedString("a".to_owned()))]), + trim_characters: Some(vec![Expr::Value( + Value::SingleQuotedString("a".to_owned()).with_empty_span() + )]), }, expr_from_projection(only(&select.projection)) ); @@ -2158,6 +2303,14 @@ fn parse_extract_weekday() { ); } +#[test] +fn bigquery_select_expr_star() { + bigquery() + .verified_only_select("SELECT STRUCT((SELECT foo FROM T WHERE true)).* FROM T"); + bigquery().verified_only_select("SELECT [STRUCT('foo')][0].* EXCEPT (foo) FROM T"); + bigquery().verified_only_select("SELECT myfunc()[0].* FROM T"); +} + #[test] fn test_select_as_struct() { bigquery().verified_only_select("SELECT * FROM (SELECT AS VALUE STRUCT(123 AS a, false AS b))"); @@ -2174,6 +2327,20 @@ fn test_select_as_value() { assert_eq!(Some(ValueTableMode::AsValue), select.value_table_mode); } +#[test] +fn test_triple_quote_typed_strings() { + bigquery().verified_expr(r#"JSON '''{"foo":"bar's"}'''"#); + + let expr = bigquery().verified_expr(r#"JSON """{"foo":"bar's"}""""#); + assert_eq!( + Expr::TypedString { + data_type: DataType::JSON, + value: Value::TripleDoubleQuotedString(r#"{"foo":"bar's"}"#.into()) + }, + expr + ); +} + #[test] fn test_array_agg() { bigquery_and_generic().verified_expr("ARRAY_AGG(state)"); @@ -2194,3 +2361,19 @@ fn test_any_value() { bigquery_and_generic().verified_expr("ANY_VALUE(fruit HAVING MAX sold)"); bigquery_and_generic().verified_expr("ANY_VALUE(fruit HAVING MIN sold)"); } + +#[test] +fn test_any_type() { + bigquery().verified_stmt(concat!( + "CREATE OR REPLACE TEMPORARY FUNCTION ", + "my_function(param1 ANY TYPE) ", + "AS (", + "(SELECT 1)", + ")", + )); +} + +#[test] +fn test_any_type_dont_break_custom_type() { + bigquery_and_generic().verified_stmt("CREATE TABLE foo (x ANY)"); +} diff --git a/tests/sqlparser_clickhouse.rs b/tests/sqlparser_clickhouse.rs index f8c349a37..d0218b6c3 100644 --- a/tests/sqlparser_clickhouse.rs +++ b/tests/sqlparser_clickhouse.rs @@ -21,9 +21,11 @@ #[macro_use] mod test_utils; +use helpers::attached_token::AttachedToken; +use sqlparser::tokenizer::Span; use test_utils::*; -use sqlparser::ast::Expr::{BinaryOp, Identifier, MapAccess}; +use sqlparser::ast::Expr::{BinaryOp, Identifier}; use sqlparser::ast::SelectItem::UnnamedExpr; use sqlparser::ast::TableFactor::Table; use sqlparser::ast::Value::Number; @@ -39,34 +41,28 @@ fn parse_map_access_expr() { assert_eq!( Select { distinct: None, + select_token: AttachedToken::empty(), top: None, - projection: vec![UnnamedExpr(MapAccess { - column: Box::new(Identifier(Ident { + top_before_distinct: false, + projection: vec![UnnamedExpr(Expr::CompoundFieldAccess { + root: Box::new(Identifier(Ident { value: "string_values".to_string(), quote_style: None, + span: Span::empty(), })), - keys: vec![MapAccessKey { - key: call( + access_chain: vec![AccessExpr::Subscript(Subscript::Index { + index: call( "indexOf", [ Expr::Identifier(Ident::new("string_names")), - Expr::Value(Value::SingleQuotedString("endpoint".to_string())) + Expr::value(Value::SingleQuotedString("endpoint".to_string())) ] ), - syntax: MapAccessSyntax::Bracket - }], + })], })], into: None, from: vec![TableWithJoins { - relation: Table { - name: ObjectName(vec![Ident::new("foos")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident::new("foos")])), joins: vec![], }], lateral_views: vec![], @@ -75,25 +71,24 @@ fn parse_map_access_expr() { left: Box::new(BinaryOp { left: Box::new(Identifier(Ident::new("id"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::SingleQuotedString("test".to_string()))), + right: Box::new(Expr::value(Value::SingleQuotedString("test".to_string()))), }), op: BinaryOperator::And, right: Box::new(BinaryOp { - left: Box::new(MapAccess { - column: Box::new(Identifier(Ident::new("string_value"))), - keys: vec![MapAccessKey { - key: call( + left: Box::new(Expr::CompoundFieldAccess { + root: Box::new(Identifier(Ident::new("string_value"))), + access_chain: vec![AccessExpr::Subscript(Subscript::Index { + index: call( "indexOf", [ Expr::Identifier(Ident::new("string_name")), - Expr::Value(Value::SingleQuotedString("app".to_string())) + Expr::value(Value::SingleQuotedString("app".to_string())) ] ), - syntax: MapAccessSyntax::Bracket - }], + })], }), op: BinaryOperator::NotEq, - right: Box::new(Expr::Value(Value::SingleQuotedString("foo".to_string()))), + right: Box::new(Expr::value(Value::SingleQuotedString("foo".to_string()))), }), }), group_by: GroupByExpr::Expressions(vec![], vec![]), @@ -106,6 +101,7 @@ fn parse_map_access_expr() { qualify: None, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }, select ); @@ -118,8 +114,8 @@ fn parse_array_expr() { assert_eq!( &Expr::Array(Array { elem: vec![ - Expr::Value(Value::SingleQuotedString("1".to_string())), - Expr::Value(Value::SingleQuotedString("2".to_string())), + Expr::value(Value::SingleQuotedString("1".to_string())), + Expr::value(Value::SingleQuotedString("2".to_string())), ], named: false, }), @@ -169,10 +165,12 @@ fn parse_delimited_identifiers() { args, with_hints, version, - with_ordinality: _, - partitions: _, + .. } => { - assert_eq!(vec![Ident::with_quote('"', "a table")], name.0); + assert_eq!( + ObjectName::from(vec![Ident::with_quote('"', "a table")]), + name + ); assert_eq!(Ident::with_quote('"', "alias"), alias.unwrap().name); assert!(args.is_none()); assert!(with_hints.is_empty()); @@ -191,7 +189,8 @@ fn parse_delimited_identifiers() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::with_quote('"', "myfun")]), + name: ObjectName::from(vec![Ident::with_quote('"', "myfun")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -220,13 +219,19 @@ fn parse_delimited_identifiers() { #[test] fn parse_create_table() { - clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE=MergeTree ORDER BY ("x")"#); - clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE=MergeTree ORDER BY "x""#); + clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY ("x")"#); + clickhouse().verified_stmt(r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY "x""#); clickhouse().verified_stmt( - r#"CREATE TABLE "x" ("a" "int") ENGINE=MergeTree ORDER BY "x" AS SELECT * FROM "t" WHERE true"#, + r#"CREATE TABLE "x" ("a" "int") ENGINE = MergeTree ORDER BY "x" AS SELECT * FROM "t" WHERE true"#, ); } +#[test] +fn parse_insert_into_function() { + clickhouse().verified_stmt(r#"INSERT INTO TABLE FUNCTION remote('localhost', default.simple_table) VALUES (100, 'inserted via remote()')"#); + clickhouse().verified_stmt(r#"INSERT INTO FUNCTION remote('localhost', default.simple_table) VALUES (100, 'inserted via remote()')"#); +} + #[test] fn parse_alter_table_attach_and_detach_partition() { for operation in &["ATTACH", "DETACH"] { @@ -301,7 +306,7 @@ fn parse_alter_table_add_projection() { Statement::AlterTable { name, operations, .. } => { - assert_eq!(name, ObjectName(vec!["t0".into()])); + assert_eq!(name, ObjectName::from(vec!["t0".into()])); assert_eq!(1, operations.len()); assert_eq!( operations[0], @@ -318,12 +323,14 @@ fn parse_alter_table_add_projection() { vec![] )), order_by: Some(OrderBy { - exprs: vec![OrderByExpr { + kind: OrderByKind::Expressions(vec![OrderByExpr { expr: Identifier(Ident::new("b")), - asc: None, - nulls_first: None, + options: OrderByOptions { + asc: None, + nulls_first: None, + }, with_fill: None, - }], + }]), interpolate: None, }), } @@ -371,7 +378,7 @@ fn parse_alter_table_drop_projection() { Statement::AlterTable { name, operations, .. } => { - assert_eq!(name, ObjectName(vec!["t0".into()])); + assert_eq!(name, ObjectName::from(vec!["t0".into()])); assert_eq!(1, operations.len()); assert_eq!( operations[0], @@ -404,7 +411,7 @@ fn parse_alter_table_clear_and_materialize_projection() { Statement::AlterTable { name, operations, .. } => { - assert_eq!(name, ObjectName(vec!["t0".into()])); + assert_eq!(name, ObjectName::from(vec!["t0".into()])); assert_eq!(1, operations.len()); assert_eq!( operations[0], @@ -523,7 +530,6 @@ fn column_def(name: Ident, data_type: DataType) -> ColumnDef { ColumnDef { name, data_type, - collation: None, options: vec![], } } @@ -548,7 +554,7 @@ fn parse_clickhouse_data_types() { match clickhouse_and_generic().one_statement_parses_to(sql, &canonical_sql) { Statement::CreateTable(CreateTable { name, columns, .. }) => { - assert_eq!(name, ObjectName(vec!["table".into()])); + assert_eq!(name, ObjectName::from(vec!["table".into()])); assert_eq!( columns, vec![ @@ -583,13 +589,13 @@ fn parse_clickhouse_data_types() { #[test] fn parse_create_table_with_nullable() { - let sql = r#"CREATE TABLE table (k UInt8, `a` Nullable(String), `b` Nullable(DateTime64(9, 'UTC')), c Nullable(DateTime64(9)), d Date32 NULL) ENGINE=MergeTree ORDER BY (`k`)"#; + let sql = r#"CREATE TABLE table (k UInt8, `a` Nullable(String), `b` Nullable(DateTime64(9, 'UTC')), c Nullable(DateTime64(9)), d Date32 NULL) ENGINE = MergeTree ORDER BY (`k`)"#; // ClickHouse has a case-sensitive definition of data type, but canonical representation is not let canonical_sql = sql.replace("String", "STRING"); match clickhouse_and_generic().one_statement_parses_to(sql, &canonical_sql) { Statement::CreateTable(CreateTable { name, columns, .. }) => { - assert_eq!(name, ObjectName(vec!["table".into()])); + assert_eq!(name, ObjectName::from(vec!["table".into()])); assert_eq!( columns, vec![ @@ -612,7 +618,6 @@ fn parse_create_table_with_nullable() { ColumnDef { name: "d".into(), data_type: DataType::Date32, - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Null @@ -638,7 +643,7 @@ fn parse_create_table_with_nested_data_types() { match clickhouse().one_statement_parses_to(sql, "") { Statement::CreateTable(CreateTable { name, columns, .. }) => { - assert_eq!(name, ObjectName(vec!["table".into()])); + assert_eq!(name, ObjectName::from(vec!["table".into()])); assert_eq!( columns, vec![ @@ -656,7 +661,6 @@ fn parse_create_table_with_nested_data_types() { DataType::LowCardinality(Box::new(DataType::String(None))) ) ]), - collation: None, options: vec![], }, ColumnDef { @@ -673,7 +677,6 @@ fn parse_create_table_with_nested_data_types() { } ]) ))), - collation: None, options: vec![], }, ColumnDef { @@ -690,7 +693,6 @@ fn parse_create_table_with_nested_data_types() { )) }, ]), - collation: None, options: vec![], }, ColumnDef { @@ -699,7 +701,6 @@ fn parse_create_table_with_nested_data_types() { Box::new(DataType::String(None)), Box::new(DataType::UInt16) ), - collation: None, options: vec![], }, ] @@ -713,14 +714,14 @@ fn parse_create_table_with_nested_data_types() { fn parse_create_table_with_primary_key() { match clickhouse_and_generic().verified_stmt(concat!( r#"CREATE TABLE db.table (`i` INT, `k` INT)"#, - " ENGINE=SharedMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')", + " ENGINE = SharedMergeTree('/clickhouse/tables/{uuid}/{shard}', '{replica}')", " PRIMARY KEY tuple(i)", " ORDER BY tuple(i)", )) { Statement::CreateTable(CreateTable { name, columns, - engine, + table_options, primary_key, order_by, .. @@ -731,30 +732,35 @@ fn parse_create_table_with_primary_key() { ColumnDef { name: Ident::with_quote('`', "i"), data_type: DataType::Int(None), - collation: None, options: vec![], }, ColumnDef { name: Ident::with_quote('`', "k"), data_type: DataType::Int(None), - collation: None, options: vec![], }, ], columns ); - assert_eq!( - engine, - Some(TableEngine { - name: "SharedMergeTree".to_string(), - parameters: Some(vec![ + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("ENGINE"), + name: Some(Ident::new("SharedMergeTree")), + values: vec![ Ident::with_quote('\'', "/clickhouse/tables/{uuid}/{shard}"), Ident::with_quote('\'', "{replica}"), - ]), - }) - ); + ] + } + ))); + fn assert_function(actual: &Function, name: &str, arg: &str) -> bool { - assert_eq!(actual.name, ObjectName(vec![Ident::new(name)])); + assert_eq!(actual.name, ObjectName::from(vec![Ident::new(name)])); assert_eq!( actual.args, FunctionArguments::List(FunctionArgumentList { @@ -799,7 +805,7 @@ fn parse_create_table_with_variant_default_expressions() { " b DATETIME EPHEMERAL now(),", " c DATETIME EPHEMERAL,", " d STRING ALIAS toString(c)", - ") ENGINE=MergeTree" + ") ENGINE = MergeTree" ); match clickhouse_and_generic().verified_stmt(sql) { Statement::CreateTable(CreateTable { columns, .. }) => { @@ -809,11 +815,11 @@ fn parse_create_table_with_variant_default_expressions() { ColumnDef { name: Ident::new("a"), data_type: DataType::Datetime(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Materialized(Expr::Function(Function { - name: ObjectName(vec![Ident::new("now")]), + name: ObjectName::from(vec![Ident::new("now")]), + uses_odbc_syntax: false, args: FunctionArguments::List(FunctionArgumentList { args: vec![], duplicate_treatment: None, @@ -830,11 +836,11 @@ fn parse_create_table_with_variant_default_expressions() { ColumnDef { name: Ident::new("b"), data_type: DataType::Datetime(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Ephemeral(Some(Expr::Function(Function { - name: ObjectName(vec![Ident::new("now")]), + name: ObjectName::from(vec![Ident::new("now")]), + uses_odbc_syntax: false, args: FunctionArguments::List(FunctionArgumentList { args: vec![], duplicate_treatment: None, @@ -851,7 +857,6 @@ fn parse_create_table_with_variant_default_expressions() { ColumnDef { name: Ident::new("c"), data_type: DataType::Datetime(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Ephemeral(None) @@ -860,11 +865,11 @@ fn parse_create_table_with_variant_default_expressions() { ColumnDef { name: Ident::new("d"), data_type: DataType::String(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Alias(Expr::Function(Function { - name: ObjectName(vec![Ident::new("toString")]), + name: ObjectName::from(vec![Ident::new("toString")]), + uses_odbc_syntax: false, args: FunctionArguments::List(FunctionArgumentList { args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr( Identifier(Ident::new("c")) @@ -891,16 +896,17 @@ fn parse_create_table_with_variant_default_expressions() { fn parse_create_view_with_fields_data_types() { match clickhouse().verified_stmt(r#"CREATE VIEW v (i "int", f "String") AS SELECT * FROM t"#) { Statement::CreateView { name, columns, .. } => { - assert_eq!(name, ObjectName(vec!["v".into()])); + assert_eq!(name, ObjectName::from(vec!["v".into()])); assert_eq!( columns, vec![ ViewColumnDef { name: "i".into(), data_type: Some(DataType::Custom( - ObjectName(vec![Ident { + ObjectName::from(vec![Ident { value: "int".into(), - quote_style: Some('"') + quote_style: Some('"'), + span: Span::empty(), }]), vec![] )), @@ -909,9 +915,10 @@ fn parse_create_view_with_fields_data_types() { ViewColumnDef { name: "f".into(), data_type: Some(DataType::Custom( - ObjectName(vec![Ident { + ObjectName::from(vec![Ident { value: "String".into(), - quote_style: Some('"') + quote_style: Some('"'), + span: Span::empty(), }]), vec![] )), @@ -944,6 +951,12 @@ fn parse_limit_by() { clickhouse_and_generic().verified_stmt( r#"SELECT * FROM default.last_asset_runs_mv ORDER BY created_at DESC LIMIT 1 BY asset, toStartOfDay(created_at)"#, ); + clickhouse_and_generic().parse_sql_statements( + r#"SELECT * FROM default.last_asset_runs_mv ORDER BY created_at DESC BY asset, toStartOfDay(created_at)"#, + ).expect_err("BY without LIMIT"); + clickhouse_and_generic() + .parse_sql_statements("SELECT * FROM T OFFSET 5 BY foo") + .expect_err("BY with OFFSET but without LIMIT"); } #[test] @@ -1016,17 +1029,15 @@ fn parse_select_parametric_function() { assert_eq!(parameters.args.len(), 2); assert_eq!( parameters.args[0], - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value(Value::Number( - "0.5".parse().unwrap(), - false - )))) + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (Value::Number("0.5".parse().unwrap(), false)).with_empty_span() + ))) ); assert_eq!( parameters.args[1], - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value(Value::Number( - "0.6".parse().unwrap(), - false - )))) + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (Value::Number("0.6".parse().unwrap(), false)).with_empty_span() + ))) ); } _ => unreachable!(), @@ -1059,61 +1070,6 @@ fn parse_create_materialized_view() { clickhouse_and_generic().verified_stmt(sql); } -#[test] -fn parse_group_by_with_modifier() { - let clauses = ["x", "a, b", "ALL"]; - let modifiers = [ - "WITH ROLLUP", - "WITH CUBE", - "WITH TOTALS", - "WITH ROLLUP WITH CUBE", - ]; - let expected_modifiers = [ - vec![GroupByWithModifier::Rollup], - vec![GroupByWithModifier::Cube], - vec![GroupByWithModifier::Totals], - vec![GroupByWithModifier::Rollup, GroupByWithModifier::Cube], - ]; - for clause in &clauses { - for (modifier, expected_modifier) in modifiers.iter().zip(expected_modifiers.iter()) { - let sql = format!("SELECT * FROM t GROUP BY {clause} {modifier}"); - match clickhouse_and_generic().verified_stmt(&sql) { - Statement::Query(query) => { - let group_by = &query.body.as_select().unwrap().group_by; - if clause == &"ALL" { - assert_eq!(group_by, &GroupByExpr::All(expected_modifier.to_vec())); - } else { - assert_eq!( - group_by, - &GroupByExpr::Expressions( - clause - .split(", ") - .map(|c| Identifier(Ident::new(c))) - .collect(), - expected_modifier.to_vec() - ) - ); - } - } - _ => unreachable!(), - } - } - } - - // invalid cases - let invalid_cases = [ - "SELECT * FROM t GROUP BY x WITH", - "SELECT * FROM t GROUP BY x WITH ROLLUP CUBE", - "SELECT * FROM t GROUP BY x WITH WITH ROLLUP", - "SELECT * FROM t GROUP BY WITH ROLLUP", - ]; - for sql in invalid_cases { - clickhouse_and_generic() - .parse_sql_statements(sql) - .expect_err("Expected: one of ROLLUP or CUBE or TOTALS, found: WITH"); - } -} - #[test] fn parse_select_order_by_with_fill_interpolate() { let sql = "SELECT id, fname, lname FROM customer WHERE id < 5 \ @@ -1125,42 +1081,53 @@ fn parse_select_order_by_with_fill_interpolate() { let select = clickhouse().verified_query(sql); assert_eq!( OrderBy { - exprs: vec![ + kind: OrderByKind::Expressions(vec![ OrderByExpr { expr: Expr::Identifier(Ident::new("fname")), - asc: Some(true), - nulls_first: Some(true), + options: OrderByOptions { + asc: Some(true), + nulls_first: Some(true), + }, with_fill: Some(WithFill { - from: Some(Expr::Value(number("10"))), - to: Some(Expr::Value(number("20"))), - step: Some(Expr::Value(number("2"))), + from: Some(Expr::value(number("10"))), + to: Some(Expr::value(number("20"))), + step: Some(Expr::value(number("2"))), }), }, OrderByExpr { expr: Expr::Identifier(Ident::new("lname")), - asc: Some(false), - nulls_first: Some(false), + options: OrderByOptions { + asc: Some(false), + nulls_first: Some(false), + }, with_fill: Some(WithFill { - from: Some(Expr::Value(number("30"))), - to: Some(Expr::Value(number("40"))), - step: Some(Expr::Value(number("3"))), + from: Some(Expr::value(number("30"))), + to: Some(Expr::value(number("40"))), + step: Some(Expr::value(number("3"))), }), }, - ], + ]), interpolate: Some(Interpolate { exprs: Some(vec![InterpolateExpr { column: Ident::new("col1"), expr: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("col1"))), op: BinaryOperator::Plus, - right: Box::new(Expr::Value(number("1"))), + right: Box::new(Expr::value(number("1"))), }), }]) }) }, select.order_by.expect("ORDER BY expected") ); - assert_eq!(Some(Expr::Value(number("2"))), select.limit); + assert_eq!( + select.limit_clause, + Some(LimitClause::LimitOffset { + limit: Some(Expr::value(number("2"))), + offset: None, + limit_by: vec![] + }) + ); } #[test] @@ -1201,11 +1168,15 @@ fn parse_with_fill() { let select = clickhouse().verified_query(sql); assert_eq!( Some(WithFill { - from: Some(Expr::Value(number("10"))), - to: Some(Expr::Value(number("20"))), - step: Some(Expr::Value(number("2"))), - }), - select.order_by.expect("ORDER BY expected").exprs[0].with_fill + from: Some(Expr::value(number("10"))), + to: Some(Expr::value(number("20"))), + step: Some(Expr::value(number("2"))), + }) + .as_ref(), + match select.order_by.expect("ORDER BY expected").kind { + OrderByKind::Expressions(ref exprs) => exprs[0].with_fill.as_ref(), + _ => None, + } ); } @@ -1240,7 +1211,7 @@ fn parse_interpolate_body_with_columns() { expr: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("col1"))), op: BinaryOperator::Plus, - right: Box::new(Expr::Value(number("1"))), + right: Box::new(Expr::value(number("1"))), }), }, InterpolateExpr { @@ -1252,12 +1223,17 @@ fn parse_interpolate_body_with_columns() { expr: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("col4"))), op: BinaryOperator::Plus, - right: Box::new(Expr::Value(number("4"))), + right: Box::new(Expr::value(number("4"))), }), }, ]) - }), - select.order_by.expect("ORDER BY expected").interpolate + }) + .as_ref(), + select + .order_by + .expect("ORDER BY expected") + .interpolate + .as_ref() ); } @@ -1266,8 +1242,12 @@ fn parse_interpolate_without_body() { let sql = "SELECT fname FROM customer ORDER BY fname WITH FILL INTERPOLATE"; let select = clickhouse().verified_query(sql); assert_eq!( - Some(Interpolate { exprs: None }), - select.order_by.expect("ORDER BY expected").interpolate + Some(Interpolate { exprs: None }).as_ref(), + select + .order_by + .expect("ORDER BY expected") + .interpolate + .as_ref() ); } @@ -1278,8 +1258,13 @@ fn parse_interpolate_with_empty_body() { assert_eq!( Some(Interpolate { exprs: Some(vec![]) - }), - select.order_by.expect("ORDER BY expected").interpolate + }) + .as_ref(), + select + .order_by + .expect("ORDER BY expected") + .interpolate + .as_ref() ); } @@ -1293,7 +1278,9 @@ fn test_prewhere() { Some(&BinaryOp { left: Box::new(Identifier(Ident::new("x"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::Number("1".parse().unwrap(), false))), + right: Box::new(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + )), }) ); let selection = query.as_ref().body.as_select().unwrap().selection.as_ref(); @@ -1302,7 +1289,9 @@ fn test_prewhere() { Some(&BinaryOp { left: Box::new(Identifier(Ident::new("y"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::Number("2".parse().unwrap(), false))), + right: Box::new(Expr::Value( + (Value::Number("2".parse().unwrap(), false)).with_empty_span() + )), }) ); } @@ -1318,13 +1307,17 @@ fn test_prewhere() { left: Box::new(BinaryOp { left: Box::new(Identifier(Ident::new("x"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::Number("1".parse().unwrap(), false))), + right: Box::new(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + )), }), op: BinaryOperator::And, right: Box::new(BinaryOp { left: Box::new(Identifier(Ident::new("y"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::Number("2".parse().unwrap(), false))), + right: Box::new(Expr::Value( + (Value::Number("2".parse().unwrap(), false)).with_empty_span() + )), }), }) ); @@ -1349,7 +1342,7 @@ fn parse_use() { // Test single identifier without quotes assert_eq!( clickhouse().verified_stmt(&format!("USE {}", object_name)), - Statement::Use(Use::Object(ObjectName(vec![Ident::new( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( object_name.to_string() )]))) ); @@ -1357,7 +1350,7 @@ fn parse_use() { // Test single identifier with different type of quotes assert_eq!( clickhouse().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)), - Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -1398,6 +1391,26 @@ fn test_query_with_format_clause() { } } +#[test] +fn test_insert_query_with_format_clause() { + let cases = [ + r#"INSERT INTO tbl FORMAT JSONEachRow {"id": 1, "value": "foo"}, {"id": 2, "value": "bar"}"#, + r#"INSERT INTO tbl FORMAT JSONEachRow ["first", "second", "third"]"#, + r#"INSERT INTO tbl FORMAT JSONEachRow [{"first": 1}]"#, + r#"INSERT INTO tbl (foo) FORMAT JSONAsObject {"foo": {"bar": {"x": "y"}, "baz": 1}}"#, + r#"INSERT INTO tbl (foo, bar) FORMAT JSON {"foo": 1, "bar": 2}"#, + r#"INSERT INTO tbl FORMAT CSV col1, col2, col3"#, + r#"INSERT INTO tbl FORMAT LineAsString "I love apple", "I love banana", "I love orange""#, + r#"INSERT INTO tbl (foo) SETTINGS input_format_json_read_bools_as_numbers = true FORMAT JSONEachRow {"id": 1, "value": "foo"}"#, + r#"INSERT INTO tbl SETTINGS format_template_resultset = '/some/path/resultset.format', format_template_row = '/some/path/row.format' FORMAT Template"#, + r#"INSERT INTO tbl SETTINGS input_format_json_read_bools_as_numbers = true FORMAT JSONEachRow {"id": 1, "value": "foo"}"#, + ]; + + for sql in &cases { + clickhouse().verified_stmt(sql); + } +} + #[test] fn parse_create_table_on_commit_and_as_query() { let sql = r#"CREATE LOCAL TEMPORARY TABLE test ON COMMIT PRESERVE ROWS AS SELECT 1"#; @@ -1412,10 +1425,9 @@ fn parse_create_table_on_commit_and_as_query() { assert_eq!(on_commit, Some(OnCommit::PreserveRows)); assert_eq!( query.unwrap().body.as_select().unwrap().projection, - vec![UnnamedExpr(Expr::Value(Value::Number( - "1".parse().unwrap(), - false - )))] + vec![UnnamedExpr(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + ))] ); } _ => unreachable!(), @@ -1428,9 +1440,9 @@ fn parse_freeze_and_unfreeze_partition() { for operation_name in &["FREEZE", "UNFREEZE"] { let sql = format!("ALTER TABLE t {operation_name} PARTITION '2024-08-14'"); - let expected_partition = Partition::Expr(Expr::Value(Value::SingleQuotedString( - "2024-08-14".to_string(), - ))); + let expected_partition = Partition::Expr(Expr::Value( + Value::SingleQuotedString("2024-08-14".to_string()).with_empty_span(), + )); match clickhouse_and_generic().verified_stmt(&sql) { Statement::AlterTable { operations, .. } => { assert_eq!(operations.len(), 1); @@ -1458,9 +1470,9 @@ fn parse_freeze_and_unfreeze_partition() { match clickhouse_and_generic().verified_stmt(&sql) { Statement::AlterTable { operations, .. } => { assert_eq!(operations.len(), 1); - let expected_partition = Partition::Expr(Expr::Value(Value::SingleQuotedString( - "2024-08-14".to_string(), - ))); + let expected_partition = Partition::Expr(Expr::Value( + Value::SingleQuotedString("2024-08-14".to_string()).with_empty_span(), + )); let expected_operation = if operation_name == &"FREEZE" { AlterTableOperation::FreezePartition { partition: expected_partition, @@ -1612,6 +1624,14 @@ fn parse_explain_table() { } } +#[test] +fn parse_table_sample() { + clickhouse().verified_stmt("SELECT * FROM tbl SAMPLE 0.1"); + clickhouse().verified_stmt("SELECT * FROM tbl SAMPLE 1000"); + clickhouse().verified_stmt("SELECT * FROM tbl SAMPLE 1 / 10"); + clickhouse().verified_stmt("SELECT * FROM tbl SAMPLE 1 / 10 OFFSET 1 / 2"); +} + fn clickhouse() -> TestedDialects { TestedDialects::new(vec![Box::new(ClickHouseDialect {})]) } diff --git a/tests/sqlparser_common.rs b/tests/sqlparser_common.rs index a2eb5070d..7a8b8bdaa 100644 --- a/tests/sqlparser_common.rs +++ b/tests/sqlparser_common.rs @@ -25,6 +25,7 @@ extern crate core; +use helpers::attached_token::AttachedToken; use matches::assert_matches; use sqlparser::ast::SelectItem::UnnamedExpr; use sqlparser::ast::TableFactor::{Pivot, Unpivot}; @@ -34,12 +35,13 @@ use sqlparser::dialect::{ GenericDialect, HiveDialect, MsSqlDialect, MySqlDialect, PostgreSqlDialect, RedshiftSqlDialect, SQLiteDialect, SnowflakeDialect, }; -use sqlparser::keywords::ALL_KEYWORDS; +use sqlparser::keywords::{Keyword, ALL_KEYWORDS}; use sqlparser::parser::{Parser, ParserError, ParserOptions}; use sqlparser::tokenizer::Tokenizer; +use sqlparser::tokenizer::{Location, Span}; use test_utils::{ all_dialects, all_dialects_where, alter_table_op, assert_eq_vec, call, expr_from_projection, - join, number, only, table, table_alias, TestedDialects, + join, number, only, table, table_alias, table_from_name, TestedDialects, }; #[macro_use] @@ -48,15 +50,54 @@ mod test_utils; #[cfg(test)] use pretty_assertions::assert_eq; use sqlparser::ast::ColumnOption::Comment; +use sqlparser::ast::DateTimeField::Seconds; use sqlparser::ast::Expr::{Identifier, UnaryOp}; +use sqlparser::ast::Value::Number; use sqlparser::test_utils::all_dialects_except; +#[test] +fn parse_numeric_literal_underscore() { + let dialects = all_dialects_where(|d| d.supports_numeric_literal_underscores()); + + let canonical = if cfg!(feature = "bigdecimal") { + "SELECT 10000" + } else { + "SELECT 10_000" + }; + + let select = dialects.verified_only_select_with_canonical("SELECT 10_000", canonical); + + assert_eq!( + select.projection, + vec![UnnamedExpr(Expr::Value( + (number("10_000")).with_empty_span() + ))] + ); +} + +#[test] +fn parse_function_object_name() { + let select = verified_only_select("SELECT a.b.c.d(1, 2, 3) FROM T"); + let Expr::Function(func) = expr_from_projection(&select.projection[0]) else { + unreachable!() + }; + assert_eq!( + ObjectName::from( + ["a", "b", "c", "d"] + .into_iter() + .map(Ident::new) + .collect::>() + ), + func.name, + ); +} + #[test] fn parse_insert_values() { let row = vec![ - Expr::Value(number("1")), - Expr::Value(number("2")), - Expr::Value(number("3")), + Expr::value(number("1")), + Expr::value(number("2")), + Expr::value(number("3")), ]; let rows1 = vec![row.clone()]; let rows2 = vec![row.clone(), row]; @@ -92,7 +133,7 @@ fn parse_insert_values() { ) { match verified_stmt(sql) { Statement::Insert(Insert { - table_name, + table: table_name, columns, source: Some(source), .. @@ -116,6 +157,12 @@ fn parse_insert_values() { verified_stmt("INSERT INTO customer WITH foo AS (SELECT 1) SELECT * FROM foo UNION VALUES (1)"); } +#[test] +fn parse_insert_set() { + let dialects = all_dialects_where(|d| d.supports_insert_set()); + dialects.verified_stmt("INSERT INTO tbl1 SET col1 = 1, col2 = 'abc', col3 = current_date()"); +} + #[test] fn parse_replace_into() { let dialect = PostgreSqlDialect {}; @@ -139,7 +186,7 @@ fn parse_insert_default_values() { partitioned, returning, source, - table_name, + table: table_name, .. }) => { assert_eq!(columns, vec![]); @@ -148,7 +195,10 @@ fn parse_insert_default_values() { assert_eq!(partitioned, None); assert_eq!(returning, None); assert_eq!(source, None); - assert_eq!(table_name, ObjectName(vec!["test_table".into()])); + assert_eq!( + table_name, + TableObject::TableName(ObjectName::from(vec!["test_table".into()])) + ); } _ => unreachable!(), } @@ -164,7 +214,7 @@ fn parse_insert_default_values() { partitioned, returning, source, - table_name, + table: table_name, .. }) => { assert_eq!(after_columns, vec![]); @@ -173,7 +223,10 @@ fn parse_insert_default_values() { assert_eq!(partitioned, None); assert!(returning.is_some()); assert_eq!(source, None); - assert_eq!(table_name, ObjectName(vec!["test_table".into()])); + assert_eq!( + table_name, + TableObject::TableName(ObjectName::from(vec!["test_table".into()])) + ); } _ => unreachable!(), } @@ -189,7 +242,7 @@ fn parse_insert_default_values() { partitioned, returning, source, - table_name, + table: table_name, .. }) => { assert_eq!(after_columns, vec![]); @@ -198,7 +251,10 @@ fn parse_insert_default_values() { assert_eq!(partitioned, None); assert_eq!(returning, None); assert_eq!(source, None); - assert_eq!(table_name, ObjectName(vec!["test_table".into()])); + assert_eq!( + table_name, + TableObject::TableName(ObjectName::from(vec!["test_table".into()])) + ); } _ => unreachable!(), } @@ -234,8 +290,13 @@ fn parse_insert_default_values() { #[test] fn parse_insert_select_returning() { - verified_stmt("INSERT INTO t SELECT 1 RETURNING 2"); - let stmt = verified_stmt("INSERT INTO t SELECT x RETURNING x AS y"); + // Dialects that support `RETURNING` as a column identifier do + // not support this syntax. + let dialects = + all_dialects_where(|d| !d.is_column_alias(&Keyword::RETURNING, &mut Parser::new(d))); + + dialects.verified_stmt("INSERT INTO t SELECT 1 RETURNING 2"); + let stmt = dialects.verified_stmt("INSERT INTO t SELECT x RETURNING x AS y"); match stmt { Statement::Insert(Insert { returning: Some(ret), @@ -246,6 +307,27 @@ fn parse_insert_select_returning() { } } +#[test] +fn parse_insert_select_from_returning() { + let sql = "INSERT INTO table1 SELECT * FROM table2 RETURNING id"; + match verified_stmt(sql) { + Statement::Insert(Insert { + table: TableObject::TableName(table_name), + source: Some(source), + returning: Some(returning), + .. + }) => { + assert_eq!("table1", table_name.to_string()); + assert!(matches!(*source.body, SetExpr::Select(_))); + assert_eq!( + returning, + vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("id"))),] + ); + } + bad_stmt => unreachable!("Expected valid insert, got {:?}", bad_stmt), + } +} + #[test] fn parse_returning_as_column_alias() { verified_stmt("SELECT 1 AS RETURNING"); @@ -303,16 +385,16 @@ fn parse_update() { assignments, vec![ Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec!["a".into()])), - value: Expr::Value(number("1")), + target: AssignmentTarget::ColumnName(ObjectName::from(vec!["a".into()])), + value: Expr::value(number("1")), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec!["b".into()])), - value: Expr::Value(number("2")), + target: AssignmentTarget::ColumnName(ObjectName::from(vec!["b".into()])), + value: Expr::value(number("2")), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec!["c".into()])), - value: Expr::Value(number("3")), + target: AssignmentTarget::ColumnName(ObjectName::from(vec!["c".into()])), + value: Expr::value(number("3")), }, ] ); @@ -356,44 +438,30 @@ fn parse_update_set_from() { stmt, Statement::Update { table: TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("t1")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident::new("t1")])), joins: vec![], }, assignments: vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new("name")])), + target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new("name")])), value: Expr::CompoundIdentifier(vec![Ident::new("t2"), Ident::new("name")]) }], - from: Some(TableWithJoins { + from: Some(UpdateTableFromKind::AfterSet(vec![TableWithJoins { relation: TableFactor::Derived { lateral: false, subquery: Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: None, top: None, + top_before_distinct: false, projection: vec![ SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("name"))), SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("id"))), ], into: None, from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("t1")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident::new("t1")])), joins: vec![], }], lateral_views: vec![], @@ -412,24 +480,24 @@ fn parse_update_set_from() { window_before_qualify: false, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], }), alias: Some(TableAlias { name: Ident::new("t2"), columns: vec![], }) }, - joins: vec![], - }), + joins: vec![] + }])), selection: Some(Expr::BinaryOp { left: Box::new(Expr::CompoundIdentifier(vec![ Ident::new("t1"), @@ -442,8 +510,12 @@ fn parse_update_set_from() { ])), }), returning: None, + or: None, } ); + + let sql = "UPDATE T SET a = b FROM U, (SELECT foo FROM V) AS W WHERE 1 = 1"; + dialects.verified_stmt(sql); } #[test] @@ -456,11 +528,12 @@ fn parse_update_with_table_alias() { from: _from, selection, returning, + or: None, } => { assert_eq!( TableWithJoins { relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("users")]), + name: ObjectName::from(vec![Ident::new("users")]), alias: Some(TableAlias { name: Ident::new("u"), columns: vec![], @@ -470,6 +543,9 @@ fn parse_update_with_table_alias() { version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }, joins: vec![], }, @@ -477,11 +553,13 @@ fn parse_update_with_table_alias() { ); assert_eq!( vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec![ + target: AssignmentTarget::ColumnName(ObjectName::from(vec![ Ident::new("u"), Ident::new("username") ])), - value: Expr::Value(Value::SingleQuotedString("new_user".to_string())), + value: Expr::Value( + (Value::SingleQuotedString("new_user".to_string())).with_empty_span() + ), }], assignments ); @@ -492,9 +570,9 @@ fn parse_update_with_table_alias() { Ident::new("username"), ])), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::SingleQuotedString( - "old_user".to_string() - ))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("old_user".to_string())).with_empty_span() + )), }), selection ); @@ -504,6 +582,25 @@ fn parse_update_with_table_alias() { } } +#[test] +fn parse_update_or() { + let expect_or_clause = |sql: &str, expected_action: SqliteOnConflict| match verified_stmt(sql) { + Statement::Update { or, .. } => assert_eq!(or, Some(expected_action)), + other => unreachable!("Expected update with or, got {:?}", other), + }; + expect_or_clause( + "UPDATE OR REPLACE t SET n = n + 1", + SqliteOnConflict::Replace, + ); + expect_or_clause( + "UPDATE OR ROLLBACK t SET n = n + 1", + SqliteOnConflict::Rollback, + ); + expect_or_clause("UPDATE OR ABORT t SET n = n + 1", SqliteOnConflict::Abort); + expect_or_clause("UPDATE OR FAIL t SET n = n + 1", SqliteOnConflict::Fail); + expect_or_clause("UPDATE OR IGNORE t SET n = n + 1", SqliteOnConflict::Ignore); +} + #[test] fn parse_select_with_table_alias_as() { // AS is optional @@ -528,22 +625,35 @@ fn parse_select_with_table_alias() { select.from, vec![TableWithJoins { relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("lineitem")]), + name: ObjectName::from(vec![Ident::new("lineitem")]), alias: Some(TableAlias { name: Ident::new("l"), - columns: vec![Ident::new("A"), Ident::new("B"), Ident::new("C"),], + columns: vec![ + TableAliasColumnDef::from_name("A"), + TableAliasColumnDef::from_name("B"), + TableAliasColumnDef::from_name("C"), + ], }), args: None, with_hints: vec![], version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }, joins: vec![], }] ); } +#[test] +fn parse_analyze() { + verified_stmt("ANALYZE TABLE test_table"); + verified_stmt("ANALYZE test_table"); +} + #[test] fn parse_invalid_table_name() { let ast = all_dialects().run_parser_method("db.public..customer", |parser| { @@ -567,15 +677,7 @@ fn parse_delete_statement() { .. }) => { assert_eq!( - TableFactor::Table { - name: ObjectName(vec![Ident::with_quote('"', "table")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + table_from_name(ObjectName::from(vec![Ident::with_quote('"', "table")])), from[0].relation ); } @@ -606,35 +708,25 @@ fn parse_delete_statement_for_multi_tables() { .. }) => { assert_eq!( - ObjectName(vec![Ident::new("schema1"), Ident::new("table1")]), + ObjectName::from(vec![Ident::new("schema1"), Ident::new("table1")]), tables[0] ); assert_eq!( - ObjectName(vec![Ident::new("schema2"), Ident::new("table2")]), + ObjectName::from(vec![Ident::new("schema2"), Ident::new("table2")]), tables[1] ); assert_eq!( - TableFactor::Table { - name: ObjectName(vec![Ident::new("schema1"), Ident::new("table1")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + table_from_name(ObjectName::from(vec![ + Ident::new("schema1"), + Ident::new("table1") + ])), from[0].relation ); assert_eq!( - TableFactor::Table { - name: ObjectName(vec![Ident::new("schema2"), Ident::new("table2")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + table_from_name(ObjectName::from(vec![ + Ident::new("schema2"), + Ident::new("table2") + ])), from[0].joins[0].relation ); } @@ -652,51 +744,31 @@ fn parse_delete_statement_for_multi_tables_with_using() { .. }) => { assert_eq!( - TableFactor::Table { - name: ObjectName(vec![Ident::new("schema1"), Ident::new("table1")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + table_from_name(ObjectName::from(vec![ + Ident::new("schema1"), + Ident::new("table1") + ])), from[0].relation ); assert_eq!( - TableFactor::Table { - name: ObjectName(vec![Ident::new("schema2"), Ident::new("table2")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + table_from_name(ObjectName::from(vec![ + Ident::new("schema2"), + Ident::new("table2") + ])), from[1].relation ); assert_eq!( - TableFactor::Table { - name: ObjectName(vec![Ident::new("schema1"), Ident::new("table1")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + table_from_name(ObjectName::from(vec![ + Ident::new("schema1"), + Ident::new("table1") + ])), using[0].relation ); assert_eq!( - TableFactor::Table { - name: ObjectName(vec![Ident::new("schema2"), Ident::new("table2")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + table_from_name(ObjectName::from(vec![ + Ident::new("schema2"), + Ident::new("table2") + ])), using[0].joins[0].relation ); } @@ -719,15 +791,7 @@ fn parse_where_delete_statement() { .. }) => { assert_eq!( - TableFactor::Table { - name: ObjectName(vec![Ident::new("foo")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + table_from_name(ObjectName::from(vec![Ident::new("foo")])), from[0].relation, ); @@ -736,7 +800,7 @@ fn parse_where_delete_statement() { Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("name"))), op: Eq, - right: Box::new(Expr::Value(number("5"))), + right: Box::new(Expr::value(number("5"))), }, selection.unwrap(), ); @@ -762,7 +826,7 @@ fn parse_where_delete_with_alias_statement() { }) => { assert_eq!( TableFactor::Table { - name: ObjectName(vec![Ident::new("basket")]), + name: ObjectName::from(vec![Ident::new("basket")]), alias: Some(TableAlias { name: Ident::new("a"), columns: vec![], @@ -772,13 +836,16 @@ fn parse_where_delete_with_alias_statement() { version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }, from[0].relation, ); assert_eq!( Some(vec![TableWithJoins { relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("basket")]), + name: ObjectName::from(vec![Ident::new("basket")]), alias: Some(TableAlias { name: Ident::new("b"), columns: vec![], @@ -788,6 +855,9 @@ fn parse_where_delete_with_alias_statement() { version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }, joins: vec![], }]), @@ -819,7 +889,7 @@ fn parse_top_level() { verified_stmt("(SELECT 1)"); verified_stmt("((SELECT 1))"); verified_stmt("VALUES (1)"); - verified_stmt("VALUES ROW(1, true, 'a'), ROW(2, false, 'b')"); + verified_stmt("VALUES ROW(1, NULL, 'a'), ROW(2, NULL, 'b')"); } #[test] @@ -829,7 +899,12 @@ fn parse_simple_select() { assert!(select.distinct.is_none()); assert_eq!(3, select.projection.len()); let select = verified_query(sql); - assert_eq!(Some(Expr::Value(number("5"))), select.limit); + let expected_limit_clause = LimitClause::LimitOffset { + limit: Some(Expr::value(number("5"))), + offset: None, + limit_by: vec![], + }; + assert_eq!(Some(expected_limit_clause), select.limit_clause); } #[test] @@ -837,14 +912,31 @@ fn parse_limit() { verified_stmt("SELECT * FROM user LIMIT 1"); } +#[test] +fn parse_invalid_limit_by() { + all_dialects() + .parse_sql_statements("SELECT * FROM user BY name") + .expect_err("BY without LIMIT"); +} + #[test] fn parse_limit_is_not_an_alias() { // In dialects supporting LIMIT it shouldn't be parsed as a table alias let ast = verified_query("SELECT id FROM customer LIMIT 1"); - assert_eq!(Some(Expr::Value(number("1"))), ast.limit); + let expected_limit_clause = LimitClause::LimitOffset { + limit: Some(Expr::value(number("1"))), + offset: None, + limit_by: vec![], + }; + assert_eq!(Some(expected_limit_clause), ast.limit_clause); let ast = verified_query("SELECT 1 LIMIT 5"); - assert_eq!(Some(Expr::Value(number("5"))), ast.limit); + let expected_limit_clause = LimitClause::LimitOffset { + limit: Some(Expr::value(number("5"))), + offset: None, + limit_by: vec![], + }; + assert_eq!(Some(expected_limit_clause), ast.limit_clause); } #[test] @@ -886,6 +978,44 @@ fn parse_select_distinct_tuple() { ); } +#[test] +fn parse_outer_join_operator() { + let dialects = all_dialects_where(|d| d.supports_outer_join_operator()); + + let select = dialects.verified_only_select("SELECT 1 FROM T WHERE a = b (+)"); + assert_eq!( + select.selection, + Some(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("a"))), + op: BinaryOperator::Eq, + right: Box::new(Expr::OuterJoin(Box::new(Expr::Identifier(Ident::new("b"))))) + }) + ); + + let select = dialects.verified_only_select("SELECT 1 FROM T WHERE t1.c1 = t2.c2.d3 (+)"); + assert_eq!( + select.selection, + Some(Expr::BinaryOp { + left: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("t1"), + Ident::new("c1") + ])), + op: BinaryOperator::Eq, + right: Box::new(Expr::OuterJoin(Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("t2"), + Ident::new("c2"), + Ident::new("d3"), + ])))) + }) + ); + + let res = dialects.parse_sql_statements("SELECT 1 FROM T WHERE 1 = 2 (+)"); + assert_eq!( + ParserError::ParserError("Expected: column identifier before (+), found: 2".to_string()), + res.unwrap_err() + ); +} + #[test] fn parse_select_distinct_on() { let sql = "SELECT DISTINCT ON (album_id) name FROM track ORDER BY album_id, milliseconds"; @@ -943,7 +1073,7 @@ fn parse_select_into() { temporary: false, unlogged: false, table: false, - name: ObjectName(vec![Ident::new("table0")]), + name: ObjectName::from(vec![Ident::new("table0")]), }, only(&select.into) ); @@ -976,7 +1106,7 @@ fn parse_select_wildcard() { let select = verified_only_select(sql); assert_eq!( &SelectItem::QualifiedWildcard( - ObjectName(vec![Ident::new("foo")]), + SelectItemQualifiedWildcardKind::ObjectName(ObjectName::from(vec![Ident::new("foo")])), WildcardAdditionalOptions::default() ), only(&select.projection) @@ -986,7 +1116,10 @@ fn parse_select_wildcard() { let select = verified_only_select(sql); assert_eq!( &SelectItem::QualifiedWildcard( - ObjectName(vec![Ident::new("myschema"), Ident::new("mytable"),]), + SelectItemQualifiedWildcardKind::ObjectName(ObjectName::from(vec![ + Ident::new("myschema"), + Ident::new("mytable"), + ])), WildcardAdditionalOptions::default(), ), only(&select.projection) @@ -1021,7 +1154,7 @@ fn parse_column_aliases() { } = only(&select.projection) { assert_eq!(&BinaryOperator::Plus, op); - assert_eq!(&Expr::Value(number("1")), right.as_ref()); + assert_eq!(&Expr::value(number("1")), right.as_ref()); assert_eq!(&Ident::new("newname"), alias); } else { panic!("Expected: ExprWithAlias") @@ -1031,6 +1164,84 @@ fn parse_column_aliases() { one_statement_parses_to("SELECT a.col + 1 newname FROM foo AS a", sql); } +#[test] +fn parse_select_expr_star() { + let dialects = all_dialects_where(|d| d.supports_select_expr_star()); + + // Identifier wildcard expansion. + let select = dialects.verified_only_select("SELECT foo.bar.* FROM T"); + let SelectItem::QualifiedWildcard(SelectItemQualifiedWildcardKind::ObjectName(object_name), _) = + only(&select.projection) + else { + unreachable!( + "expected wildcard select item: got {:?}", + &select.projection[0] + ) + }; + assert_eq!( + &ObjectName::from( + ["foo", "bar"] + .into_iter() + .map(Ident::new) + .collect::>() + ), + object_name + ); + + // Arbitrary compound expression with wildcard expansion. + let select = dialects.verified_only_select("SELECT foo - bar.* FROM T"); + let SelectItem::QualifiedWildcard( + SelectItemQualifiedWildcardKind::Expr(Expr::BinaryOp { left, op, right }), + _, + ) = only(&select.projection) + else { + unreachable!( + "expected wildcard select item: got {:?}", + &select.projection[0] + ) + }; + let (Expr::Identifier(left), BinaryOperator::Minus, Expr::Identifier(right)) = + (left.as_ref(), op, right.as_ref()) + else { + unreachable!("expected binary op expr: got {:?}", &select.projection[0]) + }; + assert_eq!(&Ident::new("foo"), left); + assert_eq!(&Ident::new("bar"), right); + + // Arbitrary expression wildcard expansion. + let select = dialects.verified_only_select("SELECT myfunc().foo.* FROM T"); + let SelectItem::QualifiedWildcard( + SelectItemQualifiedWildcardKind::Expr(Expr::CompoundFieldAccess { root, access_chain }), + _, + ) = only(&select.projection) + else { + unreachable!("expected wildcard expr: got {:?}", &select.projection[0]) + }; + assert!(matches!(root.as_ref(), Expr::Function(_))); + assert_eq!(1, access_chain.len()); + assert!(matches!( + &access_chain[0], + AccessExpr::Dot(Expr::Identifier(_)) + )); + + dialects.one_statement_parses_to( + "SELECT 2. * 3 FROM T", + #[cfg(feature = "bigdecimal")] + "SELECT 2 * 3 FROM T", + #[cfg(not(feature = "bigdecimal"))] + "SELECT 2. * 3 FROM T", + ); + dialects.verified_only_select("SELECT myfunc().* FROM T"); + dialects.verified_only_select("SELECT myfunc().* EXCEPT (foo) FROM T"); + + // Invalid + let res = dialects.parse_sql_statements("SELECT foo.*.* FROM T"); + assert_eq!( + ParserError::ParserError("Expected: end of statement, found: .".to_string()), + res.unwrap_err() + ); +} + #[test] fn test_eof_after_as() { let res = parse_sql_statements("SELECT foo AS"); @@ -1063,7 +1274,8 @@ fn parse_select_count_wildcard() { let select = verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::new("COUNT")]), + name: ObjectName::from(vec![Ident::new("COUNT")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -1085,7 +1297,8 @@ fn parse_select_count_distinct() { let select = verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::new("COUNT")]), + name: ObjectName::from(vec![Ident::new("COUNT")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: Some(DuplicateTreatment::Distinct), @@ -1168,7 +1381,7 @@ fn parse_null_in_select() { let sql = "SELECT NULL"; let select = verified_only_select(sql); assert_eq!( - &Expr::Value(Value::Null), + &Expr::Value((Value::Null).with_empty_span()), expr_from_projection(only(&select.projection)), ); } @@ -1204,18 +1417,18 @@ fn parse_exponent_in_select() -> Result<(), ParserError> { assert_eq!( &vec![ - SelectItem::UnnamedExpr(Expr::Value(number("10e-20"))), - SelectItem::UnnamedExpr(Expr::Value(number("1e3"))), - SelectItem::UnnamedExpr(Expr::Value(number("1e+3"))), + SelectItem::UnnamedExpr(Expr::Value((number("10e-20")).with_empty_span())), + SelectItem::UnnamedExpr(Expr::value(number("1e3"))), + SelectItem::UnnamedExpr(Expr::Value((number("1e+3")).with_empty_span())), SelectItem::ExprWithAlias { - expr: Expr::Value(number("1e3")), + expr: Expr::value(number("1e3")), alias: Ident::new("a") }, SelectItem::ExprWithAlias { - expr: Expr::Value(number("1")), + expr: Expr::value(number("1")), alias: Ident::new("e") }, - SelectItem::UnnamedExpr(Expr::Value(number("0.5e2"))), + SelectItem::UnnamedExpr(Expr::value(number("0.5e2"))), ], &select.projection ); @@ -1231,6 +1444,7 @@ fn parse_select_with_date_column_name() { &Expr::Identifier(Ident { value: "date".into(), quote_style: None, + span: Span::empty(), }), expr_from_projection(only(&select.projection)), ); @@ -1248,9 +1462,9 @@ fn parse_escaped_single_quote_string_predicate_with_escape() { Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("salary"))), op: NotEq, - right: Box::new(Expr::Value(Value::SingleQuotedString( - "Jim's salary".to_string() - ))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("Jim's salary".to_string())).with_empty_span() + )), }), ast.selection, ); @@ -1274,9 +1488,9 @@ fn parse_escaped_single_quote_string_predicate_with_no_escape() { Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("salary"))), op: NotEq, - right: Box::new(Expr::Value(Value::SingleQuotedString( - "Jim''s salary".to_string() - ))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("Jim''s salary".to_string())).with_empty_span() + )), }), ast.selection, ); @@ -1289,11 +1503,14 @@ fn parse_number() { #[cfg(feature = "bigdecimal")] assert_eq!( expr, - Expr::Value(Value::Number(bigdecimal::BigDecimal::from(1), false)) + Expr::Value((Value::Number(bigdecimal::BigDecimal::from(1), false)).with_empty_span()) ); #[cfg(not(feature = "bigdecimal"))] - assert_eq!(expr, Expr::Value(Value::Number("1.0".into(), false))); + assert_eq!( + expr, + Expr::Value((Value::Number("1.0".into(), false)).with_empty_span()) + ); } #[test] @@ -1395,6 +1612,10 @@ fn pg_and_generic() -> TestedDialects { ]) } +fn ms_and_generic() -> TestedDialects { + TestedDialects::new(vec![Box::new(MsSqlDialect {}), Box::new(GenericDialect {})]) +} + #[test] fn parse_json_ops_without_colon() { use self::BinaryOperator::*; @@ -1428,43 +1649,220 @@ fn parse_json_ops_without_colon() { } #[test] -fn parse_mod_no_spaces() { - use self::Expr::*; - let canonical = "a1 % b1"; - let sqls = ["a1 % b1", "a1% b1", "a1 %b1", "a1%b1"]; - for sql in sqls { - println!("Parsing {sql}"); - assert_eq!( - BinaryOp { - left: Box::new(Identifier(Ident::new("a1"))), - op: BinaryOperator::Modulo, - right: Box::new(Identifier(Ident::new("b1"))), - }, - pg_and_generic().expr_parses_to(sql, canonical) - ); +fn parse_json_object() { + let dialects = TestedDialects::new(vec![ + Box::new(MsSqlDialect {}), + Box::new(PostgreSqlDialect {}), + ]); + let select = dialects.verified_only_select("SELECT JSON_OBJECT('name' : 'value', 'type' : 1)"); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, .. }), + .. + }) => assert_eq!( + &[ + FunctionArg::ExprNamed { + name: Expr::Value((Value::SingleQuotedString("name".into())).with_empty_span()), + arg: FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("value".into())).with_empty_span() + )), + operator: FunctionArgOperator::Colon + }, + FunctionArg::ExprNamed { + name: Expr::Value((Value::SingleQuotedString("type".into())).with_empty_span()), + arg: FunctionArgExpr::Expr(Expr::value(number("1"))), + operator: FunctionArgOperator::Colon + } + ], + &args[..] + ), + _ => unreachable!(), } -} - -#[test] -fn parse_is_null() { - use self::Expr::*; - let sql = "a IS NULL"; - assert_eq!( - IsNull(Box::new(Identifier(Ident::new("a")))), - verified_expr(sql) - ); -} - -#[test] -fn parse_is_not_null() { - use self::Expr::*; - let sql = "a IS NOT NULL"; - assert_eq!( - IsNotNull(Box::new(Identifier(Ident::new("a")))), - verified_expr(sql) - ); -} - + let select = dialects + .verified_only_select("SELECT JSON_OBJECT('name' : 'value', 'type' : NULL ABSENT ON NULL)"); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), + .. + }) => { + assert_eq!( + &[ + FunctionArg::ExprNamed { + name: Expr::Value( + (Value::SingleQuotedString("name".into())).with_empty_span() + ), + arg: FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("value".into())).with_empty_span() + )), + operator: FunctionArgOperator::Colon + }, + FunctionArg::ExprNamed { + name: Expr::Value( + (Value::SingleQuotedString("type".into())).with_empty_span() + ), + arg: FunctionArgExpr::Expr(Expr::Value((Value::Null).with_empty_span())), + operator: FunctionArgOperator::Colon + } + ], + &args[..] + ); + assert_eq!( + &[FunctionArgumentClause::JsonNullClause( + JsonNullClause::AbsentOnNull + )], + &clauses[..] + ); + } + _ => unreachable!(), + } + let select = dialects.verified_only_select("SELECT JSON_OBJECT(NULL ON NULL)"); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), + .. + }) => { + assert!(args.is_empty()); + assert_eq!( + &[FunctionArgumentClause::JsonNullClause( + JsonNullClause::NullOnNull + )], + &clauses[..] + ); + } + _ => unreachable!(), + } + let select = dialects.verified_only_select("SELECT JSON_OBJECT(ABSENT ON NULL)"); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), + .. + }) => { + assert!(args.is_empty()); + assert_eq!( + &[FunctionArgumentClause::JsonNullClause( + JsonNullClause::AbsentOnNull + )], + &clauses[..] + ); + } + _ => unreachable!(), + } + let select = dialects.verified_only_select( + "SELECT JSON_OBJECT('name' : 'value', 'type' : JSON_ARRAY(1, 2) ABSENT ON NULL)", + ); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), + .. + }) => { + assert_eq!( + &FunctionArg::ExprNamed { + name: Expr::Value((Value::SingleQuotedString("name".into())).with_empty_span()), + arg: FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("value".into())).with_empty_span() + )), + operator: FunctionArgOperator::Colon + }, + &args[0] + ); + assert!(matches!( + args[1], + FunctionArg::ExprNamed { + name: Expr::Value(ValueWithSpan { + value: Value::SingleQuotedString(_), + span: _ + }), + arg: FunctionArgExpr::Expr(Expr::Function(_)), + operator: FunctionArgOperator::Colon + } + )); + assert_eq!( + &[FunctionArgumentClause::JsonNullClause( + JsonNullClause::AbsentOnNull + )], + &clauses[..] + ); + } + _ => unreachable!(), + } + let select = dialects.verified_only_select( + "SELECT JSON_OBJECT('name' : 'value', 'type' : JSON_OBJECT('type_id' : 1, 'name' : 'a') NULL ON NULL)", + ); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), + .. + }) => { + assert_eq!( + &FunctionArg::ExprNamed { + name: Expr::Value((Value::SingleQuotedString("name".into())).with_empty_span()), + arg: FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("value".into())).with_empty_span() + )), + operator: FunctionArgOperator::Colon + }, + &args[0] + ); + assert!(matches!( + args[1], + FunctionArg::ExprNamed { + name: Expr::Value(ValueWithSpan { + value: Value::SingleQuotedString(_), + span: _ + }), + arg: FunctionArgExpr::Expr(Expr::Function(_)), + operator: FunctionArgOperator::Colon + } + )); + assert_eq!( + &[FunctionArgumentClause::JsonNullClause( + JsonNullClause::NullOnNull + )], + &clauses[..] + ); + } + _ => unreachable!(), + } +} + +#[test] +fn parse_mod_no_spaces() { + use self::Expr::*; + let canonical = "a1 % b1"; + let sqls = ["a1 % b1", "a1% b1", "a1 %b1", "a1%b1"]; + for sql in sqls { + println!("Parsing {sql}"); + assert_eq!( + BinaryOp { + left: Box::new(Identifier(Ident::new("a1"))), + op: BinaryOperator::Modulo, + right: Box::new(Identifier(Ident::new("b1"))), + }, + pg_and_generic().expr_parses_to(sql, canonical) + ); + } +} + +#[test] +fn parse_is_null() { + use self::Expr::*; + let sql = "a IS NULL"; + assert_eq!( + IsNull(Box::new(Identifier(Ident::new("a")))), + verified_expr(sql) + ); +} + +#[test] +fn parse_is_not_null() { + use self::Expr::*; + let sql = "a IS NOT NULL"; + assert_eq!( + IsNotNull(Box::new(Identifier(Ident::new("a")))), + verified_expr(sql) + ); +} + #[test] fn parse_is_distinct_from() { use self::Expr::*; @@ -1494,7 +1892,7 @@ fn parse_is_not_distinct_from() { #[test] fn parse_not_precedence() { // NOT has higher precedence than OR/AND, so the following must parse as (NOT true) OR true - let sql = "NOT true OR true"; + let sql = "NOT 1 OR 1"; assert_matches!( verified_expr(sql), Expr::BinaryOp { @@ -1520,9 +1918,9 @@ fn parse_not_precedence() { Expr::UnaryOp { op: UnaryOperator::Not, expr: Box::new(Expr::Between { - expr: Box::new(Expr::Value(number("1"))), - low: Box::new(Expr::Value(number("1"))), - high: Box::new(Expr::Value(number("2"))), + expr: Box::new(Expr::value(number("1"))), + low: Box::new(Expr::value(number("1"))), + high: Box::new(Expr::value(number("2"))), negated: true, }), }, @@ -1535,9 +1933,13 @@ fn parse_not_precedence() { Expr::UnaryOp { op: UnaryOperator::Not, expr: Box::new(Expr::Like { - expr: Box::new(Expr::Value(Value::SingleQuotedString("a".into()))), + expr: Box::new(Expr::Value( + (Value::SingleQuotedString("a".into())).with_empty_span() + )), negated: true, - pattern: Box::new(Expr::Value(Value::SingleQuotedString("b".into()))), + pattern: Box::new(Expr::Value( + (Value::SingleQuotedString("b".into())).with_empty_span() + )), escape_char: None, any: false, }), @@ -1552,7 +1954,9 @@ fn parse_not_precedence() { op: UnaryOperator::Not, expr: Box::new(Expr::InList { expr: Box::new(Expr::Identifier("a".into())), - list: vec![Expr::Value(Value::SingleQuotedString("a".into()))], + list: vec![Expr::Value( + (Value::SingleQuotedString("a".into())).with_empty_span() + )], negated: true, }), }, @@ -1572,12 +1976,13 @@ fn parse_null_like() { expr: Box::new(Expr::Identifier(Ident::new("column1"))), any: false, negated: false, - pattern: Box::new(Expr::Value(Value::Null)), + pattern: Box::new(Expr::Value((Value::Null).with_empty_span())), escape_char: None, }, alias: Ident { value: "col_null".to_owned(), quote_style: None, + span: Span::empty(), }, }, select.projection[0] @@ -1585,7 +1990,7 @@ fn parse_null_like() { assert_eq!( SelectItem::ExprWithAlias { expr: Expr::Like { - expr: Box::new(Expr::Value(Value::Null)), + expr: Box::new(Expr::Value((Value::Null).with_empty_span())), any: false, negated: false, pattern: Box::new(Expr::Identifier(Ident::new("column1"))), @@ -1594,6 +1999,7 @@ fn parse_null_like() { alias: Ident { value: "null_col".to_owned(), quote_style: None, + span: Span::empty(), }, }, select.projection[1] @@ -1612,7 +2018,9 @@ fn parse_ilike() { Expr::ILike { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), + pattern: Box::new(Expr::Value( + (Value::SingleQuotedString("%a".to_string())).with_empty_span() + )), escape_char: None, any: false, }, @@ -1629,7 +2037,9 @@ fn parse_ilike() { Expr::ILike { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), + pattern: Box::new(Expr::Value( + (Value::SingleQuotedString("%a".to_string())).with_empty_span() + )), escape_char: Some('^'.to_string()), any: false, }, @@ -1647,7 +2057,9 @@ fn parse_ilike() { Expr::IsNull(Box::new(Expr::ILike { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), + pattern: Box::new(Expr::Value( + (Value::SingleQuotedString("%a".to_string())).with_empty_span() + )), escape_char: None, any: false, })), @@ -1670,7 +2082,9 @@ fn parse_like() { Expr::Like { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), + pattern: Box::new(Expr::Value( + (Value::SingleQuotedString("%a".to_string())).with_empty_span() + )), escape_char: None, any: false, }, @@ -1687,7 +2101,9 @@ fn parse_like() { Expr::Like { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), + pattern: Box::new(Expr::Value( + (Value::SingleQuotedString("%a".to_string())).with_empty_span() + )), escape_char: Some('^'.to_string()), any: false, }, @@ -1705,7 +2121,9 @@ fn parse_like() { Expr::IsNull(Box::new(Expr::Like { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), + pattern: Box::new(Expr::Value( + (Value::SingleQuotedString("%a".to_string())).with_empty_span() + )), escape_char: None, any: false, })), @@ -1728,7 +2146,9 @@ fn parse_similar_to() { Expr::SimilarTo { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), + pattern: Box::new(Expr::Value( + (Value::SingleQuotedString("%a".to_string())).with_empty_span() + )), escape_char: None, }, select.selection.unwrap() @@ -1744,7 +2164,9 @@ fn parse_similar_to() { Expr::SimilarTo { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), + pattern: Box::new(Expr::Value( + (Value::SingleQuotedString("%a".to_string())).with_empty_span() + )), escape_char: Some('^'.to_string()), }, select.selection.unwrap() @@ -1760,7 +2182,9 @@ fn parse_similar_to() { Expr::IsNull(Box::new(Expr::SimilarTo { expr: Box::new(Expr::Identifier(Ident::new("name"))), negated, - pattern: Box::new(Expr::Value(Value::SingleQuotedString("%a".to_string()))), + pattern: Box::new(Expr::Value( + (Value::SingleQuotedString("%a".to_string())).with_empty_span() + )), escape_char: Some('^'.to_string()), })), select.selection.unwrap() @@ -1782,8 +2206,8 @@ fn parse_in_list() { Expr::InList { expr: Box::new(Expr::Identifier(Ident::new("segment"))), list: vec![ - Expr::Value(Value::SingleQuotedString("HIGH".to_string())), - Expr::Value(Value::SingleQuotedString("MED".to_string())), + Expr::Value((Value::SingleQuotedString("HIGH".to_string())).with_empty_span()), + Expr::Value((Value::SingleQuotedString("MED".to_string())).with_empty_span()), ], negated, }, @@ -1801,7 +2225,21 @@ fn parse_in_subquery() { assert_eq!( Expr::InSubquery { expr: Box::new(Expr::Identifier(Ident::new("segment"))), - subquery: Box::new(verified_query("SELECT segm FROM bar")), + subquery: verified_query("SELECT segm FROM bar").body, + negated: false, + }, + select.selection.unwrap() + ); +} + +#[test] +fn parse_in_union() { + let sql = "SELECT * FROM customers WHERE segment IN ((SELECT segm FROM bar) UNION (SELECT segm FROM bar2))"; + let select = verified_only_select(sql); + assert_eq!( + Expr::InSubquery { + expr: Box::new(Expr::Identifier(Ident::new("segment"))), + subquery: verified_query("(SELECT segm FROM bar) UNION (SELECT segm FROM bar2)").body, negated: false, }, select.selection.unwrap() @@ -1914,44 +2352,6 @@ fn parse_binary_all() { ); } -#[test] -fn parse_logical_xor() { - let sql = "SELECT true XOR true, false XOR false, true XOR false, false XOR true"; - let select = verified_only_select(sql); - assert_eq!( - SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Value(Value::Boolean(true))), - op: BinaryOperator::Xor, - right: Box::new(Expr::Value(Value::Boolean(true))), - }), - select.projection[0] - ); - assert_eq!( - SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Value(Value::Boolean(false))), - op: BinaryOperator::Xor, - right: Box::new(Expr::Value(Value::Boolean(false))), - }), - select.projection[1] - ); - assert_eq!( - SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Value(Value::Boolean(true))), - op: BinaryOperator::Xor, - right: Box::new(Expr::Value(Value::Boolean(false))), - }), - select.projection[2] - ); - assert_eq!( - SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Value(Value::Boolean(false))), - op: BinaryOperator::Xor, - right: Box::new(Expr::Value(Value::Boolean(true))), - }), - select.projection[3] - ); -} - #[test] fn parse_between() { fn chk(negated: bool) { @@ -1963,8 +2363,8 @@ fn parse_between() { assert_eq!( Expr::Between { expr: Box::new(Expr::Identifier(Ident::new("age"))), - low: Box::new(Expr::Value(number("25"))), - high: Box::new(Expr::Value(number("32"))), + low: Box::new(Expr::value(number("25"))), + high: Box::new(Expr::value(number("32"))), negated, }, select.selection.unwrap() @@ -1981,16 +2381,16 @@ fn parse_between_with_expr() { let select = verified_only_select(sql); assert_eq!( Expr::IsNull(Box::new(Expr::Between { - expr: Box::new(Expr::Value(number("1"))), + expr: Box::new(Expr::value(number("1"))), low: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Value(number("1"))), + left: Box::new(Expr::value(number("1"))), op: Plus, - right: Box::new(Expr::Value(number("2"))), + right: Box::new(Expr::value(number("2"))), }), high: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Value(number("3"))), + left: Box::new(Expr::value(number("3"))), op: Plus, - right: Box::new(Expr::Value(number("4"))), + right: Box::new(Expr::value(number("4"))), }), negated: false, })), @@ -2002,19 +2402,19 @@ fn parse_between_with_expr() { assert_eq!( Expr::BinaryOp { left: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Value(number("1"))), + left: Box::new(Expr::value(number("1"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(number("1"))), + right: Box::new(Expr::value(number("1"))), }), op: BinaryOperator::And, right: Box::new(Expr::Between { expr: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Value(number("1"))), + left: Box::new(Expr::value(number("1"))), op: BinaryOperator::Plus, right: Box::new(Expr::Identifier(Ident::new("x"))), }), - low: Box::new(Expr::Value(number("1"))), - high: Box::new(Expr::Value(number("2"))), + low: Box::new(Expr::value(number("1"))), + high: Box::new(Expr::value(number("2"))), negated: false, }), }, @@ -2029,13 +2429,15 @@ fn parse_tuples() { assert_eq!( vec![ SelectItem::UnnamedExpr(Expr::Tuple(vec![ - Expr::Value(number("1")), - Expr::Value(number("2")), + Expr::value(number("1")), + Expr::value(number("2")), ])), - SelectItem::UnnamedExpr(Expr::Nested(Box::new(Expr::Value(number("1"))))), + SelectItem::UnnamedExpr(Expr::Nested(Box::new(Expr::Value( + (number("1")).with_empty_span() + )))), SelectItem::UnnamedExpr(Expr::Tuple(vec![ - Expr::Value(Value::SingleQuotedString("foo".into())), - Expr::Value(number("3")), + Expr::Value((Value::SingleQuotedString("foo".into())).with_empty_span()), + Expr::value(number("3")), Expr::Identifier(Ident::new("baz")), ])), ], @@ -2065,27 +2467,33 @@ fn parse_select_order_by() { fn chk(sql: &str) { let select = verified_query(sql); assert_eq!( - vec![ + OrderByKind::Expressions(vec![ OrderByExpr { expr: Expr::Identifier(Ident::new("lname")), - asc: Some(true), - nulls_first: None, + options: OrderByOptions { + asc: Some(true), + nulls_first: None, + }, with_fill: None, }, OrderByExpr { expr: Expr::Identifier(Ident::new("fname")), - asc: Some(false), - nulls_first: None, + options: OrderByOptions { + asc: Some(false), + nulls_first: None, + }, with_fill: None, }, OrderByExpr { expr: Expr::Identifier(Ident::new("id")), - asc: None, - nulls_first: None, + options: OrderByOptions { + asc: None, + nulls_first: None, + }, with_fill: None, }, - ], - select.order_by.expect("ORDER BY expected").exprs + ]), + select.order_by.expect("ORDER BY expected").kind ); } chk("SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY lname ASC, fname DESC, id"); @@ -2100,23 +2508,164 @@ fn parse_select_order_by_limit() { ORDER BY lname ASC, fname DESC LIMIT 2"; let select = verified_query(sql); assert_eq!( - vec![ + OrderByKind::Expressions(vec![ OrderByExpr { expr: Expr::Identifier(Ident::new("lname")), - asc: Some(true), - nulls_first: None, + options: OrderByOptions { + asc: Some(true), + nulls_first: None, + }, with_fill: None, }, OrderByExpr { expr: Expr::Identifier(Ident::new("fname")), - asc: Some(false), - nulls_first: None, + options: OrderByOptions { + asc: Some(false), + nulls_first: None, + }, with_fill: None, }, - ], - select.order_by.expect("ORDER BY expected").exprs + ]), + select.order_by.expect("ORDER BY expected").kind ); - assert_eq!(Some(Expr::Value(number("2"))), select.limit); + let expected_limit_clause = LimitClause::LimitOffset { + limit: Some(Expr::value(number("2"))), + offset: None, + limit_by: vec![], + }; + assert_eq!(Some(expected_limit_clause), select.limit_clause); +} + +#[test] +fn parse_select_order_by_all() { + fn chk(sql: &str, except_order_by: OrderByKind) { + let dialects = all_dialects_where(|d| d.supports_order_by_all()); + let select = dialects.verified_query(sql); + assert_eq!( + except_order_by, + select.order_by.expect("ORDER BY expected").kind + ); + } + let test_cases = [ + ( + "SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY ALL", + OrderByKind::All(OrderByOptions { + asc: None, + nulls_first: None, + }), + ), + ( + "SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY ALL NULLS FIRST", + OrderByKind::All(OrderByOptions { + asc: None, + nulls_first: Some(true), + }), + ), + ( + "SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY ALL NULLS LAST", + OrderByKind::All(OrderByOptions { + asc: None, + nulls_first: Some(false), + }), + ), + ( + "SELECT id, fname, lname FROM customer ORDER BY ALL ASC", + OrderByKind::All(OrderByOptions { + asc: Some(true), + nulls_first: None, + }), + ), + ( + "SELECT id, fname, lname FROM customer ORDER BY ALL ASC NULLS FIRST", + OrderByKind::All(OrderByOptions { + asc: Some(true), + nulls_first: Some(true), + }), + ), + ( + "SELECT id, fname, lname FROM customer ORDER BY ALL ASC NULLS LAST", + OrderByKind::All(OrderByOptions { + asc: Some(true), + nulls_first: Some(false), + }), + ), + ( + "SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY ALL DESC", + OrderByKind::All(OrderByOptions { + asc: Some(false), + nulls_first: None, + }), + ), + ( + "SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY ALL DESC NULLS FIRST", + OrderByKind::All(OrderByOptions { + asc: Some(false), + nulls_first: Some(true), + }), + ), + ( + "SELECT id, fname, lname FROM customer WHERE id < 5 ORDER BY ALL DESC NULLS LAST", + OrderByKind::All(OrderByOptions { + asc: Some(false), + nulls_first: Some(false), + }), + ), + ]; + + for (sql, expected_order_by) in test_cases { + chk(sql, expected_order_by); + } +} + +#[test] +fn parse_select_order_by_not_support_all() { + fn chk(sql: &str, except_order_by: OrderByKind) { + let dialects = all_dialects_where(|d| !d.supports_order_by_all()); + let select = dialects.verified_query(sql); + assert_eq!( + except_order_by, + select.order_by.expect("ORDER BY expected").kind + ); + } + let test_cases = [ + ( + "SELECT id, ALL FROM customer WHERE id < 5 ORDER BY ALL", + OrderByKind::Expressions(vec![OrderByExpr { + expr: Expr::Identifier(Ident::new("ALL")), + options: OrderByOptions { + asc: None, + nulls_first: None, + }, + with_fill: None, + }]), + ), + ( + "SELECT id, ALL FROM customer ORDER BY ALL ASC NULLS FIRST", + OrderByKind::Expressions(vec![OrderByExpr { + expr: Expr::Identifier(Ident::new("ALL")), + options: OrderByOptions { + asc: Some(true), + nulls_first: Some(true), + }, + with_fill: None, + }]), + ), + ( + "SELECT id, ALL FROM customer ORDER BY ALL DESC NULLS LAST", + OrderByKind::Expressions(vec![OrderByExpr { + expr: Expr::Identifier(Ident::new("ALL")), + options: OrderByOptions { + asc: Some(false), + nulls_first: Some(false), + }, + with_fill: None, + }]), + ), + ]; + + for (sql, expected_order_by) in test_cases { + chk(sql, expected_order_by); + } } #[test] @@ -2125,23 +2674,32 @@ fn parse_select_order_by_nulls_order() { ORDER BY lname ASC NULLS FIRST, fname DESC NULLS LAST LIMIT 2"; let select = verified_query(sql); assert_eq!( - vec![ + OrderByKind::Expressions(vec![ OrderByExpr { expr: Expr::Identifier(Ident::new("lname")), - asc: Some(true), - nulls_first: Some(true), + options: OrderByOptions { + asc: Some(true), + nulls_first: Some(true), + }, with_fill: None, }, OrderByExpr { expr: Expr::Identifier(Ident::new("fname")), - asc: Some(false), - nulls_first: Some(false), + options: OrderByOptions { + asc: Some(false), + nulls_first: Some(false), + }, with_fill: None, }, - ], - select.order_by.expect("ORDER BY expeccted").exprs + ]), + select.order_by.expect("ORDER BY expeccted").kind ); - assert_eq!(Some(Expr::Value(number("2"))), select.limit); + let expected_limit_clause = LimitClause::LimitOffset { + limit: Some(Expr::value(number("2"))), + offset: None, + limit_by: vec![], + }; + assert_eq!(Some(expected_limit_clause), select.limit_clause); } #[test] @@ -2178,6 +2736,92 @@ fn parse_select_group_by_all() { ); } +#[test] +fn parse_group_by_with_modifier() { + let clauses = ["x", "a, b", "ALL"]; + let modifiers = [ + "WITH ROLLUP", + "WITH CUBE", + "WITH TOTALS", + "WITH ROLLUP WITH CUBE", + ]; + let expected_modifiers = [ + vec![GroupByWithModifier::Rollup], + vec![GroupByWithModifier::Cube], + vec![GroupByWithModifier::Totals], + vec![GroupByWithModifier::Rollup, GroupByWithModifier::Cube], + ]; + let dialects = all_dialects_where(|d| d.supports_group_by_with_modifier()); + + for clause in &clauses { + for (modifier, expected_modifier) in modifiers.iter().zip(expected_modifiers.iter()) { + let sql = format!("SELECT * FROM t GROUP BY {clause} {modifier}"); + match dialects.verified_stmt(&sql) { + Statement::Query(query) => { + let group_by = &query.body.as_select().unwrap().group_by; + if clause == &"ALL" { + assert_eq!(group_by, &GroupByExpr::All(expected_modifier.to_vec())); + } else { + assert_eq!( + group_by, + &GroupByExpr::Expressions( + clause + .split(", ") + .map(|c| Identifier(Ident::new(c))) + .collect(), + expected_modifier.to_vec() + ) + ); + } + } + _ => unreachable!(), + } + } + } + + // invalid cases + let invalid_cases = [ + "SELECT * FROM t GROUP BY x WITH", + "SELECT * FROM t GROUP BY x WITH ROLLUP CUBE", + "SELECT * FROM t GROUP BY x WITH WITH ROLLUP", + "SELECT * FROM t GROUP BY WITH ROLLUP", + ]; + for sql in invalid_cases { + dialects + .parse_sql_statements(sql) + .expect_err("Expected: one of ROLLUP or CUBE or TOTALS, found: WITH"); + } +} + +#[test] +fn parse_group_by_special_grouping_sets() { + let sql = "SELECT a, b, SUM(c) FROM tab1 GROUP BY a, b GROUPING SETS ((a, b), (a), (b), ())"; + match all_dialects().verified_stmt(sql) { + Statement::Query(query) => { + let group_by = &query.body.as_select().unwrap().group_by; + assert_eq!( + group_by, + &GroupByExpr::Expressions( + vec![ + Expr::Identifier(Ident::new("a")), + Expr::Identifier(Ident::new("b")) + ], + vec![GroupByWithModifier::GroupingSets(Expr::GroupingSets(vec![ + vec![ + Expr::Identifier(Ident::new("a")), + Expr::Identifier(Ident::new("b")) + ], + vec![Expr::Identifier(Ident::new("a")),], + vec![Expr::Identifier(Ident::new("b"))], + vec![] + ]))] + ) + ); + } + _ => unreachable!(), + } +} + #[test] fn parse_select_having() { let sql = "SELECT foo FROM bar GROUP BY foo HAVING COUNT(*) > 1"; @@ -2185,7 +2829,8 @@ fn parse_select_having() { assert_eq!( Some(Expr::BinaryOp { left: Box::new(Expr::Function(Function { - name: ObjectName(vec![Ident::new("COUNT")]), + name: ObjectName::from(vec![Ident::new("COUNT")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -2198,7 +2843,7 @@ fn parse_select_having() { within_group: vec![] })), op: BinaryOperator::Gt, - right: Box::new(Expr::Value(number("1"))), + right: Box::new(Expr::value(number("1"))), }), select.having ); @@ -2215,7 +2860,8 @@ fn parse_select_qualify() { assert_eq!( Some(Expr::BinaryOp { left: Box::new(Expr::Function(Function { - name: ObjectName(vec![Ident::new("ROW_NUMBER")]), + name: ObjectName::from(vec![Ident::new("ROW_NUMBER")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -2229,8 +2875,10 @@ fn parse_select_qualify() { partition_by: vec![Expr::Identifier(Ident::new("p"))], order_by: vec![OrderByExpr { expr: Expr::Identifier(Ident::new("o")), - asc: None, - nulls_first: None, + options: OrderByOptions { + asc: None, + nulls_first: None, + }, with_fill: None, }], window_frame: None, @@ -2238,7 +2886,7 @@ fn parse_select_qualify() { within_group: vec![] })), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(number("1"))), + right: Box::new(Expr::value(number("1"))), }), select.qualify ); @@ -2249,7 +2897,7 @@ fn parse_select_qualify() { Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("row_num"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(number("1"))), + right: Box::new(Expr::value(number("1"))), }), select.qualify ); @@ -2261,6 +2909,14 @@ fn parse_limit_accepts_all() { "SELECT id, fname, lname FROM customer WHERE id = 1 LIMIT ALL", "SELECT id, fname, lname FROM customer WHERE id = 1", ); + one_statement_parses_to( + "SELECT id, fname, lname FROM customer WHERE id = 1 LIMIT ALL OFFSET 1", + "SELECT id, fname, lname FROM customer WHERE id = 1 OFFSET 1", + ); + one_statement_parses_to( + "SELECT id, fname, lname FROM customer WHERE id = 1 OFFSET 1 LIMIT ALL", + "SELECT id, fname, lname FROM customer WHERE id = 1 OFFSET 1", + ); } #[test] @@ -2362,7 +3018,7 @@ fn parse_cast() { &Expr::Cast { kind: CastKind::Cast, expr: Box::new(Expr::Identifier(Ident::new("id"))), - data_type: DataType::Varbinary(Some(50)), + data_type: DataType::Varbinary(Some(BinaryLength::IntegerLength { length: 50 })), format: None, }, expr_from_projection(only(&select.projection)) @@ -2621,7 +3277,8 @@ fn parse_listagg() { assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::new("LISTAGG")]), + name: ObjectName::from(vec![Ident::new("LISTAGG")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: Some(DuplicateTreatment::Distinct), @@ -2630,14 +3287,14 @@ fn parse_listagg() { "dateid" )))), FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - Value::SingleQuotedString(", ".to_owned()) + (Value::SingleQuotedString(", ".to_owned())).with_empty_span() ))) ], clauses: vec![FunctionArgumentClause::OnOverflow( ListAggOnOverflow::Truncate { - filler: Some(Box::new(Expr::Value(Value::SingleQuotedString( - "%".to_string(), - )))), + filler: Some(Box::new(Expr::Value( + (Value::SingleQuotedString("%".to_string(),)).with_empty_span() + ))), with_count: false, } )], @@ -2650,18 +3307,24 @@ fn parse_listagg() { expr: Expr::Identifier(Ident { value: "id".to_string(), quote_style: None, + span: Span::empty(), }), - asc: None, - nulls_first: None, + options: OrderByOptions { + asc: None, + nulls_first: None, + }, with_fill: None, }, OrderByExpr { expr: Expr::Identifier(Ident { value: "username".to_string(), quote_style: None, + span: Span::empty(), }), - asc: None, - nulls_first: None, + options: OrderByOptions { + asc: None, + nulls_first: None, + }, with_fill: None, }, ] @@ -2773,7 +3436,10 @@ fn parse_window_function_null_treatment_arg() { let SelectItem::UnnamedExpr(Expr::Function(actual)) = &projection[i] else { unreachable!() }; - assert_eq!(ObjectName(vec![Ident::new("FIRST_VALUE")]), actual.name); + assert_eq!( + ObjectName::from(vec![Ident::new("FIRST_VALUE")]), + actual.name + ); let FunctionArguments::List(arg_list) = &actual.args else { panic!("expected argument list") }; @@ -2814,6 +3480,138 @@ fn parse_window_function_null_treatment_arg() { ); } +#[test] +fn test_compound_expr() { + let supported_dialects = TestedDialects::new(vec![ + Box::new(GenericDialect {}), + Box::new(DuckDbDialect {}), + Box::new(BigQueryDialect {}), + ]); + let sqls = [ + "SELECT abc[1].f1 FROM t", + "SELECT abc[1].f1.f2 FROM t", + "SELECT f1.abc[1] FROM t", + "SELECT f1.f2.abc[1] FROM t", + "SELECT f1.abc[1].f2 FROM t", + "SELECT named_struct('a', 1, 'b', 2).a", + "SELECT named_struct('a', 1, 'b', 2).a", + "SELECT make_array(1, 2, 3)[1]", + "SELECT make_array(named_struct('a', 1))[1].a", + "SELECT abc[1][-1].a.b FROM t", + "SELECT abc[1][-1].a.b[1] FROM t", + ]; + for sql in sqls { + supported_dialects.verified_stmt(sql); + } +} + +#[test] +fn test_double_value() { + let dialects = all_dialects(); + let test_cases = vec![ + gen_number_case_with_sign("0."), + gen_number_case_with_sign("0.0"), + gen_number_case_with_sign("0000."), + gen_number_case_with_sign("0000.00"), + gen_number_case_with_sign(".0"), + gen_number_case_with_sign(".00"), + gen_number_case_with_sign("0e0"), + gen_number_case_with_sign("0e+0"), + gen_number_case_with_sign("0e-0"), + gen_number_case_with_sign("0.e-0"), + gen_number_case_with_sign("0.e+0"), + gen_number_case_with_sign(".0e-0"), + gen_number_case_with_sign(".0e+0"), + gen_number_case_with_sign("00.0e+0"), + gen_number_case_with_sign("00.0e-0"), + ]; + + for (input, expected) in test_cases { + for (i, expr) in input.iter().enumerate() { + if let Statement::Query(query) = + dialects.one_statement_parses_to(&format!("SELECT {}", expr), "") + { + if let SetExpr::Select(select) = *query.body { + assert_eq!(expected[i], select.projection[0]); + } else { + panic!("Expected a SELECT statement"); + } + } else { + panic!("Expected a SELECT statement"); + } + } + } +} + +fn gen_number_case(value: &str) -> (Vec, Vec) { + let input = vec![ + value.to_string(), + format!("{} col_alias", value), + format!("{} AS col_alias", value), + ]; + let expected = vec![ + SelectItem::UnnamedExpr(Expr::value(number(value))), + SelectItem::ExprWithAlias { + expr: Expr::value(number(value)), + alias: Ident::new("col_alias"), + }, + SelectItem::ExprWithAlias { + expr: Expr::value(number(value)), + alias: Ident::new("col_alias"), + }, + ]; + (input, expected) +} + +fn gen_sign_number_case(value: &str, op: UnaryOperator) -> (Vec, Vec) { + match op { + UnaryOperator::Plus | UnaryOperator::Minus => {} + _ => panic!("Invalid sign"), + } + + let input = vec![ + format!("{}{}", op, value), + format!("{}{} col_alias", op, value), + format!("{}{} AS col_alias", op, value), + ]; + let expected = vec![ + SelectItem::UnnamedExpr(Expr::UnaryOp { + op, + expr: Box::new(Expr::value(number(value))), + }), + SelectItem::ExprWithAlias { + expr: Expr::UnaryOp { + op, + expr: Box::new(Expr::value(number(value))), + }, + alias: Ident::new("col_alias"), + }, + SelectItem::ExprWithAlias { + expr: Expr::UnaryOp { + op, + expr: Box::new(Expr::value(number(value))), + }, + alias: Ident::new("col_alias"), + }, + ]; + (input, expected) +} + +/// generate the test cases for signed and unsigned numbers +/// For example, given "0.0", the test cases will be: +/// - "0.0" +/// - "+0.0" +/// - "-0.0" +fn gen_number_case_with_sign(number: &str) -> (Vec, Vec) { + let (mut input, mut expected) = gen_number_case(number); + for op in [UnaryOperator::Plus, UnaryOperator::Minus] { + let (input_sign, expected_sign) = gen_sign_number_case(number, op); + input.extend(input_sign); + expected.extend(expected_sign); + } + (input, expected) +} + #[test] fn parse_negative_value() { let sql1 = "SELECT -1"; @@ -2859,7 +3657,7 @@ fn parse_create_table() { name, columns, constraints, - with_options, + table_options, if_not_exists: false, external: false, file_format: None, @@ -2876,7 +3674,6 @@ fn parse_create_table() { length: 100, unit: None, })), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -2884,8 +3681,7 @@ fn parse_create_table() { }, ColumnDef { name: "lat".into(), - data_type: DataType::Double, - collation: None, + data_type: DataType::Double(ExactNumberInfo::None), options: vec![ColumnOptionDef { name: None, option: ColumnOption::Null, @@ -2893,14 +3689,12 @@ fn parse_create_table() { }, ColumnDef { name: "lng".into(), - data_type: DataType::Double, - collation: None, + data_type: DataType::Double(ExactNumberInfo::None), options: vec![], }, ColumnDef { name: "constrained".into(), data_type: DataType::Int(None), - collation: None, options: vec![ ColumnOptionDef { name: None, @@ -2933,11 +3727,10 @@ fn parse_create_table() { ColumnDef { name: "ref".into(), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::ForeignKey { - foreign_table: ObjectName(vec!["othertable".into()]), + foreign_table: ObjectName::from(vec!["othertable".into()]), referred_columns: vec!["a".into(), "b".into()], on_delete: None, on_update: None, @@ -2948,11 +3741,10 @@ fn parse_create_table() { ColumnDef { name: "ref2".into(), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::ForeignKey { - foreign_table: ObjectName(vec!["othertable2".into()]), + foreign_table: ObjectName::from(vec!["othertable2".into()]), referred_columns: vec![], on_delete: Some(ReferentialAction::Cascade), on_update: Some(ReferentialAction::NoAction), @@ -2968,7 +3760,7 @@ fn parse_create_table() { TableConstraint::ForeignKey { name: Some("fkey".into()), columns: vec!["lat".into()], - foreign_table: ObjectName(vec!["othertable3".into()]), + foreign_table: ObjectName::from(vec!["othertable3".into()]), referred_columns: vec!["lat".into()], on_delete: Some(ReferentialAction::Restrict), on_update: None, @@ -2977,7 +3769,7 @@ fn parse_create_table() { TableConstraint::ForeignKey { name: Some("fkey2".into()), columns: vec!["lat".into()], - foreign_table: ObjectName(vec!["othertable4".into()]), + foreign_table: ObjectName::from(vec!["othertable4".into()]), referred_columns: vec!["lat".into()], on_delete: Some(ReferentialAction::NoAction), on_update: Some(ReferentialAction::Restrict), @@ -2986,7 +3778,7 @@ fn parse_create_table() { TableConstraint::ForeignKey { name: None, columns: vec!["lat".into()], - foreign_table: ObjectName(vec!["othertable4".into()]), + foreign_table: ObjectName::from(vec!["othertable4".into()]), referred_columns: vec!["lat".into()], on_delete: Some(ReferentialAction::Cascade), on_update: Some(ReferentialAction::SetDefault), @@ -2995,7 +3787,7 @@ fn parse_create_table() { TableConstraint::ForeignKey { name: None, columns: vec!["lng".into()], - foreign_table: ObjectName(vec!["othertable4".into()]), + foreign_table: ObjectName::from(vec!["othertable4".into()]), referred_columns: vec!["longitude".into()], on_delete: None, on_update: Some(ReferentialAction::SetNull), @@ -3003,7 +3795,7 @@ fn parse_create_table() { }, ] ); - assert_eq!(with_options, vec![]); + assert_eq!(table_options, CreateTableOptions::None); } _ => unreachable!(), } @@ -3048,7 +3840,7 @@ fn parse_create_table_with_constraint_characteristics() { name, columns, constraints, - with_options, + table_options, if_not_exists: false, external: false, file_format: None, @@ -3065,7 +3857,6 @@ fn parse_create_table_with_constraint_characteristics() { length: 100, unit: None, })), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -3073,8 +3864,7 @@ fn parse_create_table_with_constraint_characteristics() { }, ColumnDef { name: "lat".into(), - data_type: DataType::Double, - collation: None, + data_type: DataType::Double(ExactNumberInfo::None), options: vec![ColumnOptionDef { name: None, option: ColumnOption::Null, @@ -3082,8 +3872,7 @@ fn parse_create_table_with_constraint_characteristics() { }, ColumnDef { name: "lng".into(), - data_type: DataType::Double, - collation: None, + data_type: DataType::Double(ExactNumberInfo::None), options: vec![], }, ] @@ -3094,7 +3883,7 @@ fn parse_create_table_with_constraint_characteristics() { TableConstraint::ForeignKey { name: Some("fkey".into()), columns: vec!["lat".into()], - foreign_table: ObjectName(vec!["othertable3".into()]), + foreign_table: ObjectName::from(vec!["othertable3".into()]), referred_columns: vec!["lat".into()], on_delete: Some(ReferentialAction::Restrict), on_update: None, @@ -3107,7 +3896,7 @@ fn parse_create_table_with_constraint_characteristics() { TableConstraint::ForeignKey { name: Some("fkey2".into()), columns: vec!["lat".into()], - foreign_table: ObjectName(vec!["othertable4".into()]), + foreign_table: ObjectName::from(vec!["othertable4".into()]), referred_columns: vec!["lat".into()], on_delete: Some(ReferentialAction::NoAction), on_update: Some(ReferentialAction::Restrict), @@ -3120,7 +3909,7 @@ fn parse_create_table_with_constraint_characteristics() { TableConstraint::ForeignKey { name: None, columns: vec!["lat".into()], - foreign_table: ObjectName(vec!["othertable4".into()]), + foreign_table: ObjectName::from(vec!["othertable4".into()]), referred_columns: vec!["lat".into()], on_delete: Some(ReferentialAction::Cascade), on_update: Some(ReferentialAction::SetDefault), @@ -3133,7 +3922,7 @@ fn parse_create_table_with_constraint_characteristics() { TableConstraint::ForeignKey { name: None, columns: vec!["lng".into()], - foreign_table: ObjectName(vec!["othertable4".into()]), + foreign_table: ObjectName::from(vec!["othertable4".into()]), referred_columns: vec!["longitude".into()], on_delete: None, on_update: Some(ReferentialAction::SetNull), @@ -3145,7 +3934,7 @@ fn parse_create_table_with_constraint_characteristics() { }, ] ); - assert_eq!(with_options, vec![]); + assert_eq!(table_options, CreateTableOptions::None); } _ => unreachable!(), } @@ -3218,7 +4007,6 @@ fn parse_create_table_column_constraint_characteristics() { vec![ColumnDef { name: "a".into(), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Unique { @@ -3326,20 +4114,18 @@ fn parse_create_table_hive_array() { .. }) => { assert!(if_not_exists); - assert_eq!(name, ObjectName(vec!["something".into()])); + assert_eq!(name, ObjectName::from(vec!["something".into()])); assert_eq!( columns, vec![ ColumnDef { name: Ident::new("name"), data_type: DataType::Int(None), - collation: None, options: vec![], }, ColumnDef { name: Ident::new("val"), data_type: DataType::Array(expected), - collation: None, options: vec![], }, ], @@ -3416,7 +4202,10 @@ fn parse_assert_message() { message: Some(message), } => { match message { - Expr::Value(Value::SingleQuotedString(s)) => assert_eq!(s, "No rows in my_table"), + Expr::Value(ValueWithSpan { + value: Value::SingleQuotedString(s), + span: _, + }) => assert_eq!(s, "No rows in my_table"), _ => unreachable!(), }; } @@ -3434,6 +4223,11 @@ fn parse_create_schema() { } _ => unreachable!(), } + + verified_stmt(r#"CREATE SCHEMA a.b.c OPTIONS(key1 = 'value1', key2 = 'value2')"#); + verified_stmt(r#"CREATE SCHEMA IF NOT EXISTS a OPTIONS(key1 = 'value1')"#); + verified_stmt(r#"CREATE SCHEMA IF NOT EXISTS a OPTIONS()"#); + verified_stmt(r#"CREATE SCHEMA IF NOT EXISTS a DEFAULT COLLATE 'und:ci' OPTIONS()"#); } #[test] @@ -3511,19 +4305,18 @@ fn parse_create_table_as_table() { schema_name: None, }))), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], }); match verified_stmt(sql1) { Statement::CreateTable(CreateTable { query, name, .. }) => { - assert_eq!(name, ObjectName(vec![Ident::new("new_table")])); + assert_eq!(name, ObjectName::from(vec![Ident::new("new_table")])); assert_eq!(query.unwrap(), expected_query1); } _ => unreachable!(), @@ -3538,19 +4331,18 @@ fn parse_create_table_as_table() { schema_name: Some("schema_name".to_string()), }))), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], }); match verified_stmt(sql2) { Statement::CreateTable(CreateTable { query, name, .. }) => { - assert_eq!(name, ObjectName(vec![Ident::new("new_table")])); + assert_eq!(name, ObjectName::from(vec![Ident::new("new_table")])); assert_eq!(query.unwrap(), expected_query2); } _ => unreachable!(), @@ -3629,16 +4421,22 @@ fn parse_create_table_with_options() { let sql = "CREATE TABLE t (c INT) WITH (foo = 'bar', a = 123)"; match generic.verified_stmt(sql) { - Statement::CreateTable(CreateTable { with_options, .. }) => { + Statement::CreateTable(CreateTable { table_options, .. }) => { + let with_options = match table_options { + CreateTableOptions::With(options) => options, + _ => unreachable!(), + }; assert_eq!( vec![ SqlOption::KeyValue { key: "foo".into(), - value: Expr::Value(Value::SingleQuotedString("bar".into())), + value: Expr::Value( + (Value::SingleQuotedString("bar".into())).with_empty_span() + ), }, SqlOption::KeyValue { key: "a".into(), - value: Expr::Value(number("123")), + value: Expr::value(number("123")), }, ], with_options @@ -3653,8 +4451,8 @@ fn parse_create_table_clone() { let sql = "CREATE OR REPLACE TABLE a CLONE a_tmp"; match verified_stmt(sql) { Statement::CreateTable(CreateTable { name, clone, .. }) => { - assert_eq!(ObjectName(vec![Ident::new("a")]), name); - assert_eq!(Some(ObjectName(vec![(Ident::new("a_tmp"))])), clone) + assert_eq!(ObjectName::from(vec![Ident::new("a")]), name); + assert_eq!(Some(ObjectName::from(vec![(Ident::new("a_tmp"))])), clone) } _ => unreachable!(), } @@ -3688,7 +4486,7 @@ fn parse_create_external_table() { name, columns, constraints, - with_options, + table_options, if_not_exists, external, file_format, @@ -3705,7 +4503,6 @@ fn parse_create_external_table() { length: 100, unit: None, })), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -3713,8 +4510,7 @@ fn parse_create_external_table() { }, ColumnDef { name: "lat".into(), - data_type: DataType::Double, - collation: None, + data_type: DataType::Double(ExactNumberInfo::None), options: vec![ColumnOptionDef { name: None, option: ColumnOption::Null, @@ -3722,8 +4518,7 @@ fn parse_create_external_table() { }, ColumnDef { name: "lng".into(), - data_type: DataType::Double, - collation: None, + data_type: DataType::Double(ExactNumberInfo::None), options: vec![], }, ] @@ -3734,7 +4529,7 @@ fn parse_create_external_table() { assert_eq!(FileFormat::TEXTFILE, file_format.unwrap()); assert_eq!("/tmp/example.csv", location.unwrap()); - assert_eq!(with_options, vec![]); + assert_eq!(table_options, CreateTableOptions::None); assert!(!if_not_exists); } _ => unreachable!(), @@ -3759,7 +4554,7 @@ fn parse_create_or_replace_external_table() { name, columns, constraints, - with_options, + table_options, if_not_exists, external, file_format, @@ -3776,7 +4571,6 @@ fn parse_create_or_replace_external_table() { length: 100, unit: None, })), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -3789,7 +4583,7 @@ fn parse_create_or_replace_external_table() { assert_eq!(FileFormat::TEXTFILE, file_format.unwrap()); assert_eq!("/tmp/example.csv", location.unwrap()); - assert_eq!(with_options, vec![]); + assert_eq!(table_options, CreateTableOptions::None); assert!(!if_not_exists); assert!(or_replace); } @@ -3865,9 +4659,12 @@ fn parse_alter_table() { [SqlOption::KeyValue { key: Ident { value: "classification".to_string(), - quote_style: Some('\'') + quote_style: Some('\''), + span: Span::empty(), }, - value: Expr::Value(Value::SingleQuotedString("parquet".to_string())), + value: Expr::Value( + (Value::SingleQuotedString("parquet".to_string())).with_empty_span() + ), }], ); } @@ -3875,6 +4672,65 @@ fn parse_alter_table() { } } +#[test] +fn parse_rename_table() { + match verified_stmt("RENAME TABLE test.test1 TO test_db.test2") { + Statement::RenameTable(rename_tables) => { + assert_eq!( + vec![RenameTable { + old_name: ObjectName::from(vec![ + Ident::new("test".to_string()), + Ident::new("test1".to_string()), + ]), + new_name: ObjectName::from(vec![ + Ident::new("test_db".to_string()), + Ident::new("test2".to_string()), + ]), + }], + rename_tables + ); + } + _ => unreachable!(), + }; + + match verified_stmt( + "RENAME TABLE old_table1 TO new_table1, old_table2 TO new_table2, old_table3 TO new_table3", + ) { + Statement::RenameTable(rename_tables) => { + assert_eq!( + vec![ + RenameTable { + old_name: ObjectName::from(vec![Ident::new("old_table1".to_string())]), + new_name: ObjectName::from(vec![Ident::new("new_table1".to_string())]), + }, + RenameTable { + old_name: ObjectName::from(vec![Ident::new("old_table2".to_string())]), + new_name: ObjectName::from(vec![Ident::new("new_table2".to_string())]), + }, + RenameTable { + old_name: ObjectName::from(vec![Ident::new("old_table3".to_string())]), + new_name: ObjectName::from(vec![Ident::new("new_table3".to_string())]), + } + ], + rename_tables + ); + } + _ => unreachable!(), + }; + + assert_eq!( + parse_sql_statements("RENAME TABLE old_table TO new_table a").unwrap_err(), + ParserError::ParserError("Expected: end of statement, found: a".to_string()) + ); + + assert_eq!( + parse_sql_statements("RENAME TABLE1 old_table TO new_table a").unwrap_err(), + ParserError::ParserError( + "Expected: KEYWORD `TABLE` after RENAME, found: TABLE1".to_string() + ) + ); +} + #[test] fn test_alter_table_with_on_cluster() { match all_dialects() @@ -3883,8 +4739,8 @@ fn test_alter_table_with_on_cluster() { Statement::AlterTable { name, on_cluster, .. } => { - std::assert_eq!(name.to_string(), "t"); - std::assert_eq!(on_cluster, Some(Ident::with_quote('\'', "cluster"))); + assert_eq!(name.to_string(), "t"); + assert_eq!(on_cluster, Some(Ident::with_quote('\'', "cluster"))); } _ => unreachable!(), } @@ -3895,15 +4751,15 @@ fn test_alter_table_with_on_cluster() { Statement::AlterTable { name, on_cluster, .. } => { - std::assert_eq!(name.to_string(), "t"); - std::assert_eq!(on_cluster, Some(Ident::new("cluster_name"))); + assert_eq!(name.to_string(), "t"); + assert_eq!(on_cluster, Some(Ident::new("cluster_name"))); } _ => unreachable!(), } let res = all_dialects() .parse_sql_statements("ALTER TABLE t ON CLUSTER 123 ADD CONSTRAINT bar PRIMARY KEY (baz)"); - std::assert_eq!( + assert_eq!( res.unwrap_err(), ParserError::ParserError("Expected: identifier, found: 123".to_string()) ) @@ -3952,11 +4808,13 @@ fn parse_alter_view_with_options() { vec![ SqlOption::KeyValue { key: "foo".into(), - value: Expr::Value(Value::SingleQuotedString("bar".into())), + value: Expr::Value( + (Value::SingleQuotedString("bar".into())).with_empty_span() + ), }, SqlOption::KeyValue { key: "a".into(), - value: Expr::Value(number("123")), + value: Expr::value(number("123")), }, ], with_options @@ -4047,6 +4905,7 @@ fn parse_alter_table_constraints() { check_one("UNIQUE (id)"); check_one("FOREIGN KEY (foo, bar) REFERENCES AnotherTable(foo, bar)"); check_one("CHECK (end_date > start_date OR end_date IS NULL)"); + check_one("CONSTRAINT fk FOREIGN KEY (lng) REFERENCES othertable4"); fn check_one(constraint_text: &str) { match alter_table_op(verified_stmt(&format!( @@ -4063,7 +4922,9 @@ fn parse_alter_table_constraints() { #[test] fn parse_alter_table_drop_column() { + check_one("DROP COLUMN IF EXISTS is_active"); check_one("DROP COLUMN IF EXISTS is_active CASCADE"); + check_one("DROP COLUMN IF EXISTS is_active RESTRICT"); one_statement_parses_to( "ALTER TABLE tab DROP IF EXISTS is_active CASCADE", "ALTER TABLE tab DROP COLUMN IF EXISTS is_active CASCADE", @@ -4078,11 +4939,15 @@ fn parse_alter_table_drop_column() { AlterTableOperation::DropColumn { column_name, if_exists, - cascade, + drop_behavior, } => { assert_eq!("is_active", column_name.to_string()); assert!(if_exists); - assert!(cascade); + match drop_behavior { + None => assert!(constraint_text.ends_with(" is_active")), + Some(DropBehavior::Restrict) => assert!(constraint_text.ends_with(" RESTRICT")), + Some(DropBehavior::Cascade) => assert!(constraint_text.ends_with(" CASCADE")), + } } _ => unreachable!(), } @@ -4108,14 +4973,14 @@ fn parse_alter_table_alter_column() { ); match alter_table_op(verified_stmt(&format!( - "{alter_stmt} ALTER COLUMN is_active SET DEFAULT false" + "{alter_stmt} ALTER COLUMN is_active SET DEFAULT 0" ))) { AlterTableOperation::AlterColumn { column_name, op } => { assert_eq!("is_active", column_name.to_string()); assert_eq!( op, AlterColumnOperation::SetDefault { - value: Expr::Value(Value::Boolean(false)) + value: Expr::Value((test_utils::number("0")).with_empty_span()) } ); } @@ -4172,37 +5037,29 @@ fn parse_alter_table_alter_column_type() { #[test] fn parse_alter_table_drop_constraint() { - let alter_stmt = "ALTER TABLE tab"; - match alter_table_op(verified_stmt( - "ALTER TABLE tab DROP CONSTRAINT constraint_name CASCADE", - )) { - AlterTableOperation::DropConstraint { - name: constr_name, - if_exists, - cascade, - } => { - assert_eq!("constraint_name", constr_name.to_string()); - assert!(!if_exists); - assert!(cascade); - } - _ => unreachable!(), - } - match alter_table_op(verified_stmt( - "ALTER TABLE tab DROP CONSTRAINT IF EXISTS constraint_name", - )) { - AlterTableOperation::DropConstraint { - name: constr_name, - if_exists, - cascade, - } => { - assert_eq!("constraint_name", constr_name.to_string()); - assert!(if_exists); - assert!(!cascade); + check_one("DROP CONSTRAINT IF EXISTS constraint_name"); + check_one("DROP CONSTRAINT IF EXISTS constraint_name RESTRICT"); + check_one("DROP CONSTRAINT IF EXISTS constraint_name CASCADE"); + fn check_one(constraint_text: &str) { + match alter_table_op(verified_stmt(&format!("ALTER TABLE tab {constraint_text}"))) { + AlterTableOperation::DropConstraint { + name: constr_name, + if_exists, + drop_behavior, + } => { + assert_eq!("constraint_name", constr_name.to_string()); + assert!(if_exists); + match drop_behavior { + None => assert!(constraint_text.ends_with(" constraint_name")), + Some(DropBehavior::Restrict) => assert!(constraint_text.ends_with(" RESTRICT")), + Some(DropBehavior::Cascade) => assert!(constraint_text.ends_with(" CASCADE")), + } + } + _ => unreachable!(), } - _ => unreachable!(), } - let res = parse_sql_statements(&format!("{alter_stmt} DROP CONSTRAINT is_active TEXT")); + let res = parse_sql_statements("ALTER TABLE tab DROP CONSTRAINT is_active TEXT"); assert_eq!( ParserError::ParserError("Expected: end of statement, found: TEXT".to_string()), res.unwrap_err() @@ -4247,7 +5104,7 @@ fn run_explain_analyze( expected_verbose: bool, expected_analyze: bool, expected_format: Option, - exepcted_options: Option>, + expected_options: Option>, ) { match dialect.verified_stmt(query) { Statement::Explain { @@ -4255,6 +5112,7 @@ fn run_explain_analyze( analyze, verbose, query_plan, + estimate, statement, format, options, @@ -4262,8 +5120,9 @@ fn run_explain_analyze( assert_eq!(verbose, expected_verbose); assert_eq!(analyze, expected_analyze); assert_eq!(format, expected_format); - assert_eq!(options, exepcted_options); + assert_eq!(options, expected_options); assert!(!query_plan); + assert!(!estimate); assert_eq!("SELECT sqrt(id) FROM foo", statement.to_string()); } _ => panic!("Unexpected Statement, must be Explain"), @@ -4408,30 +5267,63 @@ fn parse_explain_query_plan() { ); } +#[test] +fn parse_explain_estimate() { + let statement = all_dialects().verified_stmt("EXPLAIN ESTIMATE SELECT sqrt(id) FROM foo"); + + match &statement { + Statement::Explain { + query_plan, + estimate, + analyze, + verbose, + statement, + .. + } => { + assert!(estimate); + assert!(!query_plan); + assert!(!analyze); + assert!(!verbose); + assert_eq!("SELECT sqrt(id) FROM foo", statement.to_string()); + } + _ => unreachable!(), + } + + assert_eq!( + "EXPLAIN ESTIMATE SELECT sqrt(id) FROM foo", + statement.to_string() + ); +} + #[test] fn parse_named_argument_function() { + let dialects = all_dialects_where(|d| { + d.supports_named_fn_args_with_rarrow_operator() + && !d.supports_named_fn_args_with_expr_name() + }); let sql = "SELECT FUN(a => '1', b => '2') FROM foo"; - let select = verified_only_select(sql); + let select = dialects.verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::new("FUN")]), + name: ObjectName::from(vec![Ident::new("FUN")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, args: vec![ FunctionArg::Named { name: Ident::new("a"), - arg: FunctionArgExpr::Expr(Expr::Value(Value::SingleQuotedString( - "1".to_owned() - ))), + arg: FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("1".to_owned())).with_empty_span() + )), operator: FunctionArgOperator::RightArrow }, FunctionArg::Named { name: Ident::new("b"), - arg: FunctionArgExpr::Expr(Expr::Value(Value::SingleQuotedString( - "2".to_owned() - ))), + arg: FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("2".to_owned())).with_empty_span() + )), operator: FunctionArgOperator::RightArrow }, ], @@ -4454,23 +5346,24 @@ fn parse_named_argument_function_with_eq_operator() { .verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::new("FUN")]), + name: ObjectName::from(vec![Ident::new("FUN")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, args: vec![ FunctionArg::Named { name: Ident::new("a"), - arg: FunctionArgExpr::Expr(Expr::Value(Value::SingleQuotedString( - "1".to_owned() - ))), + arg: FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("1".to_owned())).with_empty_span() + )), operator: FunctionArgOperator::Equals }, FunctionArg::Named { name: Ident::new("b"), - arg: FunctionArgExpr::Expr(Expr::Value(Value::SingleQuotedString( - "2".to_owned() - ))), + arg: FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("2".to_owned())).with_empty_span() + )), operator: FunctionArgOperator::Equals }, ], @@ -4494,7 +5387,7 @@ fn parse_named_argument_function_with_eq_operator() { [Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("bar"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(number("42"))), + right: Box::new(Expr::value(number("42"))), }] ), ); @@ -4528,7 +5421,8 @@ fn parse_window_functions() { assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::new("row_number")]), + name: ObjectName::from(vec![Ident::new("row_number")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -4542,8 +5436,10 @@ fn parse_window_functions() { partition_by: vec![], order_by: vec![OrderByExpr { expr: Expr::Identifier(Ident::new("dt")), - asc: Some(false), - nulls_first: None, + options: OrderByOptions { + asc: Some(false), + nulls_first: None, + }, with_fill: None, }], window_frame: None, @@ -4647,15 +5543,19 @@ fn test_parse_named_window() { ORDER BY C3"; let actual_select_only = verified_only_select(sql); let expected = Select { + select_token: AttachedToken::empty(), distinct: None, top: None, + top_before_distinct: false, projection: vec![ SelectItem::ExprWithAlias { expr: Expr::Function(Function { - name: ObjectName(vec![Ident { + name: ObjectName::from(vec![Ident { value: "MIN".to_string(), quote_style: None, + span: Span::empty(), }]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -4663,6 +5563,7 @@ fn test_parse_named_window() { Expr::Identifier(Ident { value: "c12".to_string(), quote_style: None, + span: Span::empty(), }), ))], clauses: vec![], @@ -4672,20 +5573,24 @@ fn test_parse_named_window() { over: Some(WindowType::NamedWindow(Ident { value: "window1".to_string(), quote_style: None, + span: Span::empty(), })), within_group: vec![], }), alias: Ident { value: "min1".to_string(), quote_style: None, + span: Span::empty(), }, }, SelectItem::ExprWithAlias { expr: Expr::Function(Function { - name: ObjectName(vec![Ident { + name: ObjectName::from(vec![Ident { value: "MAX".to_string(), quote_style: None, + span: Span::empty(), }]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -4693,6 +5598,7 @@ fn test_parse_named_window() { Expr::Identifier(Ident { value: "c12".to_string(), quote_style: None, + span: Span::empty(), }), ))], clauses: vec![], @@ -4702,29 +5608,24 @@ fn test_parse_named_window() { over: Some(WindowType::NamedWindow(Ident { value: "window2".to_string(), quote_style: None, + span: Span::empty(), })), within_group: vec![], }), alias: Ident { value: "max1".to_string(), quote_style: None, + span: Span::empty(), }, }, ], into: None, from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident { - value: "aggregate_test_100".to_string(), - quote_style: None, - }]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident { + value: "aggregate_test_100".to_string(), + quote_style: None, + span: Span::empty(), + }])), joins: vec![], }], lateral_views: vec![], @@ -4740,6 +5641,7 @@ fn test_parse_named_window() { Ident { value: "window1".to_string(), quote_style: None, + span: Span::empty(), }, NamedWindowExpr::WindowSpec(WindowSpec { window_name: None, @@ -4748,9 +5650,12 @@ fn test_parse_named_window() { expr: Expr::Identifier(Ident { value: "C12".to_string(), quote_style: None, + span: Span::empty(), }), - asc: None, - nulls_first: None, + options: OrderByOptions { + asc: None, + nulls_first: None, + }, with_fill: None, }], window_frame: None, @@ -4760,12 +5665,14 @@ fn test_parse_named_window() { Ident { value: "window2".to_string(), quote_style: None, + span: Span::empty(), }, NamedWindowExpr::WindowSpec(WindowSpec { window_name: None, partition_by: vec![Expr::Identifier(Ident { value: "C11".to_string(), quote_style: None, + span: Span::empty(), })], order_by: vec![], window_frame: None, @@ -4776,6 +5683,7 @@ fn test_parse_named_window() { window_before_qualify: true, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }; assert_eq!(actual_select_only, expected); } @@ -4829,14 +5737,14 @@ fn parse_literal_integer() { let select = verified_only_select(sql); assert_eq!(3, select.projection.len()); assert_eq!( - &Expr::Value(number("1")), + &Expr::value(number("1")), expr_from_projection(&select.projection[0]), ); // negative literal is parsed as a - and expr assert_eq!( &UnaryOp { op: UnaryOperator::Minus, - expr: Box::new(Expr::Value(number("10"))) + expr: Box::new(Expr::value(number("10"))) }, expr_from_projection(&select.projection[1]), ); @@ -4844,7 +5752,7 @@ fn parse_literal_integer() { assert_eq!( &UnaryOp { op: UnaryOperator::Plus, - expr: Box::new(Expr::Value(number("20"))) + expr: Box::new(Expr::value(number("20"))) }, expr_from_projection(&select.projection[2]), ) @@ -4858,11 +5766,11 @@ fn parse_literal_decimal() { let select = verified_only_select(sql); assert_eq!(2, select.projection.len()); assert_eq!( - &Expr::Value(number("0.300000000000000004")), + &Expr::value(number("0.300000000000000004")), expr_from_projection(&select.projection[0]), ); assert_eq!( - &Expr::Value(number("9007199254740993.0")), + &Expr::value(number("9007199254740993.0")), expr_from_projection(&select.projection[1]), ) } @@ -4873,15 +5781,17 @@ fn parse_literal_string() { let select = verified_only_select(sql); assert_eq!(3, select.projection.len()); assert_eq!( - &Expr::Value(Value::SingleQuotedString("one".to_string())), + &Expr::Value((Value::SingleQuotedString("one".to_string())).with_empty_span()), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value(Value::NationalStringLiteral("national string".to_string())), + &Expr::Value( + (Value::NationalStringLiteral("national string".to_string())).with_empty_span() + ), expr_from_projection(&select.projection[1]) ); assert_eq!( - &Expr::Value(Value::HexStringLiteral("deadBEEF".to_string())), + &Expr::Value((Value::HexStringLiteral("deadBEEF".to_string())).with_empty_span()), expr_from_projection(&select.projection[2]) ); @@ -4896,7 +5806,7 @@ fn parse_literal_date() { assert_eq!( &Expr::TypedString { data_type: DataType::Date, - value: "1999-01-01".into(), + value: Value::SingleQuotedString("1999-01-01".into()), }, expr_from_projection(only(&select.projection)), ); @@ -4909,7 +5819,7 @@ fn parse_literal_time() { assert_eq!( &Expr::TypedString { data_type: DataType::Time(None, TimezoneInfo::None), - value: "01:23:34".into(), + value: Value::SingleQuotedString("01:23:34".into()), }, expr_from_projection(only(&select.projection)), ); @@ -4922,7 +5832,7 @@ fn parse_literal_datetime() { assert_eq!( &Expr::TypedString { data_type: DataType::Datetime(None), - value: "1999-01-01 01:23:34.45".into(), + value: Value::SingleQuotedString("1999-01-01 01:23:34.45".into()), }, expr_from_projection(only(&select.projection)), ); @@ -4935,7 +5845,7 @@ fn parse_literal_timestamp_without_time_zone() { assert_eq!( &Expr::TypedString { data_type: DataType::Timestamp(None, TimezoneInfo::None), - value: "1999-01-01 01:23:34".into(), + value: Value::SingleQuotedString("1999-01-01 01:23:34".into()), }, expr_from_projection(only(&select.projection)), ); @@ -4950,7 +5860,7 @@ fn parse_literal_timestamp_with_time_zone() { assert_eq!( &Expr::TypedString { data_type: DataType::Timestamp(None, TimezoneInfo::Tz), - value: "1999-01-01 01:23:34Z".into(), + value: Value::SingleQuotedString("1999-01-01 01:23:34Z".into()), }, expr_from_projection(only(&select.projection)), ); @@ -4966,7 +5876,9 @@ fn parse_interval_all() { let select = verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString(String::from("1-1")))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString(String::from("1-1"))).with_empty_span() + )), leading_field: Some(DateTimeField::Year), leading_precision: None, last_field: Some(DateTimeField::Month), @@ -4979,9 +5891,9 @@ fn parse_interval_all() { let select = verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString(String::from( - "01:01.01" - )))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString(String::from("01:01.01"))).with_empty_span() + )), leading_field: Some(DateTimeField::Minute), leading_precision: Some(5), last_field: Some(DateTimeField::Second), @@ -4994,7 +5906,9 @@ fn parse_interval_all() { let select = verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString(String::from("1")))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString(String::from("1"))).with_empty_span() + )), leading_field: Some(DateTimeField::Second), leading_precision: Some(5), last_field: None, @@ -5007,7 +5921,9 @@ fn parse_interval_all() { let select = verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString(String::from("10")))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString(String::from("10"))).with_empty_span() + )), leading_field: Some(DateTimeField::Hour), leading_precision: None, last_field: None, @@ -5020,7 +5936,7 @@ fn parse_interval_all() { let select = verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::Value(number("5"))), + value: Box::new(Expr::value(number("5"))), leading_field: Some(DateTimeField::Day), leading_precision: None, last_field: None, @@ -5029,21 +5945,36 @@ fn parse_interval_all() { expr_from_projection(only(&select.projection)), ); - let sql = "SELECT INTERVAL '10' HOUR (1)"; + let sql = "SELECT INTERVAL 5 DAYS"; let select = verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString(String::from("10")))), - leading_field: Some(DateTimeField::Hour), - leading_precision: Some(1), + value: Box::new(Expr::value(number("5"))), + leading_field: Some(DateTimeField::Days), + leading_precision: None, last_field: None, fractional_seconds_precision: None, }), expr_from_projection(only(&select.projection)), ); - let result = parse_sql_statements("SELECT INTERVAL '1' SECOND TO SECOND"); - assert_eq!( + let sql = "SELECT INTERVAL '10' HOUR (1)"; + let select = verified_only_select(sql); + assert_eq!( + &Expr::Interval(Interval { + value: Box::new(Expr::Value( + (Value::SingleQuotedString(String::from("10"))).with_empty_span() + )), + leading_field: Some(DateTimeField::Hour), + leading_precision: Some(1), + last_field: None, + fractional_seconds_precision: None, + }), + expr_from_projection(only(&select.projection)), + ); + + let result = parse_sql_statements("SELECT INTERVAL '1' SECOND TO SECOND"); + assert_eq!( ParserError::ParserError("Expected: end of statement, found: SECOND".to_string()), result.unwrap_err(), ); @@ -5056,10 +5987,18 @@ fn parse_interval_all() { verified_only_select("SELECT INTERVAL '1' YEAR"); verified_only_select("SELECT INTERVAL '1' MONTH"); + verified_only_select("SELECT INTERVAL '1' WEEK"); verified_only_select("SELECT INTERVAL '1' DAY"); verified_only_select("SELECT INTERVAL '1' HOUR"); verified_only_select("SELECT INTERVAL '1' MINUTE"); verified_only_select("SELECT INTERVAL '1' SECOND"); + verified_only_select("SELECT INTERVAL '1' YEARS"); + verified_only_select("SELECT INTERVAL '1' MONTHS"); + verified_only_select("SELECT INTERVAL '1' WEEKS"); + verified_only_select("SELECT INTERVAL '1' DAYS"); + verified_only_select("SELECT INTERVAL '1' HOURS"); + verified_only_select("SELECT INTERVAL '1' MINUTES"); + verified_only_select("SELECT INTERVAL '1' SECONDS"); verified_only_select("SELECT INTERVAL '1' YEAR TO MONTH"); verified_only_select("SELECT INTERVAL '1' DAY TO HOUR"); verified_only_select("SELECT INTERVAL '1' DAY TO MINUTE"); @@ -5069,10 +6008,21 @@ fn parse_interval_all() { verified_only_select("SELECT INTERVAL '1' MINUTE TO SECOND"); verified_only_select("SELECT INTERVAL 1 YEAR"); verified_only_select("SELECT INTERVAL 1 MONTH"); + verified_only_select("SELECT INTERVAL 1 WEEK"); verified_only_select("SELECT INTERVAL 1 DAY"); verified_only_select("SELECT INTERVAL 1 HOUR"); verified_only_select("SELECT INTERVAL 1 MINUTE"); verified_only_select("SELECT INTERVAL 1 SECOND"); + verified_only_select("SELECT INTERVAL 1 YEARS"); + verified_only_select("SELECT INTERVAL 1 MONTHS"); + verified_only_select("SELECT INTERVAL 1 WEEKS"); + verified_only_select("SELECT INTERVAL 1 DAYS"); + verified_only_select("SELECT INTERVAL 1 HOURS"); + verified_only_select("SELECT INTERVAL 1 MINUTES"); + verified_only_select("SELECT INTERVAL 1 SECONDS"); + verified_only_select( + "SELECT '2 years 15 months 100 weeks 99 hours 123456789 milliseconds'::INTERVAL", + ); } #[test] @@ -5083,9 +6033,9 @@ fn parse_interval_dont_require_unit() { let select = dialects.verified_only_select(sql); assert_eq!( &Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString(String::from( - "1 DAY" - )))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString(String::from("1 DAY"))).with_empty_span() + )), leading_field: None, leading_precision: None, last_field: None, @@ -5104,7 +6054,6 @@ fn parse_interval_dont_require_unit() { #[test] fn parse_interval_require_unit() { let dialects = all_dialects_where(|d| d.require_interval_qualifier()); - let sql = "SELECT INTERVAL '1 DAY'"; let err = dialects.parse_sql_statements(sql).unwrap_err(); assert_eq!( @@ -5123,9 +6072,9 @@ fn parse_interval_require_qualifier() { expr_from_projection(only(&select.projection)), &Expr::Interval(Interval { value: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Value(number("1"))), + left: Box::new(Expr::value(number("1"))), op: BinaryOperator::Plus, - right: Box::new(Expr::Value(number("1"))), + right: Box::new(Expr::value(number("1"))), }), leading_field: Some(DateTimeField::Day), leading_precision: None, @@ -5140,9 +6089,13 @@ fn parse_interval_require_qualifier() { expr_from_projection(only(&select.projection)), &Expr::Interval(Interval { value: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Value(Value::SingleQuotedString("1".to_string()))), + left: Box::new(Expr::Value( + (Value::SingleQuotedString("1".to_string())).with_empty_span() + )), op: BinaryOperator::Plus, - right: Box::new(Expr::Value(Value::SingleQuotedString("1".to_string()))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("1".to_string())).with_empty_span() + )), }), leading_field: Some(DateTimeField::Day), leading_precision: None, @@ -5158,12 +6111,18 @@ fn parse_interval_require_qualifier() { &Expr::Interval(Interval { value: Box::new(Expr::BinaryOp { left: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Value(Value::SingleQuotedString("1".to_string()))), + left: Box::new(Expr::Value( + (Value::SingleQuotedString("1".to_string())).with_empty_span() + )), op: BinaryOperator::Plus, - right: Box::new(Expr::Value(Value::SingleQuotedString("2".to_string()))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("2".to_string())).with_empty_span() + )), }), op: BinaryOperator::Minus, - right: Box::new(Expr::Value(Value::SingleQuotedString("3".to_string()))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("3".to_string())).with_empty_span() + )), }), leading_field: Some(DateTimeField::Day), leading_precision: None, @@ -5182,9 +6141,9 @@ fn parse_interval_disallow_interval_expr() { assert_eq!( expr_from_projection(only(&select.projection)), &Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString(String::from( - "1 DAY" - )))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString(String::from("1 DAY"))).with_empty_span() + )), leading_field: None, leading_precision: None, last_field: None, @@ -5205,9 +6164,9 @@ fn parse_interval_disallow_interval_expr() { expr_from_projection(only(&select.projection)), &Expr::BinaryOp { left: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString(String::from( - "1 DAY" - )))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString(String::from("1 DAY"))).with_empty_span() + )), leading_field: None, leading_precision: None, last_field: None, @@ -5215,9 +6174,9 @@ fn parse_interval_disallow_interval_expr() { })), op: BinaryOperator::Gt, right: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString(String::from( - "1 SECOND" - )))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString(String::from("1 SECOND"))).with_empty_span() + )), leading_field: None, leading_precision: None, last_field: None, @@ -5235,9 +6194,9 @@ fn interval_disallow_interval_expr_gt() { expr, Expr::BinaryOp { left: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString( - "1 second".to_string() - ))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString("1 second".to_string())).with_empty_span() + )), leading_field: None, leading_precision: None, last_field: None, @@ -5247,6 +6206,7 @@ fn interval_disallow_interval_expr_gt() { right: Box::new(Expr::Identifier(Ident { value: "x".to_string(), quote_style: None, + span: Span::empty(), })), } ) @@ -5261,9 +6221,9 @@ fn interval_disallow_interval_expr_double_colon() { Expr::Cast { kind: CastKind::DoubleColon, expr: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString( - "1 second".to_string() - ))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString("1 second".to_string())).with_empty_span() + )), leading_field: None, leading_precision: None, last_field: None, @@ -5287,26 +6247,22 @@ fn parse_interval_and_or_xor() { let expected_ast = vec![Statement::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: None, top: None, + top_before_distinct: false, projection: vec![UnnamedExpr(Expr::Identifier(Ident { value: "col".to_string(), quote_style: None, + span: Span::empty(), }))], into: None, from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident { - value: "test".to_string(), - quote_style: None, - }]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident { + value: "test".to_string(), + quote_style: None, + span: Span::empty(), + }])), joins: vec![], }], lateral_views: vec![], @@ -5316,18 +6272,20 @@ fn parse_interval_and_or_xor() { left: Box::new(Expr::Identifier(Ident { value: "d3_date".to_string(), quote_style: None, + span: Span::empty(), })), op: BinaryOperator::Gt, right: Box::new(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident { value: "d1_date".to_string(), quote_style: None, + span: Span::empty(), })), op: BinaryOperator::Plus, right: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString( - "5 days".to_string(), - ))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString("5 days".to_string())).with_empty_span(), + )), leading_field: None, leading_precision: None, last_field: None, @@ -5340,18 +6298,20 @@ fn parse_interval_and_or_xor() { left: Box::new(Expr::Identifier(Ident { value: "d2_date".to_string(), quote_style: None, + span: Span::empty(), })), op: BinaryOperator::Gt, right: Box::new(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident { value: "d1_date".to_string(), quote_style: None, + span: Span::empty(), })), op: BinaryOperator::Plus, right: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString( - "3 days".to_string(), - ))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString("3 days".to_string())).with_empty_span(), + )), leading_field: None, leading_precision: None, last_field: None, @@ -5370,16 +6330,16 @@ fn parse_interval_and_or_xor() { window_before_qualify: false, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], }))]; assert_eq!(actual_ast, expected_ast); @@ -5405,15 +6365,15 @@ fn parse_interval_and_or_xor() { #[test] fn parse_at_timezone() { - let zero = Expr::Value(number("0")); + let zero = Expr::value(number("0")); let sql = "SELECT FROM_UNIXTIME(0) AT TIME ZONE 'UTC-06:00' FROM t"; let select = verified_only_select(sql); assert_eq!( &Expr::AtTimeZone { timestamp: Box::new(call("FROM_UNIXTIME", [zero.clone()])), - time_zone: Box::new(Expr::Value(Value::SingleQuotedString( - "UTC-06:00".to_string() - ))), + time_zone: Box::new(Expr::Value( + (Value::SingleQuotedString("UTC-06:00".to_string())).with_empty_span() + )), }, expr_from_projection(only(&select.projection)), ); @@ -5427,16 +6387,19 @@ fn parse_at_timezone() { [ Expr::AtTimeZone { timestamp: Box::new(call("FROM_UNIXTIME", [zero])), - time_zone: Box::new(Expr::Value(Value::SingleQuotedString( - "UTC-06:00".to_string() - ))), + time_zone: Box::new(Expr::Value( + (Value::SingleQuotedString("UTC-06:00".to_string())).with_empty_span() + )), }, - Expr::Value(Value::SingleQuotedString("%Y-%m-%dT%H".to_string()),) + Expr::Value( + (Value::SingleQuotedString("%Y-%m-%dT%H".to_string())).with_empty_span() + ) ] ), alias: Ident { value: "hour".to_string(), quote_style: Some('"'), + span: Span::empty(), }, }, only(&select.projection), @@ -5469,7 +6432,8 @@ fn parse_json_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::JSON, - value: r#"{ + value: Value::SingleQuotedString( + r#"{ "id": 10, "type": "fruit", "name": "apple", @@ -5489,12 +6453,30 @@ fn parse_json_keyword() { ] } }"# - .into() + .to_string() + ) }, expr_from_projection(only(&select.projection)), ); } +#[test] +fn parse_typed_strings() { + let expr = verified_expr(r#"JSON '{"foo":"bar"}'"#); + assert_eq!( + Expr::TypedString { + data_type: DataType::JSON, + value: Value::SingleQuotedString(r#"{"foo":"bar"}"#.into()) + }, + expr + ); + + if let Expr::TypedString { data_type, value } = expr { + assert_eq!(DataType::JSON, data_type); + assert_eq!(r#"{"foo":"bar"}"#, value.into_string().unwrap()); + } +} + #[test] fn parse_bignumeric_keyword() { let sql = r#"SELECT BIGNUMERIC '0'"#; @@ -5502,7 +6484,7 @@ fn parse_bignumeric_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: r#"0"#.into() + value: Value::SingleQuotedString(r#"0"#.into()) }, expr_from_projection(only(&select.projection)), ); @@ -5513,7 +6495,7 @@ fn parse_bignumeric_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: r#"123456"#.into() + value: Value::SingleQuotedString(r#"123456"#.into()) }, expr_from_projection(only(&select.projection)), ); @@ -5524,7 +6506,7 @@ fn parse_bignumeric_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: r#"-3.14"#.into() + value: Value::SingleQuotedString(r#"-3.14"#.into()) }, expr_from_projection(only(&select.projection)), ); @@ -5535,7 +6517,7 @@ fn parse_bignumeric_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: r#"-0.54321"#.into() + value: Value::SingleQuotedString(r#"-0.54321"#.into()) }, expr_from_projection(only(&select.projection)), ); @@ -5546,7 +6528,7 @@ fn parse_bignumeric_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: r#"1.23456e05"#.into() + value: Value::SingleQuotedString(r#"1.23456e05"#.into()) }, expr_from_projection(only(&select.projection)), ); @@ -5557,7 +6539,7 @@ fn parse_bignumeric_keyword() { assert_eq!( &Expr::TypedString { data_type: DataType::BigNumeric(ExactNumberInfo::None), - value: r#"-9.876e-3"#.into() + value: Value::SingleQuotedString(r#"-9.876e-3"#.into()) }, expr_from_projection(only(&select.projection)), ); @@ -5585,7 +6567,9 @@ fn parse_table_function() { assert_eq!( call( "FUN", - [Expr::Value(Value::SingleQuotedString("1".to_owned()))], + [Expr::Value( + (Value::SingleQuotedString("1".to_owned())).with_empty_span() + )], ), expr ); @@ -5607,6 +6591,40 @@ fn parse_table_function() { ); } +#[test] +fn parse_select_with_alias_and_column_defs() { + let sql = r#"SELECT * FROM jsonb_to_record('{"a": "x", "b": 2}'::JSONB) AS x (a TEXT, b INT)"#; + let select = verified_only_select(sql); + + match only(&select.from) { + TableWithJoins { + relation: TableFactor::Table { + alias: Some(alias), .. + }, + .. + } => { + assert_eq!(alias.name.value, "x"); + assert_eq!( + alias.columns, + vec![ + TableAliasColumnDef { + name: Ident::new("a"), + data_type: Some(DataType::Text), + }, + TableAliasColumnDef { + name: Ident::new("b"), + data_type: Some(DataType::Int(None)), + }, + ] + ); + } + _ => unreachable!( + "Expecting only TableWithJoins with TableFactor::Table, got {:#?}", + select.from + ), + } +} + #[test] fn parse_unnest() { let sql = "SELECT UNNEST(make_array(1, 2, 3))"; @@ -5734,9 +6752,9 @@ fn parse_unnest_in_from_clause() { array_exprs: vec![call( "make_array", [ - Expr::Value(number("1")), - Expr::Value(number("2")), - Expr::Value(number("3")), + Expr::value(number("1")), + Expr::value(number("2")), + Expr::value(number("3")), ], )], with_offset: false, @@ -5760,14 +6778,14 @@ fn parse_unnest_in_from_clause() { call( "make_array", [ - Expr::Value(number("1")), - Expr::Value(number("2")), - Expr::Value(number("3")), + Expr::value(number("1")), + Expr::value(number("2")), + Expr::value(number("3")), ], ), call( "make_array", - [Expr::Value(number("5")), Expr::Value(number("6"))], + [Expr::value(number("5")), Expr::value(number("6"))], ), ], with_offset: false, @@ -5812,24 +6830,28 @@ fn parse_searched_case_expr() { &Case { operand: None, conditions: vec![ - IsNull(Box::new(Identifier(Ident::new("bar")))), - BinaryOp { - left: Box::new(Identifier(Ident::new("bar"))), - op: Eq, - right: Box::new(Expr::Value(number("0"))), + CaseWhen { + condition: IsNull(Box::new(Identifier(Ident::new("bar")))), + result: Expr::value(Value::SingleQuotedString("null".to_string())), }, - BinaryOp { - left: Box::new(Identifier(Ident::new("bar"))), - op: GtEq, - right: Box::new(Expr::Value(number("0"))), + CaseWhen { + condition: BinaryOp { + left: Box::new(Identifier(Ident::new("bar"))), + op: Eq, + right: Box::new(Expr::value(number("0"))), + }, + result: Expr::value(Value::SingleQuotedString("=0".to_string())), + }, + CaseWhen { + condition: BinaryOp { + left: Box::new(Identifier(Ident::new("bar"))), + op: GtEq, + right: Box::new(Expr::value(number("0"))), + }, + result: Expr::value(Value::SingleQuotedString(">=0".to_string())), }, ], - results: vec![ - Expr::Value(Value::SingleQuotedString("null".to_string())), - Expr::Value(Value::SingleQuotedString("=0".to_string())), - Expr::Value(Value::SingleQuotedString(">=0".to_string())), - ], - else_result: Some(Box::new(Expr::Value(Value::SingleQuotedString( + else_result: Some(Box::new(Expr::value(Value::SingleQuotedString( "<0".to_string() )))), }, @@ -5846,9 +6868,11 @@ fn parse_simple_case_expr() { assert_eq!( &Case { operand: Some(Box::new(Identifier(Ident::new("foo")))), - conditions: vec![Expr::Value(number("1"))], - results: vec![Expr::Value(Value::SingleQuotedString("Y".to_string()))], - else_result: Some(Box::new(Expr::Value(Value::SingleQuotedString( + conditions: vec![CaseWhen { + condition: Expr::value(number("1")), + result: Expr::value(Value::SingleQuotedString("Y".to_string())), + }], + else_result: Some(Box::new(Expr::value(Value::SingleQuotedString( "N".to_string() )))), }, @@ -5875,27 +6899,11 @@ fn parse_implicit_join() { assert_eq!( vec![ TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec!["t1".into()]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec!["t1".into()])), joins: vec![], }, TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec!["t2".into()]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec!["t2".into()])), joins: vec![], }, ], @@ -5907,51 +6915,19 @@ fn parse_implicit_join() { assert_eq!( vec![ TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec!["t1a".into()]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec!["t1a".into()])), joins: vec![Join { - relation: TableFactor::Table { - name: ObjectName(vec!["t1b".into()]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec!["t1b".into()])), global: false, - join_operator: JoinOperator::Inner(JoinConstraint::Natural), + join_operator: JoinOperator::Join(JoinConstraint::Natural), }], }, TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec!["t2a".into()]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec!["t2a".into()])), joins: vec![Join { - relation: TableFactor::Table { - name: ObjectName(vec!["t2b".into()]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec!["t2b".into()])), global: false, - join_operator: JoinOperator::Inner(JoinConstraint::Natural), + join_operator: JoinOperator::Join(JoinConstraint::Natural), }], }, ], @@ -5965,15 +6941,7 @@ fn parse_cross_join() { let select = verified_only_select(sql); assert_eq!( Join { - relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("t2")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident::new("t2")])), global: false, join_operator: JoinOperator::CrossJoin, }, @@ -5991,13 +6959,16 @@ fn parse_joins_on() { ) -> Join { Join { relation: TableFactor::Table { - name: ObjectName(vec![Ident::new(relation.into())]), + name: ObjectName::from(vec![Ident::new(relation.into())]), alias, args: None, with_hints: vec![], version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }, global, join_operator: f(JoinConstraint::On(Expr::BinaryOp { @@ -6014,7 +6985,7 @@ fn parse_joins_on() { "t2", table_alias("foo"), false, - JoinOperator::Inner, + JoinOperator::Join, )] ); one_statement_parses_to( @@ -6024,10 +6995,18 @@ fn parse_joins_on() { // Test parsing of different join operators assert_eq!( only(&verified_only_select("SELECT * FROM t1 JOIN t2 ON c1 = c2").from).joins, + vec![join_with_constraint("t2", None, false, JoinOperator::Join)] + ); + assert_eq!( + only(&verified_only_select("SELECT * FROM t1 INNER JOIN t2 ON c1 = c2").from).joins, vec![join_with_constraint("t2", None, false, JoinOperator::Inner)] ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 LEFT JOIN t2 ON c1 = c2").from).joins, + vec![join_with_constraint("t2", None, false, JoinOperator::Left)] + ); + assert_eq!( + only(&verified_only_select("SELECT * FROM t1 LEFT OUTER JOIN t2 ON c1 = c2").from).joins, vec![join_with_constraint( "t2", None, @@ -6037,6 +7016,10 @@ fn parse_joins_on() { ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 RIGHT JOIN t2 ON c1 = c2").from).joins, + vec![join_with_constraint("t2", None, false, JoinOperator::Right)] + ); + assert_eq!( + only(&verified_only_select("SELECT * FROM t1 RIGHT OUTER JOIN t2 ON c1 = c2").from).joins, vec![join_with_constraint( "t2", None, @@ -6044,6 +7027,10 @@ fn parse_joins_on() { JoinOperator::RightOuter )] ); + assert_eq!( + only(&verified_only_select("SELECT * FROM t1 SEMI JOIN t2 ON c1 = c2").from).joins, + vec![join_with_constraint("t2", None, false, JoinOperator::Semi)] + ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 LEFT SEMI JOIN t2 ON c1 = c2").from).joins, vec![join_with_constraint( @@ -6062,6 +7049,10 @@ fn parse_joins_on() { JoinOperator::RightSemi )] ); + assert_eq!( + only(&verified_only_select("SELECT * FROM t1 ANTI JOIN t2 ON c1 = c2").from).joins, + vec![join_with_constraint("t2", None, false, JoinOperator::Anti)] + ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 LEFT ANTI JOIN t2 ON c1 = c2").from).joins, vec![join_with_constraint( @@ -6110,16 +7101,21 @@ fn parse_joins_using() { ) -> Join { Join { relation: TableFactor::Table { - name: ObjectName(vec![Ident::new(relation.into())]), + name: ObjectName::from(vec![Ident::new(relation.into())]), alias, args: None, with_hints: vec![], version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }, global: false, - join_operator: f(JoinConstraint::Using(vec!["c1".into()])), + join_operator: f(JoinConstraint::Using(vec![ObjectName::from(vec![ + "c1".into() + ])])), } } // Test parsing of aliases @@ -6128,7 +7124,7 @@ fn parse_joins_using() { vec![join_with_constraint( "t2", table_alias("foo"), - JoinOperator::Inner, + JoinOperator::Join, )] ); one_statement_parses_to( @@ -6138,16 +7134,32 @@ fn parse_joins_using() { // Test parsing of different join operators assert_eq!( only(&verified_only_select("SELECT * FROM t1 JOIN t2 USING(c1)").from).joins, + vec![join_with_constraint("t2", None, JoinOperator::Join)] + ); + assert_eq!( + only(&verified_only_select("SELECT * FROM t1 INNER JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::Inner)] ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 LEFT JOIN t2 USING(c1)").from).joins, + vec![join_with_constraint("t2", None, JoinOperator::Left)] + ); + assert_eq!( + only(&verified_only_select("SELECT * FROM t1 LEFT OUTER JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::LeftOuter)] ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 RIGHT JOIN t2 USING(c1)").from).joins, + vec![join_with_constraint("t2", None, JoinOperator::Right)] + ); + assert_eq!( + only(&verified_only_select("SELECT * FROM t1 RIGHT OUTER JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::RightOuter)] ); + assert_eq!( + only(&verified_only_select("SELECT * FROM t1 SEMI JOIN t2 USING(c1)").from).joins, + vec![join_with_constraint("t2", None, JoinOperator::Semi)] + ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 LEFT SEMI JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::LeftSemi)] @@ -6156,6 +7168,10 @@ fn parse_joins_using() { only(&verified_only_select("SELECT * FROM t1 RIGHT SEMI JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::RightSemi)] ); + assert_eq!( + only(&verified_only_select("SELECT * FROM t1 ANTI JOIN t2 USING(c1)").from).joins, + vec![join_with_constraint("t2", None, JoinOperator::Anti)] + ); assert_eq!( only(&verified_only_select("SELECT * FROM t1 LEFT ANTI JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::LeftAnti)] @@ -6168,6 +7184,7 @@ fn parse_joins_using() { only(&verified_only_select("SELECT * FROM t1 FULL JOIN t2 USING(c1)").from).joins, vec![join_with_constraint("t2", None, JoinOperator::FullOuter)] ); + verified_stmt("SELECT * FROM tbl1 AS t1 JOIN tbl2 AS t2 USING(t2.col1)"); } #[test] @@ -6175,33 +7192,55 @@ fn parse_natural_join() { fn natural_join(f: impl Fn(JoinConstraint) -> JoinOperator, alias: Option) -> Join { Join { relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("t2")]), + name: ObjectName::from(vec![Ident::new("t2")]), alias, args: None, with_hints: vec![], version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }, global: false, join_operator: f(JoinConstraint::Natural), } } - // if not specified, inner join as default + // unspecified join assert_eq!( only(&verified_only_select("SELECT * FROM t1 NATURAL JOIN t2").from).joins, + vec![natural_join(JoinOperator::Join, None)] + ); + + // inner join explicitly + assert_eq!( + only(&verified_only_select("SELECT * FROM t1 NATURAL INNER JOIN t2").from).joins, vec![natural_join(JoinOperator::Inner, None)] ); + // left join explicitly assert_eq!( only(&verified_only_select("SELECT * FROM t1 NATURAL LEFT JOIN t2").from).joins, + vec![natural_join(JoinOperator::Left, None)] + ); + + // left outer join explicitly + assert_eq!( + only(&verified_only_select("SELECT * FROM t1 NATURAL LEFT OUTER JOIN t2").from).joins, vec![natural_join(JoinOperator::LeftOuter, None)] ); // right join explicitly assert_eq!( only(&verified_only_select("SELECT * FROM t1 NATURAL RIGHT JOIN t2").from).joins, + vec![natural_join(JoinOperator::Right, None)] + ); + + // right outer join explicitly + assert_eq!( + only(&verified_only_select("SELECT * FROM t1 NATURAL RIGHT OUTER JOIN t2").from).joins, vec![natural_join(JoinOperator::RightOuter, None)] ); @@ -6214,7 +7253,7 @@ fn parse_natural_join() { // natural join another table with alias assert_eq!( only(&verified_only_select("SELECT * FROM t1 NATURAL JOIN t2 AS t3").from).joins, - vec![natural_join(JoinOperator::Inner, table_alias("t3"))] + vec![natural_join(JoinOperator::Join, table_alias("t3"))] ); let sql = "SELECT * FROM t1 natural"; @@ -6281,18 +7320,12 @@ fn parse_join_nesting() { #[test] fn parse_join_syntax_variants() { - one_statement_parses_to( - "SELECT c1 FROM t1 INNER JOIN t2 USING(c1)", - "SELECT c1 FROM t1 JOIN t2 USING(c1)", - ); - one_statement_parses_to( - "SELECT c1 FROM t1 LEFT OUTER JOIN t2 USING(c1)", - "SELECT c1 FROM t1 LEFT JOIN t2 USING(c1)", - ); - one_statement_parses_to( - "SELECT c1 FROM t1 RIGHT OUTER JOIN t2 USING(c1)", - "SELECT c1 FROM t1 RIGHT JOIN t2 USING(c1)", - ); + verified_stmt("SELECT c1 FROM t1 JOIN t2 USING(c1)"); + verified_stmt("SELECT c1 FROM t1 INNER JOIN t2 USING(c1)"); + verified_stmt("SELECT c1 FROM t1 LEFT JOIN t2 USING(c1)"); + verified_stmt("SELECT c1 FROM t1 LEFT OUTER JOIN t2 USING(c1)"); + verified_stmt("SELECT c1 FROM t1 RIGHT JOIN t2 USING(c1)"); + verified_stmt("SELECT c1 FROM t1 RIGHT OUTER JOIN t2 USING(c1)"); one_statement_parses_to( "SELECT c1 FROM t1 FULL OUTER JOIN t2 USING(c1)", "SELECT c1 FROM t1 FULL JOIN t2 USING(c1)", @@ -6366,7 +7399,10 @@ fn parse_cte_renamed_columns() { let sql = "WITH cte (col1, col2) AS (SELECT foo, bar FROM baz) SELECT * FROM cte"; let query = all_dialects().verified_query(sql); assert_eq!( - vec![Ident::new("col1"), Ident::new("col2")], + vec![ + TableAliasColumnDef::from_name("col1"), + TableAliasColumnDef::from_name("col2") + ], query .with .unwrap() @@ -6394,19 +7430,45 @@ fn parse_recursive_cte() { name: Ident { value: "nums".to_string(), quote_style: None, + span: Span::empty(), }, - columns: vec![Ident { - value: "val".to_string(), - quote_style: None, - }], + columns: vec![TableAliasColumnDef::from_name("val")], }, query: Box::new(cte_query), from: None, materialized: None, + closing_paren_token: AttachedToken::empty(), }; assert_eq!(with.cte_tables.first().unwrap(), &expected); } +#[test] +fn parse_cte_in_data_modification_statements() { + match verified_stmt("WITH x AS (SELECT 1) UPDATE t SET bar = (SELECT * FROM x)") { + Statement::Query(query) => { + assert_eq!(query.with.unwrap().to_string(), "WITH x AS (SELECT 1)"); + assert!(matches!(*query.body, SetExpr::Update(_))); + } + other => panic!("Expected: UPDATE, got: {:?}", other), + } + + match verified_stmt("WITH t (x) AS (SELECT 9) DELETE FROM q WHERE id IN (SELECT x FROM t)") { + Statement::Query(query) => { + assert_eq!(query.with.unwrap().to_string(), "WITH t (x) AS (SELECT 9)"); + assert!(matches!(*query.body, SetExpr::Delete(_))); + } + other => panic!("Expected: DELETE, got: {:?}", other), + } + + match verified_stmt("WITH x AS (SELECT 42) INSERT INTO t SELECT foo FROM x") { + Statement::Query(query) => { + assert_eq!(query.with.unwrap().to_string(), "WITH x AS (SELECT 42)"); + assert!(matches!(*query.body, SetExpr::Insert(_))); + } + other => panic!("Expected: INSERT, got: {:?}", other), + } +} + #[test] fn parse_derived_tables() { let sql = "SELECT a.x, b.y FROM (SELECT x FROM foo) AS a CROSS JOIN (SELECT y FROM bar) AS b"; @@ -6443,17 +7505,9 @@ fn parse_derived_tables() { }), }, joins: vec![Join { - relation: TableFactor::Table { - name: ObjectName(vec!["t2".into()]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec!["t2".into()])), global: false, - join_operator: JoinOperator::Inner(JoinConstraint::Natural), + join_operator: JoinOperator::Join(JoinConstraint::Natural), }], }), alias: None, @@ -6462,7 +7516,7 @@ fn parse_derived_tables() { } #[test] -fn parse_union_except_intersect() { +fn parse_union_except_intersect_minus() { // TODO: add assertions verified_stmt("SELECT 1 UNION SELECT 2"); verified_stmt("SELECT 1 UNION ALL SELECT 2"); @@ -6488,6 +7542,13 @@ fn parse_union_except_intersect() { verified_stmt("SELECT 1 AS x, 2 AS y INTERSECT BY NAME SELECT 9 AS y, 8 AS x"); verified_stmt("SELECT 1 AS x, 2 AS y INTERSECT ALL BY NAME SELECT 9 AS y, 8 AS x"); verified_stmt("SELECT 1 AS x, 2 AS y INTERSECT DISTINCT BY NAME SELECT 9 AS y, 8 AS x"); + + // Dialects that support `MINUS` as column identifier + // do not support `MINUS` as a set operator. + let dialects = all_dialects_where(|d| !d.is_column_alias(&Keyword::MINUS, &mut Parser::new(d))); + dialects.verified_stmt("SELECT 1 MINUS SELECT 2"); + dialects.verified_stmt("SELECT 1 MINUS ALL SELECT 2"); + dialects.verified_stmt("SELECT 1 MINUS DISTINCT SELECT 1"); } #[test] @@ -6495,7 +7556,7 @@ fn parse_values() { verified_stmt("SELECT * FROM (VALUES (1), (2), (3))"); verified_stmt("SELECT * FROM (VALUES (1), (2), (3)), (VALUES (1, 2, 3))"); verified_stmt("SELECT * FROM (VALUES (1)) UNION VALUES (1)"); - verified_stmt("SELECT * FROM (VALUES ROW(1, true, 'a'), ROW(2, false, 'b')) AS t (a, b, c)"); + verified_stmt("SELECT * FROM (VALUES ROW(1, NULL, 'a'), ROW(2, NULL, 'b')) AS t (a, b, c)"); } #[test] @@ -6532,7 +7593,17 @@ fn parse_multiple_statements() { ); test_with("DELETE FROM foo", "SELECT", " bar"); test_with("INSERT INTO foo VALUES (1)", "SELECT", " bar"); - test_with("CREATE TABLE foo (baz INT)", "SELECT", " bar"); + // Since MySQL supports the `CREATE TABLE SELECT` syntax, this needs to be handled separately + let res = parse_sql_statements("CREATE TABLE foo (baz INT); SELECT bar"); + assert_eq!( + vec![ + one_statement_parses_to("CREATE TABLE foo (baz INT)", ""), + one_statement_parses_to("SELECT bar", ""), + ], + res.unwrap() + ); + // Check that extra semicolon at the end is stripped by normalization: + one_statement_parses_to("CREATE TABLE foo (baz INT);", "CREATE TABLE foo (baz INT)"); // Make sure that empty statements do not cause an error: let res = parse_sql_statements(";;"); assert_eq!(0, res.unwrap().len()); @@ -6558,6 +7629,9 @@ fn parse_substring() { verified_stmt("SELECT SUBSTRING('1', 1, 3)"); verified_stmt("SELECT SUBSTRING('1', 1)"); verified_stmt("SELECT SUBSTRING('1' FOR 3)"); + verified_stmt("SELECT SUBSTRING('foo' FROM 1 FOR 2) FROM t"); + verified_stmt("SELECT SUBSTR('foo' FROM 1 FOR 2) FROM t"); + verified_stmt("SELECT SUBSTR('foo', 1, 2) FROM t"); } #[test] @@ -6579,13 +7653,15 @@ fn parse_overlay() { let select = verified_only_select(sql); assert_eq!( &Expr::Overlay { - expr: Box::new(Expr::Value(Value::SingleQuotedString("abcdef".to_string()))), + expr: Box::new(Expr::Value( + (Value::SingleQuotedString("abcdef".to_string())).with_empty_span() + )), overlay_what: Box::new(Expr::Identifier(Ident::new("name"))), - overlay_from: Box::new(Expr::Value(number("3"))), + overlay_from: Box::new(Expr::value(number("3"))), overlay_for: Some(Box::new(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("id"))), op: BinaryOperator::Plus, - right: Box::new(Expr::Value(number("1"))), + right: Box::new(Expr::value(number("1"))), })), }, expr_from_projection(only(&select.projection)) @@ -6772,6 +7848,7 @@ fn parse_create_view() { let sql = "CREATE VIEW myschema.myview AS SELECT foo FROM bar"; match verified_stmt(sql) { Statement::CreateView { + or_alter, name, columns, query, @@ -6784,7 +7861,9 @@ fn parse_create_view() { if_not_exists, temporary, to, + params, } => { + assert_eq!(or_alter, false); assert_eq!("myschema.myview", name.to_string()); assert_eq!(Vec::::new(), columns); assert_eq!("SELECT foo FROM bar", query.to_string()); @@ -6796,10 +7875,13 @@ fn parse_create_view() { assert!(!late_binding); assert!(!if_not_exists); assert!(!temporary); - assert!(to.is_none()) + assert!(to.is_none()); + assert!(params.is_none()); } _ => unreachable!(), } + + let _ = verified_stmt("CREATE OR ALTER VIEW v AS SELECT 1"); } #[test] @@ -6811,11 +7893,13 @@ fn parse_create_view_with_options() { CreateTableOptions::With(vec![ SqlOption::KeyValue { key: "foo".into(), - value: Expr::Value(Value::SingleQuotedString("bar".into())), + value: Expr::Value( + (Value::SingleQuotedString("bar".into())).with_empty_span() + ), }, SqlOption::KeyValue { key: "a".into(), - value: Expr::Value(number("123")), + value: Expr::value(number("123")), }, ]), options @@ -6832,6 +7916,7 @@ fn parse_create_view_with_columns() { // match all_dialects().verified_stmt(sql) { match all_dialects_except(|d| d.is::()).verified_stmt(sql) { Statement::CreateView { + or_alter, name, columns, or_replace, @@ -6844,7 +7929,9 @@ fn parse_create_view_with_columns() { if_not_exists, temporary, to, + params, } => { + assert_eq!(or_alter, false); assert_eq!("v", name.to_string()); assert_eq!( columns, @@ -6866,7 +7953,8 @@ fn parse_create_view_with_columns() { assert!(!late_binding); assert!(!if_not_exists); assert!(!temporary); - assert!(to.is_none()) + assert!(to.is_none()); + assert!(params.is_none()); } _ => unreachable!(), } @@ -6877,6 +7965,7 @@ fn parse_create_view_temporary() { let sql = "CREATE TEMPORARY VIEW myschema.myview AS SELECT foo FROM bar"; match verified_stmt(sql) { Statement::CreateView { + or_alter, name, columns, query, @@ -6889,7 +7978,9 @@ fn parse_create_view_temporary() { if_not_exists, temporary, to, + params, } => { + assert_eq!(or_alter, false); assert_eq!("myschema.myview", name.to_string()); assert_eq!(Vec::::new(), columns); assert_eq!("SELECT foo FROM bar", query.to_string()); @@ -6901,7 +7992,8 @@ fn parse_create_view_temporary() { assert!(!late_binding); assert!(!if_not_exists); assert!(temporary); - assert!(to.is_none()) + assert!(to.is_none()); + assert!(params.is_none()); } _ => unreachable!(), } @@ -6912,6 +8004,7 @@ fn parse_create_or_replace_view() { let sql = "CREATE OR REPLACE VIEW v AS SELECT 1"; match verified_stmt(sql) { Statement::CreateView { + or_alter, name, columns, or_replace, @@ -6924,7 +8017,9 @@ fn parse_create_or_replace_view() { if_not_exists, temporary, to, + params, } => { + assert_eq!(or_alter, false); assert_eq!("v", name.to_string()); assert_eq!(columns, vec![]); assert_eq!(options, CreateTableOptions::None); @@ -6936,7 +8031,8 @@ fn parse_create_or_replace_view() { assert!(!late_binding); assert!(!if_not_exists); assert!(!temporary); - assert!(to.is_none()) + assert!(to.is_none()); + assert!(params.is_none()); } _ => unreachable!(), } @@ -6951,6 +8047,7 @@ fn parse_create_or_replace_materialized_view() { let sql = "CREATE OR REPLACE MATERIALIZED VIEW v AS SELECT 1"; match verified_stmt(sql) { Statement::CreateView { + or_alter, name, columns, or_replace, @@ -6963,7 +8060,9 @@ fn parse_create_or_replace_materialized_view() { if_not_exists, temporary, to, + params, } => { + assert_eq!(or_alter, false); assert_eq!("v", name.to_string()); assert_eq!(columns, vec![]); assert_eq!(options, CreateTableOptions::None); @@ -6975,7 +8074,8 @@ fn parse_create_or_replace_materialized_view() { assert!(!late_binding); assert!(!if_not_exists); assert!(!temporary); - assert!(to.is_none()) + assert!(to.is_none()); + assert!(params.is_none()); } _ => unreachable!(), } @@ -6986,6 +8086,7 @@ fn parse_create_materialized_view() { let sql = "CREATE MATERIALIZED VIEW myschema.myview AS SELECT foo FROM bar"; match verified_stmt(sql) { Statement::CreateView { + or_alter, name, or_replace, columns, @@ -6998,7 +8099,9 @@ fn parse_create_materialized_view() { if_not_exists, temporary, to, + params, } => { + assert_eq!(or_alter, false); assert_eq!("myschema.myview", name.to_string()); assert_eq!(Vec::::new(), columns); assert_eq!("SELECT foo FROM bar", query.to_string()); @@ -7010,7 +8113,8 @@ fn parse_create_materialized_view() { assert!(!late_binding); assert!(!if_not_exists); assert!(!temporary); - assert!(to.is_none()) + assert!(to.is_none()); + assert!(params.is_none()); } _ => unreachable!(), } @@ -7021,6 +8125,7 @@ fn parse_create_materialized_view_with_cluster_by() { let sql = "CREATE MATERIALIZED VIEW myschema.myview CLUSTER BY (foo) AS SELECT foo FROM bar"; match verified_stmt(sql) { Statement::CreateView { + or_alter, name, or_replace, columns, @@ -7033,7 +8138,9 @@ fn parse_create_materialized_view_with_cluster_by() { if_not_exists, temporary, to, + params, } => { + assert_eq!(or_alter, false); assert_eq!("myschema.myview", name.to_string()); assert_eq!(Vec::::new(), columns); assert_eq!("SELECT foo FROM bar", query.to_string()); @@ -7045,7 +8152,8 @@ fn parse_create_materialized_view_with_cluster_by() { assert!(!late_binding); assert!(!if_not_exists); assert!(!temporary); - assert!(to.is_none()) + assert!(to.is_none()); + assert!(params.is_none()); } _ => unreachable!(), } @@ -7127,6 +8235,9 @@ fn parse_drop_view() { } _ => unreachable!(), } + + verified_stmt("DROP MATERIALIZED VIEW a.b.c"); + verified_stmt("DROP MATERIALIZED VIEW IF EXISTS a.b.c"); } #[test] @@ -7140,53 +8251,70 @@ fn parse_invalid_subquery_without_parens() { #[test] fn parse_offset() { - let expect = Some(Offset { - value: Expr::Value(number("2")), - rows: OffsetRows::Rows, + // Dialects that support `OFFSET` as column identifiers + // don't support this syntax. + let dialects = + all_dialects_where(|d| !d.is_column_alias(&Keyword::OFFSET, &mut Parser::new(d))); + + let expected_limit_clause = &Some(LimitClause::LimitOffset { + limit: None, + offset: Some(Offset { + value: Expr::value(number("2")), + rows: OffsetRows::Rows, + }), + limit_by: vec![], }); - let ast = verified_query("SELECT foo FROM bar OFFSET 2 ROWS"); - assert_eq!(ast.offset, expect); - let ast = verified_query("SELECT foo FROM bar WHERE foo = 4 OFFSET 2 ROWS"); - assert_eq!(ast.offset, expect); - let ast = verified_query("SELECT foo FROM bar ORDER BY baz OFFSET 2 ROWS"); - assert_eq!(ast.offset, expect); - let ast = verified_query("SELECT foo FROM bar WHERE foo = 4 ORDER BY baz OFFSET 2 ROWS"); - assert_eq!(ast.offset, expect); - let ast = verified_query("SELECT foo FROM (SELECT * FROM bar OFFSET 2 ROWS) OFFSET 2 ROWS"); - assert_eq!(ast.offset, expect); + let ast = dialects.verified_query("SELECT foo FROM bar OFFSET 2 ROWS"); + assert_eq!(&ast.limit_clause, expected_limit_clause); + let ast = dialects.verified_query("SELECT foo FROM bar WHERE foo = 4 OFFSET 2 ROWS"); + assert_eq!(&ast.limit_clause, expected_limit_clause); + let ast = dialects.verified_query("SELECT foo FROM bar ORDER BY baz OFFSET 2 ROWS"); + assert_eq!(&ast.limit_clause, expected_limit_clause); + let ast = + dialects.verified_query("SELECT foo FROM bar WHERE foo = 4 ORDER BY baz OFFSET 2 ROWS"); + assert_eq!(&ast.limit_clause, expected_limit_clause); + let ast = + dialects.verified_query("SELECT foo FROM (SELECT * FROM bar OFFSET 2 ROWS) OFFSET 2 ROWS"); + assert_eq!(&ast.limit_clause, expected_limit_clause); match *ast.body { SetExpr::Select(s) => match only(s.from).relation { TableFactor::Derived { subquery, .. } => { - assert_eq!(subquery.offset, expect); + assert_eq!(&subquery.limit_clause, expected_limit_clause); } _ => panic!("Test broke"), }, _ => panic!("Test broke"), } - let ast = verified_query("SELECT 'foo' OFFSET 0 ROWS"); - assert_eq!( - ast.offset, - Some(Offset { - value: Expr::Value(number("0")), + let expected_limit_clause = LimitClause::LimitOffset { + limit: None, + offset: Some(Offset { + value: Expr::value(number("0")), rows: OffsetRows::Rows, - }) - ); - let ast = verified_query("SELECT 'foo' OFFSET 1 ROW"); - assert_eq!( - ast.offset, - Some(Offset { - value: Expr::Value(number("1")), + }), + limit_by: vec![], + }; + let ast = dialects.verified_query("SELECT 'foo' OFFSET 0 ROWS"); + assert_eq!(ast.limit_clause, Some(expected_limit_clause)); + let expected_limit_clause = LimitClause::LimitOffset { + limit: None, + offset: Some(Offset { + value: Expr::value(number("1")), rows: OffsetRows::Row, - }) - ); - let ast = verified_query("SELECT 'foo' OFFSET 1"); - assert_eq!( - ast.offset, - Some(Offset { - value: Expr::Value(number("1")), + }), + limit_by: vec![], + }; + let ast = dialects.verified_query("SELECT 'foo' OFFSET 1 ROW"); + assert_eq!(ast.limit_clause, Some(expected_limit_clause)); + let expected_limit_clause = LimitClause::LimitOffset { + limit: None, + offset: Some(Offset { + value: Expr::value(number("2")), rows: OffsetRows::None, - }) - ); + }), + limit_by: vec![], + }; + let ast = dialects.verified_query("SELECT 'foo' OFFSET 2"); + assert_eq!(ast.limit_clause, Some(expected_limit_clause)); } #[test] @@ -7194,7 +8322,7 @@ fn parse_fetch() { let fetch_first_two_rows_only = Some(Fetch { with_ties: false, percent: false, - quantity: Some(Expr::Value(number("2"))), + quantity: Some(Expr::value(number("2"))), }); let ast = verified_query("SELECT foo FROM bar FETCH FIRST 2 ROWS ONLY"); assert_eq!(ast.fetch, fetch_first_two_rows_only); @@ -7221,7 +8349,7 @@ fn parse_fetch() { Some(Fetch { with_ties: true, percent: false, - quantity: Some(Expr::Value(number("2"))), + quantity: Some(Expr::value(number("2"))), }) ); let ast = verified_query("SELECT foo FROM bar FETCH FIRST 50 PERCENT ROWS ONLY"); @@ -7230,19 +8358,21 @@ fn parse_fetch() { Some(Fetch { with_ties: false, percent: true, - quantity: Some(Expr::Value(number("50"))), + quantity: Some(Expr::value(number("50"))), }) ); let ast = verified_query( "SELECT foo FROM bar WHERE foo = 4 ORDER BY baz OFFSET 2 ROWS FETCH FIRST 2 ROWS ONLY", ); - assert_eq!( - ast.offset, - Some(Offset { - value: Expr::Value(number("2")), + let expected_limit_clause = Some(LimitClause::LimitOffset { + limit: None, + offset: Some(Offset { + value: Expr::value(number("2")), rows: OffsetRows::Rows, - }) - ); + }), + limit_by: vec![], + }); + assert_eq!(ast.limit_clause, expected_limit_clause); assert_eq!(ast.fetch, fetch_first_two_rows_only); let ast = verified_query( "SELECT foo FROM (SELECT * FROM bar FETCH FIRST 2 ROWS ONLY) FETCH FIRST 2 ROWS ONLY", @@ -7258,24 +8388,20 @@ fn parse_fetch() { _ => panic!("Test broke"), } let ast = verified_query("SELECT foo FROM (SELECT * FROM bar OFFSET 2 ROWS FETCH FIRST 2 ROWS ONLY) OFFSET 2 ROWS FETCH FIRST 2 ROWS ONLY"); - assert_eq!( - ast.offset, - Some(Offset { - value: Expr::Value(number("2")), + let expected_limit_clause = &Some(LimitClause::LimitOffset { + limit: None, + offset: Some(Offset { + value: Expr::value(number("2")), rows: OffsetRows::Rows, - }) - ); + }), + limit_by: vec![], + }); + assert_eq!(&ast.limit_clause, expected_limit_clause); assert_eq!(ast.fetch, fetch_first_two_rows_only); match *ast.body { SetExpr::Select(s) => match only(s.from).relation { TableFactor::Derived { subquery, .. } => { - assert_eq!( - subquery.offset, - Some(Offset { - value: Expr::Value(number("2")), - rows: OffsetRows::Rows, - }) - ); + assert_eq!(&subquery.limit_clause, expected_limit_clause); assert_eq!(subquery.fetch, fetch_first_two_rows_only); } _ => panic!("Test broke"), @@ -7314,7 +8440,7 @@ fn lateral_derived() { let lateral_str = if lateral_in { "LATERAL " } else { "" }; let sql = format!( "SELECT * FROM customer LEFT JOIN {lateral_str}\ - (SELECT * FROM order WHERE order.customer = customer.id LIMIT 3) AS order ON true" + (SELECT * FROM orders WHERE orders.customer = customer.id LIMIT 3) AS orders ON 1" ); let select = verified_only_select(&sql); let from = only(select.from); @@ -7322,7 +8448,9 @@ fn lateral_derived() { let join = &from.joins[0]; assert_eq!( join.join_operator, - JoinOperator::LeftOuter(JoinConstraint::On(Expr::Value(Value::Boolean(true)))) + JoinOperator::Left(JoinConstraint::On(Expr::Value( + (test_utils::number("1")).with_empty_span() + ))) ); if let TableFactor::Derived { lateral, @@ -7331,10 +8459,10 @@ fn lateral_derived() { } = join.relation { assert_eq!(lateral_in, lateral); - assert_eq!(Ident::new("order"), alias.name); + assert_eq!(Ident::new("orders"), alias.name); assert_eq!( subquery.to_string(), - "SELECT * FROM order WHERE order.customer = customer.id LIMIT 3" + "SELECT * FROM orders WHERE orders.customer = customer.id LIMIT 3" ); } else { unreachable!() @@ -7365,35 +8493,26 @@ fn lateral_function() { let sql = "SELECT * FROM customer LEFT JOIN LATERAL generate_series(1, customer.id)"; let actual_select_only = verified_only_select(sql); let expected = Select { + select_token: AttachedToken::empty(), distinct: None, top: None, - projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions { - opt_ilike: None, - opt_exclude: None, - opt_except: None, - opt_rename: None, - opt_replace: None, - })], + projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())], + top_before_distinct: false, into: None, from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident { - value: "customer".to_string(), - quote_style: None, - }]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident { + value: "customer".to_string(), + quote_style: None, + span: Span::empty(), + }])), joins: vec![Join { relation: TableFactor::Function { lateral: true, - name: ObjectName(vec!["generate_series".into()]), + name: ObjectName::from(vec!["generate_series".into()]), args: vec![ - FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value(number("1")))), + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (number("1")).with_empty_span(), + ))), FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::CompoundIdentifier( vec![Ident::new("customer"), Ident::new("id")], ))), @@ -7401,7 +8520,7 @@ fn lateral_function() { alias: None, }, global: false, - join_operator: JoinOperator::LeftOuter(JoinConstraint::None), + join_operator: JoinOperator::Left(JoinConstraint::None), }], }], lateral_views: vec![], @@ -7417,13 +8536,19 @@ fn lateral_function() { window_before_qualify: false, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }; assert_eq!(actual_select_only, expected); } #[test] fn parse_start_transaction() { - match verified_stmt("START TRANSACTION READ ONLY, READ WRITE, ISOLATION LEVEL SERIALIZABLE") { + let dialects = all_dialects_except(|d| + // BigQuery does not support this syntax + d.is::()); + match dialects + .verified_stmt("START TRANSACTION READ ONLY, READ WRITE, ISOLATION LEVEL SERIALIZABLE") + { Statement::StartTransaction { modes, .. } => assert_eq!( modes, vec![ @@ -7437,7 +8562,7 @@ fn parse_start_transaction() { // For historical reasons, PostgreSQL allows the commas between the modes to // be omitted. - match one_statement_parses_to( + match dialects.one_statement_parses_to( "START TRANSACTION READ ONLY READ WRITE ISOLATION LEVEL SERIALIZABLE", "START TRANSACTION READ ONLY, READ WRITE, ISOLATION LEVEL SERIALIZABLE", ) { @@ -7452,44 +8577,65 @@ fn parse_start_transaction() { _ => unreachable!(), } - verified_stmt("START TRANSACTION"); - one_statement_parses_to("BEGIN", "BEGIN TRANSACTION"); - one_statement_parses_to("BEGIN WORK", "BEGIN TRANSACTION"); - one_statement_parses_to("BEGIN TRANSACTION", "BEGIN TRANSACTION"); + dialects.verified_stmt("START TRANSACTION"); + dialects.verified_stmt("BEGIN"); + dialects.verified_stmt("BEGIN WORK"); + dialects.verified_stmt("BEGIN TRANSACTION"); - verified_stmt("START TRANSACTION ISOLATION LEVEL READ UNCOMMITTED"); - verified_stmt("START TRANSACTION ISOLATION LEVEL READ COMMITTED"); - verified_stmt("START TRANSACTION ISOLATION LEVEL REPEATABLE READ"); - verified_stmt("START TRANSACTION ISOLATION LEVEL SERIALIZABLE"); + dialects.verified_stmt("START TRANSACTION ISOLATION LEVEL READ UNCOMMITTED"); + dialects.verified_stmt("START TRANSACTION ISOLATION LEVEL READ COMMITTED"); + dialects.verified_stmt("START TRANSACTION ISOLATION LEVEL REPEATABLE READ"); + dialects.verified_stmt("START TRANSACTION ISOLATION LEVEL SERIALIZABLE"); // Regression test for https://github.com/sqlparser-rs/sqlparser-rs/pull/139, // in which START TRANSACTION would fail to parse if followed by a statement // terminator. assert_eq!( - parse_sql_statements("START TRANSACTION; SELECT 1"), + dialects.parse_sql_statements("START TRANSACTION; SELECT 1"), Ok(vec![ verified_stmt("START TRANSACTION"), verified_stmt("SELECT 1"), ]) ); - let res = parse_sql_statements("START TRANSACTION ISOLATION LEVEL BAD"); + let res = dialects.parse_sql_statements("START TRANSACTION ISOLATION LEVEL BAD"); assert_eq!( ParserError::ParserError("Expected: isolation level, found: BAD".to_string()), res.unwrap_err() ); - let res = parse_sql_statements("START TRANSACTION BAD"); + let res = dialects.parse_sql_statements("START TRANSACTION BAD"); assert_eq!( ParserError::ParserError("Expected: end of statement, found: BAD".to_string()), res.unwrap_err() ); - let res = parse_sql_statements("START TRANSACTION READ ONLY,"); + let res = dialects.parse_sql_statements("START TRANSACTION READ ONLY,"); assert_eq!( ParserError::ParserError("Expected: transaction mode, found: EOF".to_string()), res.unwrap_err() ); + + // MS-SQL syntax + let dialects = all_dialects_where(|d| d.supports_start_transaction_modifier()); + dialects.verified_stmt("BEGIN TRY"); + dialects.verified_stmt("BEGIN CATCH"); + + let dialects = all_dialects_where(|d| { + d.supports_start_transaction_modifier() && d.supports_end_transaction_modifier() + }); + dialects + .parse_sql_statements( + r#" + BEGIN TRY; + SELECT 1/0; + END TRY; + BEGIN CATCH; + EXECUTE foo; + END CATCH; + "#, + ) + .unwrap(); } #[test] @@ -7498,11 +8644,11 @@ fn parse_set_transaction() { // TRANSACTION, so no need to duplicate the tests here. We just do a quick // sanity check. match verified_stmt("SET TRANSACTION READ ONLY, READ WRITE, ISOLATION LEVEL SERIALIZABLE") { - Statement::SetTransaction { + Statement::Set(Set::SetTransaction { modes, session, snapshot, - } => { + }) => { assert_eq!( modes, vec![ @@ -7521,21 +8667,40 @@ fn parse_set_transaction() { #[test] fn parse_set_variable() { match verified_stmt("SET SOMETHING = '1'") { - Statement::SetVariable { - local, + Statement::Set(Set::SingleAssignment { + scope, hivevar, - variables, - value, - } => { - assert!(!local); + variable, + values, + }) => { + assert_eq!(scope, None); assert!(!hivevar); + assert_eq!(variable, ObjectName::from(vec!["SOMETHING".into()])); assert_eq!( - variables, - OneOrManyWithParens::One(ObjectName(vec!["SOMETHING".into()])) + values, + vec![Expr::Value( + (Value::SingleQuotedString("1".into())).with_empty_span() + )] ); + } + _ => unreachable!(), + } + + match verified_stmt("SET GLOBAL VARIABLE = 'Value'") { + Statement::Set(Set::SingleAssignment { + scope, + hivevar, + variable, + values, + }) => { + assert_eq!(scope, Some(ContextModifier::Global)); + assert!(!hivevar); + assert_eq!(variable, ObjectName::from(vec!["VARIABLE".into()])); assert_eq!( - value, - vec![Expr::Value(Value::SingleQuotedString("1".into()))] + values, + vec![Expr::Value( + (Value::SingleQuotedString("Value".into())).with_empty_span() + )] ); } _ => unreachable!(), @@ -7544,28 +8709,21 @@ fn parse_set_variable() { let multi_variable_dialects = all_dialects_where(|d| d.supports_parenthesized_set_variables()); let sql = r#"SET (a, b, c) = (1, 2, 3)"#; match multi_variable_dialects.verified_stmt(sql) { - Statement::SetVariable { - local, - hivevar, - variables, - value, - } => { - assert!(!local); - assert!(!hivevar); + Statement::Set(Set::ParenthesizedAssignments { variables, values }) => { assert_eq!( variables, - OneOrManyWithParens::Many(vec![ - ObjectName(vec!["a".into()]), - ObjectName(vec!["b".into()]), - ObjectName(vec!["c".into()]), - ]) + vec![ + ObjectName::from(vec!["a".into()]), + ObjectName::from(vec!["b".into()]), + ObjectName::from(vec!["c".into()]), + ] ); assert_eq!( - value, + values, vec![ - Expr::Value(number("1")), - Expr::Value(number("2")), - Expr::Value(number("3")), + Expr::value(number("1")), + Expr::value(number("2")), + Expr::value(number("3")), ] ); } @@ -7621,21 +8779,20 @@ fn parse_set_variable() { #[test] fn parse_set_role_as_variable() { match verified_stmt("SET role = 'foobar'") { - Statement::SetVariable { - local, + Statement::Set(Set::SingleAssignment { + scope, hivevar, - variables, - value, - } => { - assert!(!local); + variable, + values, + }) => { + assert_eq!(scope, None); assert!(!hivevar); + assert_eq!(variable, ObjectName::from(vec!["role".into()])); assert_eq!( - variables, - OneOrManyWithParens::One(ObjectName(vec!["role".into()])) - ); - assert_eq!( - value, - vec![Expr::Value(Value::SingleQuotedString("foobar".into()))] + values, + vec![Expr::Value( + (Value::SingleQuotedString("foobar".into())).with_empty_span() + )] ); } _ => unreachable!(), @@ -7651,15 +8808,16 @@ fn parse_double_colon_cast_at_timezone() { &Expr::AtTimeZone { timestamp: Box::new(Expr::Cast { kind: CastKind::DoubleColon, - expr: Box::new(Expr::Value(Value::SingleQuotedString( - "2001-01-01T00:00:00.000Z".to_string() - ),)), + expr: Box::new(Expr::Value( + (Value::SingleQuotedString("2001-01-01T00:00:00.000Z".to_string())) + .with_empty_span() + )), data_type: DataType::Timestamp(None, TimezoneInfo::None), format: None }), - time_zone: Box::new(Expr::Value(Value::SingleQuotedString( - "Europe/Brussels".to_string() - ))), + time_zone: Box::new(Expr::Value( + (Value::SingleQuotedString("Europe/Brussels".to_string())).with_empty_span() + )), }, expr_from_projection(only(&select.projection)), ); @@ -7668,21 +8826,20 @@ fn parse_double_colon_cast_at_timezone() { #[test] fn parse_set_time_zone() { match verified_stmt("SET TIMEZONE = 'UTC'") { - Statement::SetVariable { - local, + Statement::Set(Set::SingleAssignment { + scope, hivevar, - variables: variable, - value, - } => { - assert!(!local); + variable, + values, + }) => { + assert_eq!(scope, None); assert!(!hivevar); + assert_eq!(variable, ObjectName::from(vec!["TIMEZONE".into()])); assert_eq!( - variable, - OneOrManyWithParens::One(ObjectName(vec!["TIMEZONE".into()])) - ); - assert_eq!( - value, - vec![Expr::Value(Value::SingleQuotedString("UTC".into()))] + values, + vec![Expr::Value( + (Value::SingleQuotedString("UTC".into())).with_empty_span() + )] ); } _ => unreachable!(), @@ -7691,26 +8848,15 @@ fn parse_set_time_zone() { one_statement_parses_to("SET TIME ZONE TO 'UTC'", "SET TIMEZONE = 'UTC'"); } -#[test] -fn parse_set_time_zone_alias() { - match verified_stmt("SET TIME ZONE 'UTC'") { - Statement::SetTimeZone { local, value } => { - assert!(!local); - assert_eq!(value, Expr::Value(Value::SingleQuotedString("UTC".into()))); - } - _ => unreachable!(), - } -} - #[test] fn parse_commit() { match verified_stmt("COMMIT") { - Statement::Commit { chain: false } => (), + Statement::Commit { chain: false, .. } => (), _ => unreachable!(), } match verified_stmt("COMMIT AND CHAIN") { - Statement::Commit { chain: true } => (), + Statement::Commit { chain: true, .. } => (), _ => unreachable!(), } @@ -7725,13 +8871,17 @@ fn parse_commit() { #[test] fn parse_end() { - one_statement_parses_to("END AND NO CHAIN", "COMMIT"); - one_statement_parses_to("END WORK AND NO CHAIN", "COMMIT"); - one_statement_parses_to("END TRANSACTION AND NO CHAIN", "COMMIT"); - one_statement_parses_to("END WORK AND CHAIN", "COMMIT AND CHAIN"); - one_statement_parses_to("END TRANSACTION AND CHAIN", "COMMIT AND CHAIN"); - one_statement_parses_to("END WORK", "COMMIT"); - one_statement_parses_to("END TRANSACTION", "COMMIT"); + one_statement_parses_to("END AND NO CHAIN", "END"); + one_statement_parses_to("END WORK AND NO CHAIN", "END"); + one_statement_parses_to("END TRANSACTION AND NO CHAIN", "END"); + one_statement_parses_to("END WORK AND CHAIN", "END AND CHAIN"); + one_statement_parses_to("END TRANSACTION AND CHAIN", "END AND CHAIN"); + one_statement_parses_to("END WORK", "END"); + one_statement_parses_to("END TRANSACTION", "END"); + // MS-SQL syntax + let dialects = all_dialects_where(|d| d.supports_end_transaction_modifier()); + dialects.verified_stmt("END TRY"); + dialects.verified_stmt("END CATCH"); } #[test] @@ -7798,18 +8948,28 @@ fn ensure_multiple_dialects_are_tested() { #[test] fn parse_create_index() { let sql = "CREATE UNIQUE INDEX IF NOT EXISTS idx_name ON test(name,age DESC)"; - let indexed_columns = vec![ - OrderByExpr { - expr: Expr::Identifier(Ident::new("name")), - asc: None, - nulls_first: None, - with_fill: None, + let indexed_columns: Vec = vec![ + IndexColumn { + operator_class: None, + column: OrderByExpr { + expr: Expr::Identifier(Ident::new("name")), + with_fill: None, + options: OrderByOptions { + asc: None, + nulls_first: None, + }, + }, }, - OrderByExpr { - expr: Expr::Identifier(Ident::new("age")), - asc: Some(false), - nulls_first: None, - with_fill: None, + IndexColumn { + operator_class: None, + column: OrderByExpr { + expr: Expr::Identifier(Ident::new("age")), + with_fill: None, + options: OrderByOptions { + asc: Some(false), + nulls_first: None, + }, + }, }, ]; match verified_stmt(sql) { @@ -7833,19 +8993,29 @@ fn parse_create_index() { #[test] fn test_create_index_with_using_function() { - let sql = "CREATE UNIQUE INDEX IF NOT EXISTS idx_name ON test USING btree (name,age DESC)"; - let indexed_columns = vec![ - OrderByExpr { - expr: Expr::Identifier(Ident::new("name")), - asc: None, - nulls_first: None, - with_fill: None, + let sql = "CREATE UNIQUE INDEX IF NOT EXISTS idx_name ON test USING BTREE (name,age DESC)"; + let indexed_columns: Vec = vec![ + IndexColumn { + operator_class: None, + column: OrderByExpr { + expr: Expr::Identifier(Ident::new("name")), + with_fill: None, + options: OrderByOptions { + asc: None, + nulls_first: None, + }, + }, }, - OrderByExpr { - expr: Expr::Identifier(Ident::new("age")), - asc: Some(false), - nulls_first: None, - with_fill: None, + IndexColumn { + operator_class: None, + column: OrderByExpr { + expr: Expr::Identifier(Ident::new("age")), + with_fill: None, + options: OrderByOptions { + asc: Some(false), + nulls_first: None, + }, + }, }, ]; match verified_stmt(sql) { @@ -7864,7 +9034,7 @@ fn test_create_index_with_using_function() { }) => { assert_eq!("idx_name", name.to_string()); assert_eq!("test", table_name.to_string()); - assert_eq!("btree", using.unwrap().to_string()); + assert_eq!("BTREE", using.unwrap().to_string()); assert_eq!(indexed_columns, columns); assert!(unique); assert!(!concurrently); @@ -7879,17 +9049,22 @@ fn test_create_index_with_using_function() { #[test] fn test_create_index_with_with_clause() { let sql = "CREATE UNIQUE INDEX title_idx ON films(title) WITH (fillfactor = 70, single_param)"; - let indexed_columns = vec![OrderByExpr { - expr: Expr::Identifier(Ident::new("title")), - asc: None, - nulls_first: None, - with_fill: None, + let indexed_columns: Vec = vec![IndexColumn { + column: OrderByExpr { + expr: Expr::Identifier(Ident::new("title")), + options: OrderByOptions { + asc: None, + nulls_first: None, + }, + with_fill: None, + }, + operator_class: None, }]; let with_parameters = vec![ Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("fillfactor"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(number("70"))), + right: Box::new(Expr::value(number("70"))), }, Expr::Identifier(Ident::new("single_param")), ]; @@ -8007,7 +9182,7 @@ fn parse_grant() { granted_by, .. } => match (privileges, objects) { - (Privileges::Actions(actions), GrantObjects::Tables(objects)) => { + (Privileges::Actions(actions), Some(GrantObjects::Tables(objects))) => { assert_eq!( vec![ Action::Select { columns: None }, @@ -8017,10 +9192,12 @@ fn parse_grant() { Ident { value: "shape".into(), quote_style: None, + span: Span::empty(), }, Ident { value: "size".into(), quote_style: None, + span: Span::empty(), }, ]) }, @@ -8030,8 +9207,8 @@ fn parse_grant() { Action::References { columns: None }, Action::Trigger, Action::Connect, - Action::Create, - Action::Execute, + Action::Create { obj_type: None }, + Action::Execute { obj_type: None }, Action::Temporary, ], actions @@ -8055,7 +9232,7 @@ fn parse_grant() { with_grant_option, .. } => match (privileges, objects) { - (Privileges::Actions(actions), GrantObjects::AllTablesInSchema { schemas }) => { + (Privileges::Actions(actions), Some(GrantObjects::AllTablesInSchema { schemas })) => { assert_eq!(vec![Action::Insert { columns: None }], actions); assert_eq_vec(&["public"], &schemas); assert_eq_vec(&["browser"], &grantees); @@ -8075,7 +9252,7 @@ fn parse_grant() { granted_by, .. } => match (privileges, objects, granted_by) { - (Privileges::Actions(actions), GrantObjects::Sequences(objects), None) => { + (Privileges::Actions(actions), Some(GrantObjects::Sequences(objects)), None) => { assert_eq!( vec![Action::Usage, Action::Select { columns: None }], actions @@ -8112,7 +9289,7 @@ fn parse_grant() { Privileges::All { with_privileges_keyword, }, - GrantObjects::Schemas(schemas), + Some(GrantObjects::Schemas(schemas)), ) => { assert!(!with_privileges_keyword); assert_eq_vec(&["aa", "b"], &schemas); @@ -8129,7 +9306,10 @@ fn parse_grant() { objects, .. } => match (privileges, objects) { - (Privileges::Actions(actions), GrantObjects::AllSequencesInSchema { schemas }) => { + ( + Privileges::Actions(actions), + Some(GrantObjects::AllSequencesInSchema { schemas }), + ) => { assert_eq!(vec![Action::Usage], actions); assert_eq_vec(&["bus"], &schemas); } @@ -8137,18 +9317,58 @@ fn parse_grant() { }, _ => unreachable!(), } + + verified_stmt("GRANT SELECT ON ALL TABLES IN SCHEMA db1.sc1 TO ROLE role1"); + verified_stmt("GRANT SELECT ON ALL TABLES IN SCHEMA db1.sc1 TO ROLE role1 WITH GRANT OPTION"); + verified_stmt("GRANT SELECT ON ALL TABLES IN SCHEMA db1.sc1 TO DATABASE ROLE role1"); + verified_stmt("GRANT SELECT ON ALL TABLES IN SCHEMA db1.sc1 TO APPLICATION role1"); + verified_stmt("GRANT SELECT ON ALL TABLES IN SCHEMA db1.sc1 TO APPLICATION ROLE role1"); + verified_stmt("GRANT SELECT ON ALL TABLES IN SCHEMA db1.sc1 TO SHARE share1"); + verified_stmt("GRANT USAGE ON SCHEMA sc1 TO a:b"); + verified_stmt("GRANT USAGE ON SCHEMA sc1 TO GROUP group1"); + verified_stmt("GRANT OWNERSHIP ON ALL TABLES IN SCHEMA DEV_STAS_ROGOZHIN TO ROLE ANALYST"); + verified_stmt("GRANT USAGE ON DATABASE db1 TO ROLE role1"); + verified_stmt("GRANT USAGE ON WAREHOUSE wh1 TO ROLE role1"); + verified_stmt("GRANT OWNERSHIP ON INTEGRATION int1 TO ROLE role1"); + verified_stmt("GRANT SELECT ON VIEW view1 TO ROLE role1"); } #[test] fn test_revoke() { - let sql = "REVOKE ALL PRIVILEGES ON users, auth FROM analyst CASCADE"; + let sql = "REVOKE ALL PRIVILEGES ON users, auth FROM analyst"; match verified_stmt(sql) { Statement::Revoke { privileges, - objects: GrantObjects::Tables(tables), + objects: Some(GrantObjects::Tables(tables)), grantees, + granted_by, cascade, + } => { + assert_eq!( + Privileges::All { + with_privileges_keyword: true + }, + privileges + ); + assert_eq_vec(&["users", "auth"], &tables); + assert_eq_vec(&["analyst"], &grantees); + assert_eq!(cascade, None); + assert_eq!(None, granted_by); + } + _ => unreachable!(), + } +} + +#[test] +fn test_revoke_with_cascade() { + let sql = "REVOKE ALL PRIVILEGES ON users, auth FROM analyst CASCADE"; + match all_dialects_except(|d| d.is::()).verified_stmt(sql) { + Statement::Revoke { + privileges, + objects: Some(GrantObjects::Tables(tables)), + grantees, granted_by, + cascade, } => { assert_eq!( Privileges::All { @@ -8158,7 +9378,7 @@ fn test_revoke() { ); assert_eq_vec(&["users", "auth"], &tables); assert_eq_vec(&["analyst"], &grantees); - assert!(cascade); + assert_eq!(cascade, Some(CascadeOption::Cascade)); assert_eq!(None, granted_by); } _ => unreachable!(), @@ -8177,6 +9397,7 @@ fn parse_merge() { source, on, clauses, + .. }, Statement::Merge { into: no_into, @@ -8184,6 +9405,7 @@ fn parse_merge() { source: source_no_into, on: on_no_into, clauses: clauses_no_into, + .. }, ) => { assert!(into); @@ -8192,7 +9414,7 @@ fn parse_merge() { assert_eq!( table, TableFactor::Table { - name: ObjectName(vec![Ident::new("s"), Ident::new("bar")]), + name: ObjectName::from(vec![Ident::new("s"), Ident::new("bar")]), alias: Some(TableAlias { name: Ident::new("dest"), columns: vec![], @@ -8202,6 +9424,9 @@ fn parse_merge() { version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], } ); assert_eq!(table, table_no_into); @@ -8213,22 +9438,19 @@ fn parse_merge() { subquery: Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: None, top: None, + top_before_distinct: false, projection: vec![SelectItem::Wildcard( WildcardAdditionalOptions::default() )], into: None, from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("s"), Ident::new("foo")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![ + Ident::new("s"), + Ident::new("foo") + ])), joins: vec![], }], lateral_views: vec![], @@ -8244,21 +9466,22 @@ fn parse_merge() { qualify: None, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], }), alias: Some(TableAlias { name: Ident { value: "stg".to_string(), quote_style: None, + span: Span::empty(), }, columns: vec![], }), @@ -8331,14 +9554,14 @@ fn parse_merge() { Ident::new("A"), ])), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::SingleQuotedString( - "a".to_string() - ))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("a".to_string())).with_empty_span() + )), }), action: MergeAction::Update { assignments: vec![ Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec![ + target: AssignmentTarget::ColumnName(ObjectName::from(vec![ Ident::new("dest"), Ident::new("F") ])), @@ -8348,7 +9571,7 @@ fn parse_merge() { ]), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec![ + target: AssignmentTarget::ColumnName(ObjectName::from(vec![ Ident::new("dest"), Ident::new("G") ])), @@ -8372,7 +9595,20 @@ fn parse_merge() { _ => unreachable!(), }; - let sql = "MERGE INTO s.bar AS dest USING newArrivals AS S ON false WHEN NOT MATCHED THEN INSERT VALUES (stg.A, stg.B, stg.C)"; + let sql = "MERGE INTO s.bar AS dest USING newArrivals AS S ON (1 > 1) WHEN NOT MATCHED THEN INSERT VALUES (stg.A, stg.B, stg.C)"; + verified_stmt(sql); +} + +#[test] +fn test_merge_with_output() { + let sql = "MERGE INTO target_table USING source_table \ + ON target_table.id = source_table.oooid \ + WHEN MATCHED THEN \ + UPDATE SET target_table.description = source_table.description \ + WHEN NOT MATCHED THEN \ + INSERT (ID, description) VALUES (source_table.id, source_table.description) \ + OUTPUT inserted.* INTO log_target"; + verified_stmt(sql); } @@ -8455,11 +9691,12 @@ fn test_lock_table() { let lock = ast.locks.pop().unwrap(); assert_eq!(lock.lock_type, LockType::Update); assert_eq!( - lock.of.unwrap().0, - vec![Ident { + lock.of.unwrap(), + ObjectName::from(vec![Ident { value: "school".to_string(), - quote_style: None - }] + quote_style: None, + span: Span::empty(), + }]) ); assert!(lock.nonblock.is_none()); @@ -8469,11 +9706,12 @@ fn test_lock_table() { let lock = ast.locks.pop().unwrap(); assert_eq!(lock.lock_type, LockType::Share); assert_eq!( - lock.of.unwrap().0, - vec![Ident { + lock.of.unwrap(), + ObjectName::from(vec![Ident { value: "school".to_string(), - quote_style: None - }] + quote_style: None, + span: Span::empty(), + }]) ); assert!(lock.nonblock.is_none()); @@ -8483,21 +9721,23 @@ fn test_lock_table() { let lock = ast.locks.remove(0); assert_eq!(lock.lock_type, LockType::Share); assert_eq!( - lock.of.unwrap().0, - vec![Ident { + lock.of.unwrap(), + ObjectName::from(vec![Ident { value: "school".to_string(), - quote_style: None - }] + quote_style: None, + span: Span::empty(), + }]) ); assert!(lock.nonblock.is_none()); let lock = ast.locks.remove(0); assert_eq!(lock.lock_type, LockType::Update); assert_eq!( - lock.of.unwrap().0, - vec![Ident { + lock.of.unwrap(), + ObjectName::from(vec![Ident { value: "student".to_string(), - quote_style: None - }] + quote_style: None, + span: Span::empty(), + }]) ); assert!(lock.nonblock.is_none()); } @@ -8510,11 +9750,12 @@ fn test_lock_nonblock() { let lock = ast.locks.pop().unwrap(); assert_eq!(lock.lock_type, LockType::Update); assert_eq!( - lock.of.unwrap().0, - vec![Ident { + lock.of.unwrap(), + ObjectName::from(vec![Ident { value: "school".to_string(), - quote_style: None - }] + quote_style: None, + span: Span::empty(), + }]) ); assert_eq!(lock.nonblock.unwrap(), NonBlock::SkipLocked); @@ -8524,11 +9765,12 @@ fn test_lock_nonblock() { let lock = ast.locks.pop().unwrap(); assert_eq!(lock.lock_type, LockType::Share); assert_eq!( - lock.of.unwrap().0, - vec![Ident { + lock.of.unwrap(), + ObjectName::from(vec![Ident { value: "school".to_string(), - quote_style: None - }] + quote_style: None, + span: Span::empty(), + }]) ); assert_eq!(lock.nonblock.unwrap(), NonBlock::Nowait); } @@ -8553,23 +9795,24 @@ fn test_placeholder() { Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("id"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::Placeholder("$Id1".into()))), + right: Box::new(Expr::Value( + (Value::Placeholder("$Id1".into())).with_empty_span() + )), }) ); - let sql = "SELECT * FROM student LIMIT $1 OFFSET $2"; - let ast = dialects.verified_query(sql); - assert_eq!( - ast.limit, - Some(Expr::Value(Value::Placeholder("$1".into()))) - ); - assert_eq!( - ast.offset, - Some(Offset { - value: Expr::Value(Value::Placeholder("$2".into())), + let ast = dialects.verified_query("SELECT * FROM student LIMIT $1 OFFSET $2"); + let expected_limit_clause = LimitClause::LimitOffset { + limit: Some(Expr::Value( + (Value::Placeholder("$1".into())).with_empty_span(), + )), + offset: Some(Offset { + value: Expr::Value((Value::Placeholder("$2".into())).with_empty_span()), rows: OffsetRows::None, }), - ); + limit_by: vec![], + }; + assert_eq!(ast.limit_clause, Some(expected_limit_clause)); let dialects = TestedDialects::new(vec![ Box::new(GenericDialect {}), @@ -8590,7 +9833,9 @@ fn test_placeholder() { Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("id"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::Placeholder("?".into()))), + right: Box::new(Expr::Value( + (Value::Placeholder("?".into())).with_empty_span() + )), }) ); @@ -8599,9 +9844,15 @@ fn test_placeholder() { assert_eq!( ast.projection, vec![ - UnnamedExpr(Expr::Value(Value::Placeholder("$fromage_français".into()))), - UnnamedExpr(Expr::Value(Value::Placeholder(":x".into()))), - UnnamedExpr(Expr::Value(Value::Placeholder("?123".into()))), + UnnamedExpr(Expr::Value( + (Value::Placeholder("$fromage_français".into())).with_empty_span() + )), + UnnamedExpr(Expr::Value( + (Value::Placeholder(":x".into())).with_empty_span() + )), + UnnamedExpr(Expr::Value( + (Value::Placeholder("?123".into())).with_empty_span() + )), ] ); } @@ -8641,48 +9892,47 @@ fn verified_expr(query: &str) -> Expr { #[test] fn parse_offset_and_limit() { let sql = "SELECT foo FROM bar LIMIT 1 OFFSET 2"; - let expect = Some(Offset { - value: Expr::Value(number("2")), - rows: OffsetRows::None, + let expected_limit_clause = Some(LimitClause::LimitOffset { + limit: Some(Expr::value(number("1"))), + offset: Some(Offset { + value: Expr::value(number("2")), + rows: OffsetRows::None, + }), + limit_by: vec![], }); let ast = verified_query(sql); - assert_eq!(ast.offset, expect); - assert_eq!(ast.limit, Some(Expr::Value(number("1")))); + assert_eq!(ast.limit_clause, expected_limit_clause); // different order is OK one_statement_parses_to("SELECT foo FROM bar OFFSET 2 LIMIT 1", sql); // mysql syntax is ok for some dialects - TestedDialects::new(vec![ - Box::new(GenericDialect {}), - Box::new(MySqlDialect {}), - Box::new(SQLiteDialect {}), - Box::new(ClickHouseDialect {}), - ]) - .one_statement_parses_to("SELECT foo FROM bar LIMIT 2, 1", sql); + all_dialects_where(|d| d.supports_limit_comma()) + .verified_query("SELECT foo FROM bar LIMIT 2, 1"); // expressions are allowed let sql = "SELECT foo FROM bar LIMIT 1 + 2 OFFSET 3 * 4"; let ast = verified_query(sql); - assert_eq!( - ast.limit, - Some(Expr::BinaryOp { - left: Box::new(Expr::Value(number("1"))), + let expected_limit_clause = LimitClause::LimitOffset { + limit: Some(Expr::BinaryOp { + left: Box::new(Expr::value(number("1"))), op: BinaryOperator::Plus, - right: Box::new(Expr::Value(number("2"))), + right: Box::new(Expr::value(number("2"))), }), - ); - assert_eq!( - ast.offset, - Some(Offset { + offset: Some(Offset { value: Expr::BinaryOp { - left: Box::new(Expr::Value(number("3"))), + left: Box::new(Expr::value(number("3"))), op: BinaryOperator::Multiply, - right: Box::new(Expr::Value(number("4"))), + right: Box::new(Expr::value(number("4"))), }, rows: OffsetRows::None, }), - ); + limit_by: vec![], + }; + assert_eq!(ast.limit_clause, Some(expected_limit_clause),); + + // OFFSET without LIMIT + verified_stmt("SELECT foo FROM bar OFFSET 2"); // Can't repeat OFFSET / LIMIT let res = parse_sql_statements("SELECT foo FROM bar OFFSET 2 OFFSET 2"); @@ -8710,7 +9960,8 @@ fn parse_time_functions() { let sql = format!("SELECT {}()", func_name); let select = verified_only_select(&sql); let select_localtime_func_call_ast = Function { - name: ObjectName(vec![Ident::new(func_name)]), + name: ObjectName::from(vec![Ident::new(func_name)]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -8748,7 +9999,9 @@ fn parse_time_functions() { fn parse_position() { assert_eq!( Expr::Position { - expr: Box::new(Expr::Value(Value::SingleQuotedString("@".to_string()))), + expr: Box::new(Expr::Value( + (Value::SingleQuotedString("@".to_string())).with_empty_span() + )), r#in: Box::new(Expr::Identifier(Ident::new("field"))), }, verified_expr("POSITION('@' IN field)"), @@ -8759,9 +10012,9 @@ fn parse_position() { call( "position", [ - Expr::Value(Value::SingleQuotedString("an".to_owned())), - Expr::Value(Value::SingleQuotedString("banana".to_owned())), - Expr::Value(number("1")), + Expr::Value((Value::SingleQuotedString("an".to_owned())).with_empty_span()), + Expr::Value((Value::SingleQuotedString("banana".to_owned())).with_empty_span()), + Expr::value(number("1")), ] ), verified_expr("position('an', 'banana', 1)") @@ -8806,6 +10059,46 @@ fn parse_is_boolean() { verified_expr(sql) ); + let sql = "a IS NORMALIZED"; + assert_eq!( + IsNormalized { + expr: Box::new(Identifier(Ident::new("a"))), + form: None, + negated: false, + }, + verified_expr(sql) + ); + + let sql = "a IS NOT NORMALIZED"; + assert_eq!( + IsNormalized { + expr: Box::new(Identifier(Ident::new("a"))), + form: None, + negated: true, + }, + verified_expr(sql) + ); + + let sql = "a IS NFKC NORMALIZED"; + assert_eq!( + IsNormalized { + expr: Box::new(Identifier(Ident::new("a"))), + form: Some(NormalizationForm::NFKC), + negated: false, + }, + verified_expr(sql) + ); + + let sql = "a IS NOT NFKD NORMALIZED"; + assert_eq!( + IsNormalized { + expr: Box::new(Identifier(Ident::new("a"))), + form: Some(NormalizationForm::NFKD), + negated: true, + }, + verified_expr(sql) + ); + let sql = "a IS UNKNOWN"; assert_eq!( IsUnknown(Box::new(Identifier(Ident::new("a")))), @@ -8824,6 +10117,12 @@ fn parse_is_boolean() { verified_stmt("SELECT f FROM foo WHERE field IS FALSE"); verified_stmt("SELECT f FROM foo WHERE field IS NOT FALSE"); + verified_stmt("SELECT f FROM foo WHERE field IS NORMALIZED"); + verified_stmt("SELECT f FROM foo WHERE field IS NFC NORMALIZED"); + verified_stmt("SELECT f FROM foo WHERE field IS NFD NORMALIZED"); + verified_stmt("SELECT f FROM foo WHERE field IS NOT NORMALIZED"); + verified_stmt("SELECT f FROM foo WHERE field IS NOT NFKC NORMALIZED"); + verified_stmt("SELECT f FROM foo WHERE field IS UNKNOWN"); verified_stmt("SELECT f FROM foo WHERE field IS NOT UNKNOWN"); @@ -8831,7 +10130,37 @@ fn parse_is_boolean() { let res = parse_sql_statements(sql); assert_eq!( ParserError::ParserError( - "Expected: [NOT] NULL or TRUE|FALSE or [NOT] DISTINCT FROM after IS, found: 0" + "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: 0" + .to_string() + ), + res.unwrap_err() + ); + + let sql = "SELECT s, s IS XYZ NORMALIZED FROM foo"; + let res = parse_sql_statements(sql); + assert_eq!( + ParserError::ParserError( + "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: XYZ" + .to_string() + ), + res.unwrap_err() + ); + + let sql = "SELECT s, s IS NFKC FROM foo"; + let res = parse_sql_statements(sql); + assert_eq!( + ParserError::ParserError( + "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: FROM" + .to_string() + ), + res.unwrap_err() + ); + + let sql = "SELECT s, s IS TRIM(' NFKC ') FROM foo"; + let res = parse_sql_statements(sql); + assert_eq!( + ParserError::ParserError( + "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: TRIM" .to_string() ), res.unwrap_err() @@ -8906,7 +10235,7 @@ fn parse_cache_table() { verified_stmt(format!("CACHE TABLE '{cache_table_name}'").as_str()), Statement::Cache { table_flag: None, - table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), + table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), has_as: false, options: vec![], query: None, @@ -8916,8 +10245,8 @@ fn parse_cache_table() { assert_eq!( verified_stmt(format!("CACHE {table_flag} TABLE '{cache_table_name}'").as_str()), Statement::Cache { - table_flag: Some(ObjectName(vec![Ident::new(table_flag)])), - table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), + table_flag: Some(ObjectName::from(vec![Ident::new(table_flag)])), + table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), has_as: false, options: vec![], query: None, @@ -8932,17 +10261,17 @@ fn parse_cache_table() { .as_str() ), Statement::Cache { - table_flag: Some(ObjectName(vec![Ident::new(table_flag)])), - table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), + table_flag: Some(ObjectName::from(vec![Ident::new(table_flag)])), + table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), has_as: false, options: vec![ SqlOption::KeyValue { key: Ident::with_quote('\'', "K1"), - value: Expr::Value(Value::SingleQuotedString("V1".into())), + value: Expr::Value((Value::SingleQuotedString("V1".into())).with_empty_span()), }, SqlOption::KeyValue { key: Ident::with_quote('\'', "K2"), - value: Expr::Value(number("0.88")), + value: Expr::value(number("0.88")), }, ], query: None, @@ -8954,20 +10283,20 @@ fn parse_cache_table() { format!( "CACHE {table_flag} TABLE '{cache_table_name}' OPTIONS('K1' = 'V1', 'K2' = 0.88) {sql}", ) - .as_str() + .as_str() ), Statement::Cache { - table_flag: Some(ObjectName(vec![Ident::new(table_flag)])), - table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), + table_flag: Some(ObjectName::from(vec![Ident::new(table_flag)])), + table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), has_as: false, options: vec![ SqlOption::KeyValue { key: Ident::with_quote('\'', "K1"), - value: Expr::Value(Value::SingleQuotedString("V1".into())), + value: Expr::Value((Value::SingleQuotedString("V1".into())).with_empty_span()), }, SqlOption::KeyValue { key: Ident::with_quote('\'', "K2"), - value: Expr::Value(number("0.88")), + value: Expr::value(number("0.88")), }, ], query: Some(query.clone().into()), @@ -8979,20 +10308,20 @@ fn parse_cache_table() { format!( "CACHE {table_flag} TABLE '{cache_table_name}' OPTIONS('K1' = 'V1', 'K2' = 0.88) AS {sql}", ) - .as_str() + .as_str() ), Statement::Cache { - table_flag: Some(ObjectName(vec![Ident::new(table_flag)])), - table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), + table_flag: Some(ObjectName::from(vec![Ident::new(table_flag)])), + table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), has_as: true, options: vec![ SqlOption::KeyValue { key: Ident::with_quote('\'', "K1"), - value: Expr::Value(Value::SingleQuotedString("V1".into())), + value: Expr::Value((Value::SingleQuotedString("V1".into())).with_empty_span()), }, SqlOption::KeyValue { key: Ident::with_quote('\'', "K2"), - value: Expr::Value(number("0.88")), + value: Expr::value(number("0.88")), }, ], query: Some(query.clone().into()), @@ -9002,8 +10331,8 @@ fn parse_cache_table() { assert_eq!( verified_stmt(format!("CACHE {table_flag} TABLE '{cache_table_name}' {sql}").as_str()), Statement::Cache { - table_flag: Some(ObjectName(vec![Ident::new(table_flag)])), - table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), + table_flag: Some(ObjectName::from(vec![Ident::new(table_flag)])), + table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), has_as: false, options: vec![], query: Some(query.clone().into()), @@ -9013,8 +10342,8 @@ fn parse_cache_table() { assert_eq!( verified_stmt(format!("CACHE {table_flag} TABLE '{cache_table_name}' AS {sql}").as_str()), Statement::Cache { - table_flag: Some(ObjectName(vec![Ident::new(table_flag)])), - table_name: ObjectName(vec![Ident::with_quote('\'', cache_table_name)]), + table_flag: Some(ObjectName::from(vec![Ident::new(table_flag)])), + table_name: ObjectName::from(vec![Ident::with_quote('\'', cache_table_name)]), has_as: true, options: vec![], query: Some(query.into()), @@ -9077,7 +10406,7 @@ fn parse_uncache_table() { assert_eq!( verified_stmt("UNCACHE TABLE 'table_name'"), Statement::UNCache { - table_name: ObjectName(vec![Ident::with_quote('\'', "table_name")]), + table_name: ObjectName::from(vec![Ident::with_quote('\'', "table_name")]), if_exists: false, } ); @@ -9085,7 +10414,7 @@ fn parse_uncache_table() { assert_eq!( verified_stmt("UNCACHE TABLE IF EXISTS 'table_name'"), Statement::UNCache { - table_name: ObjectName(vec![Ident::with_quote('\'', "table_name")]), + table_name: ObjectName::from(vec![Ident::with_quote('\'', "table_name")]), if_exists: true, } ); @@ -9189,21 +10518,16 @@ fn parse_with_recursion_limit() { #[test] fn parse_escaped_string_with_unescape() { - fn assert_mysql_query_value(sql: &str, quoted: &str) { - let stmt = TestedDialects::new(vec![ - Box::new(MySqlDialect {}), - Box::new(BigQueryDialect {}), - Box::new(SnowflakeDialect {}), - ]) - .one_statement_parses_to(sql, ""); - - match stmt { + fn assert_mysql_query_value(dialects: &TestedDialects, sql: &str, quoted: &str) { + match dialects.one_statement_parses_to(sql, "") { Statement::Query(query) => match *query.body { SetExpr::Select(value) => { let expr = expr_from_projection(only(&value.projection)); assert_eq!( *expr, - Expr::Value(Value::SingleQuotedString(quoted.to_string())) + Expr::Value( + (Value::SingleQuotedString(quoted.to_string())).with_empty_span() + ) ); } _ => unreachable!(), @@ -9211,17 +10535,38 @@ fn parse_escaped_string_with_unescape() { _ => unreachable!(), }; } + + let escaping_dialects = + &all_dialects_where(|dialect| dialect.supports_string_literal_backslash_escape()); + let no_wildcard_exception = &all_dialects_where(|dialect| { + dialect.supports_string_literal_backslash_escape() && !dialect.ignores_wildcard_escapes() + }); + let with_wildcard_exception = &all_dialects_where(|dialect| { + dialect.supports_string_literal_backslash_escape() && dialect.ignores_wildcard_escapes() + }); + let sql = r"SELECT 'I\'m fine'"; - assert_mysql_query_value(sql, "I'm fine"); + assert_mysql_query_value(escaping_dialects, sql, "I'm fine"); let sql = r#"SELECT 'I''m fine'"#; - assert_mysql_query_value(sql, "I'm fine"); + assert_mysql_query_value(escaping_dialects, sql, "I'm fine"); let sql = r#"SELECT 'I\"m fine'"#; - assert_mysql_query_value(sql, "I\"m fine"); + assert_mysql_query_value(escaping_dialects, sql, "I\"m fine"); let sql = r"SELECT 'Testing: \0 \\ \% \_ \b \n \r \t \Z \a \h \ '"; - assert_mysql_query_value(sql, "Testing: \0 \\ % _ \u{8} \n \r \t \u{1a} \u{7} h "); + assert_mysql_query_value( + no_wildcard_exception, + sql, + "Testing: \0 \\ % _ \u{8} \n \r \t \u{1a} \u{7} h ", + ); + + // check MySQL doesn't remove backslash from escaped LIKE wildcards + assert_mysql_query_value( + with_wildcard_exception, + sql, + "Testing: \0 \\ \\% \\_ \u{8} \n \r \t \u{1a} \u{7} h ", + ); } #[test] @@ -9243,7 +10588,9 @@ fn parse_escaped_string_without_unescape() { let expr = expr_from_projection(only(&value.projection)); assert_eq!( *expr, - Expr::Value(Value::SingleQuotedString(quoted.to_string())) + Expr::Value( + (Value::SingleQuotedString(quoted.to_string())).with_empty_span() + ) ); } _ => unreachable!(), @@ -9292,7 +10639,7 @@ fn parse_pivot_table() { verified_only_select(sql).from[0].relation, Pivot { table: Box::new(TableFactor::Table { - name: ObjectName(vec![Ident::new("monthly_sales")]), + name: ObjectName::from(vec![Ident::new("monthly_sales")]), alias: Some(TableAlias { name: Ident::new("a"), columns: vec![] @@ -9302,6 +10649,9 @@ fn parse_pivot_table() { version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }), aggregate_functions: vec![ expected_function("a", None), @@ -9311,11 +10661,13 @@ fn parse_pivot_table() { value_column: vec![Ident::new("a"), Ident::new("MONTH")], value_source: PivotValueSource::List(vec![ ExprWithAlias { - expr: Expr::Value(number("1")), + expr: Expr::value(number("1")), alias: Some(Ident::new("x")) }, ExprWithAlias { - expr: Expr::Value(Value::SingleQuotedString("two".to_string())), + expr: Expr::Value( + (Value::SingleQuotedString("two".to_string())).with_empty_span() + ), alias: None }, ExprWithAlias { @@ -9327,9 +10679,13 @@ fn parse_pivot_table() { alias: Some(TableAlias { name: Ident { value: "p".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }, - columns: vec![Ident::new("c"), Ident::new("d")], + columns: vec![ + TableAliasColumnDef::from_name("c"), + TableAliasColumnDef::from_name("d"), + ], }), } ); @@ -9362,7 +10718,7 @@ fn parse_unpivot_table() { verified_only_select(sql).from[0].relation, Unpivot { table: Box::new(TableFactor::Table { - name: ObjectName(vec![Ident::new("sales")]), + name: ObjectName::from(vec![Ident::new("sales")]), alias: Some(TableAlias { name: Ident::new("s"), columns: vec![] @@ -9372,15 +10728,20 @@ fn parse_unpivot_table() { version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }), value: Ident { value: "quantity".to_string(), - quote_style: None + quote_style: None, + span: Span::empty() }, name: Ident { value: "quarter".to_string(), - quote_style: None + quote_style: None, + span: Span::empty() }, columns: ["Q1", "Q2", "Q3", "Q4"] .into_iter() @@ -9390,8 +10751,8 @@ fn parse_unpivot_table() { name: Ident::new("u"), columns: ["product", "quarter", "quantity"] .into_iter() - .map(Ident::new) - .collect() + .map(TableAliasColumnDef::from_name) + .collect(), }), } ); @@ -9417,37 +10778,109 @@ fn parse_unpivot_table() { } #[test] -fn parse_pivot_unpivot_table() { - let sql = concat!( - "SELECT * FROM census AS c ", - "UNPIVOT(population FOR year IN (population_2000, population_2010)) AS u ", - "PIVOT(sum(population) FOR year IN ('population_2000', 'population_2010')) AS p" +fn parse_select_table_with_index_hints() { + let supported_dialects = all_dialects_where(|d| d.supports_table_hints()); + let s = supported_dialects.verified_only_select( + "SELECT * FROM t1 USE INDEX (i1) IGNORE INDEX FOR ORDER BY (i2) ORDER BY a", ); - - pretty_assertions::assert_eq!( - verified_only_select(sql).from[0].relation, - Pivot { - table: Box::new(Unpivot { - table: Box::new(TableFactor::Table { - name: ObjectName(vec![Ident::new("census")]), - alias: Some(TableAlias { - name: Ident::new("c"), - columns: vec![] - }), - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }), - value: Ident { - value: "population".to_string(), - quote_style: None + if let TableFactor::Table { index_hints, .. } = &s.from[0].relation { + assert_eq!( + vec![ + TableIndexHints { + hint_type: TableIndexHintType::Use, + index_names: vec!["i1".into()], + index_type: TableIndexType::Index, + for_clause: None, }, - + TableIndexHints { + hint_type: TableIndexHintType::Ignore, + index_names: vec!["i2".into()], + index_type: TableIndexType::Index, + for_clause: Some(TableIndexHintForClause::OrderBy), + }, + ], + *index_hints + ); + } else { + panic!("Expected TableFactor::Table"); + } + supported_dialects.verified_stmt("SELECT * FROM t1 USE INDEX (i1) USE INDEX (i1, i1)"); + supported_dialects.verified_stmt( + "SELECT * FROM t1 USE INDEX () IGNORE INDEX (i2) USE INDEX (i1) USE INDEX (i2)", + ); + supported_dialects.verified_stmt("SELECT * FROM t1 FORCE INDEX FOR JOIN (i2)"); + supported_dialects.verified_stmt("SELECT * FROM t1 IGNORE INDEX FOR JOIN (i2)"); + supported_dialects.verified_stmt( + "SELECT * FROM t USE INDEX (index1) IGNORE INDEX FOR ORDER BY (index1) IGNORE INDEX FOR GROUP BY (index1) WHERE A = B", + ); + + // Test that dialects that don't support table hints will keep parsing the USE as table alias + let sql = "SELECT * FROM T USE LIMIT 1"; + let unsupported_dialects = all_dialects_where(|d| !d.supports_table_hints()); + let select = unsupported_dialects + .verified_only_select_with_canonical(sql, "SELECT * FROM T AS USE LIMIT 1"); + assert_eq!( + select.from, + vec![TableWithJoins { + relation: TableFactor::Table { + name: ObjectName(vec![sqlparser::ast::ObjectNamePart::Identifier( + Ident::new("T") + )]), + alias: Some(TableAlias { + name: Ident::new("USE"), + columns: vec![], + }), + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], + }, + joins: vec![], + }] + ); +} + +#[test] +fn parse_pivot_unpivot_table() { + let sql = concat!( + "SELECT * FROM census AS c ", + "UNPIVOT(population FOR year IN (population_2000, population_2010)) AS u ", + "PIVOT(sum(population) FOR year IN ('population_2000', 'population_2010')) AS p" + ); + + pretty_assertions::assert_eq!( + verified_only_select(sql).from[0].relation, + Pivot { + table: Box::new(Unpivot { + table: Box::new(TableFactor::Table { + name: ObjectName::from(vec![Ident::new("census")]), + alias: Some(TableAlias { + name: Ident::new("c"), + columns: vec![] + }), + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], + }), + value: Ident { + value: "population".to_string(), + quote_style: None, + span: Span::empty() + }, + name: Ident { value: "year".to_string(), - quote_style: None + quote_style: None, + span: Span::empty() }, columns: ["population_2000", "population_2010"] .into_iter() @@ -9465,11 +10898,17 @@ fn parse_pivot_unpivot_table() { value_column: vec![Ident::new("year")], value_source: PivotValueSource::List(vec![ ExprWithAlias { - expr: Expr::Value(Value::SingleQuotedString("population_2000".to_string())), + expr: Expr::Value( + (Value::SingleQuotedString("population_2000".to_string())) + .with_empty_span() + ), alias: None }, ExprWithAlias { - expr: Expr::Value(Value::SingleQuotedString("population_2010".to_string())), + expr: Expr::Value( + (Value::SingleQuotedString("population_2010".to_string())) + .with_empty_span() + ), alias: None }, ]), @@ -9586,15 +11025,19 @@ fn parse_trailing_comma() { "Expected: column name or constraint definition, found: )".to_string() ) ); + + let unsupported_dialects = all_dialects_where(|d| !d.supports_trailing_commas()); + assert_eq!( + unsupported_dialects + .parse_sql_statements("SELECT * FROM track ORDER BY milliseconds,") + .unwrap_err(), + ParserError::ParserError("Expected: an expression, found: EOF".to_string()) + ); } #[test] fn parse_projection_trailing_comma() { - // Some dialects allow trailing commas only in the projection - let trailing_commas = TestedDialects::new(vec![ - Box::new(SnowflakeDialect {}), - Box::new(BigQueryDialect {}), - ]); + let trailing_commas = all_dialects_where(|d| d.supports_projection_trailing_commas()); trailing_commas.one_statement_parses_to( "SELECT album_id, name, FROM track", @@ -9607,20 +11050,14 @@ fn parse_projection_trailing_comma() { trailing_commas.verified_stmt("SELECT DISTINCT ON (album_id) name FROM track"); + let unsupported_dialects = all_dialects_where(|d| { + !d.supports_projection_trailing_commas() && !d.supports_trailing_commas() + }); assert_eq!( - trailing_commas - .parse_sql_statements("SELECT * FROM track ORDER BY milliseconds,") - .unwrap_err(), - ParserError::ParserError("Expected: an expression, found: EOF".to_string()) - ); - - assert_eq!( - trailing_commas - .parse_sql_statements("CREATE TABLE employees (name text, age int,)") + unsupported_dialects + .parse_sql_statements("SELECT album_id, name, FROM track") .unwrap_err(), - ParserError::ParserError( - "Expected: column name or constraint definition, found: )".to_string() - ), + ParserError::ParserError("Expected an expression, found: FROM".to_string()) ); } @@ -9630,7 +11067,7 @@ fn parse_create_type() { verified_stmt("CREATE TYPE db.type_name AS (foo INT, bar TEXT COLLATE \"de_DE\")"); assert_eq!( Statement::CreateType { - name: ObjectName(vec![Ident::new("db"), Ident::new("type_name")]), + name: ObjectName::from(vec![Ident::new("db"), Ident::new("type_name")]), representation: UserDefinedTypeRepresentation::Composite { attributes: vec![ UserDefinedTypeCompositeAttributeDef { @@ -9641,7 +11078,7 @@ fn parse_create_type() { UserDefinedTypeCompositeAttributeDef { name: Ident::new("bar"), data_type: DataType::Text, - collation: Some(ObjectName(vec![Ident::with_quote('\"', "de_DE")])), + collation: Some(ObjectName::from(vec![Ident::with_quote('\"', "de_DE")])), } ] } @@ -9713,15 +11150,16 @@ fn parse_call() { assert_eq!( verified_stmt("CALL my_procedure('a')"), Statement::Call(Function { + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( - Value::SingleQuotedString("a".to_string()) + (Value::SingleQuotedString("a".to_string())).with_empty_span() )))], clauses: vec![], }), - name: ObjectName(vec![Ident::new("my_procedure")]), + name: ObjectName::from(vec![Ident::new("my_procedure")]), filter: None, null_treatment: None, over: None, @@ -9730,9 +11168,90 @@ fn parse_call() { ); } +#[test] +fn parse_execute_stored_procedure() { + let expected = Statement::Execute { + name: Some(ObjectName::from(vec![ + Ident { + value: "my_schema".to_string(), + quote_style: None, + span: Span::empty(), + }, + Ident { + value: "my_stored_procedure".to_string(), + quote_style: None, + span: Span::empty(), + }, + ])), + parameters: vec![ + Expr::Value((Value::NationalStringLiteral("param1".to_string())).with_empty_span()), + Expr::Value((Value::NationalStringLiteral("param2".to_string())).with_empty_span()), + ], + has_parentheses: false, + immediate: false, + using: vec![], + into: vec![], + }; + assert_eq!( + // Microsoft SQL Server does not use parentheses around arguments for EXECUTE + ms_and_generic() + .verified_stmt("EXECUTE my_schema.my_stored_procedure N'param1', N'param2'"), + expected + ); + assert_eq!( + ms_and_generic().one_statement_parses_to( + "EXEC my_schema.my_stored_procedure N'param1', N'param2';", + "EXECUTE my_schema.my_stored_procedure N'param1', N'param2'", + ), + expected + ); +} + +#[test] +fn parse_execute_immediate() { + let dialects = all_dialects_where(|d| d.supports_execute_immediate()); + + let expected = Statement::Execute { + parameters: vec![Expr::Value( + (Value::SingleQuotedString("SELECT 1".to_string())).with_empty_span(), + )], + immediate: true, + using: vec![ExprWithAlias { + expr: Expr::value(number("1")), + alias: Some(Ident::new("b")), + }], + into: vec![Ident::new("a")], + name: None, + has_parentheses: false, + }; + + let stmt = dialects.verified_stmt("EXECUTE IMMEDIATE 'SELECT 1' INTO a USING 1 AS b"); + assert_eq!(expected, stmt); + + dialects.verified_stmt("EXECUTE IMMEDIATE 'SELECT 1' INTO a, b USING 1 AS x, y"); + dialects.verified_stmt("EXECUTE IMMEDIATE 'SELECT 1' USING 1 AS x, y"); + dialects.verified_stmt("EXECUTE IMMEDIATE 'SELECT 1' INTO a, b"); + dialects.verified_stmt("EXECUTE IMMEDIATE 'SELECT 1'"); + dialects.verified_stmt("EXECUTE 'SELECT 1'"); + + assert_eq!( + ParserError::ParserError("Expected: identifier, found: ,".to_string()), + dialects + .parse_sql_statements("EXECUTE IMMEDIATE 'SELECT 1' USING 1 AS, y") + .unwrap_err() + ); +} + #[test] fn parse_create_table_collate() { - pg_and_generic().verified_stmt("CREATE TABLE tbl (foo INT, bar TEXT COLLATE \"de_DE\")"); + all_dialects().verified_stmt("CREATE TABLE tbl (foo INT, bar TEXT COLLATE \"de_DE\")"); + // check ordering is preserved + all_dialects().verified_stmt( + "CREATE TABLE tbl (foo INT, bar TEXT CHARACTER SET utf8mb4 COLLATE \"de_DE\")", + ); + all_dialects().verified_stmt( + "CREATE TABLE tbl (foo INT, bar TEXT COLLATE \"de_DE\" CHARACTER SET utf8mb4)", + ); } #[test] @@ -9801,20 +11320,14 @@ fn parse_unload() { Statement::Unload { query: Box::new(Query { body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: None, top: None, + top_before_distinct: false, projection: vec![UnnamedExpr(Expr::Identifier(Ident::new("cola"))),], into: None, from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("tab")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident::new("tab")])), joins: vec![], }], lateral_views: vec![], @@ -9830,28 +11343,32 @@ fn parse_unload() { qualify: None, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), with: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, order_by: None, settings: None, format_clause: None, + pipe_operators: vec![], }), to: Ident { value: "s3://...".to_string(), - quote_style: Some('\'') + quote_style: Some('\''), + span: Span::empty(), }, with: vec![SqlOption::KeyValue { key: Ident { value: "format".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }, - value: Expr::Value(Value::SingleQuotedString("AVRO".to_string())) + value: Expr::Value( + (Value::SingleQuotedString("AVRO".to_string())).with_empty_span() + ) }] } ); @@ -9885,6 +11402,7 @@ fn test_comment_hash_syntax() { Box::new(BigQueryDialect {}), Box::new(SnowflakeDialect {}), Box::new(MySqlDialect {}), + Box::new(HiveDialect {}), ]); let sql = r#" # comment @@ -9906,14 +11424,15 @@ fn test_parse_inline_comment() { // [Hive](https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-CreateTable) match all_dialects_except(|d| d.is::()).verified_stmt(sql) { Statement::CreateTable(CreateTable { - columns, comment, .. + columns, + table_options, + .. }) => { assert_eq!( columns, vec![ColumnDef { name: Ident::new("id".to_string()), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: Comment("comment without equal".to_string()), @@ -9921,8 +11440,10 @@ fn test_parse_inline_comment() { }] ); assert_eq!( - comment.unwrap(), - CommentDef::WithEq("comment with equal".to_string()) + table_options, + CreateTableOptions::Plain(vec![SqlOption::Comment(CommentDef::WithEq( + "comment with equal".to_string() + ))]) ); } _ => unreachable!(), @@ -9950,20 +11471,39 @@ fn parse_map_access_expr() { Box::new(ClickHouseDialect {}), ]); let expr = dialects.verified_expr(sql); - let expected = Expr::MapAccess { - column: Expr::Identifier(Ident::new("users")).into(), - keys: vec![ - MapAccessKey { - key: Expr::UnaryOp { + let expected = Expr::CompoundFieldAccess { + root: Box::new(Expr::Identifier(Ident::with_span( + Span::new(Location::of(1, 1), Location::of(1, 6)), + "users", + ))), + access_chain: vec![ + AccessExpr::Subscript(Subscript::Index { + index: Expr::UnaryOp { op: UnaryOperator::Minus, - expr: Expr::Value(number("1")).into(), + expr: Expr::value(number("1")).into(), }, - syntax: MapAccessSyntax::Bracket, - }, - MapAccessKey { - key: call("safe_offset", [Expr::Value(number("2"))]), - syntax: MapAccessSyntax::Bracket, - }, + }), + AccessExpr::Subscript(Subscript::Index { + index: Expr::Function(Function { + name: ObjectName::from(vec![Ident::with_span( + Span::new(Location::of(1, 11), Location::of(1, 22)), + "safe_offset", + )]), + parameters: FunctionArguments::None, + args: FunctionArguments::List(FunctionArgumentList { + duplicate_treatment: None, + args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (number("2")).with_empty_span(), + )))], + clauses: vec![], + }), + filter: None, + null_treatment: None, + over: None, + within_group: vec![], + uses_odbc_syntax: false, + }), + }), ], }; assert_eq!(expr, expected); @@ -9976,23 +11516,17 @@ fn parse_map_access_expr() { #[test] fn parse_connect_by() { let expect_query = Select { + select_token: AttachedToken::empty(), distinct: None, top: None, + top_before_distinct: false, projection: vec![ SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("employee_id"))), SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("manager_id"))), SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("title"))), ], from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("employees")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident::new("employees")])), joins: vec![], }], into: None, @@ -10012,9 +11546,9 @@ fn parse_connect_by() { condition: Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("title"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::SingleQuotedString( - "president".to_owned(), - ))), + right: Box::new(Expr::Value( + Value::SingleQuotedString("president".to_owned()).with_empty_span(), + )), }, relationships: vec![Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("manager_id"))), @@ -10024,6 +11558,7 @@ fn parse_connect_by() { ))))), }], }), + flavor: SelectFlavor::Standard, }; let connect_by_1 = concat!( @@ -10062,23 +11597,17 @@ fn parse_connect_by() { assert_eq!( all_dialects_where(|d| d.supports_connect_by()).verified_only_select(connect_by_3), Select { + select_token: AttachedToken::empty(), distinct: None, top: None, + top_before_distinct: false, projection: vec![ SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("employee_id"))), SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("manager_id"))), SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("title"))), ], from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("employees")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident::new("employees")])), joins: vec![], }], into: None, @@ -10087,7 +11616,7 @@ fn parse_connect_by() { selection: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("employee_id"))), op: BinaryOperator::NotEq, - right: Box::new(Expr::Value(number("42"))), + right: Box::new(Expr::value(number("42"))), }), group_by: GroupByExpr::Expressions(vec![], vec![]), cluster_by: vec![], @@ -10102,9 +11631,9 @@ fn parse_connect_by() { condition: Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("title"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::SingleQuotedString( - "president".to_owned(), - ))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("president".to_owned(),)).with_empty_span() + )), }, relationships: vec![Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("manager_id"))), @@ -10114,6 +11643,7 @@ fn parse_connect_by() { ))))), }], }), + flavor: SelectFlavor::Standard, } ); @@ -10142,6 +11672,20 @@ fn parse_connect_by() { #[test] fn test_selective_aggregation() { + let testing_dialects = all_dialects_where(|d| d.supports_filter_during_aggregation()); + let expected_dialects: Vec> = vec![ + Box::new(PostgreSqlDialect {}), + Box::new(DatabricksDialect {}), + Box::new(HiveDialect {}), + Box::new(SQLiteDialect {}), + Box::new(DuckDbDialect {}), + Box::new(GenericDialect {}), + ]; + assert_eq!(testing_dialects.dialects.len(), expected_dialects.len()); + expected_dialects + .into_iter() + .for_each(|d| assert!(d.supports_filter_during_aggregation())); + let sql = concat!( "SELECT ", "ARRAY_AGG(name) FILTER (WHERE name IS NOT NULL), ", @@ -10149,12 +11693,11 @@ fn test_selective_aggregation() { "FROM region" ); assert_eq!( - all_dialects_where(|d| d.supports_filter_during_aggregation()) - .verified_only_select(sql) - .projection, + testing_dialects.verified_only_select(sql).projection, vec![ SelectItem::UnnamedExpr(Expr::Function(Function { - name: ObjectName(vec![Ident::new("ARRAY_AGG")]), + name: ObjectName::from(vec![Ident::new("ARRAY_AGG")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -10172,7 +11715,8 @@ fn test_selective_aggregation() { })), SelectItem::ExprWithAlias { expr: Expr::Function(Function { - name: ObjectName(vec![Ident::new("ARRAY_AGG")]), + name: ObjectName::from(vec![Ident::new("ARRAY_AGG")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -10184,7 +11728,9 @@ fn test_selective_aggregation() { filter: Some(Box::new(Expr::Like { negated: false, expr: Box::new(Expr::Identifier(Ident::new("name"))), - pattern: Box::new(Expr::Value(Value::SingleQuotedString("a%".to_owned()))), + pattern: Box::new(Expr::Value( + (Value::SingleQuotedString("a%".to_owned())).with_empty_span() + )), escape_char: None, any: false, })), @@ -10225,21 +11771,51 @@ fn test_group_by_grouping_sets() { ); } +#[test] +fn test_xmltable() { + all_dialects() + .verified_only_select("SELECT * FROM XMLTABLE('/root' PASSING data COLUMNS element TEXT)"); + + // Minimal meaningful working example: returns a single row with a single column named y containing the value z + all_dialects().verified_only_select( + "SELECT y FROM XMLTABLE('/X' PASSING 'z' COLUMNS y TEXT)", + ); + + // Test using subqueries + all_dialects().verified_only_select("SELECT y FROM XMLTABLE((SELECT '/X') PASSING (SELECT CAST('z' AS xml)) COLUMNS y TEXT PATH (SELECT 'y'))"); + + // NOT NULL + all_dialects().verified_only_select( + "SELECT y FROM XMLTABLE('/X' PASSING '' COLUMNS y TEXT NOT NULL)", + ); + + all_dialects().verified_only_select("SELECT * FROM XMLTABLE('/root/row' PASSING xmldata COLUMNS id INT PATH '@id', name TEXT PATH 'name/text()', value FLOAT PATH 'value')"); + + all_dialects().verified_only_select("SELECT * FROM XMLTABLE('//ROWS/ROW' PASSING data COLUMNS row_num FOR ORDINALITY, id INT PATH '@id', name TEXT PATH 'NAME' DEFAULT 'unnamed')"); + + // Example from https://www.postgresql.org/docs/15/functions-xml.html#FUNCTIONS-XML-PROCESSING + all_dialects().verified_only_select( + "SELECT xmltable.* FROM xmldata, XMLTABLE('//ROWS/ROW' PASSING data COLUMNS id INT PATH '@id', ordinality FOR ORDINALITY, \"COUNTRY_NAME\" TEXT, country_id TEXT PATH 'COUNTRY_ID', size_sq_km FLOAT PATH 'SIZE[@unit = \"sq_km\"]', size_other TEXT PATH 'concat(SIZE[@unit!=\"sq_km\"], \" \", SIZE[@unit!=\"sq_km\"]/@unit)', premier_name TEXT PATH 'PREMIER_NAME' DEFAULT 'not specified')" + ); + + // Example from DB2 docs without explicit PASSING clause: https://www.ibm.com/docs/en/db2/12.1.0?topic=xquery-simple-column-name-passing-xmlexists-xmlquery-xmltable + all_dialects().verified_only_select( + "SELECT X.* FROM T1, XMLTABLE('$CUSTLIST/customers/customerinfo' COLUMNS \"Cid\" BIGINT PATH '@Cid', \"Info\" XML PATH 'document{.}', \"History\" XML PATH 'NULL') AS X" + ); + + // Example from PostgreSQL with XMLNAMESPACES + all_dialects().verified_only_select( + "SELECT xmltable.* FROM XMLTABLE(XMLNAMESPACES('http://example.com/myns' AS x, 'http://example.com/b' AS \"B\"), '/x:example/x:item' PASSING (SELECT data FROM xmldata) COLUMNS foo INT PATH '@foo', bar INT PATH '@B:bar')" + ); +} + #[test] fn test_match_recognize() { use MatchRecognizePattern::*; use MatchRecognizeSymbol::*; use RepetitionQuantifier::*; - let table = TableFactor::Table { - name: ObjectName(vec![Ident::new("my_table")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }; + let table = table_from_name(ObjectName::from(vec![Ident::new("my_table")])); fn check(options: &str, expect: TableFactor) { let select = all_dialects_where(|d| d.supports_match_recognize()).verified_only_select( @@ -10268,8 +11844,10 @@ fn test_match_recognize() { partition_by: vec![Expr::Identifier(Ident::new("company"))], order_by: vec![OrderByExpr { expr: Expr::Identifier(Ident::new("price_date")), - asc: None, - nulls_first: None, + options: OrderByOptions { + asc: None, + nulls_first: None, + }, with_fill: None, }], measures: vec![ @@ -10548,7 +12126,9 @@ fn test_select_wildcard_with_replace() { let expected = SelectItem::Wildcard(WildcardAdditionalOptions { opt_replace: Some(ReplaceSelectItem { items: vec![Box::new(ReplaceSelectElement { - expr: Expr::Value(Value::SingleQuotedString("widget".to_owned())), + expr: Expr::Value( + (Value::SingleQuotedString("widget".to_owned())).with_empty_span(), + ), column_name: Ident::new("item_name"), as_keyword: true, })], @@ -10567,13 +12147,13 @@ fn test_select_wildcard_with_replace() { expr: Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("quantity"))), op: BinaryOperator::Divide, - right: Box::new(Expr::Value(number("2"))), + right: Box::new(Expr::value(number("2"))), }, column_name: Ident::new("quantity"), as_keyword: true, }), Box::new(ReplaceSelectElement { - expr: Expr::Value(number("3")), + expr: Expr::value(number("3")), column_name: Ident::new("order_id"), as_keyword: true, }), @@ -10607,6 +12187,37 @@ fn insert_into_with_parentheses() { Box::new(GenericDialect {}), ]); dialects.verified_stmt("INSERT INTO t1 (id, name) (SELECT t2.id, t2.name FROM t2)"); + dialects.verified_stmt("INSERT INTO t1 (SELECT t2.id, t2.name FROM t2)"); + dialects.verified_stmt(r#"INSERT INTO t1 ("select", name) (SELECT t2.name FROM t2)"#); +} + +#[test] +fn parse_odbc_scalar_function() { + let select = verified_only_select("SELECT {fn my_func(1, 2)}"); + let Expr::Function(Function { + name, + uses_odbc_syntax, + args, + .. + }) = expr_from_projection(only(&select.projection)) + else { + unreachable!("expected function") + }; + assert_eq!(name, &ObjectName::from(vec![Ident::new("my_func")])); + assert!(uses_odbc_syntax); + matches!(args, FunctionArguments::List(l) if l.args.len() == 2); + + verified_stmt("SELECT {fn fna()} AS foo, fnb(1)"); + + // Testing invalid SQL with any-one dialect is intentional. + // Depending on dialect flags the error message may be different. + let pg = TestedDialects::new(vec![Box::new(PostgreSqlDialect {})]); + assert_eq!( + pg.parse_sql_statements("SELECT {fn2 my_func()}") + .unwrap_err() + .to_string(), + "sql parser error: Expected: an expression, found: {" + ); } #[test] @@ -10618,20 +12229,22 @@ fn test_dictionary_syntax() { ); } + check("{}", Expr::Dictionary(vec![])); + check( "{'Alberta': 'Edmonton', 'Manitoba': 'Winnipeg'}", Expr::Dictionary(vec![ DictionaryField { key: Ident::with_quote('\'', "Alberta"), - value: Box::new(Expr::Value(Value::SingleQuotedString( - "Edmonton".to_owned(), - ))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString("Edmonton".to_owned())).with_empty_span(), + )), }, DictionaryField { key: Ident::with_quote('\'', "Manitoba"), - value: Box::new(Expr::Value(Value::SingleQuotedString( - "Winnipeg".to_owned(), - ))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString("Winnipeg".to_owned())).with_empty_span(), + )), }, ]), ); @@ -10643,9 +12256,9 @@ fn test_dictionary_syntax() { key: Ident::with_quote('\'', "start"), value: Box::new(Expr::Cast { kind: CastKind::Cast, - expr: Box::new(Expr::Value(Value::SingleQuotedString( - "2023-04-01".to_owned(), - ))), + expr: Box::new(Expr::Value( + (Value::SingleQuotedString("2023-04-01".to_owned())).with_empty_span(), + )), data_type: DataType::Timestamp(None, TimezoneInfo::None), format: None, }), @@ -10654,9 +12267,9 @@ fn test_dictionary_syntax() { key: Ident::with_quote('\'', "end"), value: Box::new(Expr::Cast { kind: CastKind::Cast, - expr: Box::new(Expr::Value(Value::SingleQuotedString( - "2023-04-05".to_owned(), - ))), + expr: Box::new(Expr::Value( + (Value::SingleQuotedString("2023-04-05".to_owned())).with_empty_span(), + )), data_type: DataType::Timestamp(None, TimezoneInfo::None), format: None, }), @@ -10679,25 +12292,27 @@ fn test_map_syntax() { Expr::Map(Map { entries: vec![ MapEntry { - key: Box::new(Expr::Value(Value::SingleQuotedString("Alberta".to_owned()))), - value: Box::new(Expr::Value(Value::SingleQuotedString( - "Edmonton".to_owned(), - ))), + key: Box::new(Expr::Value( + (Value::SingleQuotedString("Alberta".to_owned())).with_empty_span(), + )), + value: Box::new(Expr::Value( + (Value::SingleQuotedString("Edmonton".to_owned())).with_empty_span(), + )), }, MapEntry { - key: Box::new(Expr::Value(Value::SingleQuotedString( - "Manitoba".to_owned(), - ))), - value: Box::new(Expr::Value(Value::SingleQuotedString( - "Winnipeg".to_owned(), - ))), + key: Box::new(Expr::Value( + (Value::SingleQuotedString("Manitoba".to_owned())).with_empty_span(), + )), + value: Box::new(Expr::Value( + (Value::SingleQuotedString("Winnipeg".to_owned())).with_empty_span(), + )), }, ], }), ); fn number_expr(s: &str) -> Expr { - Expr::Value(number(s)) + Expr::value(number(s)) } check( @@ -10725,14 +12340,14 @@ fn test_map_syntax() { elem: vec![number_expr("1"), number_expr("2"), number_expr("3")], named: false, })), - value: Box::new(Expr::Value(number("10.0"))), + value: Box::new(Expr::value(number("10.0"))), }, MapEntry { key: Box::new(Expr::Array(Array { elem: vec![number_expr("4"), number_expr("5"), number_expr("6")], named: false, })), - value: Box::new(Expr::Value(number("20.0"))), + value: Box::new(Expr::value(number("20.0"))), }, ], }), @@ -10740,22 +12355,26 @@ fn test_map_syntax() { check( "MAP {'a': 10, 'b': 20}['a']", - Expr::Subscript { - expr: Box::new(Expr::Map(Map { + Expr::CompoundFieldAccess { + root: Box::new(Expr::Map(Map { entries: vec![ MapEntry { - key: Box::new(Expr::Value(Value::SingleQuotedString("a".to_owned()))), + key: Box::new(Expr::Value( + (Value::SingleQuotedString("a".to_owned())).with_empty_span(), + )), value: Box::new(number_expr("10")), }, MapEntry { - key: Box::new(Expr::Value(Value::SingleQuotedString("b".to_owned()))), + key: Box::new(Expr::Value( + (Value::SingleQuotedString("b".to_owned())).with_empty_span(), + )), value: Box::new(number_expr("20")), }, ], })), - subscript: Box::new(Subscript::Index { - index: Expr::Value(Value::SingleQuotedString("a".to_owned())), - }), + access_chain: vec![AccessExpr::Subscript(Subscript::Index { + index: Expr::Value((Value::SingleQuotedString("a".to_owned())).with_empty_span()), + })], }, ); @@ -10849,27 +12468,12 @@ fn parse_select_wildcard_with_except() { ); } -#[test] -fn parse_auto_increment_too_large() { - let dialect = GenericDialect {}; - let u64_max = u64::MAX; - let sql = - format!("CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) AUTO_INCREMENT=1{u64_max}"); - - let res = Parser::new(&dialect) - .try_with_sql(&sql) - .expect("tokenize to work") - .parse_statements(); - - assert!(res.is_err(), "{res:?}"); -} - #[test] fn test_group_by_nothing() { let Select { group_by, .. } = all_dialects_where(|d| d.supports_group_by_expr()) .verified_only_select("SELECT count(1) FROM t GROUP BY ()"); { - std::assert_eq!( + assert_eq!( GroupByExpr::Expressions(vec![Expr::Tuple(vec![])], vec![]), group_by ); @@ -10878,7 +12482,7 @@ fn test_group_by_nothing() { let Select { group_by, .. } = all_dialects_where(|d| d.supports_group_by_expr()) .verified_only_select("SELECT name, count(1) FROM t GROUP BY name, ()"); { - std::assert_eq!( + assert_eq!( GroupByExpr::Expressions( vec![ Identifier(Ident::new("name".to_string())), @@ -10894,26 +12498,75 @@ fn test_group_by_nothing() { #[test] fn test_extract_seconds_ok() { let dialects = all_dialects_where(|d| d.allow_extract_custom()); - let stmt = dialects.verified_expr("EXTRACT(seconds FROM '2 seconds'::INTERVAL)"); + let stmt = dialects.verified_expr("EXTRACT(SECONDS FROM '2 seconds'::INTERVAL)"); assert_eq!( stmt, Expr::Extract { - field: DateTimeField::Custom(Ident { - value: "seconds".to_string(), - quote_style: None, - }), + field: Seconds, syntax: ExtractSyntax::From, expr: Box::new(Expr::Cast { kind: CastKind::DoubleColon, - expr: Box::new(Expr::Value(Value::SingleQuotedString( - "2 seconds".to_string() - ))), + expr: Box::new(Expr::Value( + (Value::SingleQuotedString("2 seconds".to_string())).with_empty_span() + )), data_type: DataType::Interval, format: None, }), } - ) + ); + + let actual_ast = dialects + .parse_sql_statements("SELECT EXTRACT(seconds FROM '2 seconds'::INTERVAL)") + .unwrap(); + + let expected_ast = vec![Statement::Query(Box::new(Query { + with: None, + body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), + distinct: None, + top: None, + top_before_distinct: false, + projection: vec![UnnamedExpr(Expr::Extract { + field: Seconds, + syntax: ExtractSyntax::From, + expr: Box::new(Expr::Cast { + kind: CastKind::DoubleColon, + expr: Box::new(Expr::Value( + (Value::SingleQuotedString("2 seconds".to_string())).with_empty_span(), + )), + data_type: DataType::Interval, + format: None, + }), + })], + into: None, + from: vec![], + lateral_views: vec![], + prewhere: None, + selection: None, + group_by: GroupByExpr::Expressions(vec![], vec![]), + cluster_by: vec![], + distribute_by: vec![], + sort_by: vec![], + having: None, + named_window: vec![], + qualify: None, + window_before_qualify: false, + value_table_mode: None, + connect_by: None, + flavor: SelectFlavor::Standard, + }))), + order_by: None, + limit_clause: None, + fetch: None, + locks: vec![], + for_clause: None, + settings: None, + format_clause: None, + pipe_operators: vec![], + }))]; + + assert_eq!(actual_ast, expected_ast); } #[test] @@ -10927,13 +12580,14 @@ fn test_extract_seconds_single_quote_ok() { field: DateTimeField::Custom(Ident { value: "seconds".to_string(), quote_style: Some('\''), + span: Span::empty(), }), syntax: ExtractSyntax::From, expr: Box::new(Expr::Cast { kind: CastKind::DoubleColon, - expr: Box::new(Expr::Value(Value::SingleQuotedString( - "2 seconds".to_string() - ))), + expr: Box::new(Expr::Value( + (Value::SingleQuotedString("2 seconds".to_string())).with_empty_span() + )), data_type: DataType::Interval, format: None, }), @@ -10941,17 +12595,6 @@ fn test_extract_seconds_single_quote_ok() { ) } -#[test] -fn test_extract_seconds_err() { - let sql = "SELECT EXTRACT(seconds FROM '2 seconds'::INTERVAL)"; - let dialects = all_dialects_except(|d| d.allow_extract_custom()); - let err = dialects.parse_sql_statements(sql).unwrap_err(); - assert_eq!( - err.to_string(), - "sql parser error: Expected: date/time field, found: seconds" - ); -} - #[test] fn test_extract_seconds_single_quote_err() { let sql = r#"SELECT EXTRACT('seconds' FROM '2 seconds'::INTERVAL)"#; @@ -10995,11 +12638,11 @@ fn parse_explain_with_option_list() { Some(vec![ UtilityOption { name: Ident::new("ANALYZE"), - arg: Some(Expr::Value(Value::Boolean(false))), + arg: Some(Expr::Value((Value::Boolean(false)).with_empty_span())), }, UtilityOption { name: Ident::new("VERBOSE"), - arg: Some(Expr::Value(Value::Boolean(true))), + arg: Some(Expr::Value((Value::Boolean(true)).with_empty_span())), }, ]), ); @@ -11035,7 +12678,9 @@ fn parse_explain_with_option_list() { }, UtilityOption { name: Ident::new("FORMAT2"), - arg: Some(Expr::Value(Value::SingleQuotedString("JSON".to_string()))), + arg: Some(Expr::Value( + (Value::SingleQuotedString("JSON".to_string())).with_empty_span(), + )), }, UtilityOption { name: Ident::new("FORMAT3"), @@ -11057,20 +12702,26 @@ fn parse_explain_with_option_list() { Some(vec![ UtilityOption { name: Ident::new("NUM1"), - arg: Some(Expr::Value(Value::Number("10".parse().unwrap(), false))), + arg: Some(Expr::Value( + (Value::Number("10".parse().unwrap(), false)).with_empty_span(), + )), }, UtilityOption { name: Ident::new("NUM2"), arg: Some(Expr::UnaryOp { op: UnaryOperator::Plus, - expr: Box::new(Expr::Value(Value::Number("10.1".parse().unwrap(), false))), + expr: Box::new(Expr::Value( + (Value::Number("10.1".parse().unwrap(), false)).with_empty_span(), + )), }), }, UtilityOption { name: Ident::new("NUM3"), arg: Some(Expr::UnaryOp { op: UnaryOperator::Minus, - expr: Box::new(Expr::Value(Value::Number("10.2".parse().unwrap(), false))), + expr: Box::new(Expr::Value( + (Value::Number("10.2".parse().unwrap(), false)).with_empty_span(), + )), }), }, ]), @@ -11083,7 +12734,7 @@ fn parse_explain_with_option_list() { }, UtilityOption { name: Ident::new("VERBOSE"), - arg: Some(Expr::Value(Value::Boolean(true))), + arg: Some(Expr::Value((Value::Boolean(true)).with_empty_span())), }, UtilityOption { name: Ident::new("WAL"), @@ -11097,11 +12748,13 @@ fn parse_explain_with_option_list() { name: Ident::new("USER_DEF_NUM"), arg: Some(Expr::UnaryOp { op: UnaryOperator::Minus, - expr: Box::new(Expr::Value(Value::Number("100.1".parse().unwrap(), false))), + expr: Box::new(Expr::Value( + (Value::Number("100.1".parse().unwrap(), false)).with_empty_span(), + )), }), }, ]; - run_explain_analyze ( + run_explain_analyze( all_dialects_where(|d| d.supports_explain_with_utility_options()), "EXPLAIN (ANALYZE, VERBOSE true, WAL OFF, FORMAT YAML, USER_DEF_NUM -100.1) SELECT sqrt(id) FROM foo", false, @@ -11113,13 +12766,11 @@ fn parse_explain_with_option_list() { #[test] fn test_create_policy() { - let sql = concat!( - "CREATE POLICY my_policy ON my_table ", - "AS PERMISSIVE FOR SELECT ", - "TO my_role, CURRENT_USER ", - "USING (c0 = 1) ", - "WITH CHECK (true)" - ); + let sql: &str = "CREATE POLICY my_policy ON my_table \ + AS PERMISSIVE FOR SELECT \ + TO my_role, CURRENT_USER \ + USING (c0 = 1) \ + WITH CHECK (1 = 1)"; match all_dialects().verified_stmt(sql) { Statement::CreatePolicy { @@ -11144,10 +12795,23 @@ fn test_create_policy() { Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("c0"))), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::Number("1".parse().unwrap(), false))), + right: Box::new(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + )), + }) + ); + assert_eq!( + with_check, + Some(Expr::BinaryOp { + left: Box::new(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + )), + op: BinaryOperator::Eq, + right: Box::new(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + )), }) ); - assert_eq!(with_check, Some(Expr::Value(Value::Boolean(true)))); } _ => unreachable!(), } @@ -11158,7 +12822,7 @@ fn test_create_policy() { "AS PERMISSIVE FOR SELECT ", "TO my_role, CURRENT_USER ", "USING (c0 IN (SELECT column FROM t0)) ", - "WITH CHECK (true)" + "WITH CHECK (1 = 1)" )); // omit AS / FOR / TO / USING / WITH CHECK clauses is allowed all_dialects().verified_stmt("CREATE POLICY my_policy ON my_table"); @@ -11221,12 +12885,12 @@ fn test_drop_policy() { if_exists, name, table_name, - option, + drop_behavior, } => { assert_eq!(if_exists, true); assert_eq!(name.to_string(), "my_policy"); assert_eq!(table_name.to_string(), "my_table"); - assert_eq!(option, Some(ReferentialAction::Restrict)); + assert_eq!(drop_behavior, Some(DropBehavior::Restrict)); } _ => unreachable!(), } @@ -11335,6 +12999,183 @@ fn test_alter_policy() { ); } +#[test] +fn test_create_connector() { + let sql = "CREATE CONNECTOR my_connector \ + TYPE 'jdbc' \ + URL 'jdbc:mysql://localhost:3306/mydb' \ + WITH DCPROPERTIES('user' = 'root', 'password' = 'password')"; + let dialects = all_dialects(); + match dialects.verified_stmt(sql) { + Statement::CreateConnector(CreateConnector { + name, + connector_type, + url, + with_dcproperties, + .. + }) => { + assert_eq!(name.to_string(), "my_connector"); + assert_eq!(connector_type, Some("jdbc".to_string())); + assert_eq!(url, Some("jdbc:mysql://localhost:3306/mydb".to_string())); + assert_eq!( + with_dcproperties, + Some(vec![ + SqlOption::KeyValue { + key: Ident::with_quote('\'', "user"), + value: Expr::Value( + (Value::SingleQuotedString("root".to_string())).with_empty_span() + ) + }, + SqlOption::KeyValue { + key: Ident::with_quote('\'', "password"), + value: Expr::Value( + (Value::SingleQuotedString("password".to_string())).with_empty_span() + ) + } + ]) + ); + } + _ => unreachable!(), + } + + // omit IF NOT EXISTS/TYPE/URL/COMMENT/WITH DCPROPERTIES clauses is allowed + dialects.verified_stmt("CREATE CONNECTOR my_connector"); + + // missing connector name + assert_eq!( + dialects + .parse_sql_statements("CREATE CONNECTOR") + .unwrap_err() + .to_string(), + "sql parser error: Expected: identifier, found: EOF" + ); +} + +#[test] +fn test_drop_connector() { + let dialects = all_dialects(); + match dialects.verified_stmt("DROP CONNECTOR IF EXISTS my_connector") { + Statement::DropConnector { if_exists, name } => { + assert_eq!(if_exists, true); + assert_eq!(name.to_string(), "my_connector"); + } + _ => unreachable!(), + } + + // omit IF EXISTS is allowed + dialects.verified_stmt("DROP CONNECTOR my_connector"); + + // missing connector name + assert_eq!( + dialects + .parse_sql_statements("DROP CONNECTOR") + .unwrap_err() + .to_string(), + "sql parser error: Expected: identifier, found: EOF" + ); +} + +#[test] +fn test_alter_connector() { + let dialects = all_dialects(); + match dialects.verified_stmt( + "ALTER CONNECTOR my_connector SET DCPROPERTIES('user' = 'root', 'password' = 'password')", + ) { + Statement::AlterConnector { + name, + properties, + url, + owner, + } => { + assert_eq!(name.to_string(), "my_connector"); + assert_eq!( + properties, + Some(vec![ + SqlOption::KeyValue { + key: Ident::with_quote('\'', "user"), + value: Expr::Value( + (Value::SingleQuotedString("root".to_string())).with_empty_span() + ) + }, + SqlOption::KeyValue { + key: Ident::with_quote('\'', "password"), + value: Expr::Value( + (Value::SingleQuotedString("password".to_string())).with_empty_span() + ) + } + ]) + ); + assert_eq!(url, None); + assert_eq!(owner, None); + } + _ => unreachable!(), + } + + match dialects + .verified_stmt("ALTER CONNECTOR my_connector SET URL 'jdbc:mysql://localhost:3306/mydb'") + { + Statement::AlterConnector { + name, + properties, + url, + owner, + } => { + assert_eq!(name.to_string(), "my_connector"); + assert_eq!(properties, None); + assert_eq!(url, Some("jdbc:mysql://localhost:3306/mydb".to_string())); + assert_eq!(owner, None); + } + _ => unreachable!(), + } + + match dialects.verified_stmt("ALTER CONNECTOR my_connector SET OWNER USER 'root'") { + Statement::AlterConnector { + name, + properties, + url, + owner, + } => { + assert_eq!(name.to_string(), "my_connector"); + assert_eq!(properties, None); + assert_eq!(url, None); + assert_eq!( + owner, + Some(AlterConnectorOwner::User(Ident::with_quote('\'', "root"))) + ); + } + _ => unreachable!(), + } + + match dialects.verified_stmt("ALTER CONNECTOR my_connector SET OWNER ROLE 'admin'") { + Statement::AlterConnector { + name, + properties, + url, + owner, + } => { + assert_eq!(name.to_string(), "my_connector"); + assert_eq!(properties, None); + assert_eq!(url, None); + assert_eq!( + owner, + Some(AlterConnectorOwner::Role(Ident::with_quote('\'', "admin"))) + ); + } + _ => unreachable!(), + } + + // Wrong option name + assert_eq!( + dialects + .parse_sql_statements( + "ALTER CONNECTOR my_connector SET WRONG 'jdbc:mysql://localhost:3306/mydb'" + ) + .unwrap_err() + .to_string(), + "sql parser error: Expected: end of statement, found: WRONG" + ); +} + #[test] fn test_select_where_with_like_or_ilike_any() { verified_stmt(r#"SELECT * FROM x WHERE a ILIKE ANY '%abc%'"#); @@ -11353,7 +13194,7 @@ fn test_any_some_all_comparison() { #[test] fn test_alias_equal_expr() { - let dialects = all_dialects_where(|d| d.supports_eq_alias_assigment()); + let dialects = all_dialects_where(|d| d.supports_eq_alias_assignment()); let sql = r#"SELECT some_alias = some_column FROM some_table"#; let expected = r#"SELECT some_column AS some_alias FROM some_table"#; let _ = dialects.one_statement_parses_to(sql, expected); @@ -11362,7 +13203,7 @@ fn test_alias_equal_expr() { let expected = r#"SELECT (a * b) AS some_alias FROM some_table"#; let _ = dialects.one_statement_parses_to(sql, expected); - let dialects = all_dialects_where(|d| !d.supports_eq_alias_assigment()); + let dialects = all_dialects_where(|d| !d.supports_eq_alias_assignment()); let sql = r#"SELECT x = (a * b) FROM some_table"#; let expected = r#"SELECT x = (a * b) FROM some_table"#; let _ = dialects.one_statement_parses_to(sql, expected); @@ -11378,3 +13219,1976 @@ fn test_try_convert() { all_dialects_where(|d| d.supports_try_convert() && !d.convert_type_before_value()); dialects.verified_expr("TRY_CONVERT('foo', VARCHAR(MAX))"); } + +#[test] +fn parse_method_select() { + let _ = verified_only_select( + "SELECT LEFT('abc', 1).value('.', 'NVARCHAR(MAX)').value('.', 'NVARCHAR(MAX)') AS T", + ); + let _ = verified_only_select("SELECT STUFF((SELECT ',' + name FROM sys.objects FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 1, '') AS T"); + let _ = verified_only_select("SELECT CAST(column AS XML).value('.', 'NVARCHAR(MAX)') AS T"); + + // `CONVERT` support + let dialects = + all_dialects_where(|d| d.supports_try_convert() && d.convert_type_before_value()); + let _ = dialects.verified_only_select("SELECT CONVERT(XML, 'abc').value('.', 'NVARCHAR(MAX)').value('.', 'NVARCHAR(MAX)') AS T"); +} + +#[test] +fn parse_method_expr() { + let expr = + verified_expr("LEFT('abc', 1).value('.', 'NVARCHAR(MAX)').value('.', 'NVARCHAR(MAX)')"); + match expr { + Expr::CompoundFieldAccess { root, access_chain } => { + assert!(matches!(*root, Expr::Function(_))); + assert!(matches!( + access_chain[..], + [ + AccessExpr::Dot(Expr::Function(_)), + AccessExpr::Dot(Expr::Function(_)) + ] + )); + } + _ => unreachable!(), + } + + let expr = verified_expr( + "(SELECT ',' + name FROM sys.objects FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)')", + ); + match expr { + Expr::CompoundFieldAccess { root, access_chain } => { + assert!(matches!(*root, Expr::Subquery(_))); + assert!(matches!( + access_chain[..], + [AccessExpr::Dot(Expr::Function(_))] + )); + } + _ => unreachable!(), + } + let expr = verified_expr("CAST(column AS XML).value('.', 'NVARCHAR(MAX)')"); + match expr { + Expr::CompoundFieldAccess { root, access_chain } => { + assert!(matches!(*root, Expr::Cast { .. })); + assert!(matches!( + access_chain[..], + [AccessExpr::Dot(Expr::Function(_))] + )); + } + _ => unreachable!(), + } + + // `CONVERT` support + let dialects = + all_dialects_where(|d| d.supports_try_convert() && d.convert_type_before_value()); + let expr = dialects.verified_expr( + "CONVERT(XML, 'abc').value('.', 'NVARCHAR(MAX)').value('.', 'NVARCHAR(MAX)')", + ); + match expr { + Expr::CompoundFieldAccess { root, access_chain } => { + assert!(matches!(*root, Expr::Convert { .. })); + assert!(matches!( + access_chain[..], + [ + AccessExpr::Dot(Expr::Function(_)), + AccessExpr::Dot(Expr::Function(_)) + ] + )); + } + _ => unreachable!(), + } +} + +#[test] +fn test_show_dbs_schemas_tables_views() { + // These statements are parsed the same by all dialects + let stmts = vec![ + "SHOW DATABASES", + "SHOW SCHEMAS", + "SHOW TABLES", + "SHOW VIEWS", + "SHOW TABLES IN db1", + "SHOW VIEWS FROM db1", + "SHOW MATERIALIZED VIEWS", + "SHOW MATERIALIZED VIEWS IN db1", + "SHOW MATERIALIZED VIEWS FROM db1", + ]; + for stmt in stmts { + verified_stmt(stmt); + } + + // These statements are parsed the same by all dialects + // except for how the parser interprets the location of + // LIKE option (infix/suffix) + let stmts = vec!["SHOW DATABASES LIKE '%abc'", "SHOW SCHEMAS LIKE '%abc'"]; + for stmt in stmts { + all_dialects_where(|d| d.supports_show_like_before_in()).verified_stmt(stmt); + all_dialects_where(|d| !d.supports_show_like_before_in()).verified_stmt(stmt); + } + + // These statements are only parsed by dialects that + // support the LIKE option in the suffix + let stmts = vec![ + "SHOW TABLES IN db1 'abc'", + "SHOW VIEWS IN db1 'abc'", + "SHOW VIEWS FROM db1 'abc'", + "SHOW MATERIALIZED VIEWS IN db1 'abc'", + "SHOW MATERIALIZED VIEWS FROM db1 'abc'", + ]; + for stmt in stmts { + all_dialects_where(|d| !d.supports_show_like_before_in()).verified_stmt(stmt); + } +} + +#[test] +fn parse_listen_channel() { + let dialects = all_dialects_where(|d| d.supports_listen_notify()); + + match dialects.verified_stmt("LISTEN test1") { + Statement::LISTEN { channel } => { + assert_eq!(Ident::new("test1"), channel); + } + _ => unreachable!(), + }; + + assert_eq!( + dialects.parse_sql_statements("LISTEN *").unwrap_err(), + ParserError::ParserError("Expected: identifier, found: *".to_string()) + ); + + let dialects = all_dialects_where(|d| !d.supports_listen_notify()); + + assert_eq!( + dialects.parse_sql_statements("LISTEN test1").unwrap_err(), + ParserError::ParserError("Expected: an SQL statement, found: LISTEN".to_string()) + ); +} + +#[test] +fn parse_unlisten_channel() { + let dialects = all_dialects_where(|d| d.supports_listen_notify()); + + match dialects.verified_stmt("UNLISTEN test1") { + Statement::UNLISTEN { channel } => { + assert_eq!(Ident::new("test1"), channel); + } + _ => unreachable!(), + }; + + match dialects.verified_stmt("UNLISTEN *") { + Statement::UNLISTEN { channel } => { + assert_eq!(Ident::new("*"), channel); + } + _ => unreachable!(), + }; + + assert_eq!( + dialects.parse_sql_statements("UNLISTEN +").unwrap_err(), + ParserError::ParserError("Expected: wildcard or identifier, found: +".to_string()) + ); + + let dialects = all_dialects_where(|d| !d.supports_listen_notify()); + + assert_eq!( + dialects.parse_sql_statements("UNLISTEN test1").unwrap_err(), + ParserError::ParserError("Expected: an SQL statement, found: UNLISTEN".to_string()) + ); +} + +#[test] +fn parse_notify_channel() { + let dialects = all_dialects_where(|d| d.supports_listen_notify()); + + match dialects.verified_stmt("NOTIFY test1") { + Statement::NOTIFY { channel, payload } => { + assert_eq!(Ident::new("test1"), channel); + assert_eq!(payload, None); + } + _ => unreachable!(), + }; + + match dialects.verified_stmt("NOTIFY test1, 'this is a test notification'") { + Statement::NOTIFY { + channel, + payload: Some(payload), + } => { + assert_eq!(Ident::new("test1"), channel); + assert_eq!("this is a test notification", payload); + } + _ => unreachable!(), + }; + + assert_eq!( + dialects.parse_sql_statements("NOTIFY *").unwrap_err(), + ParserError::ParserError("Expected: identifier, found: *".to_string()) + ); + assert_eq!( + dialects + .parse_sql_statements("NOTIFY test1, *") + .unwrap_err(), + ParserError::ParserError("Expected: literal string, found: *".to_string()) + ); + + let sql_statements = [ + "NOTIFY test1", + "NOTIFY test1, 'this is a test notification'", + ]; + let dialects = all_dialects_where(|d| !d.supports_listen_notify()); + + for &sql in &sql_statements { + assert_eq!( + dialects.parse_sql_statements(sql).unwrap_err(), + ParserError::ParserError("Expected: an SQL statement, found: NOTIFY".to_string()) + ); + } +} + +#[test] +fn parse_load_data() { + let dialects = all_dialects_where(|d| d.supports_load_data()); + let only_supports_load_extension_dialects = + all_dialects_where(|d| !d.supports_load_data() && d.supports_load_extension()); + let not_supports_load_dialects = + all_dialects_where(|d| !d.supports_load_data() && !d.supports_load_extension()); + + let sql = "LOAD DATA INPATH '/local/path/to/data.txt' INTO TABLE test.my_table"; + match dialects.verified_stmt(sql) { + Statement::LoadData { + local, + inpath, + overwrite, + table_name, + partitioned, + table_format, + } => { + assert_eq!(false, local); + assert_eq!("/local/path/to/data.txt", inpath); + assert_eq!(false, overwrite); + assert_eq!( + ObjectName::from(vec![Ident::new("test"), Ident::new("my_table")]), + table_name + ); + assert_eq!(None, partitioned); + assert_eq!(None, table_format); + } + _ => unreachable!(), + }; + + // with OVERWRITE keyword + let sql = "LOAD DATA INPATH '/local/path/to/data.txt' OVERWRITE INTO TABLE my_table"; + match dialects.verified_stmt(sql) { + Statement::LoadData { + local, + inpath, + overwrite, + table_name, + partitioned, + table_format, + } => { + assert_eq!(false, local); + assert_eq!("/local/path/to/data.txt", inpath); + assert_eq!(true, overwrite); + assert_eq!(ObjectName::from(vec![Ident::new("my_table")]), table_name); + assert_eq!(None, partitioned); + assert_eq!(None, table_format); + } + _ => unreachable!(), + }; + + assert_eq!( + only_supports_load_extension_dialects + .parse_sql_statements(sql) + .unwrap_err(), + ParserError::ParserError("Expected: end of statement, found: INPATH".to_string()) + ); + assert_eq!( + not_supports_load_dialects + .parse_sql_statements(sql) + .unwrap_err(), + ParserError::ParserError( + "Expected: `DATA` or an extension name after `LOAD`, found: INPATH".to_string() + ) + ); + + // with LOCAL keyword + let sql = "LOAD DATA LOCAL INPATH '/local/path/to/data.txt' INTO TABLE test.my_table"; + match dialects.verified_stmt(sql) { + Statement::LoadData { + local, + inpath, + overwrite, + table_name, + partitioned, + table_format, + } => { + assert_eq!(true, local); + assert_eq!("/local/path/to/data.txt", inpath); + assert_eq!(false, overwrite); + assert_eq!( + ObjectName::from(vec![Ident::new("test"), Ident::new("my_table")]), + table_name + ); + assert_eq!(None, partitioned); + assert_eq!(None, table_format); + } + _ => unreachable!(), + }; + + assert_eq!( + only_supports_load_extension_dialects + .parse_sql_statements(sql) + .unwrap_err(), + ParserError::ParserError("Expected: end of statement, found: LOCAL".to_string()) + ); + assert_eq!( + not_supports_load_dialects + .parse_sql_statements(sql) + .unwrap_err(), + ParserError::ParserError( + "Expected: `DATA` or an extension name after `LOAD`, found: LOCAL".to_string() + ) + ); + + // with PARTITION clause + let sql = "LOAD DATA LOCAL INPATH '/local/path/to/data.txt' INTO TABLE my_table PARTITION (year = 2024, month = 11)"; + match dialects.verified_stmt(sql) { + Statement::LoadData { + local, + inpath, + overwrite, + table_name, + partitioned, + table_format, + } => { + assert_eq!(true, local); + assert_eq!("/local/path/to/data.txt", inpath); + assert_eq!(false, overwrite); + assert_eq!(ObjectName::from(vec![Ident::new("my_table")]), table_name); + assert_eq!( + Some(vec![ + Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("year"))), + op: BinaryOperator::Eq, + right: Box::new(Expr::Value( + (Value::Number("2024".parse().unwrap(), false)).with_empty_span() + )), + }, + Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("month"))), + op: BinaryOperator::Eq, + right: Box::new(Expr::Value( + (Value::Number("11".parse().unwrap(), false)).with_empty_span() + )), + } + ]), + partitioned + ); + assert_eq!(None, table_format); + } + _ => unreachable!(), + }; + + // with PARTITION clause + let sql = "LOAD DATA LOCAL INPATH '/local/path/to/data.txt' OVERWRITE INTO TABLE good.my_table PARTITION (year = 2024, month = 11) INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'"; + match dialects.verified_stmt(sql) { + Statement::LoadData { + local, + inpath, + overwrite, + table_name, + partitioned, + table_format, + } => { + assert_eq!(true, local); + assert_eq!("/local/path/to/data.txt", inpath); + assert_eq!(true, overwrite); + assert_eq!( + ObjectName::from(vec![Ident::new("good"), Ident::new("my_table")]), + table_name + ); + assert_eq!( + Some(vec![ + Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("year"))), + op: BinaryOperator::Eq, + right: Box::new(Expr::Value( + (Value::Number("2024".parse().unwrap(), false)).with_empty_span() + )), + }, + Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("month"))), + op: BinaryOperator::Eq, + right: Box::new(Expr::Value( + (Value::Number("11".parse().unwrap(), false)).with_empty_span() + )), + } + ]), + partitioned + ); + assert_eq!( + Some(HiveLoadDataFormat { + serde: Expr::Value( + (Value::SingleQuotedString( + "org.apache.hadoop.hive.serde2.OpenCSVSerde".to_string() + )) + .with_empty_span() + ), + input_format: Expr::Value( + (Value::SingleQuotedString( + "org.apache.hadoop.mapred.TextInputFormat".to_string() + )) + .with_empty_span() + ) + }), + table_format + ); + } + _ => unreachable!(), + }; + + // negative test case + let sql = "LOAD DATA2 LOCAL INPATH '/local/path/to/data.txt' INTO TABLE test.my_table"; + assert_eq!( + dialects.parse_sql_statements(sql).unwrap_err(), + ParserError::ParserError( + "Expected: `DATA` or an extension name after `LOAD`, found: DATA2".to_string() + ) + ); +} + +#[test] +fn test_load_extension() { + let dialects = all_dialects_where(|d| d.supports_load_extension()); + let not_supports_load_extension_dialects = all_dialects_where(|d| !d.supports_load_extension()); + let sql = "LOAD my_extension"; + + match dialects.verified_stmt(sql) { + Statement::Load { extension_name } => { + assert_eq!(Ident::new("my_extension"), extension_name); + } + _ => unreachable!(), + }; + + assert_eq!( + not_supports_load_extension_dialects + .parse_sql_statements(sql) + .unwrap_err(), + ParserError::ParserError( + "Expected: `DATA` or an extension name after `LOAD`, found: my_extension".to_string() + ) + ); + + let sql = "LOAD 'filename'"; + + match dialects.verified_stmt(sql) { + Statement::Load { extension_name } => { + assert_eq!( + Ident { + value: "filename".to_string(), + quote_style: Some('\''), + span: Span::empty(), + }, + extension_name + ); + } + _ => unreachable!(), + }; +} + +#[test] +fn test_select_top() { + let dialects = all_dialects_where(|d| d.supports_top_before_distinct()); + dialects.one_statement_parses_to("SELECT ALL * FROM tbl", "SELECT * FROM tbl"); + dialects.verified_stmt("SELECT TOP 3 * FROM tbl"); + dialects.one_statement_parses_to("SELECT TOP 3 ALL * FROM tbl", "SELECT TOP 3 * FROM tbl"); + dialects.verified_stmt("SELECT TOP 3 DISTINCT * FROM tbl"); + dialects.verified_stmt("SELECT TOP 3 DISTINCT a, b, c FROM tbl"); +} + +#[test] +fn parse_bang_not() { + let dialects = all_dialects_where(|d| d.supports_bang_not_operator()); + let sql = "SELECT !a, !(b > 3)"; + let Select { projection, .. } = dialects.verified_only_select(sql); + + for (i, expr) in [ + Box::new(Expr::Identifier(Ident::new("a"))), + Box::new(Expr::Nested(Box::new(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("b"))), + op: BinaryOperator::Gt, + right: Box::new(Expr::Value( + Value::Number("3".parse().unwrap(), false).with_empty_span(), + )), + }))), + ] + .into_iter() + .enumerate() + { + assert_eq!( + SelectItem::UnnamedExpr(Expr::UnaryOp { + op: UnaryOperator::BangNot, + expr + }), + projection[i] + ) + } + + let sql_statements = ["SELECT a!", "SELECT a ! b", "SELECT a ! as b"]; + + for &sql in &sql_statements { + assert_eq!( + dialects.parse_sql_statements(sql).unwrap_err(), + ParserError::ParserError("No infix parser for token ExclamationMark".to_string()) + ); + } + + let sql_statements = ["SELECT !a", "SELECT !a b", "SELECT !a as b"]; + let dialects = all_dialects_where(|d| !d.supports_bang_not_operator()); + + for &sql in &sql_statements { + assert_eq!( + dialects.parse_sql_statements(sql).unwrap_err(), + ParserError::ParserError("Expected: an expression, found: !".to_string()) + ); + } +} + +#[test] +fn parse_factorial_operator() { + let dialects = all_dialects_where(|d| d.supports_factorial_operator()); + let sql = "SELECT a!, (b + c)!"; + let Select { projection, .. } = dialects.verified_only_select(sql); + + for (i, expr) in [ + Box::new(Expr::Identifier(Ident::new("a"))), + Box::new(Expr::Nested(Box::new(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("b"))), + op: BinaryOperator::Plus, + right: Box::new(Expr::Identifier(Ident::new("c"))), + }))), + ] + .into_iter() + .enumerate() + { + assert_eq!( + SelectItem::UnnamedExpr(Expr::UnaryOp { + op: UnaryOperator::PGPostfixFactorial, + expr + }), + projection[i] + ) + } + + let sql_statements = ["SELECT !a", "SELECT !a b", "SELECT !a as b"]; + + for &sql in &sql_statements { + assert_eq!( + dialects.parse_sql_statements(sql).unwrap_err(), + ParserError::ParserError("Expected: an expression, found: !".to_string()) + ); + } + + let sql_statements = ["SELECT a!", "SELECT a ! b", "SELECT a ! as b"]; + + // Due to the exclamation mark, which is both part of the `bang not` operator + // and the `factorial` operator, additional filtering not supports + // `bang not` operator is required here. + let dialects = + all_dialects_where(|d| !d.supports_factorial_operator() && !d.supports_bang_not_operator()); + + for &sql in &sql_statements { + assert_eq!( + dialects.parse_sql_statements(sql).unwrap_err(), + ParserError::ParserError("No infix parser for token ExclamationMark".to_string()) + ); + } + + // Due to the exclamation mark, which is both part of the `bang not` operator + // and the `factorial` operator, additional filtering supports + // `bang not` operator is required here. + let dialects = + all_dialects_where(|d| !d.supports_factorial_operator() && d.supports_bang_not_operator()); + + for &sql in &sql_statements { + assert_eq!( + dialects.parse_sql_statements(sql).unwrap_err(), + ParserError::ParserError("No infix parser for token ExclamationMark".to_string()) + ); + } +} + +#[test] +fn parse_comments() { + match all_dialects_where(|d| d.supports_comment_on()) + .verified_stmt("COMMENT ON COLUMN tab.name IS 'comment'") + { + Statement::Comment { + object_type, + object_name, + comment: Some(comment), + if_exists, + } => { + assert_eq!("comment", comment); + assert_eq!("tab.name", object_name.to_string()); + assert_eq!(CommentObject::Column, object_type); + assert!(!if_exists); + } + _ => unreachable!(), + } + + let object_types = [ + ("COLUMN", CommentObject::Column), + ("EXTENSION", CommentObject::Extension), + ("TABLE", CommentObject::Table), + ("SCHEMA", CommentObject::Schema), + ("DATABASE", CommentObject::Database), + ("USER", CommentObject::User), + ("ROLE", CommentObject::Role), + ]; + for (keyword, expected_object_type) in object_types.iter() { + match all_dialects_where(|d| d.supports_comment_on()) + .verified_stmt(format!("COMMENT IF EXISTS ON {keyword} db.t0 IS 'comment'").as_str()) + { + Statement::Comment { + object_type, + object_name, + comment: Some(comment), + if_exists, + } => { + assert_eq!("comment", comment); + assert_eq!("db.t0", object_name.to_string()); + assert_eq!(*expected_object_type, object_type); + assert!(if_exists); + } + _ => unreachable!(), + } + } + + match all_dialects_where(|d| d.supports_comment_on()) + .verified_stmt("COMMENT IF EXISTS ON TABLE public.tab IS NULL") + { + Statement::Comment { + object_type, + object_name, + comment: None, + if_exists, + } => { + assert_eq!("public.tab", object_name.to_string()); + assert_eq!(CommentObject::Table, object_type); + assert!(if_exists); + } + _ => unreachable!(), + } + + // missing IS statement + assert_eq!( + all_dialects_where(|d| d.supports_comment_on()) + .parse_sql_statements("COMMENT ON TABLE t0") + .unwrap_err(), + ParserError::ParserError("Expected: IS, found: EOF".to_string()) + ); + + // missing comment literal + assert_eq!( + all_dialects_where(|d| d.supports_comment_on()) + .parse_sql_statements("COMMENT ON TABLE t0 IS") + .unwrap_err(), + ParserError::ParserError("Expected: literal string, found: EOF".to_string()) + ); + + // unknown object type + assert_eq!( + all_dialects_where(|d| d.supports_comment_on()) + .parse_sql_statements("COMMENT ON UNKNOWN t0 IS 'comment'") + .unwrap_err(), + ParserError::ParserError("Expected: comment object_type, found: UNKNOWN".to_string()) + ); +} + +#[test] +fn parse_create_table_select() { + let dialects = all_dialects_where(|d| d.supports_create_table_select()); + let sql_1 = r#"CREATE TABLE foo (baz INT) SELECT bar"#; + let expected = r#"CREATE TABLE foo (baz INT) AS SELECT bar"#; + let _ = dialects.one_statement_parses_to(sql_1, expected); + + let sql_2 = r#"CREATE TABLE foo (baz INT, name STRING) SELECT bar, oth_name FROM test.table_a"#; + let expected = + r#"CREATE TABLE foo (baz INT, name STRING) AS SELECT bar, oth_name FROM test.table_a"#; + let _ = dialects.one_statement_parses_to(sql_2, expected); + + let dialects = all_dialects_where(|d| !d.supports_create_table_select()); + for sql in [sql_1, sql_2] { + assert_eq!( + dialects.parse_sql_statements(sql).unwrap_err(), + ParserError::ParserError("Expected: end of statement, found: SELECT".to_string()) + ); + } +} + +#[test] +fn test_reserved_keywords_for_identifiers() { + let dialects = all_dialects_where(|d| d.is_reserved_for_identifier(Keyword::INTERVAL)); + // Dialects that reserve the word INTERVAL will not allow it as an unquoted identifier + let sql = "SELECT MAX(interval) FROM tbl"; + assert_eq!( + dialects.parse_sql_statements(sql), + Err(ParserError::ParserError( + "Expected: an expression, found: )".to_string() + )) + ); + + // Dialects that do not reserve the word INTERVAL will allow it + let dialects = all_dialects_where(|d| !d.is_reserved_for_identifier(Keyword::INTERVAL)); + let sql = "SELECT MAX(interval) FROM tbl"; + dialects.parse_sql_statements(sql).unwrap(); +} + +#[test] +fn parse_create_table_with_bit_types() { + let sql = "CREATE TABLE t (a BIT, b BIT VARYING, c BIT(42), d BIT VARYING(43))"; + match verified_stmt(sql) { + Statement::CreateTable(CreateTable { columns, .. }) => { + assert_eq!(columns.len(), 4); + assert_eq!(columns[0].data_type, DataType::Bit(None)); + assert_eq!(columns[0].to_string(), "a BIT"); + assert_eq!(columns[1].data_type, DataType::BitVarying(None)); + assert_eq!(columns[1].to_string(), "b BIT VARYING"); + assert_eq!(columns[2].data_type, DataType::Bit(Some(42))); + assert_eq!(columns[2].to_string(), "c BIT(42)"); + assert_eq!(columns[3].data_type, DataType::BitVarying(Some(43))); + assert_eq!(columns[3].to_string(), "d BIT VARYING(43)"); + } + _ => unreachable!(), + } +} + +#[test] +fn parse_composite_access_expr() { + assert_eq!( + verified_expr("f(a).b"), + Expr::CompoundFieldAccess { + root: Box::new(Expr::Function(Function { + name: ObjectName::from(vec![Ident::new("f")]), + uses_odbc_syntax: false, + parameters: FunctionArguments::None, + args: FunctionArguments::List(FunctionArgumentList { + duplicate_treatment: None, + args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr( + Expr::Identifier(Ident::new("a")) + ))], + clauses: vec![], + }), + null_treatment: None, + filter: None, + over: None, + within_group: vec![] + })), + access_chain: vec![AccessExpr::Dot(Expr::Identifier(Ident::new("b")))] + } + ); + + // Nested Composite Access + assert_eq!( + verified_expr("f(a).b.c"), + Expr::CompoundFieldAccess { + root: Box::new(Expr::Function(Function { + name: ObjectName::from(vec![Ident::new("f")]), + uses_odbc_syntax: false, + parameters: FunctionArguments::None, + args: FunctionArguments::List(FunctionArgumentList { + duplicate_treatment: None, + args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr( + Expr::Identifier(Ident::new("a")) + ))], + clauses: vec![], + }), + null_treatment: None, + filter: None, + over: None, + within_group: vec![] + })), + access_chain: vec![ + AccessExpr::Dot(Expr::Identifier(Ident::new("b"))), + AccessExpr::Dot(Expr::Identifier(Ident::new("c"))), + ] + } + ); + + // Composite Access in Select and Where Clauses + let stmt = verified_only_select("SELECT f(a).b FROM t WHERE f(a).b IS NOT NULL"); + let expr = Expr::CompoundFieldAccess { + root: Box::new(Expr::Function(Function { + name: ObjectName::from(vec![Ident::new("f")]), + uses_odbc_syntax: false, + parameters: FunctionArguments::None, + args: FunctionArguments::List(FunctionArgumentList { + duplicate_treatment: None, + args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr( + Expr::Identifier(Ident::new("a")), + ))], + clauses: vec![], + }), + null_treatment: None, + filter: None, + over: None, + within_group: vec![], + })), + access_chain: vec![AccessExpr::Dot(Expr::Identifier(Ident::new("b")))], + }; + + assert_eq!(stmt.projection[0], SelectItem::UnnamedExpr(expr.clone())); + assert_eq!(stmt.selection.unwrap(), Expr::IsNotNull(Box::new(expr))); + + // Compound access with quoted identifier. + all_dialects_where(|d| d.is_delimited_identifier_start('"')) + .verified_only_select("SELECT f(a).\"an id\""); + + // Composite Access in struct literal + all_dialects_where(|d| d.supports_struct_literal()).verified_stmt( + "SELECT * FROM t WHERE STRUCT(STRUCT(1 AS a, NULL AS b) AS c, NULL AS d).c.a IS NOT NULL", + ); + let support_struct = all_dialects_where(|d| d.supports_struct_literal()); + let stmt = support_struct + .verified_only_select("SELECT STRUCT(STRUCT(1 AS a, NULL AS b) AS c, NULL AS d).c.a"); + let expected = SelectItem::UnnamedExpr(Expr::CompoundFieldAccess { + root: Box::new(Expr::Struct { + values: vec![ + Expr::Named { + name: Ident::new("c"), + expr: Box::new(Expr::Struct { + values: vec![ + Expr::Named { + name: Ident::new("a"), + expr: Box::new(Expr::Value( + (Number("1".parse().unwrap(), false)).with_empty_span(), + )), + }, + Expr::Named { + name: Ident::new("b"), + expr: Box::new(Expr::Value((Value::Null).with_empty_span())), + }, + ], + fields: vec![], + }), + }, + Expr::Named { + name: Ident::new("d"), + expr: Box::new(Expr::Value((Value::Null).with_empty_span())), + }, + ], + fields: vec![], + }), + access_chain: vec![ + AccessExpr::Dot(Expr::Identifier(Ident::new("c"))), + AccessExpr::Dot(Expr::Identifier(Ident::new("a"))), + ], + }); + assert_eq!(stmt.projection[0], expected); +} + +#[test] +fn parse_create_table_with_enum_types() { + let sql = "CREATE TABLE t0 (foo ENUM8('a' = 1, 'b' = 2), bar ENUM16('a' = 1, 'b' = 2), baz ENUM('a', 'b'))"; + match all_dialects().verified_stmt(sql) { + Statement::CreateTable(CreateTable { name, columns, .. }) => { + assert_eq!(name.to_string(), "t0"); + assert_eq!( + vec![ + ColumnDef { + name: Ident::new("foo"), + data_type: DataType::Enum( + vec![ + EnumMember::NamedValue( + "a".to_string(), + Expr::Value( + (Number("1".parse().unwrap(), false)).with_empty_span() + ) + ), + EnumMember::NamedValue( + "b".to_string(), + Expr::Value( + (Number("2".parse().unwrap(), false)).with_empty_span() + ) + ) + ], + Some(8) + ), + options: vec![], + }, + ColumnDef { + name: Ident::new("bar"), + data_type: DataType::Enum( + vec![ + EnumMember::NamedValue( + "a".to_string(), + Expr::Value( + (Number("1".parse().unwrap(), false)).with_empty_span() + ) + ), + EnumMember::NamedValue( + "b".to_string(), + Expr::Value( + (Number("2".parse().unwrap(), false)).with_empty_span() + ) + ) + ], + Some(16) + ), + options: vec![], + }, + ColumnDef { + name: Ident::new("baz"), + data_type: DataType::Enum( + vec![ + EnumMember::Name("a".to_string()), + EnumMember::Name("b".to_string()) + ], + None + ), + options: vec![], + } + ], + columns + ); + } + _ => unreachable!(), + } + + // invalid case missing value for enum pair + assert_eq!( + all_dialects() + .parse_sql_statements("CREATE TABLE t0 (foo ENUM8('a' = 1, 'b' = ))") + .unwrap_err(), + ParserError::ParserError("Expected: a value, found: )".to_string()) + ); + + // invalid case that name is not a string + assert_eq!( + all_dialects() + .parse_sql_statements("CREATE TABLE t0 (foo ENUM8('a' = 1, 2))") + .unwrap_err(), + ParserError::ParserError("Expected: literal string, found: 2".to_string()) + ); +} + +#[test] +fn test_table_sample() { + let dialects = all_dialects_where(|d| d.supports_table_sample_before_alias()); + dialects.verified_stmt("SELECT * FROM tbl TABLESAMPLE (50) AS t"); + dialects.verified_stmt("SELECT * FROM tbl TABLESAMPLE (50 ROWS) AS t"); + dialects.verified_stmt("SELECT * FROM tbl TABLESAMPLE (50 PERCENT) AS t"); + + let dialects = all_dialects_where(|d| !d.supports_table_sample_before_alias()); + dialects.verified_stmt("SELECT * FROM tbl AS t TABLESAMPLE BERNOULLI (50)"); + dialects.verified_stmt("SELECT * FROM tbl AS t TABLESAMPLE SYSTEM (50)"); + dialects.verified_stmt("SELECT * FROM tbl AS t TABLESAMPLE SYSTEM (50) REPEATABLE (10)"); +} + +#[test] +fn overflow() { + let expr = std::iter::repeat_n("1", 1000) + .collect::>() + .join(" + "); + let sql = format!("SELECT {}", expr); + + let mut statements = Parser::parse_sql(&GenericDialect {}, sql.as_str()).unwrap(); + let statement = statements.pop().unwrap(); + assert_eq!(statement.to_string(), sql); +} +#[test] +fn parse_select_without_projection() { + let dialects = all_dialects_where(|d| d.supports_empty_projections()); + dialects.verified_stmt("SELECT FROM users"); +} + +#[test] +fn parse_update_from_before_select() { + verified_stmt("UPDATE t1 FROM (SELECT name, id FROM t1 GROUP BY id) AS t2 SET name = t2.name WHERE t1.id = t2.id"); + verified_stmt("UPDATE t1 FROM U, (SELECT id FROM V) AS W SET a = b WHERE 1 = 1"); + + let query = + "UPDATE t1 FROM (SELECT name, id FROM t1 GROUP BY id) AS t2 SET name = t2.name FROM (SELECT name from t2) AS t2"; + assert_eq!( + ParserError::ParserError("Expected: end of statement, found: FROM".to_string()), + parse_sql_statements(query).unwrap_err() + ); +} +#[test] +fn parse_overlaps() { + verified_stmt("SELECT (DATE '2016-01-10', DATE '2016-02-01') OVERLAPS (DATE '2016-01-20', DATE '2016-02-10')"); +} + +#[test] +fn parse_column_definition_trailing_commas() { + let dialects = all_dialects_where(|d| d.supports_column_definition_trailing_commas()); + + dialects.one_statement_parses_to("CREATE TABLE T (x INT64,)", "CREATE TABLE T (x INT64)"); + dialects.one_statement_parses_to( + "CREATE TABLE T (x INT64, y INT64, )", + "CREATE TABLE T (x INT64, y INT64)", + ); + dialects.one_statement_parses_to( + "CREATE VIEW T (x, y, ) AS SELECT 1", + "CREATE VIEW T (x, y) AS SELECT 1", + ); + + let unsupported_dialects = all_dialects_where(|d| { + !d.supports_projection_trailing_commas() && !d.supports_trailing_commas() + }); + assert_eq!( + unsupported_dialects + .parse_sql_statements("CREATE TABLE employees (name text, age int,)") + .unwrap_err(), + ParserError::ParserError( + "Expected: column name or constraint definition, found: )".to_string() + ), + ); +} + +#[test] +fn test_trailing_commas_in_from() { + let dialects = all_dialects_where(|d| d.supports_from_trailing_commas()); + dialects.verified_only_select_with_canonical("SELECT 1, 2 FROM t,", "SELECT 1, 2 FROM t"); + + dialects + .verified_only_select_with_canonical("SELECT 1, 2 FROM t1, t2,", "SELECT 1, 2 FROM t1, t2"); + + let sql = "SELECT a, FROM b, LIMIT 1"; + let _ = dialects.parse_sql_statements(sql).unwrap(); + + let sql = "INSERT INTO a SELECT b FROM c,"; + let _ = dialects.parse_sql_statements(sql).unwrap(); + + let sql = "SELECT a FROM b, HAVING COUNT(*) > 1"; + let _ = dialects.parse_sql_statements(sql).unwrap(); + + let sql = "SELECT a FROM b, WHERE c = 1"; + let _ = dialects.parse_sql_statements(sql).unwrap(); + + // nested + let sql = "SELECT 1, 2 FROM (SELECT * FROM t,),"; + let _ = dialects.parse_sql_statements(sql).unwrap(); + + // multiple_subqueries + dialects.verified_only_select_with_canonical( + "SELECT 1, 2 FROM (SELECT * FROM t1), (SELECT * FROM t2),", + "SELECT 1, 2 FROM (SELECT * FROM t1), (SELECT * FROM t2)", + ); +} + +#[test] +#[cfg(feature = "visitor")] +fn test_visit_order() { + let sql = "SELECT CASE a WHEN 1 THEN 2 WHEN 3 THEN 4 ELSE 5 END"; + let stmt = verified_stmt(sql); + let mut visited = vec![]; + sqlparser::ast::visit_expressions(&stmt, |expr| { + visited.push(expr.to_string()); + core::ops::ControlFlow::<()>::Continue(()) + }); + + assert_eq!( + visited, + [ + "CASE a WHEN 1 THEN 2 WHEN 3 THEN 4 ELSE 5 END", + "a", + "1", + "2", + "3", + "4", + "5" + ] + ); +} + +#[test] +fn parse_case_statement() { + let sql = "CASE 1 WHEN 2 THEN SELECT 1; SELECT 2; ELSE SELECT 3; END CASE"; + let Statement::Case(stmt) = verified_stmt(sql) else { + unreachable!() + }; + + assert_eq!(Some(Expr::value(number("1"))), stmt.match_expr); + assert_eq!( + Some(Expr::value(number("2"))), + stmt.when_blocks[0].condition + ); + assert_eq!(2, stmt.when_blocks[0].statements().len()); + assert_eq!(1, stmt.else_block.unwrap().statements().len()); + + verified_stmt(concat!( + "CASE 1", + " WHEN a THEN", + " SELECT 1; SELECT 2; SELECT 3;", + " WHEN b THEN", + " SELECT 4; SELECT 5;", + " ELSE", + " SELECT 7; SELECT 8;", + " END CASE" + )); + verified_stmt(concat!( + "CASE 1", + " WHEN a THEN", + " SELECT 1; SELECT 2; SELECT 3;", + " WHEN b THEN", + " SELECT 4; SELECT 5;", + " END CASE" + )); + verified_stmt(concat!( + "CASE 1", + " WHEN a THEN", + " SELECT 1; SELECT 2; SELECT 3;", + " END CASE" + )); + verified_stmt(concat!( + "CASE 1", + " WHEN a THEN", + " SELECT 1; SELECT 2; SELECT 3;", + " END" + )); + + assert_eq!( + ParserError::ParserError("Expected: THEN, found: END".to_string()), + parse_sql_statements("CASE 1 WHEN a END").unwrap_err() + ); + assert_eq!( + ParserError::ParserError("Expected: WHEN, found: ELSE".to_string()), + parse_sql_statements("CASE 1 ELSE SELECT 1; END").unwrap_err() + ); +} + +#[test] +fn test_case_statement_span() { + let sql = "CASE 1 WHEN 2 THEN SELECT 1; SELECT 2; ELSE SELECT 3; END CASE"; + let mut parser = Parser::new(&GenericDialect {}).try_with_sql(sql).unwrap(); + assert_eq!( + parser.parse_statement().unwrap().span(), + Span::new(Location::new(1, 1), Location::new(1, sql.len() as u64 + 1)) + ); +} + +#[test] +fn parse_if_statement() { + let dialects = all_dialects_except(|d| d.is::()); + + let sql = "IF 1 THEN SELECT 1; ELSEIF 2 THEN SELECT 2; ELSE SELECT 3; END IF"; + let Statement::If(IfStatement { + if_block, + elseif_blocks, + else_block, + .. + }) = dialects.verified_stmt(sql) + else { + unreachable!() + }; + assert_eq!(Some(Expr::value(number("1"))), if_block.condition); + assert_eq!(Some(Expr::value(number("2"))), elseif_blocks[0].condition); + assert_eq!(1, else_block.unwrap().statements().len()); + + dialects.verified_stmt(concat!( + "IF 1 THEN", + " SELECT 1;", + " SELECT 2;", + " SELECT 3;", + " ELSEIF 2 THEN", + " SELECT 4;", + " SELECT 5;", + " ELSEIF 3 THEN", + " SELECT 6;", + " SELECT 7;", + " ELSE", + " SELECT 8;", + " SELECT 9;", + " END IF" + )); + dialects.verified_stmt(concat!( + "IF 1 THEN", + " SELECT 1;", + " SELECT 2;", + " ELSE", + " SELECT 3;", + " SELECT 4;", + " END IF" + )); + dialects.verified_stmt(concat!( + "IF 1 THEN", + " SELECT 1;", + " SELECT 2;", + " SELECT 3;", + " ELSEIF 2 THEN", + " SELECT 3;", + " SELECT 4;", + " END IF" + )); + dialects.verified_stmt(concat!("IF 1 THEN", " SELECT 1;", " SELECT 2;", " END IF")); + dialects.verified_stmt(concat!( + "IF (1) THEN", + " SELECT 1;", + " SELECT 2;", + " END IF" + )); + dialects.verified_stmt("IF 1 THEN END IF"); + dialects.verified_stmt("IF 1 THEN SELECT 1; ELSEIF 1 THEN END IF"); + + assert_eq!( + ParserError::ParserError("Expected: IF, found: EOF".to_string()), + dialects + .parse_sql_statements("IF 1 THEN SELECT 1; ELSEIF 1 THEN SELECT 2; END") + .unwrap_err() + ); +} + +#[test] +fn test_if_statement_span() { + let sql = "IF 1=1 THEN SELECT 1; ELSEIF 1=2 THEN SELECT 2; ELSE SELECT 3; END IF"; + let mut parser = Parser::new(&GenericDialect {}).try_with_sql(sql).unwrap(); + assert_eq!( + parser.parse_statement().unwrap().span(), + Span::new(Location::new(1, 1), Location::new(1, sql.len() as u64 + 1)) + ); +} + +#[test] +fn test_if_statement_multiline_span() { + let sql_line1 = "IF 1 = 1 THEN SELECT 1;"; + let sql_line2 = "ELSEIF 1 = 2 THEN SELECT 2;"; + let sql_line3 = "ELSE SELECT 3;"; + let sql_line4 = "END IF"; + let sql = [sql_line1, sql_line2, sql_line3, sql_line4].join("\n"); + let mut parser = Parser::new(&GenericDialect {}).try_with_sql(&sql).unwrap(); + assert_eq!( + parser.parse_statement().unwrap().span(), + Span::new( + Location::new(1, 1), + Location::new(4, sql_line4.len() as u64 + 1) + ) + ); +} + +#[test] +fn test_conditional_statement_span() { + let sql = "IF 1=1 THEN SELECT 1; ELSEIF 1=2 THEN SELECT 2; ELSE SELECT 3; END IF"; + let mut parser = Parser::new(&GenericDialect {}).try_with_sql(sql).unwrap(); + match parser.parse_statement().unwrap() { + Statement::If(IfStatement { + if_block, + elseif_blocks, + else_block, + .. + }) => { + assert_eq!( + Span::new(Location::new(1, 1), Location::new(1, 21)), + if_block.span() + ); + assert_eq!( + Span::new(Location::new(1, 23), Location::new(1, 47)), + elseif_blocks[0].span() + ); + assert_eq!( + Span::new(Location::new(1, 49), Location::new(1, 62)), + else_block.unwrap().span() + ); + } + stmt => panic!("Unexpected statement: {:?}", stmt), + } +} + +#[test] +fn parse_raise_statement() { + let sql = "RAISE USING MESSAGE = 42"; + let Statement::Raise(stmt) = verified_stmt(sql) else { + unreachable!() + }; + assert_eq!( + Some(RaiseStatementValue::UsingMessage(Expr::value(number("42")))), + stmt.value + ); + + verified_stmt("RAISE USING MESSAGE = 'error'"); + verified_stmt("RAISE myerror"); + verified_stmt("RAISE 42"); + verified_stmt("RAISE using"); + verified_stmt("RAISE"); + + assert_eq!( + ParserError::ParserError("Expected: =, found: error".to_string()), + parse_sql_statements("RAISE USING MESSAGE error").unwrap_err() + ); +} + +#[test] +fn test_lambdas() { + let dialects = all_dialects_where(|d| d.supports_lambda_functions()); + + #[rustfmt::skip] + let sql = concat!( + "SELECT array_sort(array('Hello', 'World'), ", + "(p1, p2) -> CASE WHEN p1 = p2 THEN 0 ", + "WHEN reverse(p1) < reverse(p2) THEN -1 ", + "ELSE 1 END)", + ); + pretty_assertions::assert_eq!( + SelectItem::UnnamedExpr(call( + "array_sort", + [ + call( + "array", + [ + Expr::Value( + (Value::SingleQuotedString("Hello".to_owned())).with_empty_span() + ), + Expr::Value( + (Value::SingleQuotedString("World".to_owned())).with_empty_span() + ) + ] + ), + Expr::Lambda(LambdaFunction { + params: OneOrManyWithParens::Many(vec![Ident::new("p1"), Ident::new("p2")]), + body: Box::new(Expr::Case { + operand: None, + conditions: vec![ + CaseWhen { + condition: Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("p1"))), + op: BinaryOperator::Eq, + right: Box::new(Expr::Identifier(Ident::new("p2"))) + }, + result: Expr::value(number("0")), + }, + CaseWhen { + condition: Expr::BinaryOp { + left: Box::new(call( + "reverse", + [Expr::Identifier(Ident::new("p1"))] + )), + op: BinaryOperator::Lt, + right: Box::new(call( + "reverse", + [Expr::Identifier(Ident::new("p2"))] + )), + }, + result: Expr::UnaryOp { + op: UnaryOperator::Minus, + expr: Box::new(Expr::value(number("1"))) + } + }, + ], + else_result: Some(Box::new(Expr::value(number("1")))), + }) + }) + ] + )), + dialects.verified_only_select(sql).projection[0] + ); + + dialects.verified_expr( + "map_zip_with(map(1, 'a', 2, 'b'), map(1, 'x', 2, 'y'), (k, v1, v2) -> concat(v1, v2))", + ); + dialects.verified_expr("transform(array(1, 2, 3), x -> x + 1)"); +} + +#[test] +fn test_select_from_first() { + let dialects = all_dialects_where(|d| d.supports_from_first_select()); + let q1 = "FROM capitals"; + let q2 = "FROM capitals SELECT *"; + + for (q, flavor, projection) in [ + (q1, SelectFlavor::FromFirstNoSelect, vec![]), + ( + q2, + SelectFlavor::FromFirst, + vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())], + ), + ] { + let ast = dialects.verified_query(q); + let expected = Query { + with: None, + body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), + distinct: None, + top: None, + projection, + top_before_distinct: false, + into: None, + from: vec![TableWithJoins { + relation: table_from_name(ObjectName::from(vec![Ident { + value: "capitals".to_string(), + quote_style: None, + span: Span::empty(), + }])), + joins: vec![], + }], + lateral_views: vec![], + prewhere: None, + selection: None, + group_by: GroupByExpr::Expressions(vec![], vec![]), + cluster_by: vec![], + distribute_by: vec![], + sort_by: vec![], + having: None, + named_window: vec![], + window_before_qualify: false, + qualify: None, + value_table_mode: None, + connect_by: None, + flavor, + }))), + order_by: None, + limit_clause: None, + fetch: None, + locks: vec![], + for_clause: None, + settings: None, + format_clause: None, + pipe_operators: vec![], + }; + assert_eq!(expected, ast); + assert_eq!(ast.to_string(), q); + } +} + +#[test] +fn test_geometric_unary_operators() { + // Number of points in path or polygon + let sql = "# path '((1,0),(0,1),(-1,0))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::UnaryOp { + op: UnaryOperator::Hash, + .. + } + )); + + // Length or circumference + let sql = "@-@ path '((0,0),(1,0))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::UnaryOp { + op: UnaryOperator::AtDashAt, + .. + } + )); + + // Center + let sql = "@@ circle '((0,0),10)'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::UnaryOp { + op: UnaryOperator::DoubleAt, + .. + } + )); + // Is horizontal? + let sql = "?- lseg '((-1,0),(1,0))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::UnaryOp { + op: UnaryOperator::QuestionDash, + .. + } + )); + + // Is vertical? + let sql = "?| lseg '((-1,0),(1,0))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::UnaryOp { + op: UnaryOperator::QuestionPipe, + .. + } + )); +} + +#[test] +fn test_geometry_type() { + let sql = "point '1,2'"; + assert_eq!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::TypedString { + data_type: DataType::GeometricType(GeometricTypeKind::Point), + value: Value::SingleQuotedString("1,2".to_string()), + } + ); + + let sql = "line '1,2,3,4'"; + assert_eq!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::TypedString { + data_type: DataType::GeometricType(GeometricTypeKind::Line), + value: Value::SingleQuotedString("1,2,3,4".to_string()), + } + ); + + let sql = "path '1,2,3,4'"; + assert_eq!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::TypedString { + data_type: DataType::GeometricType(GeometricTypeKind::GeometricPath), + value: Value::SingleQuotedString("1,2,3,4".to_string()), + } + ); + let sql = "box '1,2,3,4'"; + assert_eq!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::TypedString { + data_type: DataType::GeometricType(GeometricTypeKind::GeometricBox), + value: Value::SingleQuotedString("1,2,3,4".to_string()), + } + ); + + let sql = "circle '1,2,3'"; + assert_eq!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::TypedString { + data_type: DataType::GeometricType(GeometricTypeKind::Circle), + value: Value::SingleQuotedString("1,2,3".to_string()), + } + ); + + let sql = "polygon '1,2,3,4'"; + assert_eq!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::TypedString { + data_type: DataType::GeometricType(GeometricTypeKind::Polygon), + value: Value::SingleQuotedString("1,2,3,4".to_string()), + } + ); + let sql = "lseg '1,2,3,4'"; + assert_eq!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::TypedString { + data_type: DataType::GeometricType(GeometricTypeKind::LineSegment), + value: Value::SingleQuotedString("1,2,3,4".to_string()), + } + ); +} +#[test] +fn test_geometric_binary_operators() { + // Translation plus + let sql = "box '((0,0),(1,1))' + point '(2.0,0)'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::Plus, + .. + } + )); + // Translation minus + let sql = "box '((0,0),(1,1))' - point '(2.0,0)'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::Minus, + .. + } + )); + + // Scaling multiply + let sql = "box '((0,0),(1,1))' * point '(2.0,0)'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::Multiply, + .. + } + )); + + // Scaling divide + let sql = "box '((0,0),(1,1))' / point '(2.0,0)'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::Divide, + .. + } + )); + + // Intersection + let sql = "'((1,-1),(-1,1))' # '((1,1),(-1,-1))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::PGBitwiseXor, + .. + } + )); + + //Point of closest proximity + let sql = "point '(0,0)' ## lseg '((2,0),(0,2))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::DoubleHash, + .. + } + )); + + // Point of closest proximity + let sql = "box '((0,0),(1,1))' && box '((0,0),(2,2))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::PGOverlap, + .. + } + )); + + // Overlaps to left? + let sql = "box '((0,0),(1,1))' &< box '((0,0),(2,2))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::AndLt, + .. + } + )); + + // Overlaps to right? + let sql = "box '((0,0),(3,3))' &> box '((0,0),(2,2))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::AndGt, + .. + } + )); + + // Distance between + let sql = "circle '((0,0),1)' <-> circle '((5,0),1)'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::LtDashGt, + .. + } + )); + + // Is left of? + let sql = "circle '((0,0),1)' << circle '((5,0),1)'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::PGBitwiseShiftLeft, + .. + } + )); + + // Is right of? + let sql = "circle '((5,0),1)' >> circle '((0,0),1)'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::PGBitwiseShiftRight, + .. + } + )); + + // Is below? + let sql = "circle '((0,0),1)' <^ circle '((0,5),1)'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::LtCaret, + .. + } + )); + + // Intersects or overlaps + let sql = "lseg '((-1,0),(1,0))' ?# box '((-2,-2),(2,2))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::QuestionHash, + .. + } + )); + + // Is horizontal? + let sql = "point '(1,0)' ?- point '(0,0)'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::QuestionDash, + .. + } + )); + + // Is perpendicular? + let sql = "lseg '((0,0),(0,1))' ?-| lseg '((0,0),(1,0))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::QuestionDashPipe, + .. + } + )); + + // Is vertical? + let sql = "point '(0,1)' ?| point '(0,0)'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::QuestionPipe, + .. + } + )); + + // Are parallel? + let sql = "lseg '((-1,0),(1,0))' ?|| lseg '((-1,2),(1,2))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::QuestionDoublePipe, + .. + } + )); + + // Contained or on? + let sql = "point '(1,1)' @ circle '((0,0),2)'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::At, + .. + } + )); + + // + // Same as? + let sql = "polygon '((0,0),(1,1))' ~= polygon '((1,1),(0,0))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::TildeEq, + .. + } + )); + + // Is strictly below? + let sql = "box '((0,0),(3,3))' <<| box '((3,4),(5,5))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::LtLtPipe, + .. + } + )); + + // Is strictly above? + let sql = "box '((3,4),(5,5))' |>> box '((0,0),(3,3))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::PipeGtGt, + .. + } + )); + + // Does not extend above? + let sql = "box '((0,0),(1,1))' &<| box '((0,0),(2,2))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::AndLtPipe, + .. + } + )); + + // Does not extend below? + let sql = "box '((0,0),(3,3))' |&> box '((0,0),(2,2))'"; + assert!(matches!( + all_dialects_where(|d| d.supports_geometric_types()).verified_expr(sql), + Expr::BinaryOp { + op: BinaryOperator::PipeAndGt, + .. + } + )); +} + +#[test] +fn parse_array_type_def_with_brackets() { + let dialects = all_dialects_where(|d| d.supports_array_typedef_with_brackets()); + dialects.verified_stmt("SELECT x::INT[]"); + dialects.verified_stmt("SELECT STRING_TO_ARRAY('1,2,3', ',')::INT[3]"); +} + +#[test] +fn parse_set_names() { + let dialects = all_dialects_where(|d| d.supports_set_names()); + dialects.verified_stmt("SET NAMES 'UTF8'"); + dialects.verified_stmt("SET NAMES 'utf8'"); + dialects.verified_stmt("SET NAMES UTF8 COLLATE bogus"); +} + +#[test] +fn parse_pipeline_operator() { + let dialects = all_dialects_where(|d| d.supports_pipe_operator()); + + // select pipe operator + dialects.verified_stmt("SELECT * FROM users |> SELECT id"); + dialects.verified_stmt("SELECT * FROM users |> SELECT id, name"); + dialects.verified_query_with_canonical( + "SELECT * FROM users |> SELECT id user_id", + "SELECT * FROM users |> SELECT id AS user_id", + ); + dialects.verified_stmt("SELECT * FROM users |> SELECT id AS user_id"); + + // extend pipe operator + dialects.verified_stmt("SELECT * FROM users |> EXTEND id + 1 AS new_id"); + dialects.verified_stmt("SELECT * FROM users |> EXTEND id AS new_id, name AS new_name"); + dialects.verified_query_with_canonical( + "SELECT * FROM users |> EXTEND id user_id", + "SELECT * FROM users |> EXTEND id AS user_id", + ); + + // set pipe operator + dialects.verified_stmt("SELECT * FROM users |> SET id = id + 1"); + dialects.verified_stmt("SELECT * FROM users |> SET id = id + 1, name = name + ' Doe'"); + + // drop pipe operator + dialects.verified_stmt("SELECT * FROM users |> DROP id"); + dialects.verified_stmt("SELECT * FROM users |> DROP id, name"); + + // as pipe operator + dialects.verified_stmt("SELECT * FROM users |> AS new_users"); + + // limit pipe operator + dialects.verified_stmt("SELECT * FROM users |> LIMIT 10"); + dialects.verified_stmt("SELECT * FROM users |> LIMIT 10 OFFSET 5"); + dialects.verified_stmt("SELECT * FROM users |> LIMIT 10 |> LIMIT 5"); + dialects.verified_stmt("SELECT * FROM users |> LIMIT 10 |> WHERE true"); + + // where pipe operator + dialects.verified_stmt("SELECT * FROM users |> WHERE id = 1"); + dialects.verified_stmt("SELECT * FROM users |> WHERE id = 1 AND name = 'John'"); + dialects.verified_stmt("SELECT * FROM users |> WHERE id = 1 OR name = 'John'"); + + // aggregate pipe operator full table + dialects.verified_stmt("SELECT * FROM users |> AGGREGATE COUNT(*)"); + dialects.verified_query_with_canonical( + "SELECT * FROM users |> AGGREGATE COUNT(*) total_users", + "SELECT * FROM users |> AGGREGATE COUNT(*) AS total_users", + ); + dialects.verified_stmt("SELECT * FROM users |> AGGREGATE COUNT(*) AS total_users"); + dialects.verified_stmt("SELECT * FROM users |> AGGREGATE COUNT(*), MIN(id)"); + + // aggregate pipe opeprator with grouping + dialects.verified_stmt( + "SELECT * FROM users |> AGGREGATE SUM(o_totalprice) AS price, COUNT(*) AS cnt GROUP BY EXTRACT(YEAR FROM o_orderdate) AS year", + ); + dialects.verified_stmt( + "SELECT * FROM users |> AGGREGATE GROUP BY EXTRACT(YEAR FROM o_orderdate) AS year", + ); + dialects + .verified_stmt("SELECT * FROM users |> AGGREGATE GROUP BY EXTRACT(YEAR FROM o_orderdate)"); + dialects.verified_stmt("SELECT * FROM users |> AGGREGATE GROUP BY a, b"); + dialects.verified_stmt("SELECT * FROM users |> AGGREGATE SUM(c) GROUP BY a, b"); + dialects.verified_stmt("SELECT * FROM users |> AGGREGATE SUM(c) ASC"); + + // order by pipe operator + dialects.verified_stmt("SELECT * FROM users |> ORDER BY id ASC"); + dialects.verified_stmt("SELECT * FROM users |> ORDER BY id DESC"); + dialects.verified_stmt("SELECT * FROM users |> ORDER BY id DESC, name ASC"); + + // many pipes + dialects.verified_stmt( + "SELECT * FROM CustomerOrders |> AGGREGATE SUM(cost) AS total_cost GROUP BY customer_id, state, item_type |> EXTEND COUNT(*) OVER (PARTITION BY customer_id) AS num_orders |> WHERE num_orders > 1 |> AGGREGATE AVG(total_cost) AS average GROUP BY state DESC, item_type ASC", + ); +} + +#[test] +fn parse_multiple_set_statements() -> Result<(), ParserError> { + let dialects = all_dialects_where(|d| d.supports_comma_separated_set_assignments()); + let stmt = dialects.verified_stmt("SET @a = 1, b = 2"); + + match stmt { + Statement::Set(Set::MultipleAssignments { assignments }) => { + assert_eq!( + assignments, + vec![ + SetAssignment { + scope: None, + name: ObjectName::from(vec!["@a".into()]), + value: Expr::value(number("1")) + }, + SetAssignment { + scope: None, + name: ObjectName::from(vec!["b".into()]), + value: Expr::value(number("2")) + } + ] + ); + } + _ => panic!("Expected SetVariable with 2 variables and 2 values"), + }; + + let stmt = dialects.verified_stmt("SET GLOBAL @a = 1, SESSION b = 2, LOCAL c = 3, d = 4"); + + match stmt { + Statement::Set(Set::MultipleAssignments { assignments }) => { + assert_eq!( + assignments, + vec![ + SetAssignment { + scope: Some(ContextModifier::Global), + name: ObjectName::from(vec!["@a".into()]), + value: Expr::value(number("1")) + }, + SetAssignment { + scope: Some(ContextModifier::Session), + name: ObjectName::from(vec!["b".into()]), + value: Expr::value(number("2")) + }, + SetAssignment { + scope: Some(ContextModifier::Local), + name: ObjectName::from(vec!["c".into()]), + value: Expr::value(number("3")) + }, + SetAssignment { + scope: None, + name: ObjectName::from(vec!["d".into()]), + value: Expr::value(number("4")) + } + ] + ); + } + _ => panic!("Expected MultipleAssignments with 4 scoped variables and 4 values"), + }; + + Ok(()) +} + +#[test] +fn parse_set_time_zone_alias() { + match all_dialects().verified_stmt("SET TIME ZONE 'UTC'") { + Statement::Set(Set::SetTimeZone { local, value }) => { + assert!(!local); + assert_eq!( + value, + Expr::Value((Value::SingleQuotedString("UTC".into())).with_empty_span()) + ); + } + _ => unreachable!(), + } +} + +#[test] +fn parse_return() { + let stmt = all_dialects().verified_stmt("RETURN"); + assert_eq!(stmt, Statement::Return(ReturnStatement { value: None })); + + let _ = all_dialects().verified_stmt("RETURN 1"); +} + +#[test] +fn test_open() { + let open_cursor = "OPEN Employee_Cursor"; + let stmt = all_dialects().verified_stmt(open_cursor); + assert_eq!( + stmt, + Statement::Open(OpenStatement { + cursor_name: Ident::new("Employee_Cursor"), + }) + ); +} diff --git a/tests/sqlparser_custom_dialect.rs b/tests/sqlparser_custom_dialect.rs index e9ca82aba..cee604aca 100644 --- a/tests/sqlparser_custom_dialect.rs +++ b/tests/sqlparser_custom_dialect.rs @@ -41,7 +41,7 @@ fn custom_prefix_parser() -> Result<(), ParserError> { fn parse_prefix(&self, parser: &mut Parser) -> Option> { if parser.consume_token(&Token::Number("1".to_string(), false)) { - Some(Ok(Expr::Value(Value::Null))) + Some(Ok(Expr::Value(Value::Null.with_empty_span()))) } else { None } @@ -115,7 +115,11 @@ fn custom_statement_parser() -> Result<(), ParserError> { for _ in 0..3 { let _ = parser.next_token(); } - Some(Ok(Statement::Commit { chain: false })) + Some(Ok(Statement::Commit { + chain: false, + end: false, + modifier: None, + })) } else { None } diff --git a/tests/sqlparser_databricks.rs b/tests/sqlparser_databricks.rs index 7b917bd06..88aae499a 100644 --- a/tests/sqlparser_databricks.rs +++ b/tests/sqlparser_databricks.rs @@ -47,7 +47,9 @@ fn test_databricks_identifiers() { databricks() .verified_only_select(r#"SELECT "Ä""#) .projection[0], - SelectItem::UnnamedExpr(Expr::Value(Value::DoubleQuotedString("Ä".to_owned()))) + SelectItem::UnnamedExpr(Expr::Value( + (Value::DoubleQuotedString("Ä".to_owned())).with_empty_span() + )) ); } @@ -62,9 +64,9 @@ fn test_databricks_exists() { call( "array", [ - Expr::Value(number("1")), - Expr::Value(number("2")), - Expr::Value(number("3")) + Expr::value(number("1")), + Expr::value(number("2")), + Expr::value(number("3")) ] ), Expr::Lambda(LambdaFunction { @@ -99,8 +101,8 @@ fn test_databricks_lambdas() { call( "array", [ - Expr::Value(Value::SingleQuotedString("Hello".to_owned())), - Expr::Value(Value::SingleQuotedString("World".to_owned())) + Expr::value(Value::SingleQuotedString("Hello".to_owned())), + Expr::value(Value::SingleQuotedString("World".to_owned())) ] ), Expr::Lambda(LambdaFunction { @@ -108,31 +110,33 @@ fn test_databricks_lambdas() { body: Box::new(Expr::Case { operand: None, conditions: vec![ - Expr::BinaryOp { - left: Box::new(Expr::Identifier(Ident::new("p1"))), - op: BinaryOperator::Eq, - right: Box::new(Expr::Identifier(Ident::new("p2"))) + CaseWhen { + condition: Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("p1"))), + op: BinaryOperator::Eq, + right: Box::new(Expr::Identifier(Ident::new("p2"))) + }, + result: Expr::value(number("0")) + }, + CaseWhen { + condition: Expr::BinaryOp { + left: Box::new(call( + "reverse", + [Expr::Identifier(Ident::new("p1"))] + )), + op: BinaryOperator::Lt, + right: Box::new(call( + "reverse", + [Expr::Identifier(Ident::new("p2"))] + )), + }, + result: Expr::UnaryOp { + op: UnaryOperator::Minus, + expr: Box::new(Expr::value(number("1"))) + } }, - Expr::BinaryOp { - left: Box::new(call( - "reverse", - [Expr::Identifier(Ident::new("p1"))] - )), - op: BinaryOperator::Lt, - right: Box::new(call( - "reverse", - [Expr::Identifier(Ident::new("p2"))] - )) - } - ], - results: vec![ - Expr::Value(number("0")), - Expr::UnaryOp { - op: UnaryOperator::Minus, - expr: Box::new(Expr::Value(number("1"))) - } ], - else_result: Some(Box::new(Expr::Value(number("1")))) + else_result: Some(Box::new(Expr::value(number("1")))) }) }) ] @@ -152,12 +156,12 @@ fn test_values_clause() { explicit_row: false, rows: vec![ vec![ - Expr::Value(Value::DoubleQuotedString("one".to_owned())), - Expr::Value(number("1")), + Expr::Value((Value::DoubleQuotedString("one".to_owned())).with_empty_span()), + Expr::value(number("1")), ], vec![ - Expr::Value(Value::SingleQuotedString("two".to_owned())), - Expr::Value(number("2")), + Expr::Value((Value::SingleQuotedString("two".to_owned())).with_empty_span()), + Expr::value(number("2")), ], ], }; @@ -185,15 +189,9 @@ fn test_values_clause() { "SELECT * FROM values", )); assert_eq!( - Some(&TableFactor::Table { - name: ObjectName(vec![Ident::new("values")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }), + Some(&table_from_name(ObjectName::from(vec![Ident::new( + "values" + )]))), query .body .as_select() @@ -213,7 +211,7 @@ fn parse_use() { // Test single identifier without quotes assert_eq!( databricks().verified_stmt(&format!("USE {}", object_name)), - Statement::Use(Use::Object(ObjectName(vec![Ident::new( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( object_name.to_string() )]))) ); @@ -221,7 +219,7 @@ fn parse_use() { // Test single identifier with different type of quotes assert_eq!( databricks().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)), - Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -233,21 +231,21 @@ fn parse_use() { // Test single identifier with keyword and different type of quotes assert_eq!( databricks().verified_stmt(&format!("USE CATALOG {0}my_catalog{0}", quote)), - Statement::Use(Use::Catalog(ObjectName(vec![Ident::with_quote( + Statement::Use(Use::Catalog(ObjectName::from(vec![Ident::with_quote( quote, "my_catalog".to_string(), )]))) ); assert_eq!( databricks().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)), - Statement::Use(Use::Database(ObjectName(vec![Ident::with_quote( + Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote( quote, "my_database".to_string(), )]))) ); assert_eq!( databricks().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)), - Statement::Use(Use::Schema(ObjectName(vec![Ident::with_quote( + Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote( quote, "my_schema".to_string(), )]))) @@ -257,15 +255,19 @@ fn parse_use() { // Test single identifier with keyword and no quotes assert_eq!( databricks().verified_stmt("USE CATALOG my_catalog"), - Statement::Use(Use::Catalog(ObjectName(vec![Ident::new("my_catalog")]))) + Statement::Use(Use::Catalog(ObjectName::from(vec![Ident::new( + "my_catalog" + )]))) ); assert_eq!( databricks().verified_stmt("USE DATABASE my_schema"), - Statement::Use(Use::Database(ObjectName(vec![Ident::new("my_schema")]))) + Statement::Use(Use::Database(ObjectName::from(vec![Ident::new( + "my_schema" + )]))) ); assert_eq!( databricks().verified_stmt("USE SCHEMA my_schema"), - Statement::Use(Use::Schema(ObjectName(vec![Ident::new("my_schema")]))) + Statement::Use(Use::Schema(ObjectName::from(vec![Ident::new("my_schema")]))) ); // Test invalid syntax - missing identifier @@ -277,3 +279,81 @@ fn parse_use() { ); } } + +#[test] +fn parse_databricks_struct_function() { + assert_eq!( + databricks_and_generic() + .verified_only_select("SELECT STRUCT(1, 'foo')") + .projection[0], + SelectItem::UnnamedExpr(Expr::Struct { + values: vec![ + Expr::value(number("1")), + Expr::Value((Value::SingleQuotedString("foo".to_string())).with_empty_span()) + ], + fields: vec![] + }) + ); + assert_eq!( + databricks_and_generic() + .verified_only_select("SELECT STRUCT(1 AS one, 'foo' AS foo, false)") + .projection[0], + SelectItem::UnnamedExpr(Expr::Struct { + values: vec![ + Expr::Named { + expr: Expr::value(number("1")).into(), + name: Ident::new("one") + }, + Expr::Named { + expr: Expr::Value( + (Value::SingleQuotedString("foo".to_string())).with_empty_span() + ) + .into(), + name: Ident::new("foo") + }, + Expr::Value((Value::Boolean(false)).with_empty_span()) + ], + fields: vec![] + }) + ); +} + +#[test] +fn data_type_timestamp_ntz() { + // Literal + assert_eq!( + databricks().verified_expr("TIMESTAMP_NTZ '2025-03-29T18:52:00'"), + Expr::TypedString { + data_type: DataType::TimestampNtz, + value: Value::SingleQuotedString("2025-03-29T18:52:00".to_owned()) + } + ); + + // Cast + assert_eq!( + databricks().verified_expr("(created_at)::TIMESTAMP_NTZ"), + Expr::Cast { + kind: CastKind::DoubleColon, + expr: Box::new(Expr::Nested(Box::new(Expr::Identifier( + "created_at".into() + )))), + data_type: DataType::TimestampNtz, + format: None + } + ); + + // Column definition + match databricks().verified_stmt("CREATE TABLE foo (x TIMESTAMP_NTZ)") { + Statement::CreateTable(CreateTable { columns, .. }) => { + assert_eq!( + columns, + vec![ColumnDef { + name: "x".into(), + data_type: DataType::TimestampNtz, + options: vec![], + }] + ); + } + s => panic!("Unexpected statement: {:?}", s), + } +} diff --git a/tests/sqlparser_duckdb.rs b/tests/sqlparser_duckdb.rs index a4109b0a3..8e4983655 100644 --- a/tests/sqlparser_duckdb.rs +++ b/tests/sqlparser_duckdb.rs @@ -18,6 +18,8 @@ #[macro_use] mod test_utils; +use helpers::attached_token::AttachedToken; +use sqlparser::tokenizer::Span; use test_utils::*; use sqlparser::ast::*; @@ -58,7 +60,6 @@ fn test_struct() { vec![ColumnDef { name: "s".into(), data_type: struct_type1.clone(), - collation: None, options: vec![], }] ); @@ -73,7 +74,6 @@ fn test_struct() { Box::new(struct_type1), None )), - collation: None, options: vec![], }] ); @@ -118,7 +118,6 @@ fn test_struct() { Box::new(struct_type2), None )), - collation: None, options: vec![], }] ); @@ -158,7 +157,7 @@ fn test_select_wildcard_with_exclude() { let select = duckdb().verified_only_select("SELECT name.* EXCLUDE department_id FROM employee_table"); let expected = SelectItem::QualifiedWildcard( - ObjectName(vec![Ident::new("name")]), + SelectItemQualifiedWildcardKind::ObjectName(ObjectName::from(vec![Ident::new("name")])), WildcardAdditionalOptions { opt_exclude: Some(ExcludeSelectItem::Single(Ident::new("department_id"))), ..Default::default() @@ -189,7 +188,7 @@ fn test_create_macro() { let expected = Statement::CreateMacro { or_replace: false, temporary: false, - name: ObjectName(vec![Ident::new("schema"), Ident::new("add")]), + name: ObjectName::from(vec![Ident::new("schema"), Ident::new("add")]), args: Some(vec![MacroArg::new("a"), MacroArg::new("b")]), definition: MacroDefinition::Expr(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("a"))), @@ -206,12 +205,12 @@ fn test_create_macro_default_args() { let expected = Statement::CreateMacro { or_replace: false, temporary: false, - name: ObjectName(vec![Ident::new("add_default")]), + name: ObjectName::from(vec![Ident::new("add_default")]), args: Some(vec![ MacroArg::new("a"), MacroArg { name: Ident::new("b"), - default_expr: Some(Expr::Value(number("5"))), + default_expr: Some(Expr::value(number("5"))), }, ]), definition: MacroDefinition::Expr(Expr::BinaryOp { @@ -234,7 +233,7 @@ fn test_create_table_macro() { let expected = Statement::CreateMacro { or_replace: true, temporary: true, - name: ObjectName(vec![Ident::new("dynamic_table")]), + name: ObjectName::from(vec![Ident::new("dynamic_table")]), args: Some(vec![ MacroArg::new("col1_value"), MacroArg::new("col2_value"), @@ -259,29 +258,18 @@ fn test_select_union_by_name() { op: SetOperator::Union, set_quantifier: *expected_quantifier, left: Box::::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: None, top: None, - projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions { - opt_ilike: None, - opt_exclude: None, - opt_except: None, - opt_rename: None, - opt_replace: None, - })], + projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())], + top_before_distinct: false, into: None, from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident { - value: "capitals".to_string(), - quote_style: None, - }]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident { + value: "capitals".to_string(), + quote_style: None, + span: Span::empty(), + }])), joins: vec![], }], lateral_views: vec![], @@ -297,31 +285,21 @@ fn test_select_union_by_name() { qualify: None, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), right: Box::::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: None, top: None, - projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions { - opt_ilike: None, - opt_exclude: None, - opt_except: None, - opt_rename: None, - opt_replace: None, - })], + projection: vec![SelectItem::Wildcard(WildcardAdditionalOptions::default())], + top_before_distinct: false, into: None, from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident { - value: "weather".to_string(), - quote_style: None, - }]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident { + value: "weather".to_string(), + quote_style: None, + span: Span::empty(), + }])), joins: vec![], }], lateral_views: vec![], @@ -337,6 +315,7 @@ fn test_select_union_by_name() { qualify: None, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), }); assert_eq!(ast.body, expected); @@ -351,7 +330,8 @@ fn test_duckdb_install() { Statement::Install { extension_name: Ident { value: "tpch".to_string(), - quote_style: None + quote_style: None, + span: Span::empty() } } ); @@ -364,13 +344,40 @@ fn test_duckdb_load_extension() { Statement::Load { extension_name: Ident { value: "my_extension".to_string(), - quote_style: None + quote_style: None, + span: Span::empty() } }, stmt ); } +#[test] +fn test_duckdb_specific_int_types() { + let duckdb_dtypes = vec![ + ("UTINYINT", DataType::UTinyInt), + ("USMALLINT", DataType::USmallInt), + ("UBIGINT", DataType::UBigInt), + ("UHUGEINT", DataType::UHugeInt), + ("HUGEINT", DataType::HugeInt), + ]; + for (dtype_string, data_type) in duckdb_dtypes { + let sql = format!("SELECT 123::{}", dtype_string); + let select = duckdb().verified_only_select(&sql); + assert_eq!( + &Expr::Cast { + kind: CastKind::DoubleColon, + expr: Box::new(Expr::Value( + Value::Number("123".parse().unwrap(), false).with_empty_span() + )), + data_type: data_type.clone(), + format: None, + }, + expr_from_projection(&select.projection[0]) + ); + } +} + #[test] fn test_duckdb_struct_literal() { //struct literal syntax https://duckdb.org/docs/sql/data_types/struct#creating-structs @@ -382,15 +389,15 @@ fn test_duckdb_struct_literal() { &Expr::Dictionary(vec![ DictionaryField { key: Ident::with_quote('\'', "a"), - value: Box::new(Expr::Value(number("1"))), + value: Box::new(Expr::value(number("1"))), }, DictionaryField { key: Ident::with_quote('\'', "b"), - value: Box::new(Expr::Value(number("2"))), + value: Box::new(Expr::value(number("2"))), }, DictionaryField { key: Ident::with_quote('\'', "c"), - value: Box::new(Expr::Value(number("3"))), + value: Box::new(Expr::value(number("3"))), }, ],), expr_from_projection(&select.projection[0]) @@ -400,7 +407,9 @@ fn test_duckdb_struct_literal() { &Expr::Array(Array { elem: vec![Expr::Dictionary(vec![DictionaryField { key: Ident::with_quote('\'', "a"), - value: Box::new(Expr::Value(Value::SingleQuotedString("abc".to_string()))), + value: Box::new(Expr::Value( + (Value::SingleQuotedString("abc".to_string())).with_empty_span() + )), },],)], named: false }), @@ -410,7 +419,7 @@ fn test_duckdb_struct_literal() { &Expr::Dictionary(vec![ DictionaryField { key: Ident::with_quote('\'', "a"), - value: Box::new(Expr::Value(number("1"))), + value: Box::new(Expr::value(number("1"))), }, DictionaryField { key: Ident::with_quote('\'', "b"), @@ -429,11 +438,14 @@ fn test_duckdb_struct_literal() { &Expr::Dictionary(vec![ DictionaryField { key: Ident::with_quote('\'', "a"), - value: Expr::Value(number("1")).into(), + value: Expr::value(number("1")).into(), }, DictionaryField { key: Ident::with_quote('\'', "b"), - value: Expr::Value(Value::SingleQuotedString("abc".to_string())).into(), + value: Expr::Value( + (Value::SingleQuotedString("abc".to_string())).with_empty_span() + ) + .into(), }, ],), expr_from_projection(&select.projection[3]) @@ -450,7 +462,7 @@ fn test_duckdb_struct_literal() { key: Ident::with_quote('\'', "a"), value: Expr::Dictionary(vec![DictionaryField { key: Ident::with_quote('\'', "aa"), - value: Expr::Value(number("1")).into(), + value: Expr::value(number("1")).into(), }],) .into(), }],), @@ -605,23 +617,24 @@ fn test_duckdb_named_argument_function_with_assignment_operator() { let select = duckdb_and_generic().verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::new("FUN")]), + name: ObjectName::from(vec![Ident::new("FUN")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, args: vec![ FunctionArg::Named { name: Ident::new("a"), - arg: FunctionArgExpr::Expr(Expr::Value(Value::SingleQuotedString( - "1".to_owned() - ))), + arg: FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("1".to_owned())).with_empty_span() + )), operator: FunctionArgOperator::Assignment }, FunctionArg::Named { name: Ident::new("b"), - arg: FunctionArgExpr::Expr(Expr::Value(Value::SingleQuotedString( - "2".to_owned() - ))), + arg: FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("2".to_owned())).with_empty_span() + )), operator: FunctionArgOperator::Assignment }, ], @@ -647,18 +660,18 @@ fn test_array_index() { _ => panic!("Expected an expression with alias"), }; assert_eq!( - &Expr::Subscript { - expr: Box::new(Expr::Array(Array { + &Expr::CompoundFieldAccess { + root: Box::new(Expr::Array(Array { elem: vec![ - Expr::Value(Value::SingleQuotedString("a".to_owned())), - Expr::Value(Value::SingleQuotedString("b".to_owned())), - Expr::Value(Value::SingleQuotedString("c".to_owned())) + Expr::Value((Value::SingleQuotedString("a".to_owned())).with_empty_span()), + Expr::Value((Value::SingleQuotedString("b".to_owned())).with_empty_span()), + Expr::Value((Value::SingleQuotedString("c".to_owned())).with_empty_span()) ], named: false })), - subscript: Box::new(Subscript::Index { - index: Expr::Value(number("3")) - }) + access_chain: vec![AccessExpr::Subscript(Subscript::Index { + index: Expr::value(number("3")) + })] }, expr ); @@ -677,7 +690,8 @@ fn test_duckdb_union_datatype() { if_not_exists: Default::default(), transient: Default::default(), volatile: Default::default(), - name: ObjectName(vec!["tbl1".into()]), + iceberg: Default::default(), + name: ObjectName::from(vec!["tbl1".into()]), columns: vec![ ColumnDef { name: "one".into(), @@ -685,7 +699,6 @@ fn test_duckdb_union_datatype() { field_name: "a".into(), field_type: DataType::Int(None) }]), - collation: Default::default(), options: Default::default() }, ColumnDef { @@ -700,7 +713,6 @@ fn test_duckdb_union_datatype() { field_type: DataType::Int(None) } ]), - collation: Default::default(), options: Default::default() }, ColumnDef { @@ -712,7 +724,6 @@ fn test_duckdb_union_datatype() { field_type: DataType::Int(None) }]) }]), - collation: Default::default(), options: Default::default() } ], @@ -724,19 +735,13 @@ fn test_duckdb_union_datatype() { storage: Default::default(), location: Default::default() }), - table_properties: Default::default(), - with_options: Default::default(), file_format: Default::default(), location: Default::default(), query: Default::default(), without_rowid: Default::default(), like: Default::default(), clone: Default::default(), - engine: Default::default(), comment: Default::default(), - auto_increment_offset: Default::default(), - default_charset: Default::default(), - collation: Default::default(), on_commit: Default::default(), on_cluster: Default::default(), primary_key: Default::default(), @@ -744,7 +749,7 @@ fn test_duckdb_union_datatype() { partition_by: Default::default(), cluster_by: Default::default(), clustered_by: Default::default(), - options: Default::default(), + inherits: Default::default(), strict: Default::default(), copy_grants: Default::default(), enable_schema_evolution: Default::default(), @@ -754,7 +759,13 @@ fn test_duckdb_union_datatype() { default_ddl_collation: Default::default(), with_aggregation_policy: Default::default(), with_row_access_policy: Default::default(), - with_tags: Default::default() + with_tags: Default::default(), + base_location: Default::default(), + external_volume: Default::default(), + catalog: Default::default(), + catalog_sync: Default::default(), + storage_serialization_policy: Default::default(), + table_options: CreateTableOptions::None }), stmt ); @@ -776,7 +787,7 @@ fn parse_use() { // Test single identifier without quotes assert_eq!( duckdb().verified_stmt(&format!("USE {}", object_name)), - Statement::Use(Use::Object(ObjectName(vec![Ident::new( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( object_name.to_string() )]))) ); @@ -784,7 +795,7 @@ fn parse_use() { // Test single identifier with different type of quotes assert_eq!( duckdb().verified_stmt(&format!("USE {0}{1}{0}", quote, object_name)), - Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -796,7 +807,7 @@ fn parse_use() { // Test double identifier with different type of quotes assert_eq!( duckdb().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)), - Statement::Use(Use::Object(ObjectName(vec![ + Statement::Use(Use::Object(ObjectName::from(vec![ Ident::with_quote(quote, "CATALOG"), Ident::with_quote(quote, "my_schema") ]))) @@ -805,7 +816,7 @@ fn parse_use() { // Test double identifier without quotes assert_eq!( duckdb().verified_stmt("USE mydb.my_schema"), - Statement::Use(Use::Object(ObjectName(vec![ + Statement::Use(Use::Object(ObjectName::from(vec![ Ident::new("mydb"), Ident::new("my_schema") ]))) diff --git a/tests/sqlparser_hive.rs b/tests/sqlparser_hive.rs index 10bd374c0..14dcbffd1 100644 --- a/tests/sqlparser_hive.rs +++ b/tests/sqlparser_hive.rs @@ -21,11 +21,11 @@ //! is also tested (on the inputs it can handle). use sqlparser::ast::{ - ClusteredBy, CommentDef, CreateFunctionBody, CreateFunctionUsing, CreateTable, Expr, Function, - FunctionArgumentList, FunctionArguments, Ident, ObjectName, OneOrManyWithParens, OrderByExpr, - SelectItem, Statement, TableFactor, UnaryOperator, Use, Value, + ClusteredBy, CommentDef, CreateFunction, CreateFunctionBody, CreateFunctionUsing, CreateTable, + Expr, Function, FunctionArgumentList, FunctionArguments, Ident, ObjectName, OrderByExpr, + OrderByOptions, SelectItem, Set, Statement, TableFactor, UnaryOperator, Use, Value, }; -use sqlparser::dialect::{GenericDialect, HiveDialect, MsSqlDialect}; +use sqlparser::dialect::{AnsiDialect, GenericDialect, HiveDialect}; use sqlparser::parser::ParserError; use sqlparser::test_utils::*; @@ -91,7 +91,7 @@ fn parse_msck() { } #[test] -fn parse_set() { +fn parse_set_hivevar() { let set = "SET HIVEVAR:name = a, b, c_d"; hive().verified_stmt(set); } @@ -133,9 +133,7 @@ fn create_table_with_comment() { Statement::CreateTable(CreateTable { comment, .. }) => { assert_eq!( comment, - Some(CommentDef::AfterColumnDefsWithoutEq( - "table comment".to_string() - )) + Some(CommentDef::WithoutEq("table comment".to_string())) ) } _ => unreachable!(), @@ -170,14 +168,18 @@ fn create_table_with_clustered_by() { sorted_by: Some(vec![ OrderByExpr { expr: Expr::Identifier(Ident::new("a")), - asc: Some(true), - nulls_first: None, + options: OrderByOptions { + asc: Some(true), + nulls_first: None, + }, with_fill: None, }, OrderByExpr { expr: Expr::Identifier(Ident::new("b")), - asc: Some(false), - nulls_first: None, + options: OrderByOptions { + asc: Some(false), + nulls_first: None, + }, with_fill: None, }, ]), @@ -364,20 +366,20 @@ fn from_cte() { fn set_statement_with_minus() { assert_eq!( hive().verified_stmt("SET hive.tez.java.opts = -Xmx4g"), - Statement::SetVariable { - local: false, + Statement::Set(Set::SingleAssignment { + scope: None, hivevar: false, - variables: OneOrManyWithParens::One(ObjectName(vec![ + variable: ObjectName::from(vec![ Ident::new("hive"), Ident::new("tez"), Ident::new("java"), Ident::new("opts") - ])), - value: vec![Expr::UnaryOp { + ]), + values: vec![Expr::UnaryOp { op: UnaryOperator::Minus, expr: Box::new(Expr::Identifier(Ident::new("Xmx4g"))) }], - } + }) ); assert_eq!( @@ -392,19 +394,20 @@ fn set_statement_with_minus() { fn parse_create_function() { let sql = "CREATE TEMPORARY FUNCTION mydb.myfunc AS 'org.random.class.Name' USING JAR 'hdfs://somewhere.com:8020/very/far'"; match hive().verified_stmt(sql) { - Statement::CreateFunction { + Statement::CreateFunction(CreateFunction { temporary, name, function_body, using, .. - } => { + }) => { assert!(temporary); assert_eq!(name.to_string(), "mydb.myfunc"); assert_eq!( function_body, Some(CreateFunctionBody::AsBeforeOptions(Expr::Value( - Value::SingleQuotedString("org.random.class.Name".to_string()) + (Value::SingleQuotedString("org.random.class.Name".to_string())) + .with_empty_span() ))) ); assert_eq!( @@ -418,7 +421,7 @@ fn parse_create_function() { } // Test error in dialect that doesn't support parsing CREATE FUNCTION - let unsupported_dialects = TestedDialects::new(vec![Box::new(MsSqlDialect {})]); + let unsupported_dialects = TestedDialects::new(vec![Box::new(AnsiDialect {})]); assert_eq!( unsupported_dialects.parse_sql_statements(sql).unwrap_err(), @@ -457,8 +460,14 @@ fn parse_delimited_identifiers() { version, with_ordinality: _, partitions: _, + json_path: _, + sample: _, + index_hints: _, } => { - assert_eq!(vec![Ident::with_quote('"', "a table")], name.0); + assert_eq!( + ObjectName::from(vec![Ident::with_quote('"', "a table")]), + name + ); assert_eq!(Ident::with_quote('"', "alias"), alias.unwrap().name); assert!(args.is_none()); assert!(with_hints.is_empty()); @@ -477,7 +486,8 @@ fn parse_delimited_identifiers() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::with_quote('"', "myfun")]), + name: ObjectName::from(vec![Ident::with_quote('"', "myfun")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -512,7 +522,7 @@ fn parse_use() { // Test single identifier without quotes assert_eq!( hive().verified_stmt(&format!("USE {}", object_name)), - Statement::Use(Use::Object(ObjectName(vec![Ident::new( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( object_name.to_string() )]))) ); @@ -520,7 +530,7 @@ fn parse_use() { // Test single identifier with different type of quotes assert_eq!( hive().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)), - Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -534,6 +544,15 @@ fn parse_use() { ); } +#[test] +fn test_tample_sample() { + hive().verified_stmt("SELECT * FROM source TABLESAMPLE (BUCKET 3 OUT OF 32 ON rand()) AS s"); + hive().verified_stmt("SELECT * FROM source TABLESAMPLE (BUCKET 3 OUT OF 16 ON id)"); + hive().verified_stmt("SELECT * FROM source TABLESAMPLE (100M) AS s"); + hive().verified_stmt("SELECT * FROM source TABLESAMPLE (0.1 PERCENT) AS s"); + hive().verified_stmt("SELECT * FROM source TABLESAMPLE (10 ROWS)"); +} + fn hive() -> TestedDialects { TestedDialects::new(vec![Box::new(HiveDialect {})]) } diff --git a/tests/sqlparser_mssql.rs b/tests/sqlparser_mssql.rs index 0223e2915..1c0a00b16 100644 --- a/tests/sqlparser_mssql.rs +++ b/tests/sqlparser_mssql.rs @@ -22,9 +22,12 @@ #[macro_use] mod test_utils; +use helpers::attached_token::AttachedToken; +use sqlparser::keywords::Keyword; +use sqlparser::tokenizer::{Location, Span, Token, TokenWithSpan, Word}; use test_utils::*; -use sqlparser::ast::DataType::{Int, Text}; +use sqlparser::ast::DataType::{Int, Text, Varbinary}; use sqlparser::ast::DeclareAssignment::MsSqlAssignment; use sqlparser::ast::Value::SingleQuotedString; use sqlparser::ast::*; @@ -61,15 +64,18 @@ fn parse_table_time_travel() { select.from, vec![TableWithJoins { relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("t1")]), + name: ObjectName::from(vec![Ident::new("t1")]), alias: None, args: None, with_hints: vec![], version: Some(TableVersion::ForSystemTimeAsOf(Expr::Value( - Value::SingleQuotedString(version) + (Value::SingleQuotedString(version)).with_empty_span() ))), partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![] }, joins: vec![] },] @@ -94,56 +100,66 @@ fn parse_mssql_delimited_identifiers() { #[test] fn parse_create_procedure() { - let sql = "CREATE OR ALTER PROCEDURE test (@foo INT, @bar VARCHAR(256)) AS BEGIN SELECT 1 END"; + let sql = "CREATE OR ALTER PROCEDURE test (@foo INT, @bar VARCHAR(256)) AS BEGIN SELECT 1; END"; assert_eq!( ms().verified_stmt(sql), Statement::CreateProcedure { or_alter: true, - body: vec![Statement::Query(Box::new(Query { - with: None, - limit: None, - limit_by: vec![], - offset: None, - fetch: None, - locks: vec![], - for_clause: None, - order_by: None, - settings: None, - format_clause: None, - body: Box::new(SetExpr::Select(Box::new(Select { - distinct: None, - top: None, - projection: vec![SelectItem::UnnamedExpr(Expr::Value(number("1")))], - into: None, - from: vec![], - lateral_views: vec![], - prewhere: None, - selection: None, - group_by: GroupByExpr::Expressions(vec![], vec![]), - cluster_by: vec![], - distribute_by: vec![], - sort_by: vec![], - having: None, - named_window: vec![], - window_before_qualify: false, - qualify: None, - value_table_mode: None, - connect_by: None, - }))) - }))], + body: ConditionalStatements::BeginEnd(BeginEndStatements { + begin_token: AttachedToken::empty(), + statements: vec![Statement::Query(Box::new(Query { + with: None, + limit_clause: None, + fetch: None, + locks: vec![], + for_clause: None, + order_by: None, + settings: None, + format_clause: None, + pipe_operators: vec![], + body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), + distinct: None, + top: None, + top_before_distinct: false, + projection: vec![SelectItem::UnnamedExpr(Expr::Value( + (number("1")).with_empty_span() + ))], + into: None, + from: vec![], + lateral_views: vec![], + prewhere: None, + selection: None, + group_by: GroupByExpr::Expressions(vec![], vec![]), + cluster_by: vec![], + distribute_by: vec![], + sort_by: vec![], + having: None, + named_window: vec![], + window_before_qualify: false, + qualify: None, + value_table_mode: None, + connect_by: None, + flavor: SelectFlavor::Standard, + }))) + }))], + end_token: AttachedToken::empty(), + }), params: Some(vec![ ProcedureParam { name: Ident { value: "@foo".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, data_type: DataType::Int(None) }, ProcedureParam { name: Ident { value: "@bar".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, data_type: DataType::Varchar(Some(CharacterLength::IntegerLength { length: 256, @@ -151,9 +167,10 @@ fn parse_create_procedure() { })) } ]), - name: ObjectName(vec![Ident { + name: ObjectName::from(vec![Ident { value: "test".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }]) } ) @@ -161,19 +178,116 @@ fn parse_create_procedure() { #[test] fn parse_mssql_create_procedure() { - let _ = ms_and_generic().verified_stmt("CREATE OR ALTER PROCEDURE foo AS BEGIN SELECT 1 END"); - let _ = ms_and_generic().verified_stmt("CREATE PROCEDURE foo AS BEGIN SELECT 1 END"); + let _ = ms_and_generic().verified_stmt("CREATE OR ALTER PROCEDURE foo AS SELECT 1;"); + let _ = ms_and_generic().verified_stmt("CREATE OR ALTER PROCEDURE foo AS BEGIN SELECT 1; END"); + let _ = ms_and_generic().verified_stmt("CREATE PROCEDURE foo AS BEGIN SELECT 1; END"); let _ = ms().verified_stmt( - "CREATE PROCEDURE foo AS BEGIN SELECT [myColumn] FROM [myschema].[mytable] END", + "CREATE PROCEDURE foo AS BEGIN SELECT [myColumn] FROM [myschema].[mytable]; END", ); let _ = ms_and_generic().verified_stmt( - "CREATE PROCEDURE foo (@CustomerName NVARCHAR(50)) AS BEGIN SELECT * FROM DEV END", + "CREATE PROCEDURE foo (@CustomerName NVARCHAR(50)) AS BEGIN SELECT * FROM DEV; END", ); - let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN UPDATE bar SET col = 'test' END"); + let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN UPDATE bar SET col = 'test'; END"); // Test a statement with END in it - let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN SELECT [foo], CASE WHEN [foo] IS NULL THEN 'empty' ELSE 'notempty' END AS [foo] END"); + let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN SELECT [foo], CASE WHEN [foo] IS NULL THEN 'empty' ELSE 'notempty' END AS [foo]; END"); // Multiple statements - let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN UPDATE bar SET col = 'test'; SELECT [foo] FROM BAR WHERE [FOO] > 10 END"); + let _ = ms().verified_stmt("CREATE PROCEDURE [foo] AS BEGIN UPDATE bar SET col = 'test'; SELECT [foo] FROM BAR WHERE [FOO] > 10; END"); +} + +#[test] +fn parse_create_function() { + let return_expression_function = "CREATE FUNCTION some_scalar_udf(@foo INT, @bar VARCHAR(256)) RETURNS INT AS BEGIN RETURN 1; END"; + assert_eq!( + ms().verified_stmt(return_expression_function), + sqlparser::ast::Statement::CreateFunction(CreateFunction { + or_alter: false, + or_replace: false, + temporary: false, + if_not_exists: false, + name: ObjectName::from(vec![Ident::new("some_scalar_udf")]), + args: Some(vec![ + OperateFunctionArg { + mode: None, + name: Some(Ident::new("@foo")), + data_type: DataType::Int(None), + default_expr: None, + }, + OperateFunctionArg { + mode: None, + name: Some(Ident::new("@bar")), + data_type: DataType::Varchar(Some(CharacterLength::IntegerLength { + length: 256, + unit: None + })), + default_expr: None, + }, + ]), + return_type: Some(DataType::Int(None)), + function_body: Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements { + begin_token: AttachedToken::empty(), + statements: vec![Statement::Return(ReturnStatement { + value: Some(ReturnStatementValue::Expr(Expr::Value( + (number("1")).with_empty_span() + ))), + })], + end_token: AttachedToken::empty(), + })), + behavior: None, + called_on_null: None, + parallel: None, + using: None, + language: None, + determinism_specifier: None, + options: None, + remote_connection: None, + }), + ); + + let multi_statement_function = "\ + CREATE FUNCTION some_scalar_udf(@foo INT, @bar VARCHAR(256)) \ + RETURNS INT \ + AS \ + BEGIN \ + SET @foo = @foo + 1; \ + RETURN @foo; \ + END\ + "; + let _ = ms().verified_stmt(multi_statement_function); + + let create_function_with_conditional = "\ + CREATE FUNCTION some_scalar_udf() \ + RETURNS INT \ + AS \ + BEGIN \ + IF 1 = 2 \ + BEGIN \ + RETURN 1; \ + END; \ + RETURN 0; \ + END\ + "; + let _ = ms().verified_stmt(create_function_with_conditional); + + let create_or_alter_function = "\ + CREATE OR ALTER FUNCTION some_scalar_udf(@foo INT, @bar VARCHAR(256)) \ + RETURNS INT \ + AS \ + BEGIN \ + SET @foo = @foo + 1; \ + RETURN @foo; \ + END\ + "; + let _ = ms().verified_stmt(create_or_alter_function); + + let create_function_with_return_expression = "\ + CREATE FUNCTION some_scalar_udf(@foo INT, @bar VARCHAR(256)) \ + RETURNS INT \ + AS \ + BEGIN \ + RETURN CONVERT(INT, 1) + 2; \ + END\ + "; + let _ = ms().verified_stmt(create_function_with_return_expression); } #[test] @@ -192,13 +306,279 @@ fn parse_mssql_apply_join() { ); } +#[test] +fn parse_mssql_openjson() { + let select = ms().verified_only_select( + "SELECT B.kind, B.id_list \ + FROM t_test_table AS A \ + CROSS APPLY OPENJSON(A.param, '$.config') WITH (kind VARCHAR(20) '$.kind', [id_list] NVARCHAR(MAX) '$.id_list' AS JSON) AS B", + ); + assert_eq!( + vec![TableWithJoins { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("t_test_table")]), + alias: Some(TableAlias { + name: Ident::new("A"), + columns: vec![] + }), + args: None, + with_hints: vec![], + version: None, + with_ordinality: false, + partitions: vec![], + json_path: None, + sample: None, + index_hints: vec![] + }, + joins: vec![Join { + relation: TableFactor::OpenJsonTable { + json_expr: Expr::CompoundIdentifier( + vec![Ident::new("A"), Ident::new("param"),] + ), + json_path: Some(Value::SingleQuotedString("$.config".into())), + columns: vec![ + OpenJsonTableColumn { + name: Ident::new("kind"), + r#type: DataType::Varchar(Some(CharacterLength::IntegerLength { + length: 20, + unit: None + })), + path: Some("$.kind".into()), + as_json: false + }, + OpenJsonTableColumn { + name: Ident { + value: "id_list".into(), + quote_style: Some('['), + span: Span::empty(), + }, + r#type: DataType::Nvarchar(Some(CharacterLength::Max)), + path: Some("$.id_list".into()), + as_json: true + } + ], + alias: Some(TableAlias { + name: Ident::new("B"), + columns: vec![] + }) + }, + global: false, + join_operator: JoinOperator::CrossApply + }] + }], + select.from + ); + let select = ms().verified_only_select( + "SELECT B.kind, B.id_list \ + FROM t_test_table AS A \ + CROSS APPLY OPENJSON(A.param) WITH (kind VARCHAR(20) '$.kind', [id_list] NVARCHAR(MAX) '$.id_list' AS JSON) AS B", + ); + assert_eq!( + vec![TableWithJoins { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("t_test_table"),]), + alias: Some(TableAlias { + name: Ident::new("A"), + columns: vec![] + }), + args: None, + with_hints: vec![], + version: None, + with_ordinality: false, + partitions: vec![], + json_path: None, + sample: None, + index_hints: vec![] + }, + joins: vec![Join { + relation: TableFactor::OpenJsonTable { + json_expr: Expr::CompoundIdentifier( + vec![Ident::new("A"), Ident::new("param"),] + ), + json_path: None, + columns: vec![ + OpenJsonTableColumn { + name: Ident::new("kind"), + r#type: DataType::Varchar(Some(CharacterLength::IntegerLength { + length: 20, + unit: None + })), + path: Some("$.kind".into()), + as_json: false + }, + OpenJsonTableColumn { + name: Ident { + value: "id_list".into(), + quote_style: Some('['), + span: Span::empty(), + }, + r#type: DataType::Nvarchar(Some(CharacterLength::Max)), + path: Some("$.id_list".into()), + as_json: true + } + ], + alias: Some(TableAlias { + name: Ident::new("B"), + columns: vec![] + }) + }, + global: false, + join_operator: JoinOperator::CrossApply + }] + }], + select.from + ); + let select = ms().verified_only_select( + "SELECT B.kind, B.id_list \ + FROM t_test_table AS A \ + CROSS APPLY OPENJSON(A.param) WITH (kind VARCHAR(20), [id_list] NVARCHAR(MAX)) AS B", + ); + assert_eq!( + vec![TableWithJoins { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("t_test_table")]), + alias: Some(TableAlias { + name: Ident::new("A"), + columns: vec![] + }), + args: None, + with_hints: vec![], + version: None, + with_ordinality: false, + partitions: vec![], + json_path: None, + sample: None, + index_hints: vec![] + }, + joins: vec![Join { + relation: TableFactor::OpenJsonTable { + json_expr: Expr::CompoundIdentifier( + vec![Ident::new("A"), Ident::new("param"),] + ), + json_path: None, + columns: vec![ + OpenJsonTableColumn { + name: Ident::new("kind"), + r#type: DataType::Varchar(Some(CharacterLength::IntegerLength { + length: 20, + unit: None + })), + path: None, + as_json: false + }, + OpenJsonTableColumn { + name: Ident { + value: "id_list".into(), + quote_style: Some('['), + span: Span::empty(), + }, + r#type: DataType::Nvarchar(Some(CharacterLength::Max)), + path: None, + as_json: false + } + ], + alias: Some(TableAlias { + name: Ident::new("B"), + columns: vec![] + }) + }, + global: false, + join_operator: JoinOperator::CrossApply + }] + }], + select.from + ); + let select = ms_and_generic().verified_only_select( + "SELECT B.kind, B.id_list \ + FROM t_test_table AS A \ + CROSS APPLY OPENJSON(A.param, '$.config') AS B", + ); + assert_eq!( + vec![TableWithJoins { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("t_test_table")]), + alias: Some(TableAlias { + name: Ident::new("A"), + columns: vec![] + }), + args: None, + with_hints: vec![], + version: None, + with_ordinality: false, + partitions: vec![], + json_path: None, + sample: None, + index_hints: vec![], + }, + joins: vec![Join { + relation: TableFactor::OpenJsonTable { + json_expr: Expr::CompoundIdentifier( + vec![Ident::new("A"), Ident::new("param"),] + ), + json_path: Some(Value::SingleQuotedString("$.config".into())), + columns: vec![], + alias: Some(TableAlias { + name: Ident::new("B"), + columns: vec![] + }) + }, + global: false, + join_operator: JoinOperator::CrossApply + }] + }], + select.from + ); + let select = ms_and_generic().verified_only_select( + "SELECT B.kind, B.id_list \ + FROM t_test_table AS A \ + CROSS APPLY OPENJSON(A.param) AS B", + ); + assert_eq!( + vec![TableWithJoins { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("t_test_table")]), + alias: Some(TableAlias { + name: Ident::new("A"), + columns: vec![] + }), + args: None, + with_hints: vec![], + version: None, + with_ordinality: false, + partitions: vec![], + json_path: None, + sample: None, + index_hints: vec![], + }, + joins: vec![Join { + relation: TableFactor::OpenJsonTable { + json_expr: Expr::CompoundIdentifier( + vec![Ident::new("A"), Ident::new("param"),] + ), + json_path: None, + columns: vec![], + alias: Some(TableAlias { + name: Ident::new("B"), + columns: vec![] + }) + }, + global: false, + join_operator: JoinOperator::CrossApply + }] + }], + select.from + ); +} + #[test] fn parse_mssql_top_paren() { let sql = "SELECT TOP (5) * FROM foo"; let select = ms_and_generic().verified_only_select(sql); let top = select.top.unwrap(); assert_eq!( - Some(TopQuantity::Expr(Expr::Value(number("5")))), + Some(TopQuantity::Expr(Expr::Value( + (number("5")).with_empty_span() + ))), top.quantity ); assert!(!top.percent); @@ -210,7 +590,9 @@ fn parse_mssql_top_percent() { let select = ms_and_generic().verified_only_select(sql); let top = select.top.unwrap(); assert_eq!( - Some(TopQuantity::Expr(Expr::Value(number("5")))), + Some(TopQuantity::Expr(Expr::Value( + (number("5")).with_empty_span() + ))), top.quantity ); assert!(top.percent); @@ -222,7 +604,9 @@ fn parse_mssql_top_with_ties() { let select = ms_and_generic().verified_only_select(sql); let top = select.top.unwrap(); assert_eq!( - Some(TopQuantity::Expr(Expr::Value(number("5")))), + Some(TopQuantity::Expr(Expr::Value( + (number("5")).with_empty_span() + ))), top.quantity ); assert!(top.with_ties); @@ -234,7 +618,9 @@ fn parse_mssql_top_percent_with_ties() { let select = ms_and_generic().verified_only_select(sql); let top = select.top.unwrap(); assert_eq!( - Some(TopQuantity::Expr(Expr::Value(number("10")))), + Some(TopQuantity::Expr(Expr::Value( + (number("10")).with_empty_span() + ))), top.quantity ); assert!(top.percent); @@ -263,9 +649,10 @@ fn parse_mssql_create_role() { assert_eq_vec(&["mssql"], &names); assert_eq!( authorization_owner, - Some(ObjectName(vec![Ident { + Some(ObjectName::from(vec![Ident { value: "helena".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }])) ); } @@ -281,12 +668,14 @@ fn parse_alter_role() { [Statement::AlterRole { name: Ident { value: "old_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, operation: AlterRoleOperation::RenameRole { role_name: Ident { value: "new_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), } }, }] @@ -298,12 +687,14 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, operation: AlterRoleOperation::AddMember { member_name: Ident { value: "new_member".into(), - quote_style: None + quote_style: None, + span: Span::empty(), } }, } @@ -315,12 +706,14 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, operation: AlterRoleOperation::DropMember { member_name: Ident { value: "old_member".into(), - quote_style: None + quote_style: None, + span: Span::empty(), } }, } @@ -341,10 +734,12 @@ fn parse_delimited_identifiers() { args, with_hints, version, - with_ordinality: _, - partitions: _, + .. } => { - assert_eq!(vec![Ident::with_quote('"', "a table")], name.0); + assert_eq!( + ObjectName::from(vec![Ident::with_quote('"', "a table")]), + name + ); assert_eq!(Ident::with_quote('"', "alias"), alias.unwrap().name); assert!(args.is_none()); assert!(with_hints.is_empty()); @@ -363,7 +758,8 @@ fn parse_delimited_identifiers() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::with_quote('"', "myfun")]), + name: ObjectName::from(vec![Ident::with_quote('"', "myfun")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -395,11 +791,11 @@ fn parse_table_name_in_square_brackets() { let select = ms().verified_only_select(r#"SELECT [a column] FROM [a schema].[a table]"#); if let TableFactor::Table { name, .. } = only(select.from).relation { assert_eq!( - vec![ + ObjectName::from(vec![ Ident::with_quote('[', "a schema"), Ident::with_quote('[', "a table") - ], - name.0 + ]), + name ); } else { panic!("Expecting TableFactor::Table"); @@ -448,6 +844,317 @@ fn parse_for_json_expect_ast() { ); } +#[test] +fn parse_mssql_json_object() { + let select = ms().verified_only_select( + "SELECT JSON_OBJECT('user_name' : USER_NAME(), LOWER(@id_key) : @id_value, 'sid' : (SELECT @@SPID) ABSENT ON NULL)", + ); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), + .. + }) => { + assert!(matches!( + args[0], + FunctionArg::ExprNamed { + name: Expr::Value(ValueWithSpan { + value: Value::SingleQuotedString(_), + span: _ + }), + arg: FunctionArgExpr::Expr(Expr::Function(_)), + operator: FunctionArgOperator::Colon + } + )); + assert!(matches!( + args[1], + FunctionArg::ExprNamed { + name: Expr::Function(_), + arg: FunctionArgExpr::Expr(Expr::Identifier(_)), + operator: FunctionArgOperator::Colon + } + )); + assert!(matches!( + args[2], + FunctionArg::ExprNamed { + name: Expr::Value(ValueWithSpan { + value: Value::SingleQuotedString(_), + span: _ + }), + arg: FunctionArgExpr::Expr(Expr::Subquery(_)), + operator: FunctionArgOperator::Colon + } + )); + assert_eq!( + &[FunctionArgumentClause::JsonNullClause( + JsonNullClause::AbsentOnNull + )], + &clauses[..] + ); + } + _ => unreachable!(), + } + let select = ms().verified_only_select( + "SELECT s.session_id, JSON_OBJECT('security_id' : s.security_id, 'login' : s.login_name, 'status' : s.status) AS info \ + FROM sys.dm_exec_sessions AS s \ + WHERE s.is_user_process = 1", + ); + match &select.projection[1] { + SelectItem::ExprWithAlias { + expr: + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, .. }), + .. + }), + .. + } => { + assert!(matches!( + args[0], + FunctionArg::ExprNamed { + name: Expr::Value(ValueWithSpan { + value: Value::SingleQuotedString(_), + span: _ + }), + arg: FunctionArgExpr::Expr(Expr::CompoundIdentifier(_)), + operator: FunctionArgOperator::Colon + } + )); + assert!(matches!( + args[1], + FunctionArg::ExprNamed { + name: Expr::Value(ValueWithSpan { + value: Value::SingleQuotedString(_), + span: _ + }), + arg: FunctionArgExpr::Expr(Expr::CompoundIdentifier(_)), + operator: FunctionArgOperator::Colon + } + )); + assert!(matches!( + args[2], + FunctionArg::ExprNamed { + name: Expr::Value(ValueWithSpan { + value: Value::SingleQuotedString(_), + span: _ + }), + arg: FunctionArgExpr::Expr(Expr::CompoundIdentifier(_)), + operator: FunctionArgOperator::Colon + } + )); + } + _ => unreachable!(), + } +} + +#[test] +fn parse_mssql_json_array() { + let select = ms().verified_only_select("SELECT JSON_ARRAY('a', 1, NULL, 2 NULL ON NULL)"); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), + .. + }) => { + assert_eq!( + &[ + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("a".into())).with_empty_span() + ))), + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (number("1")).with_empty_span() + ))), + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (Value::Null).with_empty_span() + ))), + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (number("2")).with_empty_span() + ))), + ], + &args[..] + ); + assert_eq!( + &[FunctionArgumentClause::JsonNullClause( + JsonNullClause::NullOnNull + )], + &clauses[..] + ); + } + _ => unreachable!(), + } + let select = ms().verified_only_select("SELECT JSON_ARRAY('a', 1, NULL, 2 ABSENT ON NULL)"); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), + .. + }) => { + assert_eq!( + &[ + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("a".into())).with_empty_span() + ))), + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (number("1")).with_empty_span() + ))), + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (Value::Null).with_empty_span() + ))), + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (number("2")).with_empty_span() + ))), + ], + &args[..] + ); + assert_eq!( + &[FunctionArgumentClause::JsonNullClause( + JsonNullClause::AbsentOnNull + )], + &clauses[..] + ); + } + _ => unreachable!(), + } + let select = ms().verified_only_select("SELECT JSON_ARRAY(NULL ON NULL)"); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), + .. + }) => { + assert!(args.is_empty()); + assert_eq!( + &[FunctionArgumentClause::JsonNullClause( + JsonNullClause::NullOnNull + )], + &clauses[..] + ); + } + _ => unreachable!(), + } + let select = ms().verified_only_select("SELECT JSON_ARRAY(ABSENT ON NULL)"); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), + .. + }) => { + assert!(args.is_empty()); + assert_eq!( + &[FunctionArgumentClause::JsonNullClause( + JsonNullClause::AbsentOnNull + )], + &clauses[..] + ); + } + _ => unreachable!(), + } + let select = ms().verified_only_select( + "SELECT JSON_ARRAY('a', JSON_OBJECT('name' : 'value', 'type' : 1) NULL ON NULL)", + ); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), + .. + }) => { + assert_eq!( + &FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("a".into())).with_empty_span() + ))), + &args[0] + ); + assert!(matches!( + args[1], + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Function(_))) + )); + assert_eq!( + &[FunctionArgumentClause::JsonNullClause( + JsonNullClause::NullOnNull + )], + &clauses[..] + ); + } + _ => unreachable!(), + } + let select = ms().verified_only_select( + "SELECT JSON_ARRAY('a', JSON_OBJECT('name' : 'value', 'type' : 1), JSON_ARRAY(1, NULL, 2 NULL ON NULL))", + ); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, .. }), + .. + }) => { + assert_eq!( + &FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (Value::SingleQuotedString("a".into())).with_empty_span() + ))), + &args[0] + ); + assert!(matches!( + args[1], + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Function(_))) + )); + assert!(matches!( + args[2], + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Function(_))) + )); + } + _ => unreachable!(), + } + let select = ms().verified_only_select("SELECT JSON_ARRAY(1, @id_value, (SELECT @@SPID))"); + match expr_from_projection(&select.projection[0]) { + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, .. }), + .. + }) => { + assert_eq!( + &FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Value( + (number("1")).with_empty_span() + ))), + &args[0] + ); + assert!(matches!( + args[1], + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Identifier(_))) + )); + assert!(matches!( + args[2], + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Subquery(_))) + )); + } + _ => unreachable!(), + } + let select = ms().verified_only_select( + "SELECT s.session_id, JSON_ARRAY(s.host_name, s.program_name, s.client_interface_name NULL ON NULL) AS info \ + FROM sys.dm_exec_sessions AS s \ + WHERE s.is_user_process = 1", + ); + match &select.projection[1] { + SelectItem::ExprWithAlias { + expr: + Expr::Function(Function { + args: FunctionArguments::List(FunctionArgumentList { args, clauses, .. }), + .. + }), + .. + } => { + assert!(matches!( + args[0], + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::CompoundIdentifier(_))) + )); + assert!(matches!( + args[1], + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::CompoundIdentifier(_))) + )); + assert!(matches!( + args[2], + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::CompoundIdentifier(_))) + )); + assert_eq!( + &[FunctionArgumentClause::JsonNullClause( + JsonNullClause::NullOnNull + )], + &clauses[..] + ); + } + _ => unreachable!(), + } +} + #[test] fn parse_ampersand_arobase() { // In SQL Server, a&@b means (a) & (@b), in PostgreSQL it means (a) &@ (b) @@ -475,15 +1182,15 @@ fn parse_convert() { unreachable!() }; assert!(!is_try); - assert_eq!(Expr::Value(number("1")), *expr); + assert_eq!(Expr::value(number("1")), *expr); assert_eq!(Some(DataType::Int(None)), data_type); assert!(charset.is_none()); assert!(target_before_value); assert_eq!( vec![ - Expr::Value(number("2")), - Expr::Value(number("3")), - Expr::Value(Value::Null), + Expr::value(number("2")), + Expr::value(number("3")), + Expr::Value((Value::Null).with_empty_span()), ], styles ); @@ -512,31 +1219,32 @@ fn parse_substring_in_select() { with: None, body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: Some(Distinct::Distinct), top: None, + top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Substring { expr: Box::new(Expr::Identifier(Ident { value: "description".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), })), - substring_from: Some(Box::new(Expr::Value(number("0")))), - substring_for: Some(Box::new(Expr::Value(number("1")))), + substring_from: Some(Box::new(Expr::Value( + (number("0")).with_empty_span() + ))), + substring_for: Some(Box::new(Expr::Value( + (number("1")).with_empty_span() + ))), special: true, + shorthand: false, })], into: None, from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident { - value: "test".to_string(), - quote_style: None - }]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident { + value: "test".to_string(), + quote_style: None, + span: Span::empty(), + }])), joins: vec![] }], lateral_views: vec![], @@ -552,16 +1260,16 @@ fn parse_substring_in_select() { window_before_qualify: false, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], }), query ); @@ -573,7 +1281,7 @@ fn parse_substring_in_select() { #[test] fn parse_mssql_declare() { let sql = "DECLARE @foo CURSOR, @bar INT, @baz AS TEXT = 'foobar';"; - let ast = Parser::parse_sql(&MsSqlDialect {}, sql).unwrap(); + let ast = ms().parse_sql_statements(sql).unwrap(); assert_eq!( vec![Statement::Declare { @@ -581,7 +1289,8 @@ fn parse_mssql_declare() { Declare { names: vec![Ident { value: "@foo".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }], data_type: None, assignment: None, @@ -595,7 +1304,8 @@ fn parse_mssql_declare() { Declare { names: vec![Ident { value: "@bar".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }], data_type: Some(Int(None)), assignment: None, @@ -609,12 +1319,13 @@ fn parse_mssql_declare() { Declare { names: vec![Ident { value: "@baz".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }], data_type: Some(Text), - assignment: Some(MsSqlAssignment(Box::new(Expr::Value(SingleQuotedString( - "foobar".to_string() - ))))), + assignment: Some(MsSqlAssignment(Box::new(Expr::Value( + (SingleQuotedString("foobar".to_string())).with_empty_span() + )))), declare_type: None, binary: None, sensitive: None, @@ -626,6 +1337,196 @@ fn parse_mssql_declare() { }], ast ); + + let sql = "DECLARE @bar INT;SET @bar = 2;SELECT @bar * 4"; + let ast = ms().parse_sql_statements(sql).unwrap(); + assert_eq!( + vec![ + Statement::Declare { + stmts: vec![Declare { + names: vec![Ident::new("@bar"),], + data_type: Some(Int(None)), + assignment: None, + declare_type: None, + binary: None, + sensitive: None, + scroll: None, + hold: None, + for_query: None + }] + }, + Statement::Set(Set::SingleAssignment { + scope: None, + hivevar: false, + variable: ObjectName::from(vec![Ident::new("@bar")]), + values: vec![Expr::Value( + (Value::Number("2".parse().unwrap(), false)).with_empty_span() + )], + }), + Statement::Query(Box::new(Query { + with: None, + limit_clause: None, + fetch: None, + locks: vec![], + for_clause: None, + order_by: None, + settings: None, + format_clause: None, + pipe_operators: vec![], + + body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), + distinct: None, + top: None, + top_before_distinct: false, + projection: vec![SelectItem::UnnamedExpr(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("@bar"))), + op: BinaryOperator::Multiply, + right: Box::new(Expr::Value( + (Value::Number("4".parse().unwrap(), false)).with_empty_span() + )), + })], + into: None, + from: vec![], + lateral_views: vec![], + prewhere: None, + selection: None, + group_by: GroupByExpr::Expressions(vec![], vec![]), + cluster_by: vec![], + distribute_by: vec![], + sort_by: vec![], + having: None, + named_window: vec![], + window_before_qualify: false, + qualify: None, + value_table_mode: None, + connect_by: None, + flavor: SelectFlavor::Standard, + }))) + })) + ], + ast + ); + + let declare_cursor_for_select = + "DECLARE vend_cursor CURSOR FOR SELECT * FROM Purchasing.Vendor"; + let _ = ms().verified_stmt(declare_cursor_for_select); +} + +#[test] +fn test_mssql_cursor() { + let full_cursor_usage = "\ + DECLARE Employee_Cursor CURSOR FOR \ + SELECT LastName, FirstName \ + FROM AdventureWorks2022.HumanResources.vEmployee \ + WHERE LastName LIKE 'B%'; \ + \ + OPEN Employee_Cursor; \ + \ + FETCH NEXT FROM Employee_Cursor; \ + \ + WHILE @@FETCH_STATUS = 0 \ + BEGIN \ + FETCH NEXT FROM Employee_Cursor; \ + END; \ + \ + CLOSE Employee_Cursor; \ + DEALLOCATE Employee_Cursor\ + "; + let _ = ms().statements_parse_to(full_cursor_usage, ""); +} + +#[test] +fn test_mssql_while_statement() { + let while_single_statement = "WHILE 1 = 0 PRINT 'Hello World';"; + let stmt = ms().verified_stmt(while_single_statement); + assert_eq!( + stmt, + Statement::While(sqlparser::ast::WhileStatement { + while_block: ConditionalStatementBlock { + start_token: AttachedToken(TokenWithSpan { + token: Token::Word(Word { + value: "WHILE".to_string(), + quote_style: None, + keyword: Keyword::WHILE + }), + span: Span::empty() + }), + condition: Some(Expr::BinaryOp { + left: Box::new(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + )), + op: BinaryOperator::Eq, + right: Box::new(Expr::Value( + (Value::Number("0".parse().unwrap(), false)).with_empty_span() + )), + }), + then_token: None, + conditional_statements: ConditionalStatements::Sequence { + statements: vec![Statement::Print(PrintStatement { + message: Box::new(Expr::Value( + (Value::SingleQuotedString("Hello World".to_string())) + .with_empty_span() + )), + })], + } + } + }) + ); + + let while_begin_end = "\ + WHILE @@FETCH_STATUS = 0 \ + BEGIN \ + FETCH NEXT FROM Employee_Cursor; \ + END\ + "; + let _ = ms().verified_stmt(while_begin_end); + + let while_begin_end_multiple_statements = "\ + WHILE @@FETCH_STATUS = 0 \ + BEGIN \ + FETCH NEXT FROM Employee_Cursor; \ + PRINT 'Hello World'; \ + END\ + "; + let _ = ms().verified_stmt(while_begin_end_multiple_statements); +} + +#[test] +fn test_parse_raiserror() { + let sql = r#"RAISERROR('This is a test', 16, 1)"#; + let s = ms().verified_stmt(sql); + assert_eq!( + s, + Statement::RaisError { + message: Box::new(Expr::Value( + (Value::SingleQuotedString("This is a test".to_string())).with_empty_span() + )), + severity: Box::new(Expr::Value( + (Value::Number("16".parse().unwrap(), false)).with_empty_span() + )), + state: Box::new(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + )), + arguments: vec![], + options: vec![], + } + ); + + let sql = r#"RAISERROR('This is a test', 16, 1) WITH NOWAIT"#; + let _ = ms().verified_stmt(sql); + + let sql = r#"RAISERROR('This is a test', 16, 1, 'ARG') WITH SETERROR, LOG"#; + let _ = ms().verified_stmt(sql); + + let sql = r#"RAISERROR(N'This is message %s %d.', 10, 1, N'number', 5)"#; + let _ = ms().verified_stmt(sql); + + let sql = r#"RAISERROR(N'<<%*.*s>>', 10, 1, 7, 3, N'abcde')"#; + let _ = ms().verified_stmt(sql); + + let sql = r#"RAISERROR(@ErrorMessage, @ErrorSeverity, @ErrorState)"#; + let _ = ms().verified_stmt(sql); } #[test] @@ -643,7 +1544,7 @@ fn parse_use() { // Test single identifier without quotes assert_eq!( ms().verified_stmt(&format!("USE {}", object_name)), - Statement::Use(Use::Object(ObjectName(vec![Ident::new( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( object_name.to_string() )]))) ); @@ -651,7 +1552,7 @@ fn parse_use() { // Test single identifier with different type of quotes assert_eq!( ms().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)), - Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -670,16 +1571,18 @@ fn parse_create_table_with_valid_options() { key: Ident { value: "DISTRIBUTION".to_string(), quote_style: None, + span: Span::empty(), }, value: Expr::Identifier(Ident { value: "ROUND_ROBIN".to_string(), quote_style: None, + span: Span::empty(), }) }, SqlOption::Partition { column_name: "column_a".into(), range_direction: None, - for_values: vec![Expr::Value(test_utils::number("10")), Expr::Value(test_utils::number("11"))] , + for_values: vec![Expr::Value((test_utils::number("10")).with_empty_span()), Expr::Value((test_utils::number("11")).with_empty_span())] , }, ], ), @@ -690,8 +1593,8 @@ fn parse_create_table_with_valid_options() { column_name: "column_a".into(), range_direction: Some(PartitionRangeDirection::Left), for_values: vec![ - Expr::Value(test_utils::number("10")), - Expr::Value(test_utils::number("11")), + Expr::Value((test_utils::number("10")).with_empty_span()), + Expr::Value((test_utils::number("11")).with_empty_span()), ], } ], @@ -717,6 +1620,7 @@ fn parse_create_table_with_valid_options() { name: Ident { value: "column_a".to_string(), quote_style: None, + span: Span::empty(), }, asc: Some(true), }, @@ -724,6 +1628,7 @@ fn parse_create_table_with_valid_options() { name: Ident { value: "column_b".to_string(), quote_style: None, + span: Span::empty(), }, asc: Some(false), }, @@ -731,6 +1636,7 @@ fn parse_create_table_with_valid_options() { name: Ident { value: "column_c".to_string(), quote_style: None, + span: Span::empty(), }, asc: None, }, @@ -744,17 +1650,20 @@ fn parse_create_table_with_valid_options() { key: Ident { value: "DISTRIBUTION".to_string(), quote_style: None, + span: Span::empty(), }, value: Expr::Function( Function { - name: ObjectName( + name: ObjectName::from( vec![ Ident { value: "HASH".to_string(), quote_style: None, + span: Span::empty(), }, ], ), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List( FunctionArgumentList { @@ -766,6 +1675,7 @@ fn parse_create_table_with_valid_options() { Ident { value: "column_a".to_string(), quote_style: None, + span: Span::empty(), }, ), ), @@ -776,6 +1686,7 @@ fn parse_create_table_with_valid_options() { Ident { value: "column_b".to_string(), quote_style: None, + span: Span::empty(), }, ), ), @@ -807,36 +1718,37 @@ fn parse_create_table_with_valid_options() { if_not_exists: false, transient: false, volatile: false, - name: ObjectName(vec![Ident { + name: ObjectName::from(vec![Ident { value: "mytable".to_string(), quote_style: None, + span: Span::empty(), },],), columns: vec![ ColumnDef { name: Ident { value: "column_a".to_string(), quote_style: None, + span: Span::empty(), }, data_type: Int(None,), - collation: None, options: vec![], }, ColumnDef { name: Ident { value: "column_b".to_string(), quote_style: None, + span: Span::empty(), }, data_type: Int(None,), - collation: None, options: vec![], }, ColumnDef { name: Ident { value: "column_c".to_string(), quote_style: None, + span: Span::empty(), }, data_type: Int(None,), - collation: None, options: vec![], }, ], @@ -848,19 +1760,13 @@ fn parse_create_table_with_valid_options() { storage: None, location: None, },), - table_properties: vec![], - with_options, file_format: None, location: None, query: None, without_rowid: false, like: None, clone: None, - engine: None, comment: None, - auto_increment_offset: None, - default_charset: None, - collation: None, on_commit: None, on_cluster: None, primary_key: None, @@ -868,8 +1774,9 @@ fn parse_create_table_with_valid_options() { partition_by: None, cluster_by: None, clustered_by: None, - options: None, + inherits: None, strict: false, + iceberg: false, copy_grants: false, enable_schema_evolution: None, change_tracking: None, @@ -879,11 +1786,33 @@ fn parse_create_table_with_valid_options() { with_aggregation_policy: None, with_row_access_policy: None, with_tags: None, + base_location: None, + external_volume: None, + catalog: None, + catalog_sync: None, + storage_serialization_policy: None, + table_options: CreateTableOptions::With(with_options) }) ); } } +#[test] +fn parse_nested_slash_star_comment() { + let sql = r#" + select + /* + comment level 1 + /* + comment level 2 + */ + */ + 1; + "#; + let canonical = "SELECT 1"; + ms().one_statement_parses_to(sql, canonical); +} + #[test] fn parse_create_table_with_invalid_options() { let invalid_cases = vec![ @@ -945,8 +1874,8 @@ fn parse_create_table_with_identity_column() { IdentityProperty { parameters: Some(IdentityPropertyFormatKind::FunctionCall( IdentityParameters { - seed: Expr::Value(number("1")), - increment: Expr::Value(number("1")), + seed: Expr::value(number("1")), + increment: Expr::value(number("1")), }, )), order: None, @@ -972,17 +1901,20 @@ fn parse_create_table_with_identity_column() { if_not_exists: false, transient: false, volatile: false, - name: ObjectName(vec![Ident { + iceberg: false, + name: ObjectName::from(vec![Ident { value: "mytable".to_string(), quote_style: None, + span: Span::empty(), },],), columns: vec![ColumnDef { name: Ident { value: "columnA".to_string(), quote_style: None, + span: Span::empty(), }, data_type: Int(None,), - collation: None, + options: column_options, },], constraints: vec![], @@ -993,19 +1925,13 @@ fn parse_create_table_with_identity_column() { storage: None, location: None, },), - table_properties: vec![], - with_options: vec![], file_format: None, location: None, query: None, without_rowid: false, like: None, clone: None, - engine: None, comment: None, - auto_increment_offset: None, - default_charset: None, - collation: None, on_commit: None, on_cluster: None, primary_key: None, @@ -1013,7 +1939,7 @@ fn parse_create_table_with_identity_column() { partition_by: None, cluster_by: None, clustered_by: None, - options: None, + inherits: None, strict: false, copy_grants: false, enable_schema_evolution: None, @@ -1024,14 +1950,393 @@ fn parse_create_table_with_identity_column() { with_aggregation_policy: None, with_row_access_policy: None, with_tags: None, + base_location: None, + external_volume: None, + catalog: None, + catalog_sync: None, + storage_serialization_policy: None, + table_options: CreateTableOptions::None }), ); } } +#[test] +fn parse_true_false_as_identifiers() { + assert_eq!( + ms().verified_expr("true"), + Expr::Identifier(Ident::new("true")) + ); + assert_eq!( + ms().verified_expr("false"), + Expr::Identifier(Ident::new("false")) + ); +} + +#[test] +fn parse_mssql_set_session_value() { + ms().verified_stmt( + "SET OFFSETS SELECT, FROM, ORDER, TABLE, PROCEDURE, STATEMENT, PARAM, EXECUTE ON", + ); + ms().verified_stmt("SET IDENTITY_INSERT dbo.Tool ON"); + ms().verified_stmt("SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED"); + ms().verified_stmt("SET TRANSACTION ISOLATION LEVEL READ COMMITTED"); + ms().verified_stmt("SET TRANSACTION ISOLATION LEVEL REPEATABLE READ"); + ms().verified_stmt("SET TRANSACTION ISOLATION LEVEL SNAPSHOT"); + ms().verified_stmt("SET TRANSACTION ISOLATION LEVEL SERIALIZABLE"); + ms().verified_stmt("SET STATISTICS IO ON"); + ms().verified_stmt("SET STATISTICS XML ON"); + ms().verified_stmt("SET STATISTICS PROFILE ON"); + ms().verified_stmt("SET STATISTICS TIME ON"); + ms().verified_stmt("SET DATEFIRST 7"); + ms().verified_stmt("SET DATEFIRST @xxx"); + ms().verified_stmt("SET DATEFIRST @@xxx"); + ms().verified_stmt("SET DATEFORMAT dmy"); + ms().verified_stmt("SET DATEFORMAT @datevar"); + ms().verified_stmt("SET DATEFORMAT @@datevar"); + ms().verified_stmt("SET DEADLOCK_PRIORITY 'LOW'"); + ms().verified_stmt("SET DEADLOCK_PRIORITY LOW"); + ms().verified_stmt("SET DEADLOCK_PRIORITY 8"); + ms().verified_stmt("SET DEADLOCK_PRIORITY -8"); + ms().verified_stmt("SET DEADLOCK_PRIORITY @xxx"); + ms().verified_stmt("SET DEADLOCK_PRIORITY @@xxx"); + ms().verified_stmt("SET LOCK_TIMEOUT 1800"); + ms().verified_stmt("SET CONCAT_NULL_YIELDS_NULL ON"); + ms().verified_stmt("SET CURSOR_CLOSE_ON_COMMIT ON"); + ms().verified_stmt("SET FIPS_FLAGGER 'level'"); + ms().verified_stmt("SET FIPS_FLAGGER OFF"); + ms().verified_stmt("SET LANGUAGE Italian"); + ms().verified_stmt("SET QUOTED_IDENTIFIER ON"); + ms().verified_stmt("SET ARITHABORT ON"); + ms().verified_stmt("SET ARITHIGNORE OFF"); + ms().verified_stmt("SET FMTONLY ON"); + ms().verified_stmt("SET NOCOUNT OFF"); + ms().verified_stmt("SET NOEXEC ON"); + ms().verified_stmt("SET NUMERIC_ROUNDABORT ON"); + ms().verified_stmt("SET QUERY_GOVERNOR_COST_LIMIT 11"); + ms().verified_stmt("SET ROWCOUNT 4"); + ms().verified_stmt("SET ROWCOUNT @xxx"); + ms().verified_stmt("SET ROWCOUNT @@xxx"); + ms().verified_stmt("SET TEXTSIZE 11"); + ms().verified_stmt("SET ANSI_DEFAULTS ON"); + ms().verified_stmt("SET ANSI_NULL_DFLT_OFF ON"); + ms().verified_stmt("SET ANSI_NULL_DFLT_ON ON"); + ms().verified_stmt("SET ANSI_NULLS ON"); + ms().verified_stmt("SET ANSI_PADDING ON"); + ms().verified_stmt("SET ANSI_WARNINGS ON"); + ms().verified_stmt("SET FORCEPLAN ON"); + ms().verified_stmt("SET SHOWPLAN_ALL ON"); + ms().verified_stmt("SET SHOWPLAN_TEXT ON"); + ms().verified_stmt("SET SHOWPLAN_XML ON"); + ms().verified_stmt("SET IMPLICIT_TRANSACTIONS ON"); + ms().verified_stmt("SET REMOTE_PROC_TRANSACTIONS ON"); + ms().verified_stmt("SET XACT_ABORT ON"); + ms().verified_stmt("SET ANSI_NULLS, ANSI_PADDING ON"); +} + +#[test] +fn parse_mssql_if_else() { + // Simple statements and blocks + ms().verified_stmt("IF 1 = 1 SELECT '1'; ELSE SELECT '2';"); + ms().verified_stmt("IF 1 = 1 BEGIN SET @A = 1; END ELSE SET @A = 2;"); + ms().verified_stmt( + "IF DATENAME(weekday, GETDATE()) IN (N'Saturday', N'Sunday') SELECT 'Weekend'; ELSE SELECT 'Weekday';" + ); + ms().verified_stmt( + "IF (SELECT COUNT(*) FROM a.b WHERE c LIKE 'x%') > 1 SELECT 'yes'; ELSE SELECT 'No';", + ); + + // Multiple statements + let stmts = ms() + .parse_sql_statements("DECLARE @A INT; IF 1=1 BEGIN SET @A = 1 END ELSE SET @A = 2") + .unwrap(); + match &stmts[..] { + [Statement::Declare { .. }, Statement::If(stmt)] => { + assert_eq!( + stmt.to_string(), + "IF 1 = 1 BEGIN SET @A = 1; END ELSE SET @A = 2;" + ); + } + _ => panic!("Unexpected statements: {:?}", stmts), + } +} + +#[test] +fn test_mssql_if_else_span() { + let sql = "IF 1 = 1 SELECT '1' ELSE SELECT '2'"; + let mut parser = Parser::new(&MsSqlDialect {}).try_with_sql(sql).unwrap(); + assert_eq!( + parser.parse_statement().unwrap().span(), + Span::new(Location::new(1, 1), Location::new(1, sql.len() as u64 + 1)) + ); +} + +#[test] +fn test_mssql_if_else_multiline_span() { + let sql_line1 = "IF 1 = 1"; + let sql_line2 = "SELECT '1'"; + let sql_line3 = "ELSE SELECT '2'"; + let sql = [sql_line1, sql_line2, sql_line3].join("\n"); + let mut parser = Parser::new(&MsSqlDialect {}).try_with_sql(&sql).unwrap(); + assert_eq!( + parser.parse_statement().unwrap().span(), + Span::new( + Location::new(1, 1), + Location::new(3, sql_line3.len() as u64 + 1) + ) + ); +} + +#[test] +fn test_mssql_if_statements_span() { + // Simple statements + let mut sql = "IF 1 = 1 SELECT '1' ELSE SELECT '2'"; + let mut parser = Parser::new(&MsSqlDialect {}).try_with_sql(sql).unwrap(); + match parser.parse_statement().unwrap() { + Statement::If(IfStatement { + if_block, + else_block: Some(else_block), + .. + }) => { + assert_eq!( + if_block.span(), + Span::new(Location::new(1, 1), Location::new(1, 20)) + ); + assert_eq!( + else_block.span(), + Span::new(Location::new(1, 21), Location::new(1, 36)) + ); + } + stmt => panic!("Unexpected statement: {:?}", stmt), + } + + // Blocks + sql = "IF 1 = 1 BEGIN SET @A = 1; END ELSE BEGIN SET @A = 2 END"; + parser = Parser::new(&MsSqlDialect {}).try_with_sql(sql).unwrap(); + match parser.parse_statement().unwrap() { + Statement::If(IfStatement { + if_block, + else_block: Some(else_block), + .. + }) => { + assert_eq!( + if_block.span(), + Span::new(Location::new(1, 1), Location::new(1, 31)) + ); + assert_eq!( + else_block.span(), + Span::new(Location::new(1, 32), Location::new(1, 57)) + ); + } + stmt => panic!("Unexpected statement: {:?}", stmt), + } +} + +#[test] +fn parse_mssql_varbinary_max_length() { + let sql = "CREATE TABLE example (var_binary_col VARBINARY(MAX))"; + + match ms_and_generic().verified_stmt(sql) { + Statement::CreateTable(CreateTable { name, columns, .. }) => { + assert_eq!( + name, + ObjectName::from(vec![Ident { + value: "example".to_string(), + quote_style: None, + span: Span::empty(), + }]) + ); + assert_eq!( + columns, + vec![ColumnDef { + name: Ident::new("var_binary_col"), + data_type: Varbinary(Some(BinaryLength::Max)), + + options: vec![] + },], + ); + } + _ => unreachable!(), + } + + let sql = "CREATE TABLE example (var_binary_col VARBINARY(50))"; + + match ms_and_generic().verified_stmt(sql) { + Statement::CreateTable(CreateTable { name, columns, .. }) => { + assert_eq!( + name, + ObjectName::from(vec![Ident { + value: "example".to_string(), + quote_style: None, + span: Span::empty(), + }]) + ); + assert_eq!( + columns, + vec![ColumnDef { + name: Ident::new("var_binary_col"), + data_type: Varbinary(Some(BinaryLength::IntegerLength { length: 50 })), + + options: vec![] + },], + ); + } + _ => unreachable!(), + } +} + +#[test] +fn parse_mssql_table_identifier_with_default_schema() { + ms().verified_stmt("SELECT * FROM mydatabase..MyTable"); +} + fn ms() -> TestedDialects { TestedDialects::new(vec![Box::new(MsSqlDialect {})]) } + fn ms_and_generic() -> TestedDialects { TestedDialects::new(vec![Box::new(MsSqlDialect {}), Box::new(GenericDialect {})]) } + +#[test] +fn parse_mssql_merge_with_output() { + let stmt = "MERGE dso.products AS t \ + USING dsi.products AS \ + s ON s.ProductID = t.ProductID \ + WHEN MATCHED AND \ + NOT (t.ProductName = s.ProductName OR (ISNULL(t.ProductName, s.ProductName) IS NULL)) \ + THEN UPDATE SET t.ProductName = s.ProductName \ + WHEN NOT MATCHED BY TARGET \ + THEN INSERT (ProductID, ProductName) \ + VALUES (s.ProductID, s.ProductName) \ + WHEN NOT MATCHED BY SOURCE THEN DELETE \ + OUTPUT $action, deleted.ProductID INTO dsi.temp_products"; + ms_and_generic().verified_stmt(stmt); +} + +#[test] +fn parse_create_trigger() { + let create_trigger = "\ + CREATE OR ALTER TRIGGER reminder1 \ + ON Sales.Customer \ + AFTER INSERT, UPDATE \ + AS RAISERROR('Notify Customer Relations', 16, 10);\ + "; + let create_stmt = ms().verified_stmt(create_trigger); + assert_eq!( + create_stmt, + Statement::CreateTrigger { + or_alter: true, + or_replace: false, + is_constraint: false, + name: ObjectName::from(vec![Ident::new("reminder1")]), + period: TriggerPeriod::After, + events: vec![TriggerEvent::Insert, TriggerEvent::Update(vec![]),], + table_name: ObjectName::from(vec![Ident::new("Sales"), Ident::new("Customer")]), + referenced_table_name: None, + referencing: vec![], + trigger_object: TriggerObject::Statement, + include_each: false, + condition: None, + exec_body: None, + statements: Some(ConditionalStatements::Sequence { + statements: vec![Statement::RaisError { + message: Box::new(Expr::Value( + (Value::SingleQuotedString("Notify Customer Relations".to_string())) + .with_empty_span() + )), + severity: Box::new(Expr::Value( + (Value::Number("16".parse().unwrap(), false)).with_empty_span() + )), + state: Box::new(Expr::Value( + (Value::Number("10".parse().unwrap(), false)).with_empty_span() + )), + arguments: vec![], + options: vec![], + }], + }), + characteristics: None, + } + ); + + let multi_statement_as_trigger = "\ + CREATE TRIGGER some_trigger ON some_table FOR INSERT \ + AS \ + DECLARE @var INT; \ + RAISERROR('Trigger fired', 10, 1);\ + "; + let _ = ms().verified_stmt(multi_statement_as_trigger); + + let multi_statement_trigger = "\ + CREATE TRIGGER some_trigger ON some_table FOR INSERT \ + AS \ + BEGIN \ + DECLARE @var INT; \ + RAISERROR('Trigger fired', 10, 1); \ + END\ + "; + let _ = ms().verified_stmt(multi_statement_trigger); + + let create_trigger_with_return = "\ + CREATE TRIGGER some_trigger ON some_table FOR INSERT \ + AS \ + BEGIN \ + RETURN; \ + END\ + "; + let _ = ms().verified_stmt(create_trigger_with_return); + + let create_trigger_with_return = "\ + CREATE TRIGGER some_trigger ON some_table FOR INSERT \ + AS \ + BEGIN \ + RETURN; \ + END\ + "; + let _ = ms().verified_stmt(create_trigger_with_return); + + let create_trigger_with_conditional = "\ + CREATE TRIGGER some_trigger ON some_table FOR INSERT \ + AS \ + BEGIN \ + IF 1 = 2 \ + BEGIN \ + RAISERROR('Trigger fired', 10, 1); \ + END; \ + RETURN; \ + END\ + "; + let _ = ms().verified_stmt(create_trigger_with_conditional); +} + +#[test] +fn parse_drop_trigger() { + let sql_drop_trigger = "DROP TRIGGER emp_stamp;"; + let drop_stmt = ms().one_statement_parses_to(sql_drop_trigger, ""); + assert_eq!( + drop_stmt, + Statement::DropTrigger { + if_exists: false, + trigger_name: ObjectName::from(vec![Ident::new("emp_stamp")]), + table_name: None, + option: None, + } + ); +} + +#[test] +fn parse_print() { + let print_string_literal = "PRINT 'Hello, world!'"; + let print_stmt = ms().verified_stmt(print_string_literal); + assert_eq!( + print_stmt, + Statement::Print(PrintStatement { + message: Box::new(Expr::Value( + (Value::SingleQuotedString("Hello, world!".to_string())).with_empty_span() + )), + }) + ); + + let _ = ms().verified_stmt("PRINT N'Hello, ⛄️!'"); + let _ = ms().verified_stmt("PRINT @my_variable"); +} diff --git a/tests/sqlparser_mysql.rs b/tests/sqlparser_mysql.rs index db5b9ec8d..27c60b052 100644 --- a/tests/sqlparser_mysql.rs +++ b/tests/sqlparser_mysql.rs @@ -19,18 +19,28 @@ //! Test SQL syntax specific to MySQL. The parser based on the generic dialect //! is also tested (on the inputs it can handle). +use helpers::attached_token::AttachedToken; use matches::assert_matches; use sqlparser::ast::MysqlInsertPriority::{Delayed, HighPriority, LowPriority}; use sqlparser::ast::*; use sqlparser::dialect::{GenericDialect, MySqlDialect}; use sqlparser::parser::{ParserError, ParserOptions}; +use sqlparser::tokenizer::Span; use sqlparser::tokenizer::Token; use test_utils::*; #[macro_use] mod test_utils; +fn mysql() -> TestedDialects { + TestedDialects::new(vec![Box::new(MySqlDialect {})]) +} + +fn mysql_and_generic() -> TestedDialects { + TestedDialects::new(vec![Box::new(MySqlDialect {}), Box::new(GenericDialect {})]) +} + #[test] fn parse_identifiers() { mysql().verified_stmt("SELECT $a$, àà"); @@ -42,11 +52,11 @@ fn parse_literal_string() { let select = mysql().verified_only_select(sql); assert_eq!(2, select.projection.len()); assert_eq!( - &Expr::Value(Value::SingleQuotedString("single".to_string())), + &Expr::Value((Value::SingleQuotedString("single".to_string())).with_empty_span()), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value(Value::DoubleQuotedString("double".to_string())), + &Expr::Value((Value::DoubleQuotedString("double".to_string())).with_empty_span()), expr_from_projection(&select.projection[1]) ); } @@ -139,19 +149,22 @@ fn parse_flush() { read_lock: false, export: false, tables: vec![ - ObjectName(vec![ + ObjectName::from(vec![ Ident { value: "mek".to_string(), - quote_style: Some('`') + quote_style: Some('`'), + span: Span::empty(), }, Ident { value: "table1".to_string(), - quote_style: Some('`') + quote_style: Some('`'), + span: Span::empty(), } ]), - ObjectName(vec![Ident { + ObjectName::from(vec![Ident { value: "table2".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }]) ] } @@ -176,19 +189,22 @@ fn parse_flush() { read_lock: true, export: false, tables: vec![ - ObjectName(vec![ + ObjectName::from(vec![ Ident { value: "mek".to_string(), - quote_style: Some('`') + quote_style: Some('`'), + span: Span::empty(), }, Ident { value: "table1".to_string(), - quote_style: Some('`') + quote_style: Some('`'), + span: Span::empty(), } ]), - ObjectName(vec![Ident { + ObjectName::from(vec![Ident { value: "table2".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }]) ] } @@ -202,19 +218,22 @@ fn parse_flush() { read_lock: false, export: true, tables: vec![ - ObjectName(vec![ + ObjectName::from(vec![ Ident { value: "mek".to_string(), - quote_style: Some('`') + quote_style: Some('`'), + span: Span::empty(), }, Ident { value: "table1".to_string(), - quote_style: Some('`') + quote_style: Some('`'), + span: Span::empty(), } ]), - ObjectName(vec![Ident { + ObjectName::from(vec![Ident { value: "table2".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }]) ] } @@ -223,14 +242,22 @@ fn parse_flush() { #[test] fn parse_show_columns() { - let table_name = ObjectName(vec![Ident::new("mytable")]); assert_eq!( mysql_and_generic().verified_stmt("SHOW COLUMNS FROM mytable"), Statement::ShowColumns { extended: false, full: false, - table_name: table_name.clone(), - filter: None, + show_options: ShowStatementOptions { + show_in: Some(ShowStatementIn { + clause: ShowStatementInClause::FROM, + parent_type: None, + parent_name: Some(ObjectName::from(vec![Ident::new("mytable")])), + }), + filter_position: None, + limit_from: None, + limit: None, + starts_with: None, + } } ); assert_eq!( @@ -238,8 +265,20 @@ fn parse_show_columns() { Statement::ShowColumns { extended: false, full: false, - table_name: ObjectName(vec![Ident::new("mydb"), Ident::new("mytable")]), - filter: None, + show_options: ShowStatementOptions { + show_in: Some(ShowStatementIn { + clause: ShowStatementInClause::FROM, + parent_type: None, + parent_name: Some(ObjectName::from(vec![ + Ident::new("mydb"), + Ident::new("mytable") + ])), + }), + filter_position: None, + limit_from: None, + limit: None, + starts_with: None, + } } ); assert_eq!( @@ -247,8 +286,17 @@ fn parse_show_columns() { Statement::ShowColumns { extended: true, full: false, - table_name: table_name.clone(), - filter: None, + show_options: ShowStatementOptions { + show_in: Some(ShowStatementIn { + clause: ShowStatementInClause::FROM, + parent_type: None, + parent_name: Some(ObjectName::from(vec![Ident::new("mytable")])), + }), + filter_position: None, + limit_from: None, + limit: None, + starts_with: None, + } } ); assert_eq!( @@ -256,8 +304,17 @@ fn parse_show_columns() { Statement::ShowColumns { extended: false, full: true, - table_name: table_name.clone(), - filter: None, + show_options: ShowStatementOptions { + show_in: Some(ShowStatementIn { + clause: ShowStatementInClause::FROM, + parent_type: None, + parent_name: Some(ObjectName::from(vec![Ident::new("mytable")])), + }), + filter_position: None, + limit_from: None, + limit: None, + starts_with: None, + } } ); assert_eq!( @@ -265,8 +322,19 @@ fn parse_show_columns() { Statement::ShowColumns { extended: false, full: false, - table_name: table_name.clone(), - filter: Some(ShowStatementFilter::Like("pattern".into())), + show_options: ShowStatementOptions { + show_in: Some(ShowStatementIn { + clause: ShowStatementInClause::FROM, + parent_type: None, + parent_name: Some(ObjectName::from(vec![Ident::new("mytable")])), + }), + filter_position: Some(ShowStatementFilterPosition::Suffix( + ShowStatementFilter::Like("pattern".into()) + )), + limit_from: None, + limit: None, + starts_with: None, + } } ); assert_eq!( @@ -274,18 +342,27 @@ fn parse_show_columns() { Statement::ShowColumns { extended: false, full: false, - table_name, - filter: Some(ShowStatementFilter::Where( - mysql_and_generic().verified_expr("1 = 2") - )), + show_options: ShowStatementOptions { + show_in: Some(ShowStatementIn { + clause: ShowStatementInClause::FROM, + parent_type: None, + parent_name: Some(ObjectName::from(vec![Ident::new("mytable")])), + }), + filter_position: Some(ShowStatementFilterPosition::Suffix( + ShowStatementFilter::Where(mysql_and_generic().verified_expr("1 = 2")) + )), + limit_from: None, + limit: None, + starts_with: None, + } } ); mysql_and_generic() .one_statement_parses_to("SHOW FIELDS FROM mytable", "SHOW COLUMNS FROM mytable"); mysql_and_generic() - .one_statement_parses_to("SHOW COLUMNS IN mytable", "SHOW COLUMNS FROM mytable"); + .one_statement_parses_to("SHOW COLUMNS IN mytable", "SHOW COLUMNS IN mytable"); mysql_and_generic() - .one_statement_parses_to("SHOW FIELDS IN mytable", "SHOW COLUMNS FROM mytable"); + .one_statement_parses_to("SHOW FIELDS IN mytable", "SHOW COLUMNS IN mytable"); mysql_and_generic().one_statement_parses_to( "SHOW COLUMNS FROM mytable FROM mydb", "SHOW COLUMNS FROM mydb.mytable", @@ -327,60 +404,115 @@ fn parse_show_tables() { assert_eq!( mysql_and_generic().verified_stmt("SHOW TABLES"), Statement::ShowTables { + terse: false, + history: false, extended: false, full: false, - db_name: None, - filter: None, + external: false, + show_options: ShowStatementOptions { + starts_with: None, + limit: None, + limit_from: None, + show_in: None, + filter_position: None + } } ); assert_eq!( mysql_and_generic().verified_stmt("SHOW TABLES FROM mydb"), Statement::ShowTables { + terse: false, + history: false, extended: false, full: false, - db_name: Some(Ident::new("mydb")), - filter: None, + external: false, + show_options: ShowStatementOptions { + starts_with: None, + limit: None, + limit_from: None, + show_in: Some(ShowStatementIn { + clause: ShowStatementInClause::FROM, + parent_type: None, + parent_name: Some(ObjectName::from(vec![Ident::new("mydb")])), + }), + filter_position: None + } } ); assert_eq!( mysql_and_generic().verified_stmt("SHOW EXTENDED TABLES"), Statement::ShowTables { + terse: false, + history: false, extended: true, full: false, - db_name: None, - filter: None, + external: false, + show_options: ShowStatementOptions { + starts_with: None, + limit: None, + limit_from: None, + show_in: None, + filter_position: None + } } ); assert_eq!( mysql_and_generic().verified_stmt("SHOW FULL TABLES"), Statement::ShowTables { + terse: false, + history: false, extended: false, full: true, - db_name: None, - filter: None, + external: false, + show_options: ShowStatementOptions { + starts_with: None, + limit: None, + limit_from: None, + show_in: None, + filter_position: None + } } ); assert_eq!( mysql_and_generic().verified_stmt("SHOW TABLES LIKE 'pattern'"), Statement::ShowTables { + terse: false, + history: false, extended: false, full: false, - db_name: None, - filter: Some(ShowStatementFilter::Like("pattern".into())), + external: false, + show_options: ShowStatementOptions { + starts_with: None, + limit: None, + limit_from: None, + show_in: None, + filter_position: Some(ShowStatementFilterPosition::Suffix( + ShowStatementFilter::Like("pattern".into()) + )) + } } ); assert_eq!( mysql_and_generic().verified_stmt("SHOW TABLES WHERE 1 = 2"), Statement::ShowTables { + terse: false, + history: false, extended: false, full: false, - db_name: None, - filter: Some(ShowStatementFilter::Where( - mysql_and_generic().verified_expr("1 = 2") - )), + external: false, + show_options: ShowStatementOptions { + starts_with: None, + limit: None, + limit_from: None, + show_in: None, + filter_position: Some(ShowStatementFilterPosition::Suffix( + ShowStatementFilter::Where(mysql_and_generic().verified_expr("1 = 2")) + )) + } } ); - mysql_and_generic().one_statement_parses_to("SHOW TABLES IN mydb", "SHOW TABLES FROM mydb"); + mysql_and_generic().verified_stmt("SHOW TABLES IN mydb"); + mysql_and_generic().verified_stmt("SHOW TABLES FROM mydb"); } #[test] @@ -405,7 +537,7 @@ fn parse_show_extended_full() { #[test] fn parse_show_create() { - let obj_name = ObjectName(vec![Ident::new("myident")]); + let obj_name = ObjectName::from(vec![Ident::new("myident")]); for obj_type in &[ ShowCreateObject::Table, @@ -462,7 +594,7 @@ fn parse_use() { // Test single identifier without quotes assert_eq!( mysql_and_generic().verified_stmt(&format!("USE {}", object_name)), - Statement::Use(Use::Object(ObjectName(vec![Ident::new( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( object_name.to_string() )]))) ); @@ -471,7 +603,7 @@ fn parse_use() { assert_eq!( mysql_and_generic() .verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)), - Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -485,12 +617,12 @@ fn parse_set_variables() { mysql_and_generic().verified_stmt("SET sql_mode = CONCAT(@@sql_mode, ',STRICT_TRANS_TABLES')"); assert_eq!( mysql_and_generic().verified_stmt("SET LOCAL autocommit = 1"), - Statement::SetVariable { - local: true, + Statement::Set(Set::SingleAssignment { + scope: Some(ContextModifier::Local), hivevar: false, - variables: OneOrManyWithParens::One(ObjectName(vec!["autocommit".into()])), - value: vec![Expr::Value(number("1"))], - } + variable: ObjectName::from(vec!["autocommit".into()]), + values: vec![Expr::value(number("1"))], + }) ); } @@ -504,7 +636,6 @@ fn parse_create_table_auto_increment() { vec![ColumnDef { name: Ident::new("bar"), data_type: DataType::Int(None), - collation: None, options: vec![ ColumnOptionDef { name: None, @@ -548,6 +679,7 @@ fn table_constraint_unique_primary_ctor( columns, index_options, characteristics, + nulls_distinct: NullsDistinctOption::None, }, None => TableConstraint::PrimaryKey { name, @@ -563,7 +695,7 @@ fn table_constraint_unique_primary_ctor( #[test] fn parse_create_table_primary_and_unique_key() { let sqls = ["UNIQUE KEY", "PRIMARY KEY"] - .map(|key_ty|format!("CREATE TABLE foo (id INT PRIMARY KEY AUTO_INCREMENT, bar INT NOT NULL, CONSTRAINT bar_key {key_ty} (bar))")); + .map(|key_ty| format!("CREATE TABLE foo (id INT PRIMARY KEY AUTO_INCREMENT, bar INT NOT NULL, CONSTRAINT bar_key {key_ty} (bar))")); let index_type_display = [Some(KeyOrIndexDisplay::Key), None]; @@ -593,7 +725,6 @@ fn parse_create_table_primary_and_unique_key() { ColumnDef { name: Ident::new("id"), data_type: DataType::Int(None), - collation: None, options: vec![ ColumnOptionDef { name: None, @@ -613,7 +744,6 @@ fn parse_create_table_primary_and_unique_key() { ColumnDef { name: Ident::new("bar"), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -631,7 +761,7 @@ fn parse_create_table_primary_and_unique_key() { #[test] fn parse_create_table_primary_and_unique_key_with_index_options() { let sqls = ["UNIQUE INDEX", "PRIMARY KEY"] - .map(|key_ty|format!("CREATE TABLE foo (bar INT, var INT, CONSTRAINT constr {key_ty} index_name (bar, var) USING HASH COMMENT 'yes, ' USING BTREE COMMENT 'MySQL allows')")); + .map(|key_ty| format!("CREATE TABLE foo (bar INT, var INT, CONSTRAINT constr {key_ty} index_name (bar, var) USING HASH COMMENT 'yes, ' USING BTREE COMMENT 'MySQL allows')")); let index_type_display = [Some(KeyOrIndexDisplay::Index), None]; @@ -705,7 +835,7 @@ fn parse_create_table_primary_and_unique_key_with_index_type() { #[test] fn parse_create_table_primary_and_unique_key_characteristic_test() { let sqls = ["UNIQUE INDEX", "PRIMARY KEY"] - .map(|key_ty|format!("CREATE TABLE x (y INT, CONSTRAINT constr {key_ty} (y) NOT DEFERRABLE INITIALLY IMMEDIATE)")); + .map(|key_ty| format!("CREATE TABLE x (y INT, CONSTRAINT constr {key_ty} (y) NOT DEFERRABLE INITIALLY IMMEDIATE)")); for sql in &sqls { mysql_and_generic().verified_stmt(sql); } @@ -718,9 +848,23 @@ fn parse_create_table_comment() { for sql in [without_equal, with_equal] { match mysql().verified_stmt(sql) { - Statement::CreateTable(CreateTable { name, comment, .. }) => { + Statement::CreateTable(CreateTable { + name, + table_options, + .. + }) => { assert_eq!(name.to_string(), "foo"); - assert_eq!(comment.expect("Should exist").to_string(), "baz"); + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + let comment = match plain_options.first().unwrap() { + SqlOption::Comment(CommentDef::WithEq(c)) + | SqlOption::Comment(CommentDef::WithoutEq(c)) => c, + _ => unreachable!(), + }; + assert_eq!(comment, "baz"); } _ => unreachable!(), } @@ -729,29 +873,226 @@ fn parse_create_table_comment() { #[test] fn parse_create_table_auto_increment_offset() { - let canonical = - "CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE=InnoDB AUTO_INCREMENT 123"; - let with_equal = - "CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE=InnoDB AUTO_INCREMENT=123"; + let sql = + "CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE = InnoDB AUTO_INCREMENT = 123"; + + match mysql().verified_stmt(sql) { + Statement::CreateTable(CreateTable { + name, + table_options, + .. + }) => { + assert_eq!(name.to_string(), "foo"); - for sql in [canonical, with_equal] { - match mysql().one_statement_parses_to(sql, canonical) { + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("AUTO_INCREMENT"), + value: Expr::Value(test_utils::number("123").with_empty_span()) + })); + } + _ => unreachable!(), + } +} + +#[test] +fn parse_create_table_multiple_options_order_independent() { + let sql1 = "CREATE TABLE mytable (id INT) ENGINE=InnoDB ROW_FORMAT=DYNAMIC KEY_BLOCK_SIZE=8 COMMENT='abc'"; + let sql2 = "CREATE TABLE mytable (id INT) KEY_BLOCK_SIZE=8 COMMENT='abc' ENGINE=InnoDB ROW_FORMAT=DYNAMIC"; + let sql3 = "CREATE TABLE mytable (id INT) ROW_FORMAT=DYNAMIC KEY_BLOCK_SIZE=8 COMMENT='abc' ENGINE=InnoDB"; + + for sql in [sql1, sql2, sql3] { + match mysql().parse_sql_statements(sql).unwrap().pop().unwrap() { Statement::CreateTable(CreateTable { name, - auto_increment_offset, + table_options, .. }) => { - assert_eq!(name.to_string(), "foo"); - assert_eq!( - auto_increment_offset.expect("Should exist").to_string(), - "123" - ); + assert_eq!(name.to_string(), "mytable"); + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("ENGINE"), + name: Some(Ident::new("InnoDB")), + values: vec![] + } + ))); + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("KEY_BLOCK_SIZE"), + value: Expr::Value(test_utils::number("8").with_empty_span()) + })); + + assert!(plain_options + .contains(&SqlOption::Comment(CommentDef::WithEq("abc".to_owned())))); + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("ROW_FORMAT"), + value: Expr::Identifier(Ident::new("DYNAMIC".to_owned())) + })); } _ => unreachable!(), } } } +#[test] +fn parse_create_table_with_all_table_options() { + let sql = + "CREATE TABLE foo (bar INT NOT NULL AUTO_INCREMENT) ENGINE = InnoDB AUTO_INCREMENT = 123 DEFAULT CHARSET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci INSERT_METHOD = FIRST KEY_BLOCK_SIZE = 8 ROW_FORMAT = DYNAMIC DATA DIRECTORY = '/var/lib/mysql/data' INDEX DIRECTORY = '/var/lib/mysql/index' PACK_KEYS = 1 STATS_AUTO_RECALC = 1 STATS_PERSISTENT = 0 STATS_SAMPLE_PAGES = 128 DELAY_KEY_WRITE = 1 COMPRESSION = 'ZLIB' ENCRYPTION = 'Y' MAX_ROWS = 10000 MIN_ROWS = 10 AUTOEXTEND_SIZE = 64 AVG_ROW_LENGTH = 128 CHECKSUM = 1 CONNECTION = 'mysql://localhost' ENGINE_ATTRIBUTE = 'primary' PASSWORD = 'secure_password' SECONDARY_ENGINE_ATTRIBUTE = 'secondary_attr' START TRANSACTION TABLESPACE my_tablespace STORAGE DISK UNION = (table1, table2, table3)"; + + match mysql().verified_stmt(sql) { + Statement::CreateTable(CreateTable { + name, + table_options, + .. + }) => { + assert_eq!(name, vec![Ident::new("foo".to_owned())].into()); + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("ENGINE"), + name: Some(Ident::new("InnoDB")), + values: vec![] + } + ))); + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("COLLATE"), + value: Expr::Identifier(Ident::new("utf8mb4_0900_ai_ci".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("DEFAULT CHARSET"), + value: Expr::Identifier(Ident::new("utf8mb4".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("AUTO_INCREMENT"), + value: Expr::value(test_utils::number("123")) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("KEY_BLOCK_SIZE"), + value: Expr::value(test_utils::number("8")) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("ROW_FORMAT"), + value: Expr::Identifier(Ident::new("DYNAMIC".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("PACK_KEYS"), + value: Expr::value(test_utils::number("1")) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("STATS_AUTO_RECALC"), + value: Expr::value(test_utils::number("1")) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("STATS_PERSISTENT"), + value: Expr::value(test_utils::number("0")) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("STATS_SAMPLE_PAGES"), + value: Expr::value(test_utils::number("128")) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("STATS_SAMPLE_PAGES"), + value: Expr::value(test_utils::number("128")) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("INSERT_METHOD"), + value: Expr::Identifier(Ident::new("FIRST".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("COMPRESSION"), + value: Expr::value(Value::SingleQuotedString("ZLIB".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("ENCRYPTION"), + value: Expr::value(Value::SingleQuotedString("Y".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("MAX_ROWS"), + value: Expr::value(test_utils::number("10000")) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("MIN_ROWS"), + value: Expr::value(test_utils::number("10")) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("AUTOEXTEND_SIZE"), + value: Expr::value(test_utils::number("64")) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("AVG_ROW_LENGTH"), + value: Expr::value(test_utils::number("128")) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("CHECKSUM"), + value: Expr::value(test_utils::number("1")) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("CONNECTION"), + value: Expr::value(Value::SingleQuotedString("mysql://localhost".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("ENGINE_ATTRIBUTE"), + value: Expr::value(Value::SingleQuotedString("primary".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("PASSWORD"), + value: Expr::value(Value::SingleQuotedString("secure_password".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("SECONDARY_ENGINE_ATTRIBUTE"), + value: Expr::value(Value::SingleQuotedString("secondary_attr".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::Ident(Ident::new( + "START TRANSACTION".to_owned() + )))); + assert!( + plain_options.contains(&SqlOption::TableSpace(TablespaceOption { + name: "my_tablespace".to_string(), + storage: Some(StorageType::Disk), + })) + ); + + assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("UNION"), + name: None, + values: vec![ + Ident::new("table1".to_string()), + Ident::new("table2".to_string()), + Ident::new("table3".to_string()) + ] + } + ))); + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("DATA DIRECTORY"), + value: Expr::value(Value::SingleQuotedString("/var/lib/mysql/data".to_owned())) + })); + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("INDEX DIRECTORY"), + value: Expr::value(Value::SingleQuotedString("/var/lib/mysql/index".to_owned())) + })); + } + _ => unreachable!(), + } +} + #[test] fn parse_create_table_set_enum() { let sql = "CREATE TABLE foo (bar SET('a', 'b'), baz ENUM('a', 'b'))"; @@ -763,13 +1104,17 @@ fn parse_create_table_set_enum() { ColumnDef { name: Ident::new("bar"), data_type: DataType::Set(vec!["a".to_string(), "b".to_string()]), - collation: None, options: vec![], }, ColumnDef { name: Ident::new("baz"), - data_type: DataType::Enum(vec!["a".to_string(), "b".to_string()]), - collation: None, + data_type: DataType::Enum( + vec![ + EnumMember::Name("a".to_string()), + EnumMember::Name("b".to_string()) + ], + None + ), options: vec![], } ], @@ -782,13 +1127,12 @@ fn parse_create_table_set_enum() { #[test] fn parse_create_table_engine_default_charset() { - let sql = "CREATE TABLE foo (id INT(11)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3"; + let sql = "CREATE TABLE foo (id INT(11)) ENGINE = InnoDB DEFAULT CHARSET = utf8mb3"; match mysql().verified_stmt(sql) { Statement::CreateTable(CreateTable { name, columns, - engine, - default_charset, + table_options, .. }) => { assert_eq!(name.to_string(), "foo"); @@ -796,19 +1140,28 @@ fn parse_create_table_engine_default_charset() { vec![ColumnDef { name: Ident::new("id"), data_type: DataType::Int(Some(11)), - collation: None, options: vec![], },], columns ); - assert_eq!( - engine, - Some(TableEngine { - name: "InnoDB".to_string(), - parameters: None - }) - ); - assert_eq!(default_charset, Some("utf8mb3".to_string())); + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("DEFAULT CHARSET"), + value: Expr::Identifier(Ident::new("utf8mb3".to_owned())) + })); + + assert!(plain_options.contains(&SqlOption::NamedParenthesizedList( + NamedParenthesizedList { + key: Ident::new("ENGINE"), + name: Some(Ident::new("InnoDB")), + values: vec![] + } + ))); } _ => unreachable!(), } @@ -816,12 +1169,12 @@ fn parse_create_table_engine_default_charset() { #[test] fn parse_create_table_collate() { - let sql = "CREATE TABLE foo (id INT(11)) COLLATE=utf8mb4_0900_ai_ci"; + let sql = "CREATE TABLE foo (id INT(11)) COLLATE = utf8mb4_0900_ai_ci"; match mysql().verified_stmt(sql) { Statement::CreateTable(CreateTable { name, columns, - collation, + table_options, .. }) => { assert_eq!(name.to_string(), "foo"); @@ -829,12 +1182,20 @@ fn parse_create_table_collate() { vec![ColumnDef { name: Ident::new("id"), data_type: DataType::Int(Some(11)), - collation: None, options: vec![], },], columns ); - assert_eq!(collation, Some("utf8mb4_0900_ai_ci".to_string())); + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("COLLATE"), + value: Expr::Identifier(Ident::new("utf8mb4_0900_ai_ci".to_owned())) + })); } _ => unreachable!(), } @@ -842,25 +1203,38 @@ fn parse_create_table_collate() { #[test] fn parse_create_table_both_options_and_as_query() { - let sql = "CREATE TABLE foo (id INT(11)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb4_0900_ai_ci AS SELECT 1"; + let sql = "CREATE TABLE foo (id INT(11)) ENGINE = InnoDB DEFAULT CHARSET = utf8mb3 COLLATE = utf8mb4_0900_ai_ci AS SELECT 1"; match mysql_and_generic().verified_stmt(sql) { Statement::CreateTable(CreateTable { name, - collation, query, + table_options, .. }) => { assert_eq!(name.to_string(), "foo"); - assert_eq!(collation, Some("utf8mb4_0900_ai_ci".to_string())); + + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + + assert!(plain_options.contains(&SqlOption::KeyValue { + key: Ident::new("COLLATE"), + value: Expr::Identifier(Ident::new("utf8mb4_0900_ai_ci".to_owned())) + })); + assert_eq!( query.unwrap().body.as_select().unwrap().projection, - vec![SelectItem::UnnamedExpr(Expr::Value(number("1")))] + vec![SelectItem::UnnamedExpr(Expr::Value( + (number("1")).with_empty_span() + ))] ); } _ => unreachable!(), } - let sql = r"CREATE TABLE foo (id INT(11)) ENGINE=InnoDB AS SELECT 1 DEFAULT CHARSET=utf8mb3"; + let sql = + r"CREATE TABLE foo (id INT(11)) ENGINE = InnoDB AS SELECT 1 DEFAULT CHARSET = utf8mb3"; assert!(matches!( mysql_and_generic().parse_sql_statements(sql), Err(ParserError::ParserError(_)) @@ -877,11 +1251,10 @@ fn parse_create_table_comment_character_set() { vec![ColumnDef { name: Ident::new("s"), data_type: DataType::Text, - collation: None, options: vec![ ColumnOptionDef { name: None, - option: ColumnOption::CharacterSet(ObjectName(vec![Ident::new( + option: ColumnOption::CharacterSet(ObjectName::from(vec![Ident::new( "utf8mb4" )])) }, @@ -924,7 +1297,6 @@ fn parse_quote_identifiers() { vec![ColumnDef { name: Ident::with_quote('`', "BEGIN"), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Unique { @@ -948,11 +1320,14 @@ fn parse_escaped_quote_identifiers_with_escape() { Statement::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: None, top: None, + top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident { value: "quoted ` identifier".into(), quote_style: Some('`'), + span: Span::empty(), }))], into: None, from: vec![], @@ -969,16 +1344,16 @@ fn parse_escaped_quote_identifiers_with_escape() { window_before_qualify: false, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })) ); } @@ -998,11 +1373,14 @@ fn parse_escaped_quote_identifiers_with_no_escape() { Statement::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: None, top: None, + top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident { value: "quoted `` identifier".into(), quote_style: Some('`'), + span: Span::empty(), }))], into: None, from: vec![], @@ -1019,16 +1397,16 @@ fn parse_escaped_quote_identifiers_with_no_escape() { window_before_qualify: false, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })) ); } @@ -1041,11 +1419,15 @@ fn parse_escaped_backticks_with_escape() { Statement::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), + distinct: None, top: None, + top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident { value: "`quoted identifier`".into(), quote_style: Some('`'), + span: Span::empty(), }))], into: None, from: vec![], @@ -1062,16 +1444,16 @@ fn parse_escaped_backticks_with_escape() { window_before_qualify: false, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })) ); } @@ -1088,11 +1470,15 @@ fn parse_escaped_backticks_with_no_escape() { Statement::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), + distinct: None, top: None, + top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident { value: "``quoted identifier``".into(), quote_style: Some('`'), + span: Span::empty(), }))], into: None, from: vec![], @@ -1109,16 +1495,16 @@ fn parse_escaped_backticks_with_no_escape() { window_before_qualify: false, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })) ); } @@ -1169,31 +1555,26 @@ fn parse_create_table_with_minimum_display_width() { ColumnDef { name: Ident::new("bar_tinyint"), data_type: DataType::TinyInt(Some(3)), - collation: None, options: vec![], }, ColumnDef { name: Ident::new("bar_smallint"), data_type: DataType::SmallInt(Some(5)), - collation: None, options: vec![], }, ColumnDef { name: Ident::new("bar_mediumint"), data_type: DataType::MediumInt(Some(6)), - collation: None, options: vec![], }, ColumnDef { name: Ident::new("bar_int"), data_type: DataType::Int(Some(11)), - collation: None, options: vec![], }, ColumnDef { name: Ident::new("bar_bigint"), data_type: DataType::BigInt(Some(20)), - collation: None, options: vec![], } ], @@ -1214,32 +1595,27 @@ fn parse_create_table_unsigned() { vec![ ColumnDef { name: Ident::new("bar_tinyint"), - data_type: DataType::UnsignedTinyInt(Some(3)), - collation: None, + data_type: DataType::TinyIntUnsigned(Some(3)), options: vec![], }, ColumnDef { name: Ident::new("bar_smallint"), - data_type: DataType::UnsignedSmallInt(Some(5)), - collation: None, + data_type: DataType::SmallIntUnsigned(Some(5)), options: vec![], }, ColumnDef { name: Ident::new("bar_mediumint"), - data_type: DataType::UnsignedMediumInt(Some(13)), - collation: None, + data_type: DataType::MediumIntUnsigned(Some(13)), options: vec![], }, ColumnDef { name: Ident::new("bar_int"), - data_type: DataType::UnsignedInt(Some(11)), - collation: None, + data_type: DataType::IntUnsigned(Some(11)), options: vec![], }, ColumnDef { name: Ident::new("bar_bigint"), - data_type: DataType::UnsignedBigInt(Some(20)), - collation: None, + data_type: DataType::BigIntUnsigned(Some(20)), options: vec![], }, ], @@ -1256,13 +1632,16 @@ fn parse_simple_insert() { match mysql().verified_stmt(sql) { Statement::Insert(Insert { - table_name, + table: table_name, columns, source, on, .. }) => { - assert_eq!(ObjectName(vec![Ident::new("tasks")]), table_name); + assert_eq!( + TableObject::TableName(ObjectName::from(vec![Ident::new("tasks")])), + table_name + ); assert_eq!(vec![Ident::new("title"), Ident::new("priority")], columns); assert!(on.is_none()); assert_eq!( @@ -1272,30 +1651,36 @@ fn parse_simple_insert() { explicit_row: false, rows: vec![ vec![ - Expr::Value(Value::SingleQuotedString( - "Test Some Inserts".to_string() - )), - Expr::Value(number("1")) + Expr::Value( + (Value::SingleQuotedString("Test Some Inserts".to_string())) + .with_empty_span() + ), + Expr::value(number("1")) ], vec![ - Expr::Value(Value::SingleQuotedString("Test Entry 2".to_string())), - Expr::Value(number("2")) + Expr::Value( + (Value::SingleQuotedString("Test Entry 2".to_string())) + .with_empty_span() + ), + Expr::value(number("2")) ], vec![ - Expr::Value(Value::SingleQuotedString("Test Entry 3".to_string())), - Expr::Value(number("3")) + Expr::Value( + (Value::SingleQuotedString("Test Entry 3".to_string())) + .with_empty_span() + ), + Expr::value(number("3")) ] ] })), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), source ); @@ -1310,14 +1695,17 @@ fn parse_ignore_insert() { match mysql_and_generic().verified_stmt(sql) { Statement::Insert(Insert { - table_name, + table: table_name, columns, source, on, ignore, .. }) => { - assert_eq!(ObjectName(vec![Ident::new("tasks")]), table_name); + assert_eq!( + TableObject::TableName(ObjectName::from(vec![Ident::new("tasks")])), + table_name + ); assert_eq!(vec![Ident::new("title"), Ident::new("priority")], columns); assert!(on.is_none()); assert!(ignore); @@ -1327,19 +1715,21 @@ fn parse_ignore_insert() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Value(Value::SingleQuotedString("Test Some Inserts".to_string())), - Expr::Value(number("1")) + Expr::Value( + (Value::SingleQuotedString("Test Some Inserts".to_string())) + .with_empty_span() + ), + Expr::value(number("1")) ]] })), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), source ); @@ -1354,14 +1744,17 @@ fn parse_priority_insert() { match mysql_and_generic().verified_stmt(sql) { Statement::Insert(Insert { - table_name, + table: table_name, columns, source, on, priority, .. }) => { - assert_eq!(ObjectName(vec![Ident::new("tasks")]), table_name); + assert_eq!( + TableObject::TableName(ObjectName::from(vec![Ident::new("tasks")])), + table_name + ); assert_eq!(vec![Ident::new("title"), Ident::new("priority")], columns); assert!(on.is_none()); assert_eq!(priority, Some(HighPriority)); @@ -1371,19 +1764,21 @@ fn parse_priority_insert() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Value(Value::SingleQuotedString("Test Some Inserts".to_string())), - Expr::Value(number("1")) + Expr::Value( + (Value::SingleQuotedString("Test Some Inserts".to_string())) + .with_empty_span() + ), + Expr::value(number("1")) ]] })), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), source ); @@ -1395,14 +1790,17 @@ fn parse_priority_insert() { match mysql().verified_stmt(sql2) { Statement::Insert(Insert { - table_name, + table: table_name, columns, source, on, priority, .. }) => { - assert_eq!(ObjectName(vec![Ident::new("tasks")]), table_name); + assert_eq!( + TableObject::TableName(ObjectName::from(vec![Ident::new("tasks")])), + table_name + ); assert_eq!(vec![Ident::new("title"), Ident::new("priority")], columns); assert!(on.is_none()); assert_eq!(priority, Some(LowPriority)); @@ -1412,19 +1810,21 @@ fn parse_priority_insert() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Value(Value::SingleQuotedString("Test Some Inserts".to_string())), - Expr::Value(number("1")) + Expr::Value( + (Value::SingleQuotedString("Test Some Inserts".to_string())) + .with_empty_span() + ), + Expr::value(number("1")) ]] })), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), source ); @@ -1438,21 +1838,21 @@ fn parse_insert_as() { let sql = r"INSERT INTO `table` (`date`) VALUES ('2024-01-01') AS `alias`"; match mysql_and_generic().verified_stmt(sql) { Statement::Insert(Insert { - table_name, + table: table_name, columns, source, insert_alias, .. }) => { assert_eq!( - ObjectName(vec![Ident::with_quote('`', "table")]), + TableObject::TableName(ObjectName::from(vec![Ident::with_quote('`', "table")])), table_name ); assert_eq!(vec![Ident::with_quote('`', "date")], columns); let insert_alias = insert_alias.unwrap(); assert_eq!( - ObjectName(vec![Ident::with_quote('`', "alias")]), + ObjectName::from(vec![Ident::with_quote('`', "alias")]), insert_alias.row_alias ); assert_eq!(Some(vec![]), insert_alias.col_aliases); @@ -1461,19 +1861,18 @@ fn parse_insert_as() { with: None, body: Box::new(SetExpr::Values(Values { explicit_row: false, - rows: vec![vec![Expr::Value(Value::SingleQuotedString( - "2024-01-01".to_string() - ))]] + rows: vec![vec![Expr::Value( + (Value::SingleQuotedString("2024-01-01".to_string())).with_empty_span() + )]] })), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), source ); @@ -1490,14 +1889,14 @@ fn parse_insert_as() { let sql = r"INSERT INTO `table` (`id`, `date`) VALUES (1, '2024-01-01') AS `alias` (`mek_id`, `mek_date`)"; match mysql_and_generic().verified_stmt(sql) { Statement::Insert(Insert { - table_name, + table: table_name, columns, source, insert_alias, .. }) => { assert_eq!( - ObjectName(vec![Ident::with_quote('`', "table")]), + TableObject::TableName(ObjectName::from(vec![Ident::with_quote('`', "table")])), table_name ); assert_eq!( @@ -1506,7 +1905,7 @@ fn parse_insert_as() { ); let insert_alias = insert_alias.unwrap(); assert_eq!( - ObjectName(vec![Ident::with_quote('`', "alias")]), + ObjectName::from(vec![Ident::with_quote('`', "alias")]), insert_alias.row_alias ); assert_eq!( @@ -1522,19 +1921,21 @@ fn parse_insert_as() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Value(number("1")), - Expr::Value(Value::SingleQuotedString("2024-01-01".to_string())) + Expr::value(number("1")), + Expr::Value( + (Value::SingleQuotedString("2024-01-01".to_string())) + .with_empty_span() + ) ]] })), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), source ); @@ -1548,7 +1949,7 @@ fn parse_replace_insert() { let sql = r"REPLACE DELAYED INTO tasks (title, priority) VALUES ('Test Some Inserts', 1)"; match mysql().verified_stmt(sql) { Statement::Insert(Insert { - table_name, + table: table_name, columns, source, on, @@ -1556,7 +1957,10 @@ fn parse_replace_insert() { priority, .. }) => { - assert_eq!(ObjectName(vec![Ident::new("tasks")]), table_name); + assert_eq!( + TableObject::TableName(ObjectName::from(vec![Ident::new("tasks")])), + table_name + ); assert_eq!(vec![Ident::new("title"), Ident::new("priority")], columns); assert!(on.is_none()); assert!(replace_into); @@ -1567,19 +1971,21 @@ fn parse_replace_insert() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Value(Value::SingleQuotedString("Test Some Inserts".to_string())), - Expr::Value(number("1")) + Expr::Value( + (Value::SingleQuotedString("Test Some Inserts".to_string())) + .with_empty_span() + ), + Expr::value(number("1")) ]] })), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), source ); @@ -1594,13 +2000,16 @@ fn parse_empty_row_insert() { match mysql().one_statement_parses_to(sql, "INSERT INTO tb VALUES (), ()") { Statement::Insert(Insert { - table_name, + table: table_name, columns, source, on, .. }) => { - assert_eq!(ObjectName(vec![Ident::new("tb")]), table_name); + assert_eq!( + TableObject::TableName(ObjectName::from(vec![Ident::new("tb")])), + table_name + ); assert!(columns.is_empty()); assert!(on.is_none()); assert_eq!( @@ -1611,14 +2020,13 @@ fn parse_empty_row_insert() { rows: vec![vec![], vec![]] })), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), source ); @@ -1633,14 +2041,14 @@ fn parse_insert_with_on_duplicate_update() { match mysql().verified_stmt(sql) { Statement::Insert(Insert { - table_name, + table: table_name, columns, source, on, .. }) => { assert_eq!( - ObjectName(vec![Ident::new("permission_groups")]), + TableObject::TableName(ObjectName::from(vec![Ident::new("permission_groups")])), table_name ); assert_eq!( @@ -1660,58 +2068,61 @@ fn parse_insert_with_on_duplicate_update() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Value(Value::SingleQuotedString( - "accounting_manager".to_string() - )), - Expr::Value(Value::SingleQuotedString( - "Some description about the group".to_string() - )), - Expr::Value(Value::Boolean(true)), - Expr::Value(Value::Boolean(true)), - Expr::Value(Value::Boolean(true)), - Expr::Value(Value::Boolean(true)), + Expr::Value( + (Value::SingleQuotedString("accounting_manager".to_string())) + .with_empty_span() + ), + Expr::Value( + (Value::SingleQuotedString( + "Some description about the group".to_string() + )) + .with_empty_span() + ), + Expr::Value((Value::Boolean(true)).with_empty_span()), + Expr::Value((Value::Boolean(true)).with_empty_span()), + Expr::Value((Value::Boolean(true)).with_empty_span()), + Expr::Value((Value::Boolean(true)).with_empty_span()), ]] })), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), source ); assert_eq!( Some(OnInsert::DuplicateKeyUpdate(vec![ Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new( + target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new( "description".to_string() )])), value: call("VALUES", [Expr::Identifier(Ident::new("description"))]), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new( + target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new( "perm_create".to_string() )])), value: call("VALUES", [Expr::Identifier(Ident::new("perm_create"))]), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new( + target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new( "perm_read".to_string() )])), value: call("VALUES", [Expr::Identifier(Ident::new("perm_read"))]), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new( + target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new( "perm_update".to_string() )])), value: call("VALUES", [Expr::Identifier(Ident::new("perm_update"))]), }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec![Ident::new( + target: AssignmentTarget::ColumnName(ObjectName::from(vec![Ident::new( "perm_delete".to_string() )])), value: call("VALUES", [Expr::Identifier(Ident::new("perm_delete"))]), @@ -1732,22 +2143,19 @@ fn parse_select_with_numeric_prefix_column_name() { assert_eq!( q.body, Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), + distinct: None, top: None, + top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Identifier(Ident::new( "123col_$@123abc" )))], into: None, from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident::with_quote('"', "table")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident::with_quote( + '"', "table" + )])), joins: vec![] }], lateral_views: vec![], @@ -1763,6 +2171,7 @@ fn parse_select_with_numeric_prefix_column_name() { window_before_qualify: false, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))) ); } @@ -1770,6 +2179,128 @@ fn parse_select_with_numeric_prefix_column_name() { } } +#[test] +fn parse_qualified_identifiers_with_numeric_prefix() { + // Case 1: Qualified column name that starts with digits. + match mysql().verified_stmt("SELECT t.15to29 FROM my_table AS t") { + Statement::Query(q) => match *q.body { + SetExpr::Select(s) => match s.projection.last() { + Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => { + assert_eq!(&[Ident::new("t"), Ident::new("15to29")], &parts[..]); + } + proj => panic!("Unexpected projection: {:?}", proj), + }, + body => panic!("Unexpected statement body: {:?}", body), + }, + stmt => panic!("Unexpected statement: {:?}", stmt), + } + + // Case 2: Qualified column name that starts with digits and on its own represents a number. + match mysql().verified_stmt("SELECT t.15e29 FROM my_table AS t") { + Statement::Query(q) => match *q.body { + SetExpr::Select(s) => match s.projection.last() { + Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => { + assert_eq!(&[Ident::new("t"), Ident::new("15e29")], &parts[..]); + } + proj => panic!("Unexpected projection: {:?}", proj), + }, + body => panic!("Unexpected statement body: {:?}", body), + }, + stmt => panic!("Unexpected statement: {:?}", stmt), + } + + // Case 3: Unqualified, the same token is parsed as a number. + match mysql() + .parse_sql_statements("SELECT 15e29 FROM my_table") + .unwrap() + .pop() + { + Some(Statement::Query(q)) => match *q.body { + SetExpr::Select(s) => match s.projection.last() { + Some(SelectItem::UnnamedExpr(Expr::Value(ValueWithSpan { value, .. }))) => { + assert_eq!(&number("15e29"), value); + } + proj => panic!("Unexpected projection: {:?}", proj), + }, + body => panic!("Unexpected statement body: {:?}", body), + }, + stmt => panic!("Unexpected statement: {:?}", stmt), + } + + // Case 4: Quoted simple identifier. + match mysql().verified_stmt("SELECT `15e29` FROM my_table") { + Statement::Query(q) => match *q.body { + SetExpr::Select(s) => match s.projection.last() { + Some(SelectItem::UnnamedExpr(Expr::Identifier(name))) => { + assert_eq!(&Ident::with_quote('`', "15e29"), name); + } + proj => panic!("Unexpected projection: {:?}", proj), + }, + body => panic!("Unexpected statement body: {:?}", body), + }, + stmt => panic!("Unexpected statement: {:?}", stmt), + } + + // Case 5: Quoted compound identifier. + match mysql().verified_stmt("SELECT t.`15e29` FROM my_table AS t") { + Statement::Query(q) => match *q.body { + SetExpr::Select(s) => match s.projection.last() { + Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => { + assert_eq!( + &[Ident::new("t"), Ident::with_quote('`', "15e29")], + &parts[..] + ); + } + proj => panic!("Unexpected projection: {:?}", proj), + }, + body => panic!("Unexpected statement body: {:?}", body), + }, + stmt => panic!("Unexpected statement: {:?}", stmt), + } + + // Case 6: Multi-level compound identifiers. + match mysql().verified_stmt("SELECT 1db.1table.1column") { + Statement::Query(q) => match *q.body { + SetExpr::Select(s) => match s.projection.last() { + Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => { + assert_eq!( + &[ + Ident::new("1db"), + Ident::new("1table"), + Ident::new("1column") + ], + &parts[..] + ); + } + proj => panic!("Unexpected projection: {:?}", proj), + }, + body => panic!("Unexpected statement body: {:?}", body), + }, + stmt => panic!("Unexpected statement: {:?}", stmt), + } + + // Case 7: Multi-level compound quoted identifiers. + match mysql().verified_stmt("SELECT `1`.`2`.`3`") { + Statement::Query(q) => match *q.body { + SetExpr::Select(s) => match s.projection.last() { + Some(SelectItem::UnnamedExpr(Expr::CompoundIdentifier(parts))) => { + assert_eq!( + &[ + Ident::with_quote('`', "1"), + Ident::with_quote('`', "2"), + Ident::with_quote('`', "3") + ], + &parts[..] + ); + } + proj => panic!("Unexpected projection: {:?}", proj), + }, + body => panic!("Unexpected statement body: {:?}", body), + }, + stmt => panic!("Unexpected statement: {:?}", stmt), + } +} + // Don't run with bigdecimal as it fails like this on rust beta: // // 'parse_select_with_concatenation_of_exp_number_and_numeric_prefix_column' @@ -1786,23 +2317,20 @@ fn parse_select_with_concatenation_of_exp_number_and_numeric_prefix_column() { assert_eq!( q.body, Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), + distinct: None, top: None, + top_before_distinct: false, projection: vec![ - SelectItem::UnnamedExpr(Expr::Value(number("123e4"))), + SelectItem::UnnamedExpr(Expr::value(number("123e4"))), SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("123col_$@123abc"))) ], into: None, from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident::with_quote('"', "table")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident::with_quote( + '"', "table" + )])), joins: vec![] }], lateral_views: vec![], @@ -1818,6 +2346,7 @@ fn parse_select_with_concatenation_of_exp_number_and_numeric_prefix_column() { window_before_qualify: false, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))) ); } @@ -1830,12 +2359,12 @@ fn parse_insert_with_numeric_prefix_column_name() { let sql = "INSERT INTO s1.t1 (123col_$@length123) VALUES (67.654)"; match mysql().verified_stmt(sql) { Statement::Insert(Insert { - table_name, + table: table_name, columns, .. }) => { assert_eq!( - ObjectName(vec![Ident::new("s1"), Ident::new("t1")]), + TableObject::TableName(ObjectName::from(vec![Ident::new("s1"), Ident::new("t1")])), table_name ); assert_eq!(vec![Ident::new("123col_$@length123")], columns); @@ -1854,11 +2383,12 @@ fn parse_update_with_joins() { from: _from, selection, returning, + or: None, } => { assert_eq!( TableWithJoins { relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("orders")]), + name: ObjectName::from(vec![Ident::new("orders")]), alias: Some(TableAlias { name: Ident::new("o"), columns: vec![] @@ -1868,10 +2398,13 @@ fn parse_update_with_joins() { version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }, joins: vec![Join { relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("customers")]), + name: ObjectName::from(vec![Ident::new("customers")]), alias: Some(TableAlias { name: Ident::new("c"), columns: vec![] @@ -1881,9 +2414,12 @@ fn parse_update_with_joins() { version: None, partitions: vec![], with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], }, global: false, - join_operator: JoinOperator::Inner(JoinConstraint::On(Expr::BinaryOp { + join_operator: JoinOperator::Join(JoinConstraint::On(Expr::BinaryOp { left: Box::new(Expr::CompoundIdentifier(vec![ Ident::new("o"), Ident::new("customer_id") @@ -1900,11 +2436,11 @@ fn parse_update_with_joins() { ); assert_eq!( vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec![ + target: AssignmentTarget::ColumnName(ObjectName::from(vec![ Ident::new("o"), Ident::new("completed") ])), - value: Expr::Value(Value::Boolean(true)) + value: Expr::Value((Value::Boolean(true)).with_empty_span()) }], assignments ); @@ -1915,7 +2451,9 @@ fn parse_update_with_joins() { Ident::new("firstname") ])), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::SingleQuotedString("Peter".to_string()))) + right: Box::new(Expr::Value( + (Value::SingleQuotedString("Peter".to_string())).with_empty_span() + )) }), selection ); @@ -1934,10 +2472,13 @@ fn parse_delete_with_order_by() { vec![OrderByExpr { expr: Expr::Identifier(Ident { value: "id".to_owned(), - quote_style: None + quote_style: None, + span: Span::empty(), }), - asc: Some(false), - nulls_first: None, + options: OrderByOptions { + asc: Some(false), + nulls_first: None, + }, with_fill: None, }], order_by @@ -1952,7 +2493,7 @@ fn parse_delete_with_limit() { let sql = "DELETE FROM customers LIMIT 100"; match mysql().verified_stmt(sql) { Statement::Delete(Delete { limit, .. }) => { - assert_eq!(Some(Expr::Value(number("100"))), limit); + assert_eq!(Some(Expr::value(number("100"))), limit); } _ => unreachable!(), } @@ -1980,7 +2521,6 @@ fn parse_alter_table_add_column() { column_def: ColumnDef { name: "b".into(), data_type: DataType::Int(None), - collation: None, options: vec![], }, column_position: Some(MySQLColumnPosition::First), @@ -2010,12 +2550,12 @@ fn parse_alter_table_add_column() { column_def: ColumnDef { name: "b".into(), data_type: DataType::Int(None), - collation: None, options: vec![], }, column_position: Some(MySQLColumnPosition::After(Ident { value: String::from("foo"), - quote_style: None + quote_style: None, + span: Span::empty(), })), },] ); @@ -2049,7 +2589,6 @@ fn parse_alter_table_add_columns() { column_def: ColumnDef { name: "a".into(), data_type: DataType::Text, - collation: None, options: vec![], }, column_position: Some(MySQLColumnPosition::First), @@ -2060,12 +2599,12 @@ fn parse_alter_table_add_columns() { column_def: ColumnDef { name: "b".into(), data_type: DataType::Int(None), - collation: None, options: vec![], }, column_position: Some(MySQLColumnPosition::After(Ident { value: String::from("foo"), quote_style: None, + span: Span::empty(), })), }, ] @@ -2083,9 +2622,19 @@ fn parse_alter_table_drop_primary_key() { ); } +#[test] +fn parse_alter_table_drop_foreign_key() { + assert_matches!( + alter_table_op( + mysql_and_generic().verified_stmt("ALTER TABLE tab DROP FOREIGN KEY foo_ibfk_1") + ), + AlterTableOperation::DropForeignKey { name } if name.value == "foo_ibfk_1" + ); +} + #[test] fn parse_alter_table_change_column() { - let expected_name = ObjectName(vec![Ident::new("orders")]); + let expected_name = ObjectName::from(vec![Ident::new("orders")]); let expected_operation = AlterTableOperation::ChangeColumn { old_name: Ident::new("description"), new_name: Ident::new("desc"), @@ -2126,6 +2675,7 @@ fn parse_alter_table_change_column() { column_position: Some(MySQLColumnPosition::After(Ident { value: String::from("foo"), quote_style: None, + span: Span::empty(), })), }; let sql4 = "ALTER TABLE orders CHANGE COLUMN description desc TEXT NOT NULL AFTER foo"; @@ -2136,7 +2686,7 @@ fn parse_alter_table_change_column() { #[test] fn parse_alter_table_change_column_with_column_position() { - let expected_name = ObjectName(vec![Ident::new("orders")]); + let expected_name = ObjectName::from(vec![Ident::new("orders")]); let expected_operation_first = AlterTableOperation::ChangeColumn { old_name: Ident::new("description"), new_name: Ident::new("desc"), @@ -2165,6 +2715,7 @@ fn parse_alter_table_change_column_with_column_position() { column_position: Some(MySQLColumnPosition::After(Ident { value: String::from("total_count"), quote_style: None, + span: Span::empty(), })), }; @@ -2183,7 +2734,7 @@ fn parse_alter_table_change_column_with_column_position() { #[test] fn parse_alter_table_modify_column() { - let expected_name = ObjectName(vec![Ident::new("orders")]); + let expected_name = ObjectName::from(vec![Ident::new("orders")]); let expected_operation = AlterTableOperation::ModifyColumn { col_name: Ident::new("description"), data_type: DataType::Text, @@ -2221,6 +2772,7 @@ fn parse_alter_table_modify_column() { column_position: Some(MySQLColumnPosition::After(Ident { value: String::from("foo"), quote_style: None, + span: Span::empty(), })), }; let sql4 = "ALTER TABLE orders MODIFY COLUMN description TEXT NOT NULL AFTER foo"; @@ -2229,9 +2781,116 @@ fn parse_alter_table_modify_column() { assert_eq!(expected_operation, operation); } +#[test] +fn parse_alter_table_with_algorithm() { + let sql = "ALTER TABLE tab ALGORITHM = COPY"; + let expected_operation = AlterTableOperation::Algorithm { + equals: true, + algorithm: AlterTableAlgorithm::Copy, + }; + let operation = alter_table_op(mysql_and_generic().verified_stmt(sql)); + assert_eq!(expected_operation, operation); + + // Check order doesn't matter + let sql = + "ALTER TABLE users DROP COLUMN password_digest, ALGORITHM = COPY, RENAME COLUMN name TO username"; + let stmt = mysql_and_generic().verified_stmt(sql); + match stmt { + Statement::AlterTable { operations, .. } => { + assert_eq!( + operations, + vec![ + AlterTableOperation::DropColumn { + column_name: Ident::new("password_digest"), + if_exists: false, + drop_behavior: None, + }, + AlterTableOperation::Algorithm { + equals: true, + algorithm: AlterTableAlgorithm::Copy, + }, + AlterTableOperation::RenameColumn { + old_column_name: Ident::new("name"), + new_column_name: Ident::new("username") + }, + ] + ) + } + _ => panic!("Unexpected statement {stmt}"), + } + + mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM DEFAULT"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM INSTANT"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM INPLACE"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM COPY"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM = DEFAULT"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM = INSTANT"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM = INPLACE"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` ALGORITHM = COPY"); +} + +#[test] +fn parse_alter_table_with_lock() { + let sql = "ALTER TABLE tab LOCK = SHARED"; + let expected_operation = AlterTableOperation::Lock { + equals: true, + lock: AlterTableLock::Shared, + }; + let operation = alter_table_op(mysql_and_generic().verified_stmt(sql)); + assert_eq!(expected_operation, operation); + + let sql = + "ALTER TABLE users DROP COLUMN password_digest, LOCK = EXCLUSIVE, RENAME COLUMN name TO username"; + let stmt = mysql_and_generic().verified_stmt(sql); + match stmt { + Statement::AlterTable { operations, .. } => { + assert_eq!( + operations, + vec![ + AlterTableOperation::DropColumn { + column_name: Ident::new("password_digest"), + if_exists: false, + drop_behavior: None, + }, + AlterTableOperation::Lock { + equals: true, + lock: AlterTableLock::Exclusive, + }, + AlterTableOperation::RenameColumn { + old_column_name: Ident::new("name"), + new_column_name: Ident::new("username") + }, + ] + ) + } + _ => panic!("Unexpected statement {stmt}"), + } + mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK DEFAULT"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK SHARED"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK NONE"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK EXCLUSIVE"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK = DEFAULT"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK = SHARED"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK = NONE"); + mysql_and_generic().verified_stmt("ALTER TABLE `users` LOCK = EXCLUSIVE"); +} + +#[test] +fn parse_alter_table_auto_increment() { + let sql = "ALTER TABLE tab AUTO_INCREMENT = 42"; + let expected_operation = AlterTableOperation::AutoIncrement { + equals: true, + value: number("42").with_empty_span(), + }; + let operation = alter_table_op(mysql().verified_stmt(sql)); + assert_eq!(expected_operation, operation); + + mysql_and_generic().verified_stmt("ALTER TABLE `users` AUTO_INCREMENT 42"); +} + #[test] fn parse_alter_table_modify_column_with_column_position() { - let expected_name = ObjectName(vec![Ident::new("orders")]); + let expected_name = ObjectName::from(vec![Ident::new("orders")]); let expected_operation_first = AlterTableOperation::ModifyColumn { col_name: Ident::new("description"), data_type: DataType::Text, @@ -2258,6 +2917,7 @@ fn parse_alter_table_modify_column_with_column_position() { column_position: Some(MySQLColumnPosition::After(Ident { value: String::from("total_count"), quote_style: None, + span: Span::empty(), })), }; @@ -2276,6 +2936,8 @@ fn parse_alter_table_modify_column_with_column_position() { #[test] fn parse_substring_in_select() { + use sqlparser::tokenizer::Span; + let sql = "SELECT DISTINCT SUBSTRING(description, 0, 1) FROM test"; match mysql().one_statement_parses_to( sql, @@ -2286,31 +2948,32 @@ fn parse_substring_in_select() { Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: Some(Distinct::Distinct), top: None, + top_before_distinct: false, projection: vec![SelectItem::UnnamedExpr(Expr::Substring { expr: Box::new(Expr::Identifier(Ident { value: "description".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), })), - substring_from: Some(Box::new(Expr::Value(number("0")))), - substring_for: Some(Box::new(Expr::Value(number("1")))), + substring_from: Some(Box::new(Expr::Value( + (number("0")).with_empty_span() + ))), + substring_for: Some(Box::new(Expr::Value( + (number("1")).with_empty_span() + ))), special: true, + shorthand: false, })], into: None, from: vec![TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident { - value: "test".to_string(), - quote_style: None - }]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident { + value: "test".to_string(), + quote_style: None, + span: Span::empty(), + }])), joins: vec![] }], lateral_views: vec![], @@ -2326,16 +2989,16 @@ fn parse_substring_in_select() { qualify: None, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], }), query ); @@ -2369,6 +3032,17 @@ fn parse_rlike_and_regexp() { } } +#[test] +fn parse_like_with_escape() { + // verify backslash is not stripped for escaped wildcards + mysql().verified_only_select(r#"SELECT 'a\%c' LIKE 'a\%c'"#); + mysql().verified_only_select(r#"SELECT 'a\_c' LIKE 'a\_c'"#); + mysql().verified_only_select(r#"SELECT '%\_\%' LIKE '%\_\%'"#); + mysql().verified_only_select(r#"SELECT '\_\%' LIKE CONCAT('\_', '\%')"#); + mysql().verified_only_select(r#"SELECT 'a%c' LIKE 'a$%c' ESCAPE '$'"#); + mysql().verified_only_select(r#"SELECT 'a_c' LIKE 'a#_c' ESCAPE '#'"#); +} + #[test] fn parse_kill() { let stmt = mysql_and_generic().verified_stmt("KILL CONNECTION 5"); @@ -2400,7 +3074,7 @@ fn parse_kill() { } #[test] -fn parse_table_colum_option_on_update() { +fn parse_table_column_option_on_update() { let sql1 = "CREATE TABLE foo (`modification_time` DATETIME ON UPDATE CURRENT_TIMESTAMP())"; match mysql().verified_stmt(sql1) { Statement::CreateTable(CreateTable { name, columns, .. }) => { @@ -2409,7 +3083,6 @@ fn parse_table_colum_option_on_update() { vec![ColumnDef { name: Ident::with_quote('`', "modification_time"), data_type: DataType::Datetime(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::OnUpdate(call("CURRENT_TIMESTAMP", [])), @@ -2427,19 +3100,19 @@ fn parse_set_names() { let stmt = mysql_and_generic().verified_stmt("SET NAMES utf8mb4"); assert_eq!( stmt, - Statement::SetNames { - charset_name: "utf8mb4".to_string(), + Statement::Set(Set::SetNames { + charset_name: "utf8mb4".into(), collation_name: None, - } + }) ); let stmt = mysql_and_generic().verified_stmt("SET NAMES utf8mb4 COLLATE bogus"); assert_eq!( stmt, - Statement::SetNames { - charset_name: "utf8mb4".to_string(), + Statement::Set(Set::SetNames { + charset_name: "utf8mb4".into(), collation_name: Some("bogus".to_string()), - } + }) ); let stmt = mysql_and_generic() @@ -2447,22 +3120,20 @@ fn parse_set_names() { .unwrap(); assert_eq!( stmt, - vec![Statement::SetNames { - charset_name: "utf8mb4".to_string(), + vec![Statement::Set(Set::SetNames { + charset_name: "utf8mb4".into(), collation_name: Some("bogus".to_string()), - }] + })] ); let stmt = mysql_and_generic().verified_stmt("SET NAMES DEFAULT"); - assert_eq!(stmt, Statement::SetNamesDefault {}); + assert_eq!(stmt, Statement::Set(Set::SetNamesDefault {})); } #[test] fn parse_limit_my_sql_syntax() { - mysql_and_generic().one_statement_parses_to( - "SELECT id, fname, lname FROM customer LIMIT 5, 10", - "SELECT id, fname, lname FROM customer LIMIT 10 OFFSET 5", - ); + mysql_and_generic().verified_stmt("SELECT id, fname, lname FROM customer LIMIT 10 OFFSET 5"); + mysql_and_generic().verified_stmt("SELECT id, fname, lname FROM customer LIMIT 5, 10"); mysql_and_generic().verified_stmt("SELECT * FROM user LIMIT ? OFFSET ?"); } @@ -2586,14 +3257,6 @@ fn parse_create_table_with_fulltext_definition_should_not_accept_constraint_name mysql_and_generic().verified_stmt("CREATE TABLE tb (c1 INT, CONSTRAINT cons FULLTEXT (c1))"); } -fn mysql() -> TestedDialects { - TestedDialects::new(vec![Box::new(MySqlDialect {})]) -} - -fn mysql_and_generic() -> TestedDialects { - TestedDialects::new(vec![Box::new(MySqlDialect {}), Box::new(GenericDialect {})]) -} - #[test] fn parse_values() { mysql().verified_stmt("VALUES ROW(1, true, 'a')"); @@ -2607,11 +3270,16 @@ fn parse_hex_string_introducer() { Statement::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: None, top: None, - projection: vec![SelectItem::UnnamedExpr(Expr::IntroducedString { - introducer: "_latin1".to_string(), - value: Value::HexStringLiteral("4D7953514C".to_string()) + top_before_distinct: false, + projection: vec![SelectItem::UnnamedExpr(Expr::Prefixed { + prefix: Ident::from("_latin1"), + value: Expr::Value( + Value::HexStringLiteral("4D7953514C".to_string()).with_empty_span() + ) + .into(), })], from: vec![], lateral_views: vec![], @@ -2628,16 +3296,16 @@ fn parse_hex_string_introducer() { value_table_mode: None, into: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })) ) } @@ -2699,19 +3367,27 @@ fn parse_convert_using() { #[test] fn parse_create_table_with_column_collate() { let sql = "CREATE TABLE tb (id TEXT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci)"; - let canonical = "CREATE TABLE tb (id TEXT COLLATE utf8mb4_0900_ai_ci CHARACTER SET utf8mb4)"; - match mysql().one_statement_parses_to(sql, canonical) { + match mysql().verified_stmt(sql) { Statement::CreateTable(CreateTable { name, columns, .. }) => { assert_eq!(name.to_string(), "tb"); assert_eq!( vec![ColumnDef { name: Ident::new("id"), data_type: DataType::Text, - collation: Some(ObjectName(vec![Ident::new("utf8mb4_0900_ai_ci")])), - options: vec![ColumnOptionDef { - name: None, - option: ColumnOption::CharacterSet(ObjectName(vec![Ident::new("utf8mb4")])) - }], + options: vec![ + ColumnOptionDef { + name: None, + option: ColumnOption::CharacterSet(ObjectName::from(vec![Ident::new( + "utf8mb4" + )])) + }, + ColumnOptionDef { + name: None, + option: ColumnOption::Collation(ObjectName::from(vec![Ident::new( + "utf8mb4_0900_ai_ci" + )])) + } + ], },], columns ); @@ -2758,6 +3434,12 @@ fn parse_json_table() { r#"SELECT * FROM JSON_TABLE('[1,2]', '$[*]' COLUMNS(x INT PATH '$' ERROR ON EMPTY)) AS t"#, ); mysql().verified_only_select(r#"SELECT * FROM JSON_TABLE('[1,2]', '$[*]' COLUMNS(x INT PATH '$' ERROR ON EMPTY DEFAULT '0' ON ERROR)) AS t"#); + mysql().verified_only_select( + r#"SELECT jt.* FROM JSON_TABLE('["Alice", "Bob", "Charlie"]', '$[*]' COLUMNS(row_num FOR ORDINALITY, name VARCHAR(50) PATH '$')) AS jt"#, + ); + mysql().verified_only_select( + r#"SELECT * FROM JSON_TABLE('[ {"a": 1, "b": [11,111]}, {"a": 2, "b": [22,222]}, {"a":3}]', '$[*]' COLUMNS(a INT PATH '$.a', NESTED PATH '$.b[*]' COLUMNS (b INT PATH '$'))) AS jt"#, + ); assert_eq!( mysql() .verified_only_select( @@ -2766,17 +3448,17 @@ fn parse_json_table() { .from[0] .relation, TableFactor::JsonTable { - json_expr: Expr::Value(Value::SingleQuotedString("[1,2]".to_string())), + json_expr: Expr::Value((Value::SingleQuotedString("[1,2]".to_string())).with_empty_span()), json_path: Value::SingleQuotedString("$[*]".to_string()), columns: vec![ - JsonTableColumn { + JsonTableColumn::Named(JsonTableNamedColumn { name: Ident::new("x"), r#type: DataType::Int(None), path: Value::SingleQuotedString("$".to_string()), exists: false, on_empty: Some(JsonTableColumnErrorHandling::Default(Value::SingleQuotedString("0".to_string()))), on_error: Some(JsonTableColumnErrorHandling::Null), - }, + }), ], alias: Some(TableAlias { name: Ident::new("t"), @@ -2796,3 +3478,504 @@ fn test_group_concat() { mysql_and_generic() .verified_expr("GROUP_CONCAT(DISTINCT test_score ORDER BY test_score DESC SEPARATOR ' ')"); } + +/// The XOR binary operator is only supported in MySQL +#[test] +fn parse_logical_xor() { + let sql = "SELECT true XOR true, false XOR false, true XOR false, false XOR true"; + let select = mysql_and_generic().verified_only_select(sql); + assert_eq!( + SelectItem::UnnamedExpr(Expr::BinaryOp { + left: Box::new(Expr::Value((Value::Boolean(true)).with_empty_span())), + op: BinaryOperator::Xor, + right: Box::new(Expr::Value((Value::Boolean(true)).with_empty_span())), + }), + select.projection[0] + ); + assert_eq!( + SelectItem::UnnamedExpr(Expr::BinaryOp { + left: Box::new(Expr::Value((Value::Boolean(false)).with_empty_span())), + op: BinaryOperator::Xor, + right: Box::new(Expr::Value((Value::Boolean(false)).with_empty_span())), + }), + select.projection[1] + ); + assert_eq!( + SelectItem::UnnamedExpr(Expr::BinaryOp { + left: Box::new(Expr::Value((Value::Boolean(true)).with_empty_span())), + op: BinaryOperator::Xor, + right: Box::new(Expr::Value((Value::Boolean(false)).with_empty_span())), + }), + select.projection[2] + ); + assert_eq!( + SelectItem::UnnamedExpr(Expr::BinaryOp { + left: Box::new(Expr::Value((Value::Boolean(false)).with_empty_span())), + op: BinaryOperator::Xor, + right: Box::new(Expr::Value((Value::Boolean(true)).with_empty_span())), + }), + select.projection[3] + ); +} + +#[test] +fn parse_bitstring_literal() { + let select = mysql_and_generic().verified_only_select("SELECT B'111'"); + assert_eq!( + select.projection, + vec![SelectItem::UnnamedExpr(Expr::Value( + (Value::SingleQuotedByteStringLiteral("111".to_string())).with_empty_span() + ))] + ); +} + +#[test] +fn parse_grant() { + let sql = "GRANT ALL ON *.* TO 'jeffrey'@'%'"; + let stmt = mysql().verified_stmt(sql); + if let Statement::Grant { + privileges, + objects, + grantees, + with_grant_option, + granted_by, + } = stmt + { + assert_eq!( + privileges, + Privileges::All { + with_privileges_keyword: false + } + ); + assert_eq!( + objects, + Some(GrantObjects::Tables(vec![ObjectName::from(vec![ + "*".into(), + "*".into() + ])])) + ); + assert!(!with_grant_option); + assert!(granted_by.is_none()); + if let [Grantee { + grantee_type: GranteesType::None, + name: Some(GranteeName::UserHost { user, host }), + }] = grantees.as_slice() + { + assert_eq!(user.value, "jeffrey"); + assert_eq!(user.quote_style, Some('\'')); + assert_eq!(host.value, "%"); + assert_eq!(host.quote_style, Some('\'')); + } else { + unreachable!() + } + } else { + unreachable!() + } +} + +#[test] +fn parse_revoke() { + let sql = "REVOKE ALL ON db1.* FROM 'jeffrey'@'%'"; + let stmt = mysql_and_generic().verified_stmt(sql); + if let Statement::Revoke { + privileges, + objects, + grantees, + granted_by, + cascade, + } = stmt + { + assert_eq!( + privileges, + Privileges::All { + with_privileges_keyword: false + } + ); + assert_eq!( + objects, + Some(GrantObjects::Tables(vec![ObjectName::from(vec![ + "db1".into(), + "*".into() + ])])) + ); + if let [Grantee { + grantee_type: GranteesType::None, + name: Some(GranteeName::UserHost { user, host }), + }] = grantees.as_slice() + { + assert_eq!(user.value, "jeffrey"); + assert_eq!(user.quote_style, Some('\'')); + assert_eq!(host.value, "%"); + assert_eq!(host.quote_style, Some('\'')); + } else { + unreachable!() + } + assert!(granted_by.is_none()); + assert!(cascade.is_none()); + } else { + unreachable!() + } +} + +#[test] +fn parse_create_view_algorithm_param() { + let sql = "CREATE ALGORITHM = MERGE VIEW foo AS SELECT 1"; + let stmt = mysql().verified_stmt(sql); + if let Statement::CreateView { + params: + Some(CreateViewParams { + algorithm, + definer, + security, + }), + .. + } = stmt + { + assert_eq!(algorithm, Some(CreateViewAlgorithm::Merge)); + assert!(definer.is_none()); + assert!(security.is_none()); + } else { + unreachable!() + } + mysql().verified_stmt("CREATE ALGORITHM = UNDEFINED VIEW foo AS SELECT 1"); + mysql().verified_stmt("CREATE ALGORITHM = TEMPTABLE VIEW foo AS SELECT 1"); +} + +#[test] +fn parse_create_view_definer_param() { + let sql = "CREATE DEFINER = 'jeffrey'@'localhost' VIEW foo AS SELECT 1"; + let stmt = mysql().verified_stmt(sql); + if let Statement::CreateView { + params: + Some(CreateViewParams { + algorithm, + definer, + security, + }), + .. + } = stmt + { + assert!(algorithm.is_none()); + if let Some(GranteeName::UserHost { user, host }) = definer { + assert_eq!(user.value, "jeffrey"); + assert_eq!(user.quote_style, Some('\'')); + assert_eq!(host.value, "localhost"); + assert_eq!(host.quote_style, Some('\'')); + } else { + unreachable!() + } + assert!(security.is_none()); + } else { + unreachable!() + } +} + +#[test] +fn parse_create_view_security_param() { + let sql = "CREATE SQL SECURITY DEFINER VIEW foo AS SELECT 1"; + let stmt = mysql().verified_stmt(sql); + if let Statement::CreateView { + params: + Some(CreateViewParams { + algorithm, + definer, + security, + }), + .. + } = stmt + { + assert!(algorithm.is_none()); + assert!(definer.is_none()); + assert_eq!(security, Some(CreateViewSecurity::Definer)); + } else { + unreachable!() + } + mysql().verified_stmt("CREATE SQL SECURITY INVOKER VIEW foo AS SELECT 1"); +} + +#[test] +fn parse_create_view_multiple_params() { + let sql = "CREATE ALGORITHM = UNDEFINED DEFINER = `root`@`%` SQL SECURITY INVOKER VIEW foo AS SELECT 1"; + let stmt = mysql().verified_stmt(sql); + if let Statement::CreateView { + params: + Some(CreateViewParams { + algorithm, + definer, + security, + }), + .. + } = stmt + { + assert_eq!(algorithm, Some(CreateViewAlgorithm::Undefined)); + if let Some(GranteeName::UserHost { user, host }) = definer { + assert_eq!(user.value, "root"); + assert_eq!(user.quote_style, Some('`')); + assert_eq!(host.value, "%"); + assert_eq!(host.quote_style, Some('`')); + } else { + unreachable!() + } + assert_eq!(security, Some(CreateViewSecurity::Invoker)); + } else { + unreachable!() + } +} + +#[test] +fn parse_longblob_type() { + let sql = "CREATE TABLE foo (bar LONGBLOB)"; + let stmt = mysql_and_generic().verified_stmt(sql); + if let Statement::CreateTable(CreateTable { columns, .. }) = stmt { + assert_eq!(columns.len(), 1); + assert_eq!(columns[0].data_type, DataType::LongBlob); + } else { + unreachable!() + } + mysql_and_generic().verified_stmt("CREATE TABLE foo (bar TINYBLOB)"); + mysql_and_generic().verified_stmt("CREATE TABLE foo (bar MEDIUMBLOB)"); + mysql_and_generic().verified_stmt("CREATE TABLE foo (bar TINYTEXT)"); + mysql_and_generic().verified_stmt("CREATE TABLE foo (bar MEDIUMTEXT)"); + mysql_and_generic().verified_stmt("CREATE TABLE foo (bar LONGTEXT)"); +} + +#[test] +fn parse_begin_without_transaction() { + mysql().verified_stmt("BEGIN"); +} + +#[test] +fn parse_double_precision() { + mysql().verified_stmt("CREATE TABLE foo (bar DOUBLE)"); + mysql().verified_stmt("CREATE TABLE foo (bar DOUBLE(11,0))"); + mysql().one_statement_parses_to( + "CREATE TABLE foo (bar DOUBLE(11, 0))", + "CREATE TABLE foo (bar DOUBLE(11,0))", + ); +} + +#[test] +fn parse_looks_like_single_line_comment() { + mysql().one_statement_parses_to( + "UPDATE account SET balance=balance--1 WHERE account_id=5752", + "UPDATE account SET balance = balance - -1 WHERE account_id = 5752", + ); + mysql().one_statement_parses_to( + r#" + UPDATE account SET balance=balance-- 1 + WHERE account_id=5752 + "#, + "UPDATE account SET balance = balance WHERE account_id = 5752", + ); +} + +#[test] +fn parse_create_trigger() { + let sql_create_trigger = r#" + CREATE TRIGGER emp_stamp BEFORE INSERT ON emp + FOR EACH ROW EXECUTE FUNCTION emp_stamp(); + "#; + let create_stmt = mysql().one_statement_parses_to(sql_create_trigger, ""); + assert_eq!( + create_stmt, + Statement::CreateTrigger { + or_alter: false, + or_replace: false, + is_constraint: false, + name: ObjectName::from(vec![Ident::new("emp_stamp")]), + period: TriggerPeriod::Before, + events: vec![TriggerEvent::Insert], + table_name: ObjectName::from(vec![Ident::new("emp")]), + referenced_table_name: None, + referencing: vec![], + trigger_object: TriggerObject::Row, + include_each: true, + condition: None, + exec_body: Some(TriggerExecBody { + exec_type: TriggerExecBodyType::Function, + func_desc: FunctionDesc { + name: ObjectName::from(vec![Ident::new("emp_stamp")]), + args: None, + } + }), + statements: None, + characteristics: None, + } + ); +} + +#[test] +fn parse_drop_trigger() { + let sql_drop_trigger = "DROP TRIGGER emp_stamp;"; + let drop_stmt = mysql().one_statement_parses_to(sql_drop_trigger, ""); + assert_eq!( + drop_stmt, + Statement::DropTrigger { + if_exists: false, + trigger_name: ObjectName::from(vec![Ident::new("emp_stamp")]), + table_name: None, + option: None, + } + ); +} + +#[test] +fn parse_cast_integers() { + mysql().verified_expr("CAST(foo AS UNSIGNED)"); + mysql().verified_expr("CAST(foo AS SIGNED)"); + mysql().verified_expr("CAST(foo AS UNSIGNED INTEGER)"); + mysql().verified_expr("CAST(foo AS SIGNED INTEGER)"); + + mysql() + .run_parser_method("CAST(foo AS UNSIGNED(3))", |p| p.parse_expr()) + .expect_err("CAST doesn't allow display width"); + mysql() + .run_parser_method("CAST(foo AS UNSIGNED(3) INTEGER)", |p| p.parse_expr()) + .expect_err("CAST doesn't allow display width"); + mysql() + .run_parser_method("CAST(foo AS UNSIGNED INTEGER(3))", |p| p.parse_expr()) + .expect_err("CAST doesn't allow display width"); +} + +#[test] +fn parse_match_against_with_alias() { + let sql = "SELECT tbl.ProjectID FROM surveys.tbl1 AS tbl WHERE MATCH (tbl.ReferenceID) AGAINST ('AAA' IN BOOLEAN MODE)"; + match mysql().verified_stmt(sql) { + Statement::Query(query) => match *query.body { + SetExpr::Select(select) => match select.selection { + Some(Expr::MatchAgainst { + columns, + match_value, + opt_search_modifier, + }) => { + assert_eq!( + columns, + vec![ObjectName::from(vec![ + Ident::new("tbl"), + Ident::new("ReferenceID") + ])] + ); + assert_eq!(match_value, Value::SingleQuotedString("AAA".to_owned())); + assert_eq!(opt_search_modifier, Some(SearchModifier::InBooleanMode)); + } + _ => unreachable!(), + }, + _ => unreachable!(), + }, + _ => unreachable!(), + } +} + +#[test] +fn test_variable_assignment_using_colon_equal() { + let sql_select = "SELECT @price := price, @tax := price * 0.1 FROM products WHERE id = 1"; + let stmt = mysql().verified_stmt(sql_select); + match stmt { + Statement::Query(query) => { + let select = query.body.as_select().unwrap(); + + assert_eq!( + select.projection, + vec![ + SelectItem::UnnamedExpr(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident { + value: "@price".to_string(), + quote_style: None, + span: Span::empty(), + })), + op: BinaryOperator::Assignment, + right: Box::new(Expr::Identifier(Ident { + value: "price".to_string(), + quote_style: None, + span: Span::empty(), + })), + }), + SelectItem::UnnamedExpr(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident { + value: "@tax".to_string(), + quote_style: None, + span: Span::empty(), + })), + op: BinaryOperator::Assignment, + right: Box::new(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident { + value: "price".to_string(), + quote_style: None, + span: Span::empty(), + })), + op: BinaryOperator::Multiply, + right: Box::new(Expr::Value( + (test_utils::number("0.1")).with_empty_span() + )), + }), + }), + ] + ); + + assert_eq!( + select.selection, + Some(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident { + value: "id".to_string(), + quote_style: None, + span: Span::empty(), + })), + op: BinaryOperator::Eq, + right: Box::new(Expr::Value((test_utils::number("1")).with_empty_span())), + }) + ); + } + _ => panic!("Unexpected statement {stmt}"), + } + + let sql_update = + "UPDATE products SET price = @new_price := price * 1.1 WHERE category = 'Books'"; + let stmt = mysql().verified_stmt(sql_update); + + match stmt { + Statement::Update { assignments, .. } => { + assert_eq!( + assignments, + vec![Assignment { + target: AssignmentTarget::ColumnName(ObjectName(vec![ + ObjectNamePart::Identifier(Ident { + value: "price".to_string(), + quote_style: None, + span: Span::empty(), + }) + ])), + value: Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident { + value: "@new_price".to_string(), + quote_style: None, + span: Span::empty(), + })), + op: BinaryOperator::Assignment, + right: Box::new(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident { + value: "price".to_string(), + quote_style: None, + span: Span::empty(), + })), + op: BinaryOperator::Multiply, + right: Box::new(Expr::Value( + (test_utils::number("1.1")).with_empty_span() + )), + }), + }, + }] + ) + } + _ => panic!("Unexpected statement {stmt}"), + } +} + +#[test] +fn parse_straight_join() { + mysql().verified_stmt( + "SELECT a.*, b.* FROM table_a AS a STRAIGHT_JOIN table_b AS b ON a.b_id = b.id", + ); + // Without table alias + mysql() + .verified_stmt("SELECT a.*, b.* FROM table_a STRAIGHT_JOIN table_b AS b ON a.b_id = b.id"); +} diff --git a/tests/sqlparser_postgres.rs b/tests/sqlparser_postgres.rs index b9b3811ba..859eca453 100644 --- a/tests/sqlparser_postgres.rs +++ b/tests/sqlparser_postgres.rs @@ -21,6 +21,8 @@ #[macro_use] mod test_utils; +use helpers::attached_token::AttachedToken; +use sqlparser::tokenizer::Span; use test_utils::*; use sqlparser::ast::*; @@ -346,7 +348,7 @@ fn parse_create_table_with_defaults() { name, columns, constraints, - with_options, + table_options, if_not_exists: false, external: false, file_format: None, @@ -361,7 +363,6 @@ fn parse_create_table_with_defaults() { ColumnDef { name: "customer_id".into(), data_type: DataType::Integer(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Default( @@ -372,7 +373,6 @@ fn parse_create_table_with_defaults() { ColumnDef { name: "store_id".into(), data_type: DataType::SmallInt(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -386,7 +386,6 @@ fn parse_create_table_with_defaults() { unit: None } )), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull, @@ -400,11 +399,18 @@ fn parse_create_table_with_defaults() { unit: None } )), - collation: Some(ObjectName(vec![Ident::with_quote('"', "es_ES")])), - options: vec![ColumnOptionDef { - name: None, - option: ColumnOption::NotNull, - }], + options: vec![ + ColumnOptionDef { + name: None, + option: ColumnOption::Collation(ObjectName::from(vec![ + Ident::with_quote('"', "es_ES") + ])), + }, + ColumnOptionDef { + name: None, + option: ColumnOption::NotNull, + } + ], }, ColumnDef { name: "email".into(), @@ -414,13 +420,11 @@ fn parse_create_table_with_defaults() { unit: None } )), - collation: None, options: vec![], }, ColumnDef { name: "address_id".into(), data_type: DataType::SmallInt(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull @@ -429,11 +433,12 @@ fn parse_create_table_with_defaults() { ColumnDef { name: "activebool".into(), data_type: DataType::Boolean, - collation: None, options: vec![ ColumnOptionDef { name: None, - option: ColumnOption::Default(Expr::Value(Value::Boolean(true))), + option: ColumnOption::Default(Expr::Value( + (Value::Boolean(true)).with_empty_span() + )), }, ColumnOptionDef { name: None, @@ -444,7 +449,6 @@ fn parse_create_table_with_defaults() { ColumnDef { name: "create_date".into(), data_type: DataType::Date, - collation: None, options: vec![ ColumnOptionDef { name: None, @@ -459,7 +463,6 @@ fn parse_create_table_with_defaults() { ColumnDef { name: "last_update".into(), data_type: DataType::Timestamp(None, TimezoneInfo::WithoutTimeZone), - collation: None, options: vec![ ColumnOptionDef { name: None, @@ -474,7 +477,6 @@ fn parse_create_table_with_defaults() { ColumnDef { name: "active".into(), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::NotNull @@ -483,20 +485,25 @@ fn parse_create_table_with_defaults() { ] ); assert!(constraints.is_empty()); + + let with_options = match table_options { + CreateTableOptions::With(options) => options, + _ => unreachable!(), + }; assert_eq!( with_options, vec![ SqlOption::KeyValue { key: "fillfactor".into(), - value: Expr::Value(number("20")) + value: Expr::value(number("20")) }, SqlOption::KeyValue { key: "user_catalog_table".into(), - value: Expr::Value(Value::Boolean(true)) + value: Expr::Value((Value::Boolean(true)).with_empty_span()) }, SqlOption::KeyValue { key: "autovacuum_vacuum_threshold".into(), - value: Expr::Value(number("100")) + value: Expr::value(number("100")) }, ] ); @@ -592,6 +599,25 @@ fn parse_alter_table_constraints_rename() { } } +#[test] +fn parse_alter_table_constraints_unique_nulls_distinct() { + match pg_and_generic() + .verified_stmt("ALTER TABLE t ADD CONSTRAINT b UNIQUE NULLS NOT DISTINCT (c)") + { + Statement::AlterTable { operations, .. } => match &operations[0] { + AlterTableOperation::AddConstraint(TableConstraint::Unique { + nulls_distinct, .. + }) => { + assert_eq!(nulls_distinct, &NullsDistinctOption::NotDistinct) + } + _ => unreachable!(), + }, + _ => unreachable!(), + } + pg_and_generic().verified_stmt("ALTER TABLE t ADD CONSTRAINT b UNIQUE NULLS DISTINCT (c)"); + pg_and_generic().verified_stmt("ALTER TABLE t ADD CONSTRAINT b UNIQUE (c)"); +} + #[test] fn parse_alter_table_disable() { pg_and_generic().verified_stmt("ALTER TABLE tab DISABLE ROW LEVEL SECURITY"); @@ -641,6 +667,100 @@ fn parse_create_extension() { .verified_stmt("CREATE EXTENSION extension_name WITH SCHEMA schema_name VERSION version"); } +#[test] +fn parse_drop_extension() { + assert_eq!( + pg_and_generic().verified_stmt("DROP EXTENSION extension_name"), + Statement::DropExtension { + names: vec!["extension_name".into()], + if_exists: false, + cascade_or_restrict: None, + } + ); + assert_eq!( + pg_and_generic().verified_stmt("DROP EXTENSION extension_name CASCADE"), + Statement::DropExtension { + names: vec!["extension_name".into()], + if_exists: false, + cascade_or_restrict: Some(ReferentialAction::Cascade), + } + ); + + assert_eq!( + pg_and_generic().verified_stmt("DROP EXTENSION extension_name RESTRICT"), + Statement::DropExtension { + names: vec!["extension_name".into()], + if_exists: false, + cascade_or_restrict: Some(ReferentialAction::Restrict), + } + ); + + assert_eq!( + pg_and_generic().verified_stmt("DROP EXTENSION extension_name, extension_name2 CASCADE"), + Statement::DropExtension { + names: vec!["extension_name".into(), "extension_name2".into()], + if_exists: false, + cascade_or_restrict: Some(ReferentialAction::Cascade), + } + ); + + assert_eq!( + pg_and_generic().verified_stmt("DROP EXTENSION extension_name, extension_name2 RESTRICT"), + Statement::DropExtension { + names: vec!["extension_name".into(), "extension_name2".into()], + if_exists: false, + cascade_or_restrict: Some(ReferentialAction::Restrict), + } + ); + + assert_eq!( + pg_and_generic().verified_stmt("DROP EXTENSION IF EXISTS extension_name"), + Statement::DropExtension { + names: vec!["extension_name".into()], + if_exists: true, + cascade_or_restrict: None, + } + ); + + assert_eq!( + pg_and_generic().verified_stmt("DROP EXTENSION IF EXISTS extension_name CASCADE"), + Statement::DropExtension { + names: vec!["extension_name".into()], + if_exists: true, + cascade_or_restrict: Some(ReferentialAction::Cascade), + } + ); + + assert_eq!( + pg_and_generic().verified_stmt("DROP EXTENSION IF EXISTS extension_name RESTRICT"), + Statement::DropExtension { + names: vec!["extension_name".into()], + if_exists: true, + cascade_or_restrict: Some(ReferentialAction::Restrict), + } + ); + + assert_eq!( + pg_and_generic() + .verified_stmt("DROP EXTENSION IF EXISTS extension_name1, extension_name2 CASCADE"), + Statement::DropExtension { + names: vec!["extension_name1".into(), "extension_name2".into()], + if_exists: true, + cascade_or_restrict: Some(ReferentialAction::Cascade), + } + ); + + assert_eq!( + pg_and_generic() + .verified_stmt("DROP EXTENSION IF EXISTS extension_name1, extension_name2 RESTRICT"), + Statement::DropExtension { + names: vec!["extension_name1".into(), "extension_name2".into()], + if_exists: true, + cascade_or_restrict: Some(ReferentialAction::Restrict), + } + ); +} + #[test] fn parse_alter_table_alter_column() { pg().one_statement_parses_to( @@ -655,7 +775,8 @@ fn parse_alter_table_alter_column() { ) { AlterTableOperation::AlterColumn { column_name, op } => { assert_eq!("is_active", column_name.to_string()); - let using_expr = Expr::Value(Value::SingleQuotedString("text".to_string())); + let using_expr = + Expr::Value(Value::SingleQuotedString("text".to_string()).with_empty_span()); assert_eq!( op, AlterColumnOperation::SetDataType { @@ -727,7 +848,6 @@ fn parse_alter_table_add_columns() { column_def: ColumnDef { name: "a".into(), data_type: DataType::Text, - collation: None, options: vec![], }, column_position: None, @@ -738,7 +858,6 @@ fn parse_alter_table_add_columns() { column_def: ColumnDef { name: "b".into(), data_type: DataType::Int(None), - collation: None, options: vec![], }, column_position: None, @@ -874,6 +993,7 @@ fn parse_create_schema_if_not_exists() { Statement::CreateSchema { if_not_exists: true, schema_name, + .. } => assert_eq!("schema_name", schema_name.to_string()), _ => unreachable!(), } @@ -925,7 +1045,7 @@ fn test_copy_from() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName(vec!["users".into()]), + table_name: ObjectName::from(vec!["users".into()]), columns: vec![], }, to: false, @@ -943,7 +1063,7 @@ fn test_copy_from() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName(vec!["users".into()]), + table_name: ObjectName::from(vec!["users".into()]), columns: vec![], }, to: false, @@ -961,7 +1081,7 @@ fn test_copy_from() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName(vec!["users".into()]), + table_name: ObjectName::from(vec!["users".into()]), columns: vec![], }, to: false, @@ -985,7 +1105,7 @@ fn test_copy_to() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName(vec!["users".into()]), + table_name: ObjectName::from(vec!["users".into()]), columns: vec![], }, to: true, @@ -1003,7 +1123,7 @@ fn test_copy_to() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName(vec!["users".into()]), + table_name: ObjectName::from(vec!["users".into()]), columns: vec![], }, to: true, @@ -1021,7 +1141,7 @@ fn test_copy_to() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName(vec!["users".into()]), + table_name: ObjectName::from(vec!["users".into()]), columns: vec![], }, to: true, @@ -1062,7 +1182,7 @@ fn parse_copy_from() { pg_and_generic().one_statement_parses_to(sql, ""), Statement::Copy { source: CopySource::Table { - table_name: ObjectName(vec!["table".into()]), + table_name: ObjectName::from(vec!["table".into()]), columns: vec!["a".into(), "b".into()], }, to: false, @@ -1108,7 +1228,7 @@ fn parse_copy_to() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName(vec!["users".into()]), + table_name: ObjectName::from(vec!["users".into()]), columns: vec![], }, to: true, @@ -1126,7 +1246,7 @@ fn parse_copy_to() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName(vec!["country".into()]), + table_name: ObjectName::from(vec!["country".into()]), columns: vec![], }, to: true, @@ -1143,7 +1263,7 @@ fn parse_copy_to() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName(vec!["country".into()]), + table_name: ObjectName::from(vec!["country".into()]), columns: vec![], }, to: true, @@ -1163,21 +1283,27 @@ fn parse_copy_to() { source: CopySource::Query(Box::new(Query { with: None, body: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: None, top: None, + top_before_distinct: false, projection: vec![ SelectItem::ExprWithAlias { - expr: Expr::Value(number("42")), + expr: Expr::value(number("42")), alias: Ident { value: "a".into(), quote_style: None, + span: Span::empty(), }, }, SelectItem::ExprWithAlias { - expr: Expr::Value(Value::SingleQuotedString("hello".into())), + expr: Expr::Value( + (Value::SingleQuotedString("hello".into())).with_empty_span() + ), alias: Ident { value: "b".into(), quote_style: None, + span: Span::empty(), }, } ], @@ -1196,16 +1322,16 @@ fn parse_copy_to() { qualify: None, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), to: true, target: CopyTarget::File { @@ -1225,7 +1351,7 @@ fn parse_copy_from_before_v9_0() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName(vec!["users".into()]), + table_name: ObjectName::from(vec!["users".into()]), columns: vec![], }, to: false, @@ -1254,7 +1380,7 @@ fn parse_copy_from_before_v9_0() { pg_and_generic().one_statement_parses_to(sql, ""), Statement::Copy { source: CopySource::Table { - table_name: ObjectName(vec!["users".into()]), + table_name: ObjectName::from(vec!["users".into()]), columns: vec![], }, to: false, @@ -1282,7 +1408,7 @@ fn parse_copy_to_before_v9_0() { stmt, Statement::Copy { source: CopySource::Table { - table_name: ObjectName(vec!["users".into()]), + table_name: ObjectName::from(vec!["users".into()]), columns: vec![], }, to: true, @@ -1311,80 +1437,77 @@ fn parse_set() { let stmt = pg_and_generic().verified_stmt("SET a = b"); assert_eq!( stmt, - Statement::SetVariable { - local: false, + Statement::Set(Set::SingleAssignment { + scope: None, hivevar: false, - variables: OneOrManyWithParens::One(ObjectName(vec![Ident::new("a")])), - value: vec![Expr::Identifier(Ident { + variable: ObjectName::from(vec![Ident::new("a")]), + values: vec![Expr::Identifier(Ident { value: "b".into(), - quote_style: None + quote_style: None, + span: Span::empty(), })], - } + }) ); let stmt = pg_and_generic().verified_stmt("SET a = 'b'"); assert_eq!( stmt, - Statement::SetVariable { - local: false, + Statement::Set(Set::SingleAssignment { + scope: None, hivevar: false, - variables: OneOrManyWithParens::One(ObjectName(vec![Ident::new("a")])), - value: vec![Expr::Value(Value::SingleQuotedString("b".into()))], - } + variable: ObjectName::from(vec![Ident::new("a")]), + values: vec![Expr::Value( + (Value::SingleQuotedString("b".into())).with_empty_span() + )], + }) ); let stmt = pg_and_generic().verified_stmt("SET a = 0"); assert_eq!( stmt, - Statement::SetVariable { - local: false, + Statement::Set(Set::SingleAssignment { + scope: None, hivevar: false, - variables: OneOrManyWithParens::One(ObjectName(vec![Ident::new("a")])), - value: vec![Expr::Value(number("0"))], - } + variable: ObjectName::from(vec![Ident::new("a")]), + values: vec![Expr::value(number("0"))], + }) ); let stmt = pg_and_generic().verified_stmt("SET a = DEFAULT"); assert_eq!( stmt, - Statement::SetVariable { - local: false, + Statement::Set(Set::SingleAssignment { + scope: None, hivevar: false, - variables: OneOrManyWithParens::One(ObjectName(vec![Ident::new("a")])), - value: vec![Expr::Identifier(Ident { - value: "DEFAULT".into(), - quote_style: None - })], - } + variable: ObjectName::from(vec![Ident::new("a")]), + values: vec![Expr::Identifier(Ident::new("DEFAULT"))], + }) ); let stmt = pg_and_generic().verified_stmt("SET LOCAL a = b"); assert_eq!( stmt, - Statement::SetVariable { - local: true, + Statement::Set(Set::SingleAssignment { + scope: Some(ContextModifier::Local), hivevar: false, - variables: OneOrManyWithParens::One(ObjectName(vec![Ident::new("a")])), - value: vec![Expr::Identifier("b".into())], - } + variable: ObjectName::from(vec![Ident::new("a")]), + values: vec![Expr::Identifier("b".into())], + }) ); let stmt = pg_and_generic().verified_stmt("SET a.b.c = b"); assert_eq!( stmt, - Statement::SetVariable { - local: false, + Statement::Set(Set::SingleAssignment { + scope: None, hivevar: false, - variables: OneOrManyWithParens::One(ObjectName(vec![ - Ident::new("a"), - Ident::new("b"), - Ident::new("c") - ])), - value: vec![Expr::Identifier(Ident { + variable: ObjectName::from(vec![Ident::new("a"), Ident::new("b"), Ident::new("c")]), + values: vec![Expr::Identifier(Ident { value: "b".into(), - quote_style: None + quote_style: None, + span: Span::empty(), })], - } + }) ); let stmt = pg_and_generic().one_statement_parses_to( @@ -1393,22 +1516,21 @@ fn parse_set() { ); assert_eq!( stmt, - Statement::SetVariable { - local: false, + Statement::Set(Set::SingleAssignment { + scope: None, hivevar: false, - variables: OneOrManyWithParens::One(ObjectName(vec![ + variable: ObjectName::from(vec![ Ident::new("hive"), Ident::new("tez"), Ident::new("auto"), Ident::new("reducer"), Ident::new("parallelism") - ])), - value: vec![Expr::Value(Value::Boolean(false))], - } + ]), + values: vec![Expr::Value((Value::Boolean(false)).with_empty_span())], + }) ); pg_and_generic().one_statement_parses_to("SET a TO b", "SET a = b"); - pg_and_generic().one_statement_parses_to("SET SESSION a = b", "SET a = b"); assert_eq!( pg_and_generic().parse_sql_statements("SET"), @@ -1438,10 +1560,10 @@ fn parse_set_role() { let stmt = pg_and_generic().verified_stmt(query); assert_eq!( stmt, - Statement::SetRole { - context_modifier: ContextModifier::Session, + Statement::Set(Set::SetRole { + context_modifier: Some(ContextModifier::Session), role_name: None, - } + }) ); assert_eq!(query, stmt.to_string()); @@ -1449,13 +1571,14 @@ fn parse_set_role() { let stmt = pg_and_generic().verified_stmt(query); assert_eq!( stmt, - Statement::SetRole { - context_modifier: ContextModifier::Local, + Statement::Set(Set::SetRole { + context_modifier: Some(ContextModifier::Local), role_name: Some(Ident { value: "rolename".to_string(), quote_style: Some('\"'), + span: Span::empty(), }), - } + }) ); assert_eq!(query, stmt.to_string()); @@ -1463,13 +1586,14 @@ fn parse_set_role() { let stmt = pg_and_generic().verified_stmt(query); assert_eq!( stmt, - Statement::SetRole { - context_modifier: ContextModifier::None, + Statement::Set(Set::SetRole { + context_modifier: None, role_name: Some(Ident { value: "rolename".to_string(), quote_style: Some('\''), + span: Span::empty(), }), - } + }) ); assert_eq!(query, stmt.to_string()); } @@ -1538,9 +1662,12 @@ fn parse_execute() { assert_eq!( stmt, Statement::Execute { - name: "a".into(), + name: Some(ObjectName::from(vec!["a".into()])), parameters: vec![], - using: vec![] + has_parentheses: false, + using: vec![], + immediate: false, + into: vec![] } ); @@ -1548,12 +1675,15 @@ fn parse_execute() { assert_eq!( stmt, Statement::Execute { - name: "a".into(), + name: Some(ObjectName::from(vec!["a".into()])), parameters: vec![ - Expr::Value(number("1")), - Expr::Value(Value::SingleQuotedString("t".to_string())) + Expr::value(number("1")), + Expr::Value((Value::SingleQuotedString("t".to_string())).with_empty_span()) ], - using: vec![] + has_parentheses: true, + using: vec![], + immediate: false, + into: vec![] } ); @@ -1562,22 +1692,35 @@ fn parse_execute() { assert_eq!( stmt, Statement::Execute { - name: "a".into(), + name: Some(ObjectName::from(vec!["a".into()])), parameters: vec![], + has_parentheses: false, using: vec![ - Expr::Cast { - kind: CastKind::Cast, - expr: Box::new(Expr::Value(Value::Number("1337".parse().unwrap(), false))), - data_type: DataType::SmallInt(None), - format: None + ExprWithAlias { + expr: Expr::Cast { + kind: CastKind::Cast, + expr: Box::new(Expr::Value( + (Value::Number("1337".parse().unwrap(), false)).with_empty_span() + )), + data_type: DataType::SmallInt(None), + format: None + }, + alias: None }, - Expr::Cast { - kind: CastKind::Cast, - expr: Box::new(Expr::Value(Value::Number("7331".parse().unwrap(), false))), - data_type: DataType::SmallInt(None), - format: None + ExprWithAlias { + expr: Expr::Cast { + kind: CastKind::Cast, + expr: Box::new(Expr::Value( + (Value::Number("7331".parse().unwrap(), false)).with_empty_span() + )), + data_type: DataType::SmallInt(None), + format: None + }, + alias: None }, - ] + ], + immediate: false, + into: vec![] } ); } @@ -1602,7 +1745,7 @@ fn parse_prepare() { }; match sub_stmt.as_ref() { Statement::Insert(Insert { - table_name, + table: table_name, columns, source: Some(source), .. @@ -1670,7 +1813,9 @@ fn parse_pg_on_conflict() { assert_eq!( OnConflictAction::DoUpdate(DoUpdate { assignments: vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec!["dname".into()])), + target: AssignmentTarget::ColumnName(ObjectName::from( + vec!["dname".into()] + )), value: Expr::CompoundIdentifier(vec!["EXCLUDED".into(), "dname".into()]) },], selection: None @@ -1701,14 +1846,18 @@ fn parse_pg_on_conflict() { OnConflictAction::DoUpdate(DoUpdate { assignments: vec![ Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec!["dname".into()])), + target: AssignmentTarget::ColumnName(ObjectName::from(vec![ + "dname".into() + ])), value: Expr::CompoundIdentifier(vec![ "EXCLUDED".into(), "dname".into() ]) }, Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec!["area".into()])), + target: AssignmentTarget::ColumnName(ObjectName::from(vec![ + "area".into() + ])), value: Expr::CompoundIdentifier(vec!["EXCLUDED".into(), "area".into()]) }, ], @@ -1758,16 +1907,23 @@ fn parse_pg_on_conflict() { assert_eq!( OnConflictAction::DoUpdate(DoUpdate { assignments: vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec!["dname".into()])), - value: Expr::Value(Value::Placeholder("$1".to_string())) + target: AssignmentTarget::ColumnName(ObjectName::from( + vec!["dname".into()] + )), + value: Expr::Value( + (Value::Placeholder("$1".to_string())).with_empty_span() + ) },], selection: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident { value: "dsize".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), })), op: BinaryOperator::Gt, - right: Box::new(Expr::Value(Value::Placeholder("$2".to_string()))) + right: Box::new(Expr::Value( + (Value::Placeholder("$2".to_string())).with_empty_span() + )) }) }), action @@ -1791,20 +1947,30 @@ fn parse_pg_on_conflict() { })), .. }) => { - assert_eq!(vec![Ident::from("distributors_did_pkey")], cname.0); + assert_eq!( + ObjectName::from(vec![Ident::from("distributors_did_pkey")]), + cname + ); assert_eq!( OnConflictAction::DoUpdate(DoUpdate { assignments: vec![Assignment { - target: AssignmentTarget::ColumnName(ObjectName(vec!["dname".into()])), - value: Expr::Value(Value::Placeholder("$1".to_string())) + target: AssignmentTarget::ColumnName(ObjectName::from( + vec!["dname".into()] + )), + value: Expr::Value( + (Value::Placeholder("$1".to_string())).with_empty_span() + ) },], selection: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident { value: "dsize".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), })), op: BinaryOperator::Gt, - right: Box::new(Expr::Value(Value::Placeholder("$2".to_string()))) + right: Box::new(Expr::Value( + (Value::Placeholder("$2".to_string())).with_empty_span() + )) }) }), action @@ -1919,12 +2085,8 @@ fn parse_pg_custom_binary_ops() { let operators = [ // PostGIS "&&&", // n-D bounding boxes intersect - "&<", // (is strictly to the left of) - "&>", // (is strictly to the right of) "|=|", // distance between A and B trajectories at their closest point of approach "<<#>>", // n-D distance between A and B bounding boxes - "|>>", // A's bounding box is strictly above B's. - "~=", // bounding box is the same // PGroonga "&@", // Full text search by a keyword "&@~", // Full text search by easy to use query language @@ -2024,9 +2186,13 @@ fn parse_pg_regex_match_ops() { let select = pg().verified_only_select(&format!("SELECT 'abc' {} '^a'", &str_op)); assert_eq!( SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Value(Value::SingleQuotedString("abc".into()))), + left: Box::new(Expr::Value( + (Value::SingleQuotedString("abc".into())).with_empty_span() + )), op: op.clone(), - right: Box::new(Expr::Value(Value::SingleQuotedString("^a".into()))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("^a".into())).with_empty_span() + )), }), select.projection[0] ); @@ -2046,9 +2212,13 @@ fn parse_pg_like_match_ops() { let select = pg().verified_only_select(&format!("SELECT 'abc' {} 'a_c%'", &str_op)); assert_eq!( SelectItem::UnnamedExpr(Expr::BinaryOp { - left: Box::new(Expr::Value(Value::SingleQuotedString("abc".into()))), + left: Box::new(Expr::Value( + (Value::SingleQuotedString("abc".into())).with_empty_span() + )), op: op.clone(), - right: Box::new(Expr::Value(Value::SingleQuotedString("a_c%".into()))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("a_c%".into())).with_empty_span() + )), }), select.projection[0] ); @@ -2058,17 +2228,17 @@ fn parse_pg_like_match_ops() { #[test] fn parse_array_index_expr() { let num: Vec = (0..=10) - .map(|s| Expr::Value(number(&s.to_string()))) + .map(|s| Expr::Value(number(&s.to_string()).with_empty_span())) .collect(); let sql = "SELECT foo[0] FROM foos"; let select = pg_and_generic().verified_only_select(sql); assert_eq!( - &Expr::Subscript { - expr: Box::new(Expr::Identifier(Ident::new("foo"))), - subscript: Box::new(Subscript::Index { + &Expr::CompoundFieldAccess { + root: Box::new(Expr::Identifier(Ident::new("foo"))), + access_chain: vec![AccessExpr::Subscript(Subscript::Index { index: num[0].clone() - }), + })], }, expr_from_projection(only(&select.projection)), ); @@ -2076,16 +2246,16 @@ fn parse_array_index_expr() { let sql = "SELECT foo[0][0] FROM foos"; let select = pg_and_generic().verified_only_select(sql); assert_eq!( - &Expr::Subscript { - expr: Box::new(Expr::Subscript { - expr: Box::new(Expr::Identifier(Ident::new("foo"))), - subscript: Box::new(Subscript::Index { + &Expr::CompoundFieldAccess { + root: Box::new(Expr::Identifier(Ident::new("foo"))), + access_chain: vec![ + AccessExpr::Subscript(Subscript::Index { index: num[0].clone() }), - }), - subscript: Box::new(Subscript::Index { - index: num[0].clone() - }), + AccessExpr::Subscript(Subscript::Index { + index: num[0].clone() + }) + ], }, expr_from_projection(only(&select.projection)), ); @@ -2093,27 +2263,27 @@ fn parse_array_index_expr() { let sql = r#"SELECT bar[0]["baz"]["fooz"] FROM foos"#; let select = pg_and_generic().verified_only_select(sql); assert_eq!( - &Expr::Subscript { - expr: Box::new(Expr::Subscript { - expr: Box::new(Expr::Subscript { - expr: Box::new(Expr::Identifier(Ident::new("bar"))), - subscript: Box::new(Subscript::Index { - index: num[0].clone() - }) + &Expr::CompoundFieldAccess { + root: Box::new(Expr::Identifier(Ident::new("bar"))), + access_chain: vec![ + AccessExpr::Subscript(Subscript::Index { + index: num[0].clone() }), - subscript: Box::new(Subscript::Index { + AccessExpr::Subscript(Subscript::Index { index: Expr::Identifier(Ident { value: "baz".to_string(), - quote_style: Some('"') + quote_style: Some('"'), + span: Span::empty(), }) - }) - }), - subscript: Box::new(Subscript::Index { - index: Expr::Identifier(Ident { - value: "fooz".to_string(), - quote_style: Some('"') - }) - }) + }), + AccessExpr::Subscript(Subscript::Index { + index: Expr::Identifier(Ident { + value: "fooz".to_string(), + quote_style: Some('"'), + span: Span::empty(), + }) + }), + ], }, expr_from_projection(only(&select.projection)), ); @@ -2121,33 +2291,33 @@ fn parse_array_index_expr() { let sql = "SELECT (CAST(ARRAY[ARRAY[2, 3]] AS INT[][]))[1][2]"; let select = pg_and_generic().verified_only_select(sql); assert_eq!( - &Expr::Subscript { - expr: Box::new(Expr::Subscript { - expr: Box::new(Expr::Nested(Box::new(Expr::Cast { - kind: CastKind::Cast, - expr: Box::new(Expr::Array(Array { - elem: vec![Expr::Array(Array { - elem: vec![num[2].clone(), num[3].clone(),], - named: true, - })], + &Expr::CompoundFieldAccess { + root: Box::new(Expr::Nested(Box::new(Expr::Cast { + kind: CastKind::Cast, + expr: Box::new(Expr::Array(Array { + elem: vec![Expr::Array(Array { + elem: vec![num[2].clone(), num[3].clone(),], named: true, - })), - data_type: DataType::Array(ArrayElemTypeDef::SquareBracket( - Box::new(DataType::Array(ArrayElemTypeDef::SquareBracket( - Box::new(DataType::Int(None)), - None - ))), + })], + named: true, + })), + data_type: DataType::Array(ArrayElemTypeDef::SquareBracket( + Box::new(DataType::Array(ArrayElemTypeDef::SquareBracket( + Box::new(DataType::Int(None)), None - )), - format: None, - }))), - subscript: Box::new(Subscript::Index { + ))), + None + )), + format: None, + }))), + access_chain: vec![ + AccessExpr::Subscript(Subscript::Index { index: num[1].clone() }), - }), - subscript: Box::new(Subscript::Index { - index: num[2].clone() - }), + AccessExpr::Subscript(Subscript::Index { + index: num[2].clone() + }), + ], }, expr_from_projection(only(&select.projection)), ); @@ -2169,7 +2339,7 @@ fn parse_array_subscript() { ( "(ARRAY[1, 2, 3, 4, 5, 6])[2]", Subscript::Index { - index: Expr::Value(number("2")), + index: Expr::value(number("2")), }, ), ( @@ -2181,17 +2351,17 @@ fn parse_array_subscript() { ( "(ARRAY[1, 2, 3, 4, 5, 6])[2:5]", Subscript::Slice { - lower_bound: Some(Expr::Value(number("2"))), - upper_bound: Some(Expr::Value(number("5"))), + lower_bound: Some(Expr::value(number("2"))), + upper_bound: Some(Expr::value(number("5"))), stride: None, }, ), ( "(ARRAY[1, 2, 3, 4, 5, 6])[2:5:3]", Subscript::Slice { - lower_bound: Some(Expr::Value(number("2"))), - upper_bound: Some(Expr::Value(number("5"))), - stride: Some(Expr::Value(number("3"))), + lower_bound: Some(Expr::value(number("2"))), + upper_bound: Some(Expr::value(number("5"))), + stride: Some(Expr::value(number("3"))), }, ), ( @@ -2200,12 +2370,12 @@ fn parse_array_subscript() { lower_bound: Some(Expr::BinaryOp { left: Box::new(call("array_length", [Expr::Identifier(Ident::new("arr"))])), op: BinaryOperator::Minus, - right: Box::new(Expr::Value(number("3"))), + right: Box::new(Expr::value(number("3"))), }), upper_bound: Some(Expr::BinaryOp { left: Box::new(call("array_length", [Expr::Identifier(Ident::new("arr"))])), op: BinaryOperator::Minus, - right: Box::new(Expr::Value(number("1"))), + right: Box::new(Expr::value(number("1"))), }), stride: None, }, @@ -2214,14 +2384,14 @@ fn parse_array_subscript() { "(ARRAY[1, 2, 3, 4, 5, 6])[:5]", Subscript::Slice { lower_bound: None, - upper_bound: Some(Expr::Value(number("5"))), + upper_bound: Some(Expr::value(number("5"))), stride: None, }, ), ( "(ARRAY[1, 2, 3, 4, 5, 6])[2:]", Subscript::Slice { - lower_bound: Some(Expr::Value(number("2"))), + lower_bound: Some(Expr::value(number("2"))), upper_bound: None, stride: None, }, @@ -2236,9 +2406,13 @@ fn parse_array_subscript() { ), ]; for (sql, expect) in tests { - let Expr::Subscript { subscript, .. } = pg_and_generic().verified_expr(sql) else { + let Expr::CompoundFieldAccess { access_chain, .. } = pg_and_generic().verified_expr(sql) + else { panic!("expected subscript expr"); }; + let Some(AccessExpr::Subscript(subscript)) = access_chain.last() else { + panic!("expected subscript"); + }; assert_eq!(expect, *subscript); } @@ -2249,25 +2423,25 @@ fn parse_array_subscript() { fn parse_array_multi_subscript() { let expr = pg_and_generic().verified_expr("make_array(1, 2, 3)[1:2][2]"); assert_eq!( - Expr::Subscript { - expr: Box::new(Expr::Subscript { - expr: Box::new(call( - "make_array", - vec![ - Expr::Value(number("1")), - Expr::Value(number("2")), - Expr::Value(number("3")) - ] - )), - subscript: Box::new(Subscript::Slice { - lower_bound: Some(Expr::Value(number("1"))), - upper_bound: Some(Expr::Value(number("2"))), + Expr::CompoundFieldAccess { + root: Box::new(call( + "make_array", + vec![ + Expr::value(number("1")), + Expr::value(number("2")), + Expr::value(number("3")) + ] + )), + access_chain: vec![ + AccessExpr::Subscript(Subscript::Slice { + lower_bound: Some(Expr::value(number("1"))), + upper_bound: Some(Expr::value(number("2"))), stride: None, }), - }), - subscript: Box::new(Subscript::Index { - index: Expr::Value(number("2")), - }), + AccessExpr::Subscript(Subscript::Index { + index: Expr::value(number("2")), + }), + ], }, expr, ); @@ -2335,6 +2509,271 @@ fn parse_create_anonymous_index() { } } +#[test] +/// Test to verify the correctness of parsing the `CREATE INDEX` statement with optional operator classes. +/// +/// # Implementative details +/// +/// At this time, since the parser library is not intended to take care of the semantics of the SQL statements, +/// there is no way to verify the correctness of the operator classes, nor whether they are valid for the given +/// index type. This test is only intended to verify that the parser can correctly parse the statement. For this +/// reason, the test includes a `totally_not_valid` operator class. +fn parse_create_indices_with_operator_classes() { + let indices = [ + IndexType::GIN, + IndexType::GiST, + IndexType::SPGiST, + IndexType::Custom("CustomIndexType".into()), + ]; + let operator_classes: [Option; 4] = [ + None, + Some("gin_trgm_ops".into()), + Some("gist_trgm_ops".into()), + Some("totally_not_valid".into()), + ]; + + for expected_index_type in indices { + for expected_operator_class in &operator_classes { + let single_column_sql_statement = format!( + "CREATE INDEX the_index_name ON users USING {expected_index_type} (concat_users_name(first_name, last_name){})", + expected_operator_class.as_ref().map(|oc| format!(" {}", oc)) + .unwrap_or_default() + ); + let multi_column_sql_statement = format!( + "CREATE INDEX the_index_name ON users USING {expected_index_type} (column_name,concat_users_name(first_name, last_name){})", + expected_operator_class.as_ref().map(|oc| format!(" {}", oc)) + .unwrap_or_default() + ); + + let expected_function_column = IndexColumn { + column: OrderByExpr { + expr: Expr::Function(Function { + name: ObjectName(vec![ObjectNamePart::Identifier(Ident { + value: "concat_users_name".to_owned(), + quote_style: None, + span: Span::empty(), + })]), + uses_odbc_syntax: false, + parameters: FunctionArguments::None, + args: FunctionArguments::List(FunctionArgumentList { + duplicate_treatment: None, + args: vec![ + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Identifier( + Ident { + value: "first_name".to_owned(), + quote_style: None, + span: Span::empty(), + }, + ))), + FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Identifier( + Ident { + value: "last_name".to_owned(), + quote_style: None, + span: Span::empty(), + }, + ))), + ], + clauses: vec![], + }), + filter: None, + null_treatment: None, + over: None, + within_group: vec![], + }), + options: OrderByOptions { + asc: None, + nulls_first: None, + }, + with_fill: None, + }, + operator_class: expected_operator_class.clone(), + }; + + match pg().verified_stmt(&single_column_sql_statement) { + Statement::CreateIndex(CreateIndex { + name: Some(ObjectName(name)), + table_name: ObjectName(table_name), + using: Some(using), + columns, + unique: false, + concurrently: false, + if_not_exists: false, + include, + nulls_distinct: None, + with, + predicate: None, + }) => { + assert_eq_vec(&["the_index_name"], &name); + assert_eq_vec(&["users"], &table_name); + assert_eq!(expected_index_type, using); + assert_eq!(expected_function_column, columns[0],); + assert!(include.is_empty()); + assert!(with.is_empty()); + } + _ => unreachable!(), + } + + match pg().verified_stmt(&multi_column_sql_statement) { + Statement::CreateIndex(CreateIndex { + name: Some(ObjectName(name)), + table_name: ObjectName(table_name), + using: Some(using), + columns, + unique: false, + concurrently: false, + if_not_exists: false, + include, + nulls_distinct: None, + with, + predicate: None, + }) => { + assert_eq_vec(&["the_index_name"], &name); + assert_eq_vec(&["users"], &table_name); + assert_eq!(expected_index_type, using); + assert_eq!( + IndexColumn { + column: OrderByExpr { + expr: Expr::Identifier(Ident { + value: "column_name".to_owned(), + quote_style: None, + span: Span::empty() + }), + options: OrderByOptions { + asc: None, + nulls_first: None, + }, + with_fill: None, + }, + operator_class: None + }, + columns[0], + ); + assert_eq!(expected_function_column, columns[1],); + assert!(include.is_empty()); + assert!(with.is_empty()); + } + _ => unreachable!(), + } + } + } +} + +#[test] +fn parse_create_bloom() { + let sql = + "CREATE INDEX bloomidx ON tbloom USING BLOOM (i1,i2,i3) WITH (length = 80, col1 = 2, col2 = 2, col3 = 4)"; + match pg().verified_stmt(sql) { + Statement::CreateIndex(CreateIndex { + name: Some(ObjectName(name)), + table_name: ObjectName(table_name), + using: Some(using), + columns, + unique: false, + concurrently: false, + if_not_exists: false, + include, + nulls_distinct: None, + with, + predicate: None, + }) => { + assert_eq_vec(&["bloomidx"], &name); + assert_eq_vec(&["tbloom"], &table_name); + assert_eq!(IndexType::Bloom, using); + assert_eq_vec(&["i1", "i2", "i3"], &columns); + assert!(include.is_empty()); + assert_eq!( + vec![ + Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("length"))), + op: BinaryOperator::Eq, + right: Box::new(Expr::Value(number("80").into())), + }, + Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("col1"))), + op: BinaryOperator::Eq, + right: Box::new(Expr::Value(number("2").into())), + }, + Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("col2"))), + op: BinaryOperator::Eq, + right: Box::new(Expr::Value(number("2").into())), + }, + Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("col3"))), + op: BinaryOperator::Eq, + right: Box::new(Expr::Value(number("4").into())), + }, + ], + with + ); + } + _ => unreachable!(), + } +} + +#[test] +fn parse_create_brin() { + let sql = "CREATE INDEX brin_sensor_data_recorded_at ON sensor_data USING BRIN (recorded_at)"; + match pg().verified_stmt(sql) { + Statement::CreateIndex(CreateIndex { + name: Some(ObjectName(name)), + table_name: ObjectName(table_name), + using: Some(using), + columns, + unique: false, + concurrently: false, + if_not_exists: false, + include, + nulls_distinct: None, + with, + predicate: None, + }) => { + assert_eq_vec(&["brin_sensor_data_recorded_at"], &name); + assert_eq_vec(&["sensor_data"], &table_name); + assert_eq!(IndexType::BRIN, using); + assert_eq_vec(&["recorded_at"], &columns); + assert!(include.is_empty()); + assert!(with.is_empty()); + } + _ => unreachable!(), + } +} + +#[test] +fn parse_create_table_with_inherits() { + let single_inheritance_sql = + "CREATE TABLE child_table (child_column INT) INHERITS (public.parent_table)"; + match pg().verified_stmt(single_inheritance_sql) { + Statement::CreateTable(CreateTable { + inherits: Some(inherits), + .. + }) => { + assert_eq_vec(&["public", "parent_table"], &inherits[0].0); + } + _ => unreachable!(), + } + + let double_inheritance_sql = "CREATE TABLE child_table (child_column INT) INHERITS (public.parent_table, pg_catalog.pg_settings)"; + match pg().verified_stmt(double_inheritance_sql) { + Statement::CreateTable(CreateTable { + inherits: Some(inherits), + .. + }) => { + assert_eq_vec(&["public", "parent_table"], &inherits[0].0); + assert_eq_vec(&["pg_catalog", "pg_settings"], &inherits[1].0); + } + _ => unreachable!(), + } +} + +#[test] +fn parse_create_table_with_empty_inherits_fails() { + assert!(matches!( + pg().parse_sql_statements("CREATE TABLE child_table (child_column INT) INHERITS ()"), + Err(ParserError::ParserError(_)) + )); +} + #[test] fn parse_create_index_concurrently() { let sql = "CREATE INDEX CONCURRENTLY IF NOT EXISTS my_index ON my_table(col1,col2)"; @@ -2495,7 +2934,8 @@ fn parse_array_subquery_expr() { let select = pg().verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::new("ARRAY")]), + name: ObjectName::from(vec![Ident::new("ARRAY")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::Subquery(Box::new(Query { with: None, @@ -2503,9 +2943,13 @@ fn parse_array_subquery_expr() { op: SetOperator::Union, set_quantifier: SetQuantifier::None, left: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: None, top: None, - projection: vec![SelectItem::UnnamedExpr(Expr::Value(number("1")))], + top_before_distinct: false, + projection: vec![SelectItem::UnnamedExpr(Expr::Value( + (number("1")).with_empty_span() + ))], into: None, from: vec![], lateral_views: vec![], @@ -2521,11 +2965,16 @@ fn parse_array_subquery_expr() { window_before_qualify: false, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), right: Box::new(SetExpr::Select(Box::new(Select { + select_token: AttachedToken::empty(), distinct: None, top: None, - projection: vec![SelectItem::UnnamedExpr(Expr::Value(number("2")))], + top_before_distinct: false, + projection: vec![SelectItem::UnnamedExpr(Expr::Value( + (number("2")).with_empty_span() + ))], into: None, from: vec![], lateral_views: vec![], @@ -2541,17 +2990,17 @@ fn parse_array_subquery_expr() { window_before_qualify: false, value_table_mode: None, connect_by: None, + flavor: SelectFlavor::Standard, }))), }), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), filter: None, null_treatment: None, @@ -2567,16 +3016,16 @@ fn test_transaction_statement() { let statement = pg().verified_stmt("SET TRANSACTION SNAPSHOT '000003A1-1'"); assert_eq!( statement, - Statement::SetTransaction { + Statement::Set(Set::SetTransaction { modes: vec![], snapshot: Some(Value::SingleQuotedString(String::from("000003A1-1"))), session: false - } + }) ); let statement = pg().verified_stmt("SET SESSION CHARACTERISTICS AS TRANSACTION READ ONLY, READ WRITE, ISOLATION LEVEL SERIALIZABLE"); assert_eq!( statement, - Statement::SetTransaction { + Statement::Set(Set::SetTransaction { modes: vec![ TransactionMode::AccessMode(TransactionAccessMode::ReadOnly), TransactionMode::AccessMode(TransactionAccessMode::ReadWrite), @@ -2584,7 +3033,7 @@ fn test_transaction_statement() { ], snapshot: None, session: true - } + }) ); } @@ -2596,7 +3045,9 @@ fn test_json() { SelectItem::UnnamedExpr(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("params"))), op: BinaryOperator::LongArrow, - right: Box::new(Expr::Value(Value::SingleQuotedString("name".to_string()))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("name".to_string())).with_empty_span() + )), }), select.projection[0] ); @@ -2607,7 +3058,9 @@ fn test_json() { SelectItem::UnnamedExpr(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("params"))), op: BinaryOperator::Arrow, - right: Box::new(Expr::Value(Value::SingleQuotedString("name".to_string()))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("name".to_string())).with_empty_span() + )), }), select.projection[0] ); @@ -2619,12 +3072,14 @@ fn test_json() { left: Box::new(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("info"))), op: BinaryOperator::Arrow, - right: Box::new(Expr::Value(Value::SingleQuotedString("items".to_string()))) + right: Box::new(Expr::Value( + (Value::SingleQuotedString("items".to_string())).with_empty_span() + )) }), op: BinaryOperator::LongArrow, - right: Box::new(Expr::Value(Value::SingleQuotedString( - "product".to_string() - ))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("product".to_string())).with_empty_span() + )), }), select.projection[0] ); @@ -2636,7 +3091,7 @@ fn test_json() { SelectItem::UnnamedExpr(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("obj"))), op: BinaryOperator::Arrow, - right: Box::new(Expr::Value(number("42"))), + right: Box::new(Expr::value(number("42"))), }), select.projection[0] ); @@ -2661,9 +3116,9 @@ fn test_json() { left: Box::new(Expr::Identifier(Ident::new("obj"))), op: BinaryOperator::Arrow, right: Box::new(Expr::BinaryOp { - left: Box::new(Expr::Value(number("3"))), + left: Box::new(Expr::value(number("3"))), op: BinaryOperator::Multiply, - right: Box::new(Expr::Value(number("2"))), + right: Box::new(Expr::value(number("2"))), }), }), select.projection[0] @@ -2675,9 +3130,9 @@ fn test_json() { SelectItem::UnnamedExpr(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("info"))), op: BinaryOperator::HashArrow, - right: Box::new(Expr::Value(Value::SingleQuotedString( - "{a,b,c}".to_string() - ))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("{a,b,c}".to_string())).with_empty_span() + )), }), select.projection[0] ); @@ -2688,9 +3143,9 @@ fn test_json() { SelectItem::UnnamedExpr(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("info"))), op: BinaryOperator::HashLongArrow, - right: Box::new(Expr::Value(Value::SingleQuotedString( - "{a,b,c}".to_string() - ))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("{a,b,c}".to_string())).with_empty_span() + )), }), select.projection[0] ); @@ -2701,9 +3156,9 @@ fn test_json() { Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("info"))), op: BinaryOperator::AtArrow, - right: Box::new(Expr::Value(Value::SingleQuotedString( - "{\"a\": 1}".to_string() - ))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("{\"a\": 1}".to_string())).with_empty_span() + )), }, select.selection.unwrap(), ); @@ -2712,9 +3167,9 @@ fn test_json() { let select = pg().verified_only_select(sql); assert_eq!( Expr::BinaryOp { - left: Box::new(Expr::Value(Value::SingleQuotedString( - "{\"a\": 1}".to_string() - ))), + left: Box::new(Expr::Value( + (Value::SingleQuotedString("{\"a\": 1}".to_string())).with_empty_span() + )), op: BinaryOperator::ArrowAt, right: Box::new(Expr::Identifier(Ident::new("info"))), }, @@ -2729,8 +3184,8 @@ fn test_json() { op: BinaryOperator::HashMinus, right: Box::new(Expr::Array(Array { elem: vec![ - Expr::Value(Value::SingleQuotedString("a".to_string())), - Expr::Value(Value::SingleQuotedString("b".to_string())), + Expr::Value((Value::SingleQuotedString("a".to_string())).with_empty_span()), + Expr::Value((Value::SingleQuotedString("b".to_string())).with_empty_span()), ], named: true, })), @@ -2744,7 +3199,9 @@ fn test_json() { Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::from("info"))), op: BinaryOperator::AtQuestion, - right: Box::new(Expr::Value(Value::SingleQuotedString("$.a".to_string())),), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("$.a".to_string())).with_empty_span() + ),), }, select.selection.unwrap(), ); @@ -2755,7 +3212,9 @@ fn test_json() { Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::from("info"))), op: BinaryOperator::AtAt, - right: Box::new(Expr::Value(Value::SingleQuotedString("$.a".to_string())),), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("$.a".to_string())).with_empty_span() + ),), }, select.selection.unwrap(), ); @@ -2766,7 +3225,9 @@ fn test_json() { Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("info"))), op: BinaryOperator::Question, - right: Box::new(Expr::Value(Value::SingleQuotedString("b".to_string()))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("b".to_string())).with_empty_span() + )), }, select.selection.unwrap(), ); @@ -2779,8 +3240,8 @@ fn test_json() { op: BinaryOperator::QuestionAnd, right: Box::new(Expr::Array(Array { elem: vec![ - Expr::Value(Value::SingleQuotedString("b".to_string())), - Expr::Value(Value::SingleQuotedString("c".to_string())) + Expr::Value((Value::SingleQuotedString("b".to_string())).with_empty_span()), + Expr::Value((Value::SingleQuotedString("c".to_string())).with_empty_span()) ], named: true })) @@ -2796,8 +3257,8 @@ fn test_json() { op: BinaryOperator::QuestionPipe, right: Box::new(Expr::Array(Array { elem: vec![ - Expr::Value(Value::SingleQuotedString("b".to_string())), - Expr::Value(Value::SingleQuotedString("c".to_string())) + Expr::Value((Value::SingleQuotedString("b".to_string())).with_empty_span()), + Expr::Value((Value::SingleQuotedString("c".to_string())).with_empty_span()) ], named: true })) @@ -2806,6 +3267,19 @@ fn test_json() { ); } +#[test] +fn test_fn_arg_with_value_operator() { + match pg().verified_expr("JSON_OBJECT('name' VALUE 'value')") { + Expr::Function(Function { args: FunctionArguments::List(FunctionArgumentList { args, .. }), .. }) => { + assert!(matches!( + &args[..], + &[FunctionArg::ExprNamed { operator: FunctionArgOperator::Value, .. }] + ), "Invalid function argument: {:?}", args); + } + other => panic!("Expected: JSON_OBJECT('name' VALUE 'value') to be parsed as a function, but got {other:?}"), + } +} + #[test] fn parse_json_table_is_not_reserved() { // JSON_TABLE is not a reserved keyword in PostgreSQL, even though it is in SQL:2023 @@ -2816,7 +3290,10 @@ fn parse_json_table_is_not_reserved() { TableFactor::Table { name: ObjectName(name), .. - } => assert_eq!("JSON_TABLE", name[0].value), + } => assert_eq!( + ObjectNamePart::Identifier(Ident::new("JSON_TABLE")), + name[0] + ), other => panic!("Expected: JSON_TABLE to be parsed as a table name, but got {other:?}"), } } @@ -2825,50 +3302,62 @@ fn parse_json_table_is_not_reserved() { fn test_composite_value() { let sql = "SELECT (on_hand.item).name FROM on_hand WHERE (on_hand.item).price > 9"; let select = pg().verified_only_select(sql); + + let Expr::CompoundFieldAccess { root, access_chain } = + expr_from_projection(&select.projection[0]) + else { + unreachable!("expected projection: got {:?}", &select.projection[0]); + }; assert_eq!( - SelectItem::UnnamedExpr(Expr::CompositeAccess { - key: Ident::new("name"), - expr: Box::new(Expr::Nested(Box::new(Expr::CompoundIdentifier(vec![ - Ident::new("on_hand"), - Ident::new("item") - ])))) - }), - select.projection[0] + root.as_ref(), + &Expr::Nested(Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("on_hand"), + Ident::new("item") + ]))) + ); + assert_eq!( + access_chain.as_slice(), + &[AccessExpr::Dot(Expr::Identifier(Ident::new("name")))] ); assert_eq!( - select.selection, - Some(Expr::BinaryOp { - left: Box::new(Expr::CompositeAccess { - key: Ident::new("price"), - expr: Box::new(Expr::Nested(Box::new(Expr::CompoundIdentifier(vec![ + select.selection.as_ref().unwrap(), + &Expr::BinaryOp { + left: Box::new(Expr::CompoundFieldAccess { + root: Expr::Nested(Box::new(Expr::CompoundIdentifier(vec![ Ident::new("on_hand"), Ident::new("item") - ])))) + ]))) + .into(), + access_chain: vec![AccessExpr::Dot(Expr::Identifier(Ident::new("price")))] }), op: BinaryOperator::Gt, - right: Box::new(Expr::Value(number("9"))) - }) + right: Box::new(Expr::value(number("9"))) + } ); let sql = "SELECT (information_schema._pg_expandarray(ARRAY['i', 'i'])).n"; let select = pg().verified_only_select(sql); assert_eq!( - SelectItem::UnnamedExpr(Expr::CompositeAccess { - key: Ident::new("n"), - expr: Box::new(Expr::Nested(Box::new(Expr::Function(Function { - name: ObjectName(vec![ + &Expr::CompoundFieldAccess { + root: Box::new(Expr::Nested(Box::new(Expr::Function(Function { + name: ObjectName::from(vec![ Ident::new("information_schema"), Ident::new("_pg_expandarray") ]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr(Expr::Array( Array { elem: vec![ - Expr::Value(Value::SingleQuotedString("i".to_string())), - Expr::Value(Value::SingleQuotedString("i".to_string())), + Expr::Value( + (Value::SingleQuotedString("i".to_string())).with_empty_span() + ), + Expr::Value( + (Value::SingleQuotedString("i".to_string())).with_empty_span() + ), ], named: true } @@ -2879,74 +3368,13 @@ fn test_composite_value() { filter: None, over: None, within_group: vec![], - })))) - }), - select.projection[0] + })))), + access_chain: vec![AccessExpr::Dot(Expr::Identifier(Ident::new("n")))], + }, + expr_from_projection(&select.projection[0]) ); } -#[test] -fn parse_comments() { - match pg().verified_stmt("COMMENT ON COLUMN tab.name IS 'comment'") { - Statement::Comment { - object_type, - object_name, - comment: Some(comment), - if_exists, - } => { - assert_eq!("comment", comment); - assert_eq!("tab.name", object_name.to_string()); - assert_eq!(CommentObject::Column, object_type); - assert!(!if_exists); - } - _ => unreachable!(), - } - - match pg().verified_stmt("COMMENT ON EXTENSION plpgsql IS 'comment'") { - Statement::Comment { - object_type, - object_name, - comment: Some(comment), - if_exists, - } => { - assert_eq!("comment", comment); - assert_eq!("plpgsql", object_name.to_string()); - assert_eq!(CommentObject::Extension, object_type); - assert!(!if_exists); - } - _ => unreachable!(), - } - - match pg().verified_stmt("COMMENT ON TABLE public.tab IS 'comment'") { - Statement::Comment { - object_type, - object_name, - comment: Some(comment), - if_exists, - } => { - assert_eq!("comment", comment); - assert_eq!("public.tab", object_name.to_string()); - assert_eq!(CommentObject::Table, object_type); - assert!(!if_exists); - } - _ => unreachable!(), - } - - match pg().verified_stmt("COMMENT IF EXISTS ON TABLE public.tab IS NULL") { - Statement::Comment { - object_type, - object_name, - comment: None, - if_exists, - } => { - assert_eq!("public.tab", object_name.to_string()); - assert_eq!(CommentObject::Table, object_type); - assert!(if_exists); - } - _ => unreachable!(), - } -} - #[test] fn parse_quoted_identifier() { pg_and_generic().verified_stmt(r#"SELECT "quoted "" ident""#); @@ -2989,27 +3417,27 @@ fn parse_escaped_literal_string() { let select = pg_and_generic().verified_only_select(sql); assert_eq!(6, select.projection.len()); assert_eq!( - &Expr::Value(Value::EscapedStringLiteral("s1 \n s1".to_string())), + &Expr::Value((Value::EscapedStringLiteral("s1 \n s1".to_string())).with_empty_span()), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value(Value::EscapedStringLiteral("s2 \\n s2".to_string())), + &Expr::Value((Value::EscapedStringLiteral("s2 \\n s2".to_string())).with_empty_span()), expr_from_projection(&select.projection[1]) ); assert_eq!( - &Expr::Value(Value::EscapedStringLiteral("s3 \\\n s3".to_string())), + &Expr::Value((Value::EscapedStringLiteral("s3 \\\n s3".to_string())).with_empty_span()), expr_from_projection(&select.projection[2]) ); assert_eq!( - &Expr::Value(Value::EscapedStringLiteral("s4 \\\\n s4".to_string())), + &Expr::Value((Value::EscapedStringLiteral("s4 \\\\n s4".to_string())).with_empty_span()), expr_from_projection(&select.projection[3]) ); assert_eq!( - &Expr::Value(Value::EscapedStringLiteral("'".to_string())), + &Expr::Value((Value::EscapedStringLiteral("'".to_string())).with_empty_span()), expr_from_projection(&select.projection[4]) ); assert_eq!( - &Expr::Value(Value::EscapedStringLiteral("foo \\".to_string())), + &Expr::Value((Value::EscapedStringLiteral("foo \\".to_string())).with_empty_span()), expr_from_projection(&select.projection[5]) ); @@ -3027,31 +3455,31 @@ fn parse_escaped_literal_string() { let select = pg_and_generic().verified_only_select_with_canonical(sql, canonical); assert_eq!(7, select.projection.len()); assert_eq!( - &Expr::Value(Value::EscapedStringLiteral("\u{0001}".to_string())), + &Expr::Value((Value::EscapedStringLiteral("\u{0001}".to_string())).with_empty_span()), expr_from_projection(&select.projection[0]) ); assert_eq!( - &Expr::Value(Value::EscapedStringLiteral("\u{10ffff}".to_string())), + &Expr::Value((Value::EscapedStringLiteral("\u{10ffff}".to_string())).with_empty_span()), expr_from_projection(&select.projection[1]) ); assert_eq!( - &Expr::Value(Value::EscapedStringLiteral("\u{000c}".to_string())), + &Expr::Value((Value::EscapedStringLiteral("\u{000c}".to_string())).with_empty_span()), expr_from_projection(&select.projection[2]) ); assert_eq!( - &Expr::Value(Value::EscapedStringLiteral("%".to_string())), + &Expr::Value((Value::EscapedStringLiteral("%".to_string())).with_empty_span()), expr_from_projection(&select.projection[3]) ); assert_eq!( - &Expr::Value(Value::EscapedStringLiteral("\u{0002}".to_string())), + &Expr::Value((Value::EscapedStringLiteral("\u{0002}".to_string())).with_empty_span()), expr_from_projection(&select.projection[4]) ); assert_eq!( - &Expr::Value(Value::EscapedStringLiteral("%".to_string())), + &Expr::Value((Value::EscapedStringLiteral("%".to_string())).with_empty_span()), expr_from_projection(&select.projection[5]) ); assert_eq!( - &Expr::Value(Value::EscapedStringLiteral("%".to_string())), + &Expr::Value((Value::EscapedStringLiteral("%".to_string())).with_empty_span()), expr_from_projection(&select.projection[6]) ); @@ -3099,7 +3527,8 @@ fn parse_current_functions() { let select = pg_and_generic().verified_only_select(sql); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::new("CURRENT_CATALOG")]), + name: ObjectName::from(vec![Ident::new("CURRENT_CATALOG")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::None, null_treatment: None, @@ -3111,7 +3540,8 @@ fn parse_current_functions() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::new("CURRENT_USER")]), + name: ObjectName::from(vec![Ident::new("CURRENT_USER")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::None, null_treatment: None, @@ -3123,7 +3553,8 @@ fn parse_current_functions() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::new("SESSION_USER")]), + name: ObjectName::from(vec![Ident::new("SESSION_USER")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::None, null_treatment: None, @@ -3135,7 +3566,8 @@ fn parse_current_functions() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::new("USER")]), + name: ObjectName::from(vec![Ident::new("USER")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::None, null_treatment: None, @@ -3182,13 +3614,16 @@ fn parse_custom_operator() { left: Box::new(Expr::Identifier(Ident { value: "relname".into(), quote_style: None, + span: Span::empty(), })), op: BinaryOperator::PGCustomBinaryOperator(vec![ "database".into(), "pg_catalog".into(), "~".into() ]), - right: Box::new(Expr::Value(Value::SingleQuotedString("^(table)$".into()))) + right: Box::new(Expr::Value( + (Value::SingleQuotedString("^(table)$".into())).with_empty_span() + )) }) ); @@ -3201,9 +3636,12 @@ fn parse_custom_operator() { left: Box::new(Expr::Identifier(Ident { value: "relname".into(), quote_style: None, + span: Span::empty(), })), op: BinaryOperator::PGCustomBinaryOperator(vec!["pg_catalog".into(), "~".into()]), - right: Box::new(Expr::Value(Value::SingleQuotedString("^(table)$".into()))) + right: Box::new(Expr::Value( + (Value::SingleQuotedString("^(table)$".into())).with_empty_span() + )) }) ); @@ -3216,9 +3654,12 @@ fn parse_custom_operator() { left: Box::new(Expr::Identifier(Ident { value: "relname".into(), quote_style: None, + span: Span::empty(), })), op: BinaryOperator::PGCustomBinaryOperator(vec!["~".into()]), - right: Box::new(Expr::Value(Value::SingleQuotedString("^(table)$".into()))) + right: Box::new(Expr::Value( + (Value::SingleQuotedString("^(table)$".into())).with_empty_span() + )) }) ); } @@ -3304,9 +3745,9 @@ fn parse_create_role() { assert_eq!(*bypassrls, Some(true)); assert_eq!( *password, - Some(Password::Password(Expr::Value(Value::SingleQuotedString( - "abcdef".into() - )))) + Some(Password::Password(Expr::Value( + (Value::SingleQuotedString("abcdef".into())).with_empty_span() + ))) ); assert_eq!(*superuser, Some(true)); assert_eq!(*create_db, Some(false)); @@ -3315,7 +3756,9 @@ fn parse_create_role() { assert_eq!(*connection_limit, None); assert_eq!( *valid_until, - Some(Expr::Value(Value::SingleQuotedString("2025-01-01".into()))) + Some(Expr::Value( + (Value::SingleQuotedString("2025-01-01".into())).with_empty_span() + )) ); assert_eq_vec(&["role1", "role2"], in_role); assert!(in_group.is_empty()); @@ -3366,12 +3809,14 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "old_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, operation: AlterRoleOperation::RenameRole { role_name: Ident { value: "new_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), } }, } @@ -3383,7 +3828,8 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, operation: AlterRoleOperation::WithOptions { options: vec![ @@ -3394,13 +3840,15 @@ fn parse_alter_role() { RoleOption::Login(true), RoleOption::Replication(true), RoleOption::BypassRLS(true), - RoleOption::ConnectionLimit(Expr::Value(number("100"))), + RoleOption::ConnectionLimit(Expr::value(number("100"))), RoleOption::Password({ - Password::Password(Expr::Value(Value::SingleQuotedString("abcdef".into()))) + Password::Password(Expr::Value( + (Value::SingleQuotedString("abcdef".into())).with_empty_span(), + )) }), - RoleOption::ValidUntil(Expr::Value(Value::SingleQuotedString( - "2025-01-01".into(), - ))) + RoleOption::ValidUntil(Expr::Value( + (Value::SingleQuotedString("2025-01-01".into(),)).with_empty_span() + )) ] }, } @@ -3412,7 +3860,8 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, operation: AlterRoleOperation::WithOptions { options: vec![ @@ -3435,12 +3884,14 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, operation: AlterRoleOperation::Set { - config_name: ObjectName(vec![Ident { + config_name: ObjectName::from(vec![Ident { value: "maintenance_work_mem".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }]), config_value: SetConfigValue::FromCurrent, in_database: None @@ -3454,17 +3905,22 @@ fn parse_alter_role() { [Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, operation: AlterRoleOperation::Set { - config_name: ObjectName(vec![Ident { + config_name: ObjectName::from(vec![Ident { value: "maintenance_work_mem".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }]), - config_value: SetConfigValue::Value(Expr::Value(number("100000"))), - in_database: Some(ObjectName(vec![Ident { + config_value: SetConfigValue::Value(Expr::Value( + (number("100000")).with_empty_span() + )), + in_database: Some(ObjectName::from(vec![Ident { value: "database_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }])) }, }] @@ -3476,17 +3932,22 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, operation: AlterRoleOperation::Set { - config_name: ObjectName(vec![Ident { + config_name: ObjectName::from(vec![Ident { value: "maintenance_work_mem".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }]), - config_value: SetConfigValue::Value(Expr::Value(number("100000"))), - in_database: Some(ObjectName(vec![Ident { + config_value: SetConfigValue::Value(Expr::Value( + (number("100000")).with_empty_span() + )), + in_database: Some(ObjectName::from(vec![Ident { value: "database_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }])) }, } @@ -3498,17 +3959,20 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, operation: AlterRoleOperation::Set { - config_name: ObjectName(vec![Ident { + config_name: ObjectName::from(vec![Ident { value: "maintenance_work_mem".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }]), config_value: SetConfigValue::Default, - in_database: Some(ObjectName(vec![Ident { + in_database: Some(ObjectName::from(vec![Ident { value: "database_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }])) }, } @@ -3520,7 +3984,8 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, operation: AlterRoleOperation::Reset { config_name: ResetConfig::ALL, @@ -3535,16 +4000,19 @@ fn parse_alter_role() { Statement::AlterRole { name: Ident { value: "role_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }, operation: AlterRoleOperation::Reset { - config_name: ResetConfig::ConfigName(ObjectName(vec![Ident { + config_name: ResetConfig::ConfigName(ObjectName::from(vec![Ident { value: "maintenance_work_mem".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }])), - in_database: Some(ObjectName(vec![Ident { + in_database: Some(ObjectName::from(vec![Ident { value: "database_name".into(), - quote_style: None + quote_style: None, + span: Span::empty(), }])) }, } @@ -3565,10 +4033,12 @@ fn parse_delimited_identifiers() { args, with_hints, version, - with_ordinality: _, - partitions: _, + .. } => { - assert_eq!(vec![Ident::with_quote('"', "a table")], name.0); + assert_eq!( + ObjectName::from(vec![Ident::with_quote('"', "a table")]), + name + ); assert_eq!(Ident::with_quote('"', "alias"), alias.unwrap().name); assert!(args.is_none()); assert!(with_hints.is_empty()); @@ -3587,7 +4057,8 @@ fn parse_delimited_identifiers() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::with_quote('"', "myfun")]), + name: ObjectName::from(vec![Ident::with_quote('"', "myfun")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -3639,10 +4110,11 @@ fn parse_create_function() { let sql = "CREATE FUNCTION add(INTEGER, INTEGER) RETURNS INTEGER LANGUAGE SQL IMMUTABLE STRICT PARALLEL SAFE AS 'select $1 + $2;'"; assert_eq!( pg_and_generic().verified_stmt(sql), - Statement::CreateFunction { + Statement::CreateFunction(CreateFunction { + or_alter: false, or_replace: false, temporary: false, - name: ObjectName(vec![Ident::new("add")]), + name: ObjectName::from(vec![Ident::new("add")]), args: Some(vec![ OperateFunctionArg::unnamed(DataType::Integer(None)), OperateFunctionArg::unnamed(DataType::Integer(None)), @@ -3653,14 +4125,14 @@ fn parse_create_function() { called_on_null: Some(FunctionCalledOnNull::Strict), parallel: Some(FunctionParallel::Safe), function_body: Some(CreateFunctionBody::AsBeforeOptions(Expr::Value( - Value::SingleQuotedString("select $1 + $2;".into()) + (Value::SingleQuotedString("select $1 + $2;".into())).with_empty_span() ))), if_not_exists: false, using: None, determinism_specifier: None, options: None, remote_connection: None, - } + }) ); } @@ -3671,6 +4143,8 @@ fn parse_create_function_detailed() { pg_and_generic().verified_stmt("CREATE OR REPLACE FUNCTION add(a INTEGER, IN b INTEGER = 1) RETURNS INTEGER LANGUAGE SQL STABLE PARALLEL UNSAFE RETURN a + b"); pg_and_generic().verified_stmt("CREATE OR REPLACE FUNCTION add(a INTEGER, IN b INTEGER = 1) RETURNS INTEGER LANGUAGE SQL STABLE CALLED ON NULL INPUT PARALLEL UNSAFE RETURN a + b"); pg_and_generic().verified_stmt(r#"CREATE OR REPLACE FUNCTION increment(i INTEGER) RETURNS INTEGER LANGUAGE plpgsql AS $$ BEGIN RETURN i + 1; END; $$"#); + pg_and_generic().verified_stmt(r#"CREATE OR REPLACE FUNCTION no_arg() RETURNS VOID LANGUAGE plpgsql AS $$ BEGIN DELETE FROM my_table; END; $$"#); + pg_and_generic().verified_stmt(r#"CREATE OR REPLACE FUNCTION return_table(i INTEGER) RETURNS TABLE(id UUID, is_active BOOLEAN) LANGUAGE plpgsql AS $$ BEGIN RETURN QUERY SELECT NULL::UUID, NULL::BOOLEAN; END; $$"#); } #[test] fn parse_incorrect_create_function_parallel() { @@ -3686,13 +4160,14 @@ fn parse_drop_function() { Statement::DropFunction { if_exists: true, func_desc: vec![FunctionDesc { - name: ObjectName(vec![Ident { + name: ObjectName::from(vec![Ident { value: "test_func".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }]), args: None }], - option: None + drop_behavior: None } ); @@ -3702,9 +4177,10 @@ fn parse_drop_function() { Statement::DropFunction { if_exists: true, func_desc: vec![FunctionDesc { - name: ObjectName(vec![Ident { + name: ObjectName::from(vec![Ident { value: "test_func".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }]), args: Some(vec![ OperateFunctionArg::with_name("a", DataType::Integer(None)), @@ -3712,11 +4188,13 @@ fn parse_drop_function() { mode: Some(ArgMode::In), name: Some("b".into()), data_type: DataType::Integer(None), - default_expr: Some(Expr::Value(Value::Number("1".parse().unwrap(), false))), + default_expr: Some(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + )), } ]), }], - option: None + drop_behavior: None } ); @@ -3727,9 +4205,10 @@ fn parse_drop_function() { if_exists: true, func_desc: vec![ FunctionDesc { - name: ObjectName(vec![Ident { + name: ObjectName::from(vec![Ident { value: "test_func1".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }]), args: Some(vec![ OperateFunctionArg::with_name("a", DataType::Integer(None)), @@ -3737,17 +4216,17 @@ fn parse_drop_function() { mode: Some(ArgMode::In), name: Some("b".into()), data_type: DataType::Integer(None), - default_expr: Some(Expr::Value(Value::Number( - "1".parse().unwrap(), - false - ))), + default_expr: Some(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + )), } ]), }, FunctionDesc { - name: ObjectName(vec![Ident { + name: ObjectName::from(vec![Ident { value: "test_func2".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }]), args: Some(vec![ OperateFunctionArg::with_name("a", DataType::Varchar(None)), @@ -3755,19 +4234,78 @@ fn parse_drop_function() { mode: Some(ArgMode::In), name: Some("b".into()), data_type: DataType::Integer(None), - default_expr: Some(Expr::Value(Value::Number( - "1".parse().unwrap(), - false - ))), + default_expr: Some(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + )), } ]), } ], - option: None + drop_behavior: None } ); } +#[test] +fn parse_drop_domain() { + let sql = "DROP DOMAIN IF EXISTS jpeg_domain"; + assert_eq!( + pg().verified_stmt(sql), + Statement::DropDomain(DropDomain { + if_exists: true, + name: ObjectName::from(vec![Ident { + value: "jpeg_domain".to_string(), + quote_style: None, + span: Span::empty(), + }]), + drop_behavior: None + }) + ); + + let sql = "DROP DOMAIN jpeg_domain"; + assert_eq!( + pg().verified_stmt(sql), + Statement::DropDomain(DropDomain { + if_exists: false, + name: ObjectName::from(vec![Ident { + value: "jpeg_domain".to_string(), + quote_style: None, + span: Span::empty(), + }]), + drop_behavior: None + }) + ); + + let sql = "DROP DOMAIN IF EXISTS jpeg_domain CASCADE"; + assert_eq!( + pg().verified_stmt(sql), + Statement::DropDomain(DropDomain { + if_exists: true, + name: ObjectName::from(vec![Ident { + value: "jpeg_domain".to_string(), + quote_style: None, + span: Span::empty(), + }]), + drop_behavior: Some(DropBehavior::Cascade) + }) + ); + + let sql = "DROP DOMAIN IF EXISTS jpeg_domain RESTRICT"; + + assert_eq!( + pg().verified_stmt(sql), + Statement::DropDomain(DropDomain { + if_exists: true, + name: ObjectName::from(vec![Ident { + value: "jpeg_domain".to_string(), + quote_style: None, + span: Span::empty(), + }]), + drop_behavior: Some(DropBehavior::Restrict) + }) + ); +} + #[test] fn parse_drop_procedure() { let sql = "DROP PROCEDURE IF EXISTS test_proc"; @@ -3776,13 +4314,14 @@ fn parse_drop_procedure() { Statement::DropProcedure { if_exists: true, proc_desc: vec![FunctionDesc { - name: ObjectName(vec![Ident { + name: ObjectName::from(vec![Ident { value: "test_proc".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }]), args: None }], - option: None + drop_behavior: None } ); @@ -3792,9 +4331,10 @@ fn parse_drop_procedure() { Statement::DropProcedure { if_exists: true, proc_desc: vec![FunctionDesc { - name: ObjectName(vec![Ident { + name: ObjectName::from(vec![Ident { value: "test_proc".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }]), args: Some(vec![ OperateFunctionArg::with_name("a", DataType::Integer(None)), @@ -3802,11 +4342,13 @@ fn parse_drop_procedure() { mode: Some(ArgMode::In), name: Some("b".into()), data_type: DataType::Integer(None), - default_expr: Some(Expr::Value(Value::Number("1".parse().unwrap(), false))), + default_expr: Some(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + )), } ]), }], - option: None + drop_behavior: None } ); @@ -3817,9 +4359,10 @@ fn parse_drop_procedure() { if_exists: true, proc_desc: vec![ FunctionDesc { - name: ObjectName(vec![Ident { + name: ObjectName::from(vec![Ident { value: "test_proc1".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }]), args: Some(vec![ OperateFunctionArg::with_name("a", DataType::Integer(None)), @@ -3827,17 +4370,17 @@ fn parse_drop_procedure() { mode: Some(ArgMode::In), name: Some("b".into()), data_type: DataType::Integer(None), - default_expr: Some(Expr::Value(Value::Number( - "1".parse().unwrap(), - false - ))), + default_expr: Some(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + )), } ]), }, FunctionDesc { - name: ObjectName(vec![Ident { + name: ObjectName::from(vec![Ident { value: "test_proc2".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }]), args: Some(vec![ OperateFunctionArg::with_name("a", DataType::Varchar(None)), @@ -3845,15 +4388,14 @@ fn parse_drop_procedure() { mode: Some(ArgMode::In), name: Some("b".into()), data_type: DataType::Integer(None), - default_expr: Some(Expr::Value(Value::Number( - "1".parse().unwrap(), - false - ))), + default_expr: Some(Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + )), } ]), } ], - option: None + drop_behavior: None } ); @@ -3885,57 +4427,76 @@ fn parse_dollar_quoted_string() { }; assert_eq!( - &Expr::Value(Value::DollarQuotedString(DollarQuotedString { - tag: None, - value: "hello".into() - })), + &Expr::Value( + (Value::DollarQuotedString(DollarQuotedString { + tag: None, + value: "hello".into() + })) + .with_empty_span() + ), expr_from_projection(&projection[0]) ); assert_eq!( - &Expr::Value(Value::DollarQuotedString(DollarQuotedString { - tag: Some("tag_name".into()), - value: "world".into() - })), + &Expr::Value( + (Value::DollarQuotedString(DollarQuotedString { + tag: Some("tag_name".into()), + value: "world".into() + })) + .with_empty_span() + ), expr_from_projection(&projection[1]) ); assert_eq!( - &Expr::Value(Value::DollarQuotedString(DollarQuotedString { - tag: None, - value: "Foo$Bar".into() - })), + &Expr::Value( + (Value::DollarQuotedString(DollarQuotedString { + tag: None, + value: "Foo$Bar".into() + })) + .with_empty_span() + ), expr_from_projection(&projection[2]) ); assert_eq!( projection[3], SelectItem::ExprWithAlias { - expr: Expr::Value(Value::DollarQuotedString(DollarQuotedString { - tag: None, - value: "Foo$Bar".into(), - })), + expr: Expr::Value( + (Value::DollarQuotedString(DollarQuotedString { + tag: None, + value: "Foo$Bar".into(), + })) + .with_empty_span() + ), alias: Ident { value: "col_name".into(), quote_style: None, + span: Span::empty(), }, } ); assert_eq!( expr_from_projection(&projection[4]), - &Expr::Value(Value::DollarQuotedString(DollarQuotedString { - tag: None, - value: "".into() - })), + &Expr::Value( + (Value::DollarQuotedString(DollarQuotedString { + tag: None, + value: "".into() + })) + .with_empty_span() + ), ); assert_eq!( expr_from_projection(&projection[5]), - &Expr::Value(Value::DollarQuotedString(DollarQuotedString { - tag: Some("tag_name".into()), - value: "".into() - })), + &Expr::Value( + (Value::DollarQuotedString(DollarQuotedString { + tag: Some("tag_name".into()), + value: "".into() + })) + .with_empty_span() + ), ); } @@ -4015,7 +4576,7 @@ fn parse_select_group_by_cube() { #[test] fn parse_truncate() { let truncate = pg_and_generic().verified_stmt("TRUNCATE db.table_name"); - let table_name = ObjectName(vec![Ident::new("db"), Ident::new("table_name")]); + let table_name = ObjectName::from(vec![Ident::new("db"), Ident::new("table_name")]); let table_names = vec![TruncateTableTarget { name: table_name.clone(), }]; @@ -4038,7 +4599,7 @@ fn parse_truncate_with_options() { let truncate = pg_and_generic() .verified_stmt("TRUNCATE TABLE ONLY db.table_name RESTART IDENTITY CASCADE"); - let table_name = ObjectName(vec![Ident::new("db"), Ident::new("table_name")]); + let table_name = ObjectName::from(vec![Ident::new("db"), Ident::new("table_name")]); let table_names = vec![TruncateTableTarget { name: table_name.clone(), }]; @@ -4050,7 +4611,7 @@ fn parse_truncate_with_options() { table: true, only: true, identity: Some(TruncateIdentityOption::Restart), - cascade: Some(TruncateCascadeOption::Cascade), + cascade: Some(CascadeOption::Cascade), on_cluster: None, }, truncate @@ -4063,8 +4624,8 @@ fn parse_truncate_with_table_list() { "TRUNCATE TABLE db.table_name, db.other_table_name RESTART IDENTITY CASCADE", ); - let table_name_a = ObjectName(vec![Ident::new("db"), Ident::new("table_name")]); - let table_name_b = ObjectName(vec![Ident::new("db"), Ident::new("other_table_name")]); + let table_name_a = ObjectName::from(vec![Ident::new("db"), Ident::new("table_name")]); + let table_name_b = ObjectName::from(vec![Ident::new("db"), Ident::new("other_table_name")]); let table_names = vec![ TruncateTableTarget { @@ -4082,7 +4643,7 @@ fn parse_truncate_with_table_list() { table: true, only: false, identity: Some(TruncateIdentityOption::Restart), - cascade: Some(TruncateCascadeOption::Cascade), + cascade: Some(CascadeOption::Cascade), on_cluster: None, }, truncate @@ -4112,7 +4673,6 @@ fn parse_create_table_with_alias() { name, columns, constraints, - with_options: _with_options, if_not_exists: false, external: false, file_format: None, @@ -4126,37 +4686,31 @@ fn parse_create_table_with_alias() { ColumnDef { name: "int8_col".into(), data_type: DataType::Int8(None), - collation: None, options: vec![] }, ColumnDef { name: "int4_col".into(), data_type: DataType::Int4(None), - collation: None, options: vec![] }, ColumnDef { name: "int2_col".into(), data_type: DataType::Int2(None), - collation: None, options: vec![] }, ColumnDef { name: "float8_col".into(), data_type: DataType::Float8, - collation: None, options: vec![] }, ColumnDef { name: "float4_col".into(), data_type: DataType::Float4, - collation: None, options: vec![] }, ColumnDef { name: "bool_col".into(), data_type: DataType::Bool, - collation: None, options: vec![] }, ] @@ -4178,13 +4732,11 @@ fn parse_create_table_with_partition_by() { ColumnDef { name: "a".into(), data_type: DataType::Int(None), - collation: None, options: vec![] }, ColumnDef { name: "b".into(), data_type: DataType::Text, - collation: None, options: vec![] } ], @@ -4231,7 +4783,7 @@ fn parse_join_constraint_unnest_alias() { with_ordinality: false, }, global: false, - join_operator: JoinOperator::Inner(JoinConstraint::On(Expr::BinaryOp { + join_operator: JoinOperator::Join(JoinConstraint::On(Expr::BinaryOp { left: Box::new(Expr::Identifier("c1".into())), op: BinaryOperator::Eq, right: Box::new(Expr::Identifier("c2".into())), @@ -4260,22 +4812,26 @@ fn test_simple_postgres_insert_with_alias() { or: None, ignore: false, into: true, - table_name: ObjectName(vec![Ident { + table: TableObject::TableName(ObjectName::from(vec![Ident { value: "test_tables".to_string(), - quote_style: None - }]), + quote_style: None, + span: Span::empty(), + }])), table_alias: Some(Ident { value: "test_table".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }), columns: vec![ Ident { value: "id".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }, Ident { value: "a".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), } ], overwrite: false, @@ -4284,31 +4840,30 @@ fn test_simple_postgres_insert_with_alias() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Identifier(Ident { - value: "DEFAULT".to_string(), - quote_style: None - }), - Expr::Value(Value::Number("123".to_string(), false)) + Expr::Identifier(Ident::new("DEFAULT")), + Expr::Value((Value::Number("123".to_string(), false)).with_empty_span()) ]] })), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), + assignments: vec![], partitioned: None, after_columns: vec![], - table: false, + has_table_keyword: false, on: None, returning: None, replace_into: false, priority: None, - insert_alias: None + insert_alias: None, + settings: None, + format_clause: None, }) ) } @@ -4326,22 +4881,26 @@ fn test_simple_postgres_insert_with_alias() { or: None, ignore: false, into: true, - table_name: ObjectName(vec![Ident { + table: TableObject::TableName(ObjectName::from(vec![Ident { value: "test_tables".to_string(), - quote_style: None - }]), + quote_style: None, + span: Span::empty(), + }])), table_alias: Some(Ident { value: "test_table".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }), columns: vec![ Ident { value: "id".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }, Ident { value: "a".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), } ], overwrite: false, @@ -4350,34 +4909,33 @@ fn test_simple_postgres_insert_with_alias() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Identifier(Ident { - value: "DEFAULT".to_string(), - quote_style: None - }), - Expr::Value(Value::Number( - bigdecimal::BigDecimal::new(123.into(), 0), - false - )) + Expr::Identifier(Ident::new("DEFAULT")), + Expr::Value( + (Value::Number(bigdecimal::BigDecimal::new(123.into(), 0), false)) + .with_empty_span() + ) ]] })), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), + assignments: vec![], partitioned: None, after_columns: vec![], - table: false, + has_table_keyword: false, on: None, returning: None, replace_into: false, priority: None, - insert_alias: None + insert_alias: None, + settings: None, + format_clause: None, }) ) } @@ -4394,22 +4952,26 @@ fn test_simple_insert_with_quoted_alias() { or: None, ignore: false, into: true, - table_name: ObjectName(vec![Ident { + table: TableObject::TableName(ObjectName::from(vec![Ident { value: "test_tables".to_string(), - quote_style: None - }]), + quote_style: None, + span: Span::empty(), + }])), table_alias: Some(Ident { value: "Test_Table".to_string(), - quote_style: Some('"') + quote_style: Some('"'), + span: Span::empty(), }), columns: vec![ Ident { value: "id".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), }, Ident { value: "a".to_string(), - quote_style: None + quote_style: None, + span: Span::empty(), } ], overwrite: false, @@ -4418,31 +4980,32 @@ fn test_simple_insert_with_quoted_alias() { body: Box::new(SetExpr::Values(Values { explicit_row: false, rows: vec![vec![ - Expr::Identifier(Ident { - value: "DEFAULT".to_string(), - quote_style: None - }), - Expr::Value(Value::SingleQuotedString("0123".to_string())) + Expr::Identifier(Ident::new("DEFAULT")), + Expr::Value( + (Value::SingleQuotedString("0123".to_string())).with_empty_span() + ) ]] })), order_by: None, - limit: None, - limit_by: vec![], - offset: None, + limit_clause: None, fetch: None, locks: vec![], for_clause: None, settings: None, format_clause: None, + pipe_operators: vec![], })), + assignments: vec![], partitioned: None, after_columns: vec![], - table: false, + has_table_keyword: false, on: None, returning: None, replace_into: false, priority: None, insert_alias: None, + settings: None, + format_clause: None, }) ) } @@ -4485,22 +5048,22 @@ fn parse_at_time_zone() { left: Box::new(Expr::AtTimeZone { timestamp: Box::new(Expr::TypedString { data_type: DataType::Timestamp(None, TimezoneInfo::None), - value: "2001-09-28 01:00".to_owned(), + value: Value::SingleQuotedString("2001-09-28 01:00".to_string()), }), time_zone: Box::new(Expr::Cast { kind: CastKind::DoubleColon, - expr: Box::new(Expr::Value(Value::SingleQuotedString( - "America/Los_Angeles".to_owned(), - ))), + expr: Box::new(Expr::Value( + Value::SingleQuotedString("America/Los_Angeles".to_owned()).with_empty_span(), + )), data_type: DataType::Text, format: None, }), }), op: BinaryOperator::Plus, right: Box::new(Expr::Interval(Interval { - value: Box::new(Expr::Value(Value::SingleQuotedString( - "23 hours".to_owned(), - ))), + value: Box::new(Expr::Value( + Value::SingleQuotedString("23 hours".to_owned()).with_empty_span(), + )), leading_field: None, leading_precision: None, last_field: None, @@ -4519,16 +5082,22 @@ fn parse_at_time_zone() { fn parse_create_table_with_options() { let sql = "CREATE TABLE t (c INT) WITH (foo = 'bar', a = 123)"; match pg().verified_stmt(sql) { - Statement::CreateTable(CreateTable { with_options, .. }) => { + Statement::CreateTable(CreateTable { table_options, .. }) => { + let with_options = match table_options { + CreateTableOptions::With(options) => options, + _ => unreachable!(), + }; assert_eq!( vec![ SqlOption::KeyValue { key: "foo".into(), - value: Expr::Value(Value::SingleQuotedString("bar".into())), + value: Expr::Value( + (Value::SingleQuotedString("bar".into())).with_empty_span() + ), }, SqlOption::KeyValue { key: "a".into(), - value: Expr::Value(number("123")), + value: Expr::value(number("123")), }, ], with_options @@ -4574,35 +5143,133 @@ fn test_table_unnest_with_ordinality() { #[test] fn test_escaped_string_literal() { match pg().verified_expr(r#"E'\n'"#) { - Expr::Value(Value::EscapedStringLiteral(s)) => { + Expr::Value(ValueWithSpan { + value: Value::EscapedStringLiteral(s), + span: _, + }) => { assert_eq!("\n", s); } _ => unreachable!(), } } +#[test] +fn parse_create_domain() { + let sql1 = "CREATE DOMAIN my_domain AS INTEGER CHECK (VALUE > 0)"; + let expected = Statement::CreateDomain(CreateDomain { + name: ObjectName::from(vec![Ident::new("my_domain")]), + data_type: DataType::Integer(None), + collation: None, + default: None, + constraints: vec![TableConstraint::Check { + name: None, + expr: Box::new(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("VALUE"))), + op: BinaryOperator::Gt, + right: Box::new(Expr::Value(test_utils::number("0").into())), + }), + }], + }); + + assert_eq!(pg().verified_stmt(sql1), expected); + + let sql2 = "CREATE DOMAIN my_domain AS INTEGER COLLATE \"en_US\" CHECK (VALUE > 0)"; + let expected = Statement::CreateDomain(CreateDomain { + name: ObjectName::from(vec![Ident::new("my_domain")]), + data_type: DataType::Integer(None), + collation: Some(Ident::with_quote('"', "en_US")), + default: None, + constraints: vec![TableConstraint::Check { + name: None, + expr: Box::new(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("VALUE"))), + op: BinaryOperator::Gt, + right: Box::new(Expr::Value(test_utils::number("0").into())), + }), + }], + }); + + assert_eq!(pg().verified_stmt(sql2), expected); + + let sql3 = "CREATE DOMAIN my_domain AS INTEGER DEFAULT 1 CHECK (VALUE > 0)"; + let expected = Statement::CreateDomain(CreateDomain { + name: ObjectName::from(vec![Ident::new("my_domain")]), + data_type: DataType::Integer(None), + collation: None, + default: Some(Expr::Value(test_utils::number("1").into())), + constraints: vec![TableConstraint::Check { + name: None, + expr: Box::new(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("VALUE"))), + op: BinaryOperator::Gt, + right: Box::new(Expr::Value(test_utils::number("0").into())), + }), + }], + }); + + assert_eq!(pg().verified_stmt(sql3), expected); + + let sql4 = "CREATE DOMAIN my_domain AS INTEGER COLLATE \"en_US\" DEFAULT 1 CHECK (VALUE > 0)"; + let expected = Statement::CreateDomain(CreateDomain { + name: ObjectName::from(vec![Ident::new("my_domain")]), + data_type: DataType::Integer(None), + collation: Some(Ident::with_quote('"', "en_US")), + default: Some(Expr::Value(test_utils::number("1").into())), + constraints: vec![TableConstraint::Check { + name: None, + expr: Box::new(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("VALUE"))), + op: BinaryOperator::Gt, + right: Box::new(Expr::Value(test_utils::number("0").into())), + }), + }], + }); + + assert_eq!(pg().verified_stmt(sql4), expected); + + let sql5 = "CREATE DOMAIN my_domain AS INTEGER CONSTRAINT my_constraint CHECK (VALUE > 0)"; + let expected = Statement::CreateDomain(CreateDomain { + name: ObjectName::from(vec![Ident::new("my_domain")]), + data_type: DataType::Integer(None), + collation: None, + default: None, + constraints: vec![TableConstraint::Check { + name: Some(Ident::new("my_constraint")), + expr: Box::new(Expr::BinaryOp { + left: Box::new(Expr::Identifier(Ident::new("VALUE"))), + op: BinaryOperator::Gt, + right: Box::new(Expr::Value(test_utils::number("0").into())), + }), + }], + }); + + assert_eq!(pg().verified_stmt(sql5), expected); +} + #[test] fn parse_create_simple_before_insert_trigger() { let sql = "CREATE TRIGGER check_insert BEFORE INSERT ON accounts FOR EACH ROW EXECUTE FUNCTION check_account_insert"; let expected = Statement::CreateTrigger { + or_alter: false, or_replace: false, is_constraint: false, - name: ObjectName(vec![Ident::new("check_insert")]), + name: ObjectName::from(vec![Ident::new("check_insert")]), period: TriggerPeriod::Before, events: vec![TriggerEvent::Insert], - table_name: ObjectName(vec![Ident::new("accounts")]), + table_name: ObjectName::from(vec![Ident::new("accounts")]), referenced_table_name: None, referencing: vec![], trigger_object: TriggerObject::Row, include_each: true, condition: None, - exec_body: TriggerExecBody { + exec_body: Some(TriggerExecBody { exec_type: TriggerExecBodyType::Function, func_desc: FunctionDesc { - name: ObjectName(vec![Ident::new("check_account_insert")]), + name: ObjectName::from(vec![Ident::new("check_account_insert")]), args: None, }, - }, + }), + statements: None, characteristics: None, }; @@ -4613,12 +5280,13 @@ fn parse_create_simple_before_insert_trigger() { fn parse_create_after_update_trigger_with_condition() { let sql = "CREATE TRIGGER check_update AFTER UPDATE ON accounts FOR EACH ROW WHEN (NEW.balance > 10000) EXECUTE FUNCTION check_account_update"; let expected = Statement::CreateTrigger { + or_alter: false, or_replace: false, is_constraint: false, - name: ObjectName(vec![Ident::new("check_update")]), + name: ObjectName::from(vec![Ident::new("check_update")]), period: TriggerPeriod::After, events: vec![TriggerEvent::Update(vec![])], - table_name: ObjectName(vec![Ident::new("accounts")]), + table_name: ObjectName::from(vec![Ident::new("accounts")]), referenced_table_name: None, referencing: vec![], trigger_object: TriggerObject::Row, @@ -4629,15 +5297,16 @@ fn parse_create_after_update_trigger_with_condition() { Ident::new("balance"), ])), op: BinaryOperator::Gt, - right: Box::new(Expr::Value(number("10000"))), + right: Box::new(Expr::value(number("10000"))), }))), - exec_body: TriggerExecBody { + exec_body: Some(TriggerExecBody { exec_type: TriggerExecBodyType::Function, func_desc: FunctionDesc { - name: ObjectName(vec![Ident::new("check_account_update")]), + name: ObjectName::from(vec![Ident::new("check_account_update")]), args: None, }, - }, + }), + statements: None, characteristics: None, }; @@ -4648,24 +5317,26 @@ fn parse_create_after_update_trigger_with_condition() { fn parse_create_instead_of_delete_trigger() { let sql = "CREATE TRIGGER check_delete INSTEAD OF DELETE ON accounts FOR EACH ROW EXECUTE FUNCTION check_account_deletes"; let expected = Statement::CreateTrigger { + or_alter: false, or_replace: false, is_constraint: false, - name: ObjectName(vec![Ident::new("check_delete")]), + name: ObjectName::from(vec![Ident::new("check_delete")]), period: TriggerPeriod::InsteadOf, events: vec![TriggerEvent::Delete], - table_name: ObjectName(vec![Ident::new("accounts")]), + table_name: ObjectName::from(vec![Ident::new("accounts")]), referenced_table_name: None, referencing: vec![], trigger_object: TriggerObject::Row, include_each: true, condition: None, - exec_body: TriggerExecBody { + exec_body: Some(TriggerExecBody { exec_type: TriggerExecBodyType::Function, func_desc: FunctionDesc { - name: ObjectName(vec![Ident::new("check_account_deletes")]), + name: ObjectName::from(vec![Ident::new("check_account_deletes")]), args: None, }, - }, + }), + statements: None, characteristics: None, }; @@ -4676,28 +5347,30 @@ fn parse_create_instead_of_delete_trigger() { fn parse_create_trigger_with_multiple_events_and_deferrable() { let sql = "CREATE CONSTRAINT TRIGGER check_multiple_events BEFORE INSERT OR UPDATE OR DELETE ON accounts DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE FUNCTION check_account_changes"; let expected = Statement::CreateTrigger { + or_alter: false, or_replace: false, is_constraint: true, - name: ObjectName(vec![Ident::new("check_multiple_events")]), + name: ObjectName::from(vec![Ident::new("check_multiple_events")]), period: TriggerPeriod::Before, events: vec![ TriggerEvent::Insert, TriggerEvent::Update(vec![]), TriggerEvent::Delete, ], - table_name: ObjectName(vec![Ident::new("accounts")]), + table_name: ObjectName::from(vec![Ident::new("accounts")]), referenced_table_name: None, referencing: vec![], trigger_object: TriggerObject::Row, include_each: true, condition: None, - exec_body: TriggerExecBody { + exec_body: Some(TriggerExecBody { exec_type: TriggerExecBodyType::Function, func_desc: FunctionDesc { - name: ObjectName(vec![Ident::new("check_account_changes")]), + name: ObjectName::from(vec![Ident::new("check_account_changes")]), args: None, }, - }, + }), + statements: None, characteristics: Some(ConstraintCharacteristics { deferrable: Some(true), initially: Some(DeferrableInitial::Deferred), @@ -4712,35 +5385,37 @@ fn parse_create_trigger_with_multiple_events_and_deferrable() { fn parse_create_trigger_with_referencing() { let sql = "CREATE TRIGGER check_referencing BEFORE INSERT ON accounts REFERENCING NEW TABLE AS new_accounts OLD TABLE AS old_accounts FOR EACH ROW EXECUTE FUNCTION check_account_referencing"; let expected = Statement::CreateTrigger { + or_alter: false, or_replace: false, is_constraint: false, - name: ObjectName(vec![Ident::new("check_referencing")]), + name: ObjectName::from(vec![Ident::new("check_referencing")]), period: TriggerPeriod::Before, events: vec![TriggerEvent::Insert], - table_name: ObjectName(vec![Ident::new("accounts")]), + table_name: ObjectName::from(vec![Ident::new("accounts")]), referenced_table_name: None, referencing: vec![ TriggerReferencing { refer_type: TriggerReferencingType::NewTable, is_as: true, - transition_relation_name: ObjectName(vec![Ident::new("new_accounts")]), + transition_relation_name: ObjectName::from(vec![Ident::new("new_accounts")]), }, TriggerReferencing { refer_type: TriggerReferencingType::OldTable, is_as: true, - transition_relation_name: ObjectName(vec![Ident::new("old_accounts")]), + transition_relation_name: ObjectName::from(vec![Ident::new("old_accounts")]), }, ], trigger_object: TriggerObject::Row, include_each: true, condition: None, - exec_body: TriggerExecBody { + exec_body: Some(TriggerExecBody { exec_type: TriggerExecBodyType::Function, func_desc: FunctionDesc { - name: ObjectName(vec![Ident::new("check_account_referencing")]), + name: ObjectName::from(vec![Ident::new("check_account_referencing")]), args: None, }, - }, + }), + statements: None, characteristics: None, }; @@ -4760,7 +5435,7 @@ fn parse_create_trigger_invalid_cases() { ), ( "CREATE TRIGGER check_update TOMORROW UPDATE ON accounts EXECUTE FUNCTION check_account_update", - "Expected: one of BEFORE or AFTER or INSTEAD, found: TOMORROW" + "Expected: one of FOR or BEFORE or AFTER or INSTEAD, found: TOMORROW" ), ( "CREATE TRIGGER check_update BEFORE SAVE ON accounts EXECUTE FUNCTION check_account_update", @@ -4796,8 +5471,8 @@ fn parse_drop_trigger() { pg().verified_stmt(sql), Statement::DropTrigger { if_exists, - trigger_name: ObjectName(vec![Ident::new("check_update")]), - table_name: ObjectName(vec![Ident::new("table_name")]), + trigger_name: ObjectName::from(vec![Ident::new("check_update")]), + table_name: Some(ObjectName::from(vec![Ident::new("table_name")])), option } ); @@ -4910,30 +5585,27 @@ fn parse_trigger_related_functions() { if_not_exists: false, transient: false, volatile: false, - name: ObjectName(vec![Ident::new("emp")]), + iceberg: false, + name: ObjectName::from(vec![Ident::new("emp")]), columns: vec![ ColumnDef { name: "empname".into(), data_type: DataType::Text, - collation: None, options: vec![], }, ColumnDef { name: "salary".into(), data_type: DataType::Integer(None), - collation: None, options: vec![], }, ColumnDef { name: "last_date".into(), data_type: DataType::Timestamp(None, TimezoneInfo::None), - collation: None, options: vec![], }, ColumnDef { name: "last_user".into(), data_type: DataType::Text, - collation: None, options: vec![], }, ], @@ -4945,19 +5617,13 @@ fn parse_trigger_related_functions() { storage: None, location: None }), - table_properties: vec![], - with_options: vec![], file_format: None, location: None, query: None, without_rowid: false, like: None, clone: None, - engine: None, comment: None, - auto_increment_offset: None, - default_charset: None, - collation: None, on_commit: None, on_cluster: None, primary_key: None, @@ -4965,7 +5631,7 @@ fn parse_trigger_related_functions() { partition_by: None, cluster_by: None, clustered_by: None, - options: None, + inherits: None, strict: false, copy_grants: false, enable_schema_evolution: None, @@ -4976,6 +5642,12 @@ fn parse_trigger_related_functions() { with_aggregation_policy: None, with_row_access_policy: None, with_tags: None, + base_location: None, + external_volume: None, + catalog: None, + catalog_sync: None, + storage_serialization_policy: None, + table_options: CreateTableOptions::None } ); @@ -4983,16 +5655,17 @@ fn parse_trigger_related_functions() { assert_eq!( create_function, - Statement::CreateFunction { + Statement::CreateFunction(CreateFunction { + or_alter: false, or_replace: false, temporary: false, if_not_exists: false, - name: ObjectName(vec![Ident::new("emp_stamp")]), - args: None, + name: ObjectName::from(vec![Ident::new("emp_stamp")]), + args: Some(vec![]), return_type: Some(DataType::Trigger), function_body: Some( CreateFunctionBody::AsBeforeOptions( - Expr::Value( + Expr::Value(( Value::DollarQuotedString( DollarQuotedString { value: "\n BEGIN\n -- Check that empname and salary are given\n IF NEW.empname IS NULL THEN\n RAISE EXCEPTION 'empname cannot be null';\n END IF;\n IF NEW.salary IS NULL THEN\n RAISE EXCEPTION '% cannot have null salary', NEW.empname;\n END IF;\n\n -- Who works for us when they must pay for it?\n IF NEW.salary < 0 THEN\n RAISE EXCEPTION '% cannot have a negative salary', NEW.empname;\n END IF;\n\n -- Remember who changed the payroll when\n NEW.last_date := current_timestamp;\n NEW.last_user := current_user;\n RETURN NEW;\n END;\n ".to_owned(), @@ -5000,8 +5673,8 @@ fn parse_trigger_related_functions() { "emp_stamp".to_owned(), ), }, - ), - ), + ) + ).with_empty_span()), ), ), behavior: None, @@ -5013,31 +5686,33 @@ fn parse_trigger_related_functions() { options: None, remote_connection: None } - ); + )); // Check the third statement assert_eq!( create_trigger, Statement::CreateTrigger { + or_alter: false, or_replace: false, is_constraint: false, - name: ObjectName(vec![Ident::new("emp_stamp")]), + name: ObjectName::from(vec![Ident::new("emp_stamp")]), period: TriggerPeriod::Before, events: vec![TriggerEvent::Insert, TriggerEvent::Update(vec![])], - table_name: ObjectName(vec![Ident::new("emp")]), + table_name: ObjectName::from(vec![Ident::new("emp")]), referenced_table_name: None, referencing: vec![], trigger_object: TriggerObject::Row, include_each: true, condition: None, - exec_body: TriggerExecBody { + exec_body: Some(TriggerExecBody { exec_type: TriggerExecBodyType::Function, func_desc: FunctionDesc { - name: ObjectName(vec![Ident::new("emp_stamp")]), + name: ObjectName::from(vec![Ident::new("emp_stamp")]), args: None, } - }, + }), + statements: None, characteristics: None } ); @@ -5047,8 +5722,8 @@ fn parse_trigger_related_functions() { drop_trigger, Statement::DropTrigger { if_exists: false, - trigger_name: ObjectName(vec![Ident::new("emp_stamp")]), - table_name: ObjectName(vec![Ident::new("emp")]), + trigger_name: ObjectName::from(vec![Ident::new("emp_stamp")]), + table_name: Some(ObjectName::from(vec![Ident::new("emp")])), option: None } ); @@ -5068,7 +5743,10 @@ fn test_unicode_string_literal() { ]; for (input, expected) in pairs { match pg_and_generic().verified_expr(input) { - Expr::Value(Value::UnicodeStringLiteral(s)) => { + Expr::Value(ValueWithSpan { + value: Value::UnicodeStringLiteral(s), + span: _, + }) => { assert_eq!(expected, s); } _ => unreachable!(), @@ -5084,12 +5762,17 @@ fn check_arrow_precedence(sql: &str, arrow_operator: BinaryOperator) { left: Box::new(Expr::Identifier(Ident { value: "foo".to_string(), quote_style: None, + span: Span::empty(), })), op: arrow_operator, - right: Box::new(Expr::Value(Value::SingleQuotedString("bar".to_string()))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("bar".to_string())).with_empty_span() + )), }), op: BinaryOperator::Eq, - right: Box::new(Expr::Value(Value::SingleQuotedString("spam".to_string()))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("spam".to_string())).with_empty_span() + )), } ) } @@ -5114,11 +5797,14 @@ fn arrow_cast_precedence() { left: Box::new(Expr::Identifier(Ident { value: "foo".to_string(), quote_style: None, + span: Span::empty(), })), op: BinaryOperator::Arrow, right: Box::new(Expr::Cast { kind: CastKind::DoubleColon, - expr: Box::new(Expr::Value(Value::SingleQuotedString("bar".to_string()))), + expr: Box::new(Expr::Value( + (Value::SingleQuotedString("bar".to_string())).with_empty_span() + )), data_type: DataType::Text, format: None, }), @@ -5128,15 +5814,8 @@ fn arrow_cast_precedence() { #[test] fn parse_create_type_as_enum() { - let statement = pg().one_statement_parses_to( - r#"CREATE TYPE public.my_type AS ENUM ( - 'label1', - 'label2', - 'label3', - 'label4' - );"#, - "CREATE TYPE public.my_type AS ENUM ('label1', 'label2', 'label3', 'label4')", - ); + let sql = "CREATE TYPE public.my_type AS ENUM ('label1', 'label2', 'label3', 'label4')"; + let statement = pg_and_generic().verified_stmt(sql); match statement { Statement::CreateType { name, @@ -5151,6 +5830,132 @@ fn parse_create_type_as_enum() { labels ); } + _ => unreachable!("{:?} should parse to Statement::CreateType", sql), + } +} + +#[test] +fn parse_alter_type() { + struct TestCase { + sql: &'static str, + name: &'static str, + operation: AlterTypeOperation, + } + vec![ + TestCase { + sql: "ALTER TYPE public.my_type RENAME TO my_new_type", + name: "public.my_type", + operation: AlterTypeOperation::Rename(AlterTypeRename { + new_name: Ident::new("my_new_type"), + }), + }, + TestCase { + sql: "ALTER TYPE public.my_type ADD VALUE IF NOT EXISTS 'label3.5' BEFORE 'label4'", + name: "public.my_type", + operation: AlterTypeOperation::AddValue(AlterTypeAddValue { + if_not_exists: true, + value: Ident::with_quote('\'', "label3.5"), + position: Some(AlterTypeAddValuePosition::Before(Ident::with_quote( + '\'', "label4", + ))), + }), + }, + TestCase { + sql: "ALTER TYPE public.my_type ADD VALUE 'label3.5' BEFORE 'label4'", + name: "public.my_type", + operation: AlterTypeOperation::AddValue(AlterTypeAddValue { + if_not_exists: false, + value: Ident::with_quote('\'', "label3.5"), + position: Some(AlterTypeAddValuePosition::Before(Ident::with_quote( + '\'', "label4", + ))), + }), + }, + TestCase { + sql: "ALTER TYPE public.my_type ADD VALUE IF NOT EXISTS 'label3.5' AFTER 'label3'", + name: "public.my_type", + operation: AlterTypeOperation::AddValue(AlterTypeAddValue { + if_not_exists: true, + value: Ident::with_quote('\'', "label3.5"), + position: Some(AlterTypeAddValuePosition::After(Ident::with_quote( + '\'', "label3", + ))), + }), + }, + TestCase { + sql: "ALTER TYPE public.my_type ADD VALUE 'label3.5' AFTER 'label3'", + name: "public.my_type", + operation: AlterTypeOperation::AddValue(AlterTypeAddValue { + if_not_exists: false, + value: Ident::with_quote('\'', "label3.5"), + position: Some(AlterTypeAddValuePosition::After(Ident::with_quote( + '\'', "label3", + ))), + }), + }, + TestCase { + sql: "ALTER TYPE public.my_type ADD VALUE IF NOT EXISTS 'label5'", + name: "public.my_type", + operation: AlterTypeOperation::AddValue(AlterTypeAddValue { + if_not_exists: true, + value: Ident::with_quote('\'', "label5"), + position: None, + }), + }, + TestCase { + sql: "ALTER TYPE public.my_type ADD VALUE 'label5'", + name: "public.my_type", + operation: AlterTypeOperation::AddValue(AlterTypeAddValue { + if_not_exists: false, + value: Ident::with_quote('\'', "label5"), + position: None, + }), + }, + ] + .into_iter() + .enumerate() + .for_each(|(index, tc)| { + let statement = pg_and_generic().verified_stmt(tc.sql); + if let Statement::AlterType(AlterType { name, operation }) = statement { + assert_eq!(tc.name, name.to_string(), "TestCase[{index}].name"); + assert_eq!(tc.operation, operation, "TestCase[{index}].operation"); + } else { + unreachable!("{:?} should parse to Statement::AlterType", tc.sql); + } + }); +} + +#[test] +fn parse_bitstring_literal() { + let select = pg_and_generic().verified_only_select("SELECT B'111'"); + assert_eq!( + select.projection, + vec![SelectItem::UnnamedExpr(Expr::Value( + (Value::SingleQuotedByteStringLiteral("111".to_string())).with_empty_span() + ))] + ); +} + +#[test] +fn parse_varbit_datatype() { + match pg_and_generic().verified_stmt("CREATE TABLE foo (x VARBIT, y VARBIT(42))") { + Statement::CreateTable(CreateTable { columns, .. }) => { + assert_eq!( + columns, + vec![ + ColumnDef { + name: "x".into(), + data_type: DataType::VarBit(None), + options: vec![], + }, + ColumnDef { + name: "y".into(), + data_type: DataType::VarBit(Some(42)), + options: vec![], + } + ] + ); + } _ => unreachable!(), } } diff --git a/tests/sqlparser_redshift.rs b/tests/sqlparser_redshift.rs index a25d50605..be2b67223 100644 --- a/tests/sqlparser_redshift.rs +++ b/tests/sqlparser_redshift.rs @@ -18,6 +18,7 @@ #[macro_use] mod test_utils; +use sqlparser::tokenizer::Span; use test_utils::*; use sqlparser::ast::*; @@ -31,30 +32,25 @@ fn test_square_brackets_over_db_schema_table_name() { select.projection[0], SelectItem::UnnamedExpr(Expr::Identifier(Ident { value: "col1".to_string(), - quote_style: Some('[') + quote_style: Some('['), + span: Span::empty(), })), ); assert_eq!( select.from[0], TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![ - Ident { - value: "test_schema".to_string(), - quote_style: Some('[') - }, - Ident { - value: "test_table".to_string(), - quote_style: Some('[') - } - ]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![ + Ident { + value: "test_schema".to_string(), + quote_style: Some('['), + span: Span::empty(), + }, + Ident { + value: "test_table".to_string(), + quote_style: Some('['), + span: Span::empty(), + } + ])), joins: vec![], } ); @@ -78,30 +74,25 @@ fn test_double_quotes_over_db_schema_table_name() { select.projection[0], SelectItem::UnnamedExpr(Expr::Identifier(Ident { value: "col1".to_string(), - quote_style: Some('"') + quote_style: Some('"'), + span: Span::empty(), })), ); assert_eq!( select.from[0], TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![ - Ident { - value: "test_schema".to_string(), - quote_style: Some('"') - }, - Ident { - value: "test_table".to_string(), - quote_style: Some('"') - } - ]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![ + Ident { + value: "test_schema".to_string(), + quote_style: Some('"'), + span: Span::empty(), + }, + Ident { + value: "test_table".to_string(), + quote_style: Some('"'), + span: Span::empty(), + } + ])), joins: vec![], } ); @@ -121,10 +112,12 @@ fn parse_delimited_identifiers() { args, with_hints, version, - with_ordinality: _, - partitions: _, + .. } => { - assert_eq!(vec![Ident::with_quote('"', "a table")], name.0); + assert_eq!( + ObjectName::from(vec![Ident::with_quote('"', "a table")]), + name + ); assert_eq!(Ident::with_quote('"', "alias"), alias.unwrap().name); assert!(args.is_none()); assert!(with_hints.is_empty()); @@ -143,7 +136,8 @@ fn parse_delimited_identifiers() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::with_quote('"', "myfun")]), + name: ObjectName::from(vec![Ident::with_quote('"', "myfun")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -166,6 +160,8 @@ fn parse_delimited_identifiers() { } redshift().verified_stmt(r#"CREATE TABLE "foo" ("bar" "int")"#); + // An alias starting with a number + redshift().verified_stmt(r#"CREATE TABLE "foo" ("1" INT)"#); redshift().verified_stmt(r#"ALTER TABLE foo ADD CONSTRAINT "bar" PRIMARY KEY (baz)"#); //TODO verified_stmt(r#"UPDATE foo SET "bar" = 5"#); } @@ -196,3 +192,213 @@ fn test_create_view_with_no_schema_binding() { redshift_and_generic() .verified_stmt("CREATE VIEW myevent AS SELECT eventname FROM event WITH NO SCHEMA BINDING"); } + +#[test] +fn test_redshift_json_path() { + let dialects = all_dialects_where(|d| d.supports_partiql()); + let sql = "SELECT cust.c_orders[0].o_orderkey FROM customer_orders_lineitem"; + let select = dialects.verified_only_select(sql); + + assert_eq!( + &Expr::JsonAccess { + value: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("cust"), + Ident::new("c_orders") + ])), + path: JsonPath { + path: vec![ + JsonPathElem::Bracket { + key: Expr::value(number("0")) + }, + JsonPathElem::Dot { + key: "o_orderkey".to_string(), + quoted: false + } + ] + } + }, + expr_from_projection(only(&select.projection)) + ); + + let sql = "SELECT cust.c_orders[0]['id'] FROM customer_orders_lineitem"; + let select = dialects.verified_only_select(sql); + assert_eq!( + &Expr::JsonAccess { + value: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("cust"), + Ident::new("c_orders") + ])), + path: JsonPath { + path: vec![ + JsonPathElem::Bracket { + key: Expr::value(number("0")) + }, + JsonPathElem::Bracket { + key: Expr::Value( + (Value::SingleQuotedString("id".to_owned())).with_empty_span() + ) + } + ] + } + }, + expr_from_projection(only(&select.projection)) + ); + + let sql = "SELECT db1.sc1.tbl1.col1[0]['id'] FROM customer_orders_lineitem"; + let select = dialects.verified_only_select(sql); + assert_eq!( + &Expr::JsonAccess { + value: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("db1"), + Ident::new("sc1"), + Ident::new("tbl1"), + Ident::new("col1") + ])), + path: JsonPath { + path: vec![ + JsonPathElem::Bracket { + key: Expr::value(number("0")) + }, + JsonPathElem::Bracket { + key: Expr::Value( + (Value::SingleQuotedString("id".to_owned())).with_empty_span() + ) + } + ] + } + }, + expr_from_projection(only(&select.projection)) + ); + + let sql = r#"SELECT db1.sc1.tbl1.col1[0]."id" FROM customer_orders_lineitem"#; + let select = dialects.verified_only_select(sql); + assert_eq!( + &Expr::JsonAccess { + value: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("db1"), + Ident::new("sc1"), + Ident::new("tbl1"), + Ident::new("col1") + ])), + path: JsonPath { + path: vec![ + JsonPathElem::Bracket { + key: Expr::value(number("0")) + }, + JsonPathElem::Dot { + key: "id".to_string(), + quoted: true, + } + ] + } + }, + expr_from_projection(only(&select.projection)) + ); +} + +#[test] +fn test_parse_json_path_from() { + let dialects = all_dialects_where(|d| d.supports_partiql()); + let select = dialects.verified_only_select("SELECT * FROM src[0].a AS a"); + match &select.from[0].relation { + TableFactor::Table { + name, json_path, .. + } => { + assert_eq!(name, &ObjectName::from(vec![Ident::new("src")])); + assert_eq!( + json_path, + &Some(JsonPath { + path: vec![ + JsonPathElem::Bracket { + key: Expr::value(number("0")) + }, + JsonPathElem::Dot { + key: "a".to_string(), + quoted: false + } + ] + }) + ); + } + _ => panic!(), + } + + let select = dialects.verified_only_select("SELECT * FROM src[0].a[1].b AS a"); + match &select.from[0].relation { + TableFactor::Table { + name, json_path, .. + } => { + assert_eq!(name, &ObjectName::from(vec![Ident::new("src")])); + assert_eq!( + json_path, + &Some(JsonPath { + path: vec![ + JsonPathElem::Bracket { + key: Expr::value(number("0")) + }, + JsonPathElem::Dot { + key: "a".to_string(), + quoted: false + }, + JsonPathElem::Bracket { + key: Expr::Value( + (Value::Number("1".parse().unwrap(), false)).with_empty_span() + ) + }, + JsonPathElem::Dot { + key: "b".to_string(), + quoted: false + }, + ] + }) + ); + } + _ => panic!(), + } + + let select = dialects.verified_only_select("SELECT * FROM src.a.b"); + match &select.from[0].relation { + TableFactor::Table { + name, json_path, .. + } => { + assert_eq!( + name, + &ObjectName::from(vec![Ident::new("src"), Ident::new("a"), Ident::new("b")]) + ); + assert_eq!(json_path, &None); + } + _ => panic!(), + } +} + +#[test] +fn test_parse_select_numbered_columns() { + // An alias starting with a number + redshift_and_generic().verified_stmt(r#"SELECT 1 AS "1" FROM a"#); + redshift_and_generic().verified_stmt(r#"SELECT 1 AS "1abc" FROM a"#); +} + +#[test] +fn test_parse_nested_quoted_identifier() { + redshift().verified_stmt(r#"SELECT 1 AS ["1"] FROM a"#); + redshift().verified_stmt(r#"SELECT 1 AS ["[="] FROM a"#); + redshift().verified_stmt(r#"SELECT 1 AS ["=]"] FROM a"#); + redshift().verified_stmt(r#"SELECT 1 AS ["a[b]"] FROM a"#); + // trim spaces + redshift().one_statement_parses_to(r#"SELECT 1 AS [ " 1 " ]"#, r#"SELECT 1 AS [" 1 "]"#); + // invalid query + assert!(redshift() + .parse_sql_statements(r#"SELECT 1 AS ["1]"#) + .is_err()); +} + +#[test] +fn parse_extract_single_quotes() { + let sql = "SELECT EXTRACT('month' FROM my_timestamp) FROM my_table"; + redshift().verified_stmt(sql); +} + +#[test] +fn parse_string_literal_backslash_escape() { + redshift().one_statement_parses_to(r#"SELECT 'l\'auto'"#, "SELECT 'l''auto'"); +} diff --git a/tests/sqlparser_snowflake.rs b/tests/sqlparser_snowflake.rs index c17c7b958..52be31435 100644 --- a/tests/sqlparser_snowflake.rs +++ b/tests/sqlparser_snowflake.rs @@ -19,9 +19,8 @@ //! Test SQL syntax specific to Snowflake. The parser based on the //! generic dialect is also tested (on the inputs it can handle). -use sqlparser::ast::helpers::stmt_data_loading::{ - DataLoadingOption, DataLoadingOptionType, StageLoadSelectItem, -}; +use sqlparser::ast::helpers::key_value_options::{KeyValueOption, KeyValueOptionType}; +use sqlparser::ast::helpers::stmt_data_loading::{StageLoadSelectItem, StageLoadSelectItemKind}; use sqlparser::ast::*; use sqlparser::dialect::{Dialect, GenericDialect, SnowflakeDialect}; use sqlparser::parser::{ParserError, ParserOptions}; @@ -346,7 +345,6 @@ fn test_snowflake_create_table_column_comment() { name: None, option: ColumnOption::Comment("some comment".to_string()) }], - collation: None }], columns ) @@ -355,6 +353,15 @@ fn test_snowflake_create_table_column_comment() { } } +#[test] +fn test_snowflake_create_table_on_commit() { + snowflake().verified_stmt( + r#"CREATE LOCAL TEMPORARY TABLE "AAA"."foo" ("bar" INTEGER) ON COMMIT PRESERVE ROWS"#, + ); + snowflake().verified_stmt(r#"CREATE TABLE "AAA"."foo" ("bar" INTEGER) ON COMMIT DELETE ROWS"#); + snowflake().verified_stmt(r#"CREATE TABLE "AAA"."foo" ("bar" INTEGER) ON COMMIT DROP"#); +} + #[test] fn test_snowflake_create_local_table() { match snowflake().verified_stmt("CREATE TABLE my_table (a INT)") { @@ -463,9 +470,22 @@ fn test_snowflake_create_table_cluster_by() { #[test] fn test_snowflake_create_table_comment() { match snowflake().verified_stmt("CREATE TABLE my_table (a INT) COMMENT = 'some comment'") { - Statement::CreateTable(CreateTable { name, comment, .. }) => { + Statement::CreateTable(CreateTable { + name, + table_options, + .. + }) => { assert_eq!("my_table", name.to_string()); - assert_eq!("some comment", comment.unwrap().to_string()); + let plain_options = match table_options { + CreateTableOptions::Plain(options) => options, + _ => unreachable!(), + }; + let comment = match plain_options.first().unwrap() { + SqlOption::Comment(CommentDef::WithEq(c)) + | SqlOption::Comment(CommentDef::WithoutEq(c)) => c, + _ => unreachable!(), + }; + assert_eq!("some comment", comment); } _ => unreachable!(), } @@ -544,7 +564,6 @@ fn test_snowflake_create_table_with_autoincrement_columns() { ColumnDef { name: "a".into(), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Identity(IdentityPropertyKind::Autoincrement( @@ -558,15 +577,14 @@ fn test_snowflake_create_table_with_autoincrement_columns() { ColumnDef { name: "b".into(), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Identity(IdentityPropertyKind::Autoincrement( IdentityProperty { parameters: Some(IdentityPropertyFormatKind::FunctionCall( IdentityParameters { - seed: Expr::Value(number("100")), - increment: Expr::Value(number("1")), + seed: Expr::value(number("100")), + increment: Expr::value(number("1")), } )), order: Some(IdentityPropertyOrder::NoOrder), @@ -577,7 +595,6 @@ fn test_snowflake_create_table_with_autoincrement_columns() { ColumnDef { name: "c".into(), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Identity(IdentityPropertyKind::Identity( @@ -591,7 +608,6 @@ fn test_snowflake_create_table_with_autoincrement_columns() { ColumnDef { name: "d".into(), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Identity(IdentityPropertyKind::Identity( @@ -599,8 +615,12 @@ fn test_snowflake_create_table_with_autoincrement_columns() { parameters: Some( IdentityPropertyFormatKind::StartAndIncrement( IdentityParameters { - seed: Expr::Value(number("100")), - increment: Expr::Value(number("1")), + seed: Expr::Value( + (number("100")).with_empty_span() + ), + increment: Expr::Value( + (number("1")).with_empty_span() + ), } ) ), @@ -625,8 +645,12 @@ fn test_snowflake_create_table_with_collated_column() { vec![ColumnDef { name: "a".into(), data_type: DataType::Text, - collation: Some(ObjectName(vec![Ident::with_quote('\'', "de_DE")])), - options: vec![] + options: vec![ColumnOptionDef { + name: None, + option: ColumnOption::Collation(ObjectName::from(vec![Ident::with_quote( + '\'', "de_DE" + )])), + }] },] ); } @@ -665,7 +689,6 @@ fn test_snowflake_create_table_with_columns_masking_policy() { vec![ColumnDef { name: "a".into(), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Policy(ColumnPolicy::MaskingPolicy( @@ -700,7 +723,6 @@ fn test_snowflake_create_table_with_columns_projection_policy() { vec![ColumnDef { name: "a".into(), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy( @@ -738,7 +760,6 @@ fn test_snowflake_create_table_with_columns_tags() { vec![ColumnDef { name: "a".into(), data_type: DataType::Int(None), - collation: None, options: vec![ColumnOptionDef { name: None, option: ColumnOption::Tags(TagsColumnOption { @@ -773,7 +794,6 @@ fn test_snowflake_create_table_with_several_column_options() { ColumnDef { name: "a".into(), data_type: DataType::Int(None), - collation: None, options: vec![ ColumnOptionDef { name: None, @@ -809,8 +829,13 @@ fn test_snowflake_create_table_with_several_column_options() { ColumnDef { name: "b".into(), data_type: DataType::Text, - collation: Some(ObjectName(vec![Ident::with_quote('\'', "de_DE")])), options: vec![ + ColumnOptionDef { + name: None, + option: ColumnOption::Collation(ObjectName::from(vec![ + Ident::with_quote('\'', "de_DE") + ])), + }, ColumnOptionDef { name: None, option: ColumnOption::Policy(ColumnPolicy::ProjectionPolicy( @@ -840,6 +865,81 @@ fn test_snowflake_create_table_with_several_column_options() { } } +#[test] +fn test_snowflake_create_iceberg_table_all_options() { + match snowflake().verified_stmt("CREATE ICEBERG TABLE my_table (a INT, b INT) \ + CLUSTER BY (a, b) EXTERNAL_VOLUME = 'volume' CATALOG = 'SNOWFLAKE' BASE_LOCATION = 'relative/path' CATALOG_SYNC = 'OPEN_CATALOG' \ + STORAGE_SERIALIZATION_POLICY = COMPATIBLE COPY GRANTS CHANGE_TRACKING=TRUE DATA_RETENTION_TIME_IN_DAYS=5 MAX_DATA_EXTENSION_TIME_IN_DAYS=10 \ + WITH AGGREGATION POLICY policy_name WITH ROW ACCESS POLICY policy_name ON (a) WITH TAG (A='TAG A', B='TAG B')") { + Statement::CreateTable(CreateTable { + name, cluster_by, base_location, + external_volume, catalog, catalog_sync, + storage_serialization_policy, change_tracking, + copy_grants, data_retention_time_in_days, + max_data_extension_time_in_days, with_aggregation_policy, + with_row_access_policy, with_tags, .. + }) => { + assert_eq!("my_table", name.to_string()); + assert_eq!( + Some(WrappedCollection::Parentheses(vec![ + Ident::new("a"), + Ident::new("b"), + ])), + cluster_by + ); + assert_eq!("relative/path", base_location.unwrap()); + assert_eq!("volume", external_volume.unwrap()); + assert_eq!("SNOWFLAKE", catalog.unwrap()); + assert_eq!("OPEN_CATALOG", catalog_sync.unwrap()); + assert_eq!(StorageSerializationPolicy::Compatible, storage_serialization_policy.unwrap()); + assert!(change_tracking.unwrap()); + assert!(copy_grants); + assert_eq!(Some(5), data_retention_time_in_days); + assert_eq!(Some(10), max_data_extension_time_in_days); + assert_eq!( + Some("WITH ROW ACCESS POLICY policy_name ON (a)".to_string()), + with_row_access_policy.map(|policy| policy.to_string()) + ); + assert_eq!( + Some("policy_name".to_string()), + with_aggregation_policy.map(|name| name.to_string()) + ); + assert_eq!(Some(vec![ + Tag::new("A".into(), "TAG A".into()), + Tag::new("B".into(), "TAG B".into()), + ]), with_tags); + + } + _ => unreachable!(), + } +} + +#[test] +fn test_snowflake_create_iceberg_table() { + match snowflake() + .verified_stmt("CREATE ICEBERG TABLE my_table (a INT) BASE_LOCATION = 'relative_path'") + { + Statement::CreateTable(CreateTable { + name, + base_location, + .. + }) => { + assert_eq!("my_table", name.to_string()); + assert_eq!("relative_path", base_location.unwrap()); + } + _ => unreachable!(), + } +} + +#[test] +fn test_snowflake_create_iceberg_table_without_location() { + let res = snowflake().parse_sql_statements("CREATE ICEBERG TABLE my_table (a INT)"); + assert_eq!( + ParserError::ParserError("BASE_LOCATION is required for ICEBERG tables".to_string()), + res.unwrap_err() + ); +} + #[test] fn parse_sf_create_or_replace_view_with_comment_missing_equal() { assert!(snowflake_and_generic() @@ -889,6 +989,21 @@ fn parse_sf_create_or_replace_with_comment_for_snowflake() { } } +#[test] +fn parse_sf_create_table_or_view_with_dollar_quoted_comment() { + // Snowflake transforms dollar quoted comments into a common comment in DDL representation of creation + snowflake() + .one_statement_parses_to( + r#"CREATE OR REPLACE TEMPORARY VIEW foo.bar.baz ("COL_1" COMMENT $$comment 1$$) COMMENT = $$view comment$$ AS (SELECT 1)"#, + r#"CREATE OR REPLACE TEMPORARY VIEW foo.bar.baz ("COL_1" COMMENT 'comment 1') COMMENT = 'view comment' AS (SELECT 1)"# + ); + + snowflake().one_statement_parses_to( + r#"CREATE TABLE my_table (a STRING COMMENT $$comment 1$$) COMMENT = $$table comment$$"#, + r#"CREATE TABLE my_table (a STRING COMMENT 'comment 1') COMMENT = 'table comment'"#, + ); +} + #[test] fn test_sf_derived_table_in_parenthesis() { // Nesting a subquery in an extra set of parentheses is non-standard, @@ -1025,9 +1140,9 @@ fn parse_semi_structured_data_traversal() { path: JsonPath { path: vec![JsonPathElem::Bracket { key: Expr::BinaryOp { - left: Box::new(Expr::Value(number("2"))), + left: Box::new(Expr::value(number("2"))), op: BinaryOperator::Plus, - right: Box::new(Expr::Value(number("2"))) + right: Box::new(Expr::value(number("2"))) }, }] }, @@ -1105,7 +1220,7 @@ fn parse_semi_structured_data_traversal() { quoted: false, }, JsonPathElem::Bracket { - key: Expr::Value(number("0")), + key: Expr::value(number("0")), }, JsonPathElem::Dot { key: "bar".to_owned(), @@ -1127,7 +1242,7 @@ fn parse_semi_structured_data_traversal() { path: JsonPath { path: vec![ JsonPathElem::Bracket { - key: Expr::Value(number("0")), + key: Expr::value(number("0")), }, JsonPathElem::Dot { key: "foo".to_owned(), @@ -1172,6 +1287,32 @@ fn parse_semi_structured_data_traversal() { .to_string(), "sql parser error: Expected: variant object key name, found: 42" ); + + // casting a json access and accessing an array element + assert_eq!( + snowflake().verified_expr("a:b::ARRAY[1]"), + Expr::JsonAccess { + value: Box::new(Expr::Cast { + kind: CastKind::DoubleColon, + data_type: DataType::Array(ArrayElemTypeDef::None), + format: None, + expr: Box::new(Expr::JsonAccess { + value: Box::new(Expr::Identifier(Ident::new("a"))), + path: JsonPath { + path: vec![JsonPathElem::Dot { + key: "b".to_string(), + quoted: false + }] + } + }) + }), + path: JsonPath { + path: vec![JsonPathElem::Bracket { + key: Expr::value(number("1")) + }] + } + } + ); } #[test] @@ -1188,10 +1329,12 @@ fn parse_delimited_identifiers() { args, with_hints, version, - with_ordinality: _, - partitions: _, + .. } => { - assert_eq!(vec![Ident::with_quote('"', "a table")], name.0); + assert_eq!( + ObjectName::from(vec![Ident::with_quote('"', "a table")]), + name + ); assert_eq!(Ident::with_quote('"', "alias"), alias.unwrap().name); assert!(args.is_none()); assert!(with_hints.is_empty()); @@ -1210,7 +1353,8 @@ fn parse_delimited_identifiers() { ); assert_eq!( &Expr::Function(Function { - name: ObjectName(vec![Ident::with_quote('"', "myfun")]), + name: ObjectName::from(vec![Ident::with_quote('"', "myfun")]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -1281,7 +1425,7 @@ fn test_select_wildcard_with_exclude() { let select = snowflake_and_generic() .verified_only_select("SELECT name.* EXCLUDE department_id FROM employee_table"); let expected = SelectItem::QualifiedWildcard( - ObjectName(vec![Ident::new("name")]), + SelectItemQualifiedWildcardKind::ObjectName(ObjectName::from(vec![Ident::new("name")])), WildcardAdditionalOptions { opt_exclude: Some(ExcludeSelectItem::Single(Ident::new("department_id"))), ..Default::default() @@ -1318,7 +1462,7 @@ fn test_select_wildcard_with_rename() { "SELECT name.* RENAME (department_id AS new_dep, employee_id AS new_emp) FROM employee_table", ); let expected = SelectItem::QualifiedWildcard( - ObjectName(vec![Ident::new("name")]), + SelectItemQualifiedWildcardKind::ObjectName(ObjectName::from(vec![Ident::new("name")])), WildcardAdditionalOptions { opt_rename: Some(RenameSelectItem::Multiple(vec![ IdentWithAlias { @@ -1410,6 +1554,43 @@ fn test_alter_table_swap_with() { }; } +#[test] +fn test_alter_table_clustering() { + let sql = r#"ALTER TABLE tab CLUSTER BY (c1, "c2", TO_DATE(c3))"#; + match alter_table_op(snowflake_and_generic().verified_stmt(sql)) { + AlterTableOperation::ClusterBy { exprs } => { + assert_eq!( + exprs, + [ + Expr::Identifier(Ident::new("c1")), + Expr::Identifier(Ident::with_quote('"', "c2")), + Expr::Function(Function { + name: ObjectName::from(vec![Ident::new("TO_DATE")]), + uses_odbc_syntax: false, + parameters: FunctionArguments::None, + args: FunctionArguments::List(FunctionArgumentList { + args: vec![FunctionArg::Unnamed(FunctionArgExpr::Expr( + Expr::Identifier(Ident::new("c3")) + ))], + duplicate_treatment: None, + clauses: vec![], + }), + filter: None, + null_treatment: None, + over: None, + within_group: vec![] + }) + ], + ); + } + _ => unreachable!(), + } + + snowflake_and_generic().verified_stmt("ALTER TABLE tbl DROP CLUSTERING KEY"); + snowflake_and_generic().verified_stmt("ALTER TABLE tbl SUSPEND RECLUSTER"); + snowflake_and_generic().verified_stmt("ALTER TABLE tbl RESUME RECLUSTER"); +} + #[test] fn test_drop_stage() { match snowflake_and_generic().verified_stmt("DROP STAGE s1") { @@ -1512,13 +1693,13 @@ fn parse_snowflake_declare_result_set() { ( "DECLARE res RESULTSET DEFAULT 42", "res", - Some(DeclareAssignment::Default(Expr::Value(number("42")).into())), + Some(DeclareAssignment::Default(Expr::value(number("42")).into())), ), ( "DECLARE res RESULTSET := 42", "res", Some(DeclareAssignment::DuckAssignment( - Expr::Value(number("42")).into(), + Expr::value(number("42")).into(), )), ), ("DECLARE res RESULTSET", "res", None), @@ -1568,8 +1749,8 @@ fn parse_snowflake_declare_exception() { "ex", Some(DeclareAssignment::Expr( Expr::Tuple(vec![ - Expr::Value(number("42")), - Expr::Value(Value::SingleQuotedString("ERROR".to_string())), + Expr::value(number("42")), + Expr::Value((Value::SingleQuotedString("ERROR".to_string())).with_empty_span()), ]) .into(), )), @@ -1605,13 +1786,13 @@ fn parse_snowflake_declare_variable() { "DECLARE profit TEXT DEFAULT 42", "profit", Some(DataType::Text), - Some(DeclareAssignment::Default(Expr::Value(number("42")).into())), + Some(DeclareAssignment::Default(Expr::value(number("42")).into())), ), ( "DECLARE profit DEFAULT 42", "profit", None, - Some(DeclareAssignment::Default(Expr::Value(number("42")).into())), + Some(DeclareAssignment::Default(Expr::value(number("42")).into())), ), ("DECLARE profit TEXT", "profit", Some(DataType::Text), None), ("DECLARE profit", "profit", None, None), @@ -1764,38 +1945,26 @@ fn test_create_stage_with_stage_params() { "", stage_params.endpoint.unwrap() ); - assert!(stage_params - .credentials - .options - .contains(&DataLoadingOption { - option_name: "AWS_KEY_ID".to_string(), - option_type: DataLoadingOptionType::STRING, - value: "1a2b3c".to_string() - })); - assert!(stage_params - .credentials - .options - .contains(&DataLoadingOption { - option_name: "AWS_SECRET_KEY".to_string(), - option_type: DataLoadingOptionType::STRING, - value: "4x5y6z".to_string() - })); - assert!(stage_params - .encryption - .options - .contains(&DataLoadingOption { - option_name: "MASTER_KEY".to_string(), - option_type: DataLoadingOptionType::STRING, - value: "key".to_string() - })); - assert!(stage_params - .encryption - .options - .contains(&DataLoadingOption { - option_name: "TYPE".to_string(), - option_type: DataLoadingOptionType::STRING, - value: "AWS_SSE_KMS".to_string() - })); + assert!(stage_params.credentials.options.contains(&KeyValueOption { + option_name: "AWS_KEY_ID".to_string(), + option_type: KeyValueOptionType::STRING, + value: "1a2b3c".to_string() + })); + assert!(stage_params.credentials.options.contains(&KeyValueOption { + option_name: "AWS_SECRET_KEY".to_string(), + option_type: KeyValueOptionType::STRING, + value: "4x5y6z".to_string() + })); + assert!(stage_params.encryption.options.contains(&KeyValueOption { + option_name: "MASTER_KEY".to_string(), + option_type: KeyValueOptionType::STRING, + value: "key".to_string() + })); + assert!(stage_params.encryption.options.contains(&KeyValueOption { + option_name: "TYPE".to_string(), + option_type: KeyValueOptionType::STRING, + value: "AWS_SSE_KMS".to_string() + })); } _ => unreachable!(), }; @@ -1816,19 +1985,19 @@ fn test_create_stage_with_directory_table_params() { directory_table_params, .. } => { - assert!(directory_table_params.options.contains(&DataLoadingOption { + assert!(directory_table_params.options.contains(&KeyValueOption { option_name: "ENABLE".to_string(), - option_type: DataLoadingOptionType::BOOLEAN, + option_type: KeyValueOptionType::BOOLEAN, value: "TRUE".to_string() })); - assert!(directory_table_params.options.contains(&DataLoadingOption { + assert!(directory_table_params.options.contains(&KeyValueOption { option_name: "REFRESH_ON_CREATE".to_string(), - option_type: DataLoadingOptionType::BOOLEAN, + option_type: KeyValueOptionType::BOOLEAN, value: "FALSE".to_string() })); - assert!(directory_table_params.options.contains(&DataLoadingOption { + assert!(directory_table_params.options.contains(&KeyValueOption { option_name: "NOTIFICATION_INTEGRATION".to_string(), - option_type: DataLoadingOptionType::STRING, + option_type: KeyValueOptionType::STRING, value: "some-string".to_string() })); } @@ -1847,19 +2016,19 @@ fn test_create_stage_with_file_format() { match snowflake_without_unescape().verified_stmt(sql) { Statement::CreateStage { file_format, .. } => { - assert!(file_format.options.contains(&DataLoadingOption { + assert!(file_format.options.contains(&KeyValueOption { option_name: "COMPRESSION".to_string(), - option_type: DataLoadingOptionType::ENUM, + option_type: KeyValueOptionType::ENUM, value: "AUTO".to_string() })); - assert!(file_format.options.contains(&DataLoadingOption { + assert!(file_format.options.contains(&KeyValueOption { option_name: "BINARY_FORMAT".to_string(), - option_type: DataLoadingOptionType::ENUM, + option_type: KeyValueOptionType::ENUM, value: "HEX".to_string() })); - assert!(file_format.options.contains(&DataLoadingOption { + assert!(file_format.options.contains(&KeyValueOption { option_name: "ESCAPE".to_string(), - option_type: DataLoadingOptionType::STRING, + option_type: KeyValueOptionType::STRING, value: r#"\\"#.to_string() })); } @@ -1880,14 +2049,14 @@ fn test_create_stage_with_copy_options() { ); match snowflake().verified_stmt(sql) { Statement::CreateStage { copy_options, .. } => { - assert!(copy_options.options.contains(&DataLoadingOption { + assert!(copy_options.options.contains(&KeyValueOption { option_name: "ON_ERROR".to_string(), - option_type: DataLoadingOptionType::ENUM, + option_type: KeyValueOptionType::ENUM, value: "CONTINUE".to_string() })); - assert!(copy_options.options.contains(&DataLoadingOption { + assert!(copy_options.options.contains(&KeyValueOption { option_name: "FORCE".to_string(), - option_type: DataLoadingOptionType::BOOLEAN, + option_type: KeyValueOptionType::BOOLEAN, value: "TRUE".to_string() })); } @@ -1904,20 +2073,25 @@ fn test_copy_into() { ); match snowflake().verified_stmt(sql) { Statement::CopyIntoSnowflake { + kind, into, - from_stage, + from_obj, files, pattern, validation_mode, .. } => { + assert_eq!(kind, CopyIntoSnowflakeKind::Table); assert_eq!( into, - ObjectName(vec![Ident::new("my_company"), Ident::new("emp_basic")]) + ObjectName::from(vec![Ident::new("my_company"), Ident::new("emp_basic")]) ); assert_eq!( - from_stage, - ObjectName(vec![Ident::with_quote('\'', "gcs://mybucket/./../a.csv")]) + from_obj, + Some(ObjectName::from(vec![Ident::with_quote( + '\'', + "gcs://mybucket/./../a.csv" + )])) ); assert!(files.is_none()); assert!(pattern.is_none()); @@ -1926,6 +2100,60 @@ fn test_copy_into() { _ => unreachable!(), }; assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); + + let sql = concat!("COPY INTO 's3://a/b/c/data.parquet' ", "FROM db.sc.tbl ", "PARTITION BY ('date=' || to_varchar(dt, 'YYYY-MM-DD') || '/hour=' || to_varchar(date_part(hour, ts)))"); + match snowflake().verified_stmt(sql) { + Statement::CopyIntoSnowflake { + kind, + into, + from_obj, + from_query, + partition, + .. + } => { + assert_eq!(kind, CopyIntoSnowflakeKind::Location); + assert_eq!( + into, + ObjectName::from(vec![Ident::with_quote('\'', "s3://a/b/c/data.parquet")]) + ); + assert_eq!( + from_obj, + Some(ObjectName::from(vec![ + Ident::new("db"), + Ident::new("sc"), + Ident::new("tbl") + ])) + ); + assert!(from_query.is_none()); + assert!(partition.is_some()); + } + _ => unreachable!(), + }; + assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); + + let sql = concat!( + "COPY INTO 's3://a/b/c/data.parquet' ", + "FROM (SELECT * FROM tbl)" + ); + match snowflake().verified_stmt(sql) { + Statement::CopyIntoSnowflake { + kind, + into, + from_obj, + from_query, + .. + } => { + assert_eq!(kind, CopyIntoSnowflakeKind::Location); + assert_eq!( + into, + ObjectName::from(vec![Ident::with_quote('\'', "s3://a/b/c/data.parquet")]) + ); + assert!(from_query.is_some()); + assert!(from_obj.is_none()); + } + _ => unreachable!(), + }; + assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); } #[test] @@ -1941,52 +2169,43 @@ fn test_copy_into_with_stage_params() { match snowflake().verified_stmt(sql) { Statement::CopyIntoSnowflake { - from_stage, + from_obj, stage_params, .. } => { //assert_eq!("s3://load/files/", stage_params.url.unwrap()); assert_eq!( - from_stage, - ObjectName(vec![Ident::with_quote('\'', "s3://load/files/")]) + from_obj, + Some(ObjectName::from(vec![Ident::with_quote( + '\'', + "s3://load/files/" + )])) ); assert_eq!("myint", stage_params.storage_integration.unwrap()); assert_eq!( "", stage_params.endpoint.unwrap() ); - assert!(stage_params - .credentials - .options - .contains(&DataLoadingOption { - option_name: "AWS_KEY_ID".to_string(), - option_type: DataLoadingOptionType::STRING, - value: "1a2b3c".to_string() - })); - assert!(stage_params - .credentials - .options - .contains(&DataLoadingOption { - option_name: "AWS_SECRET_KEY".to_string(), - option_type: DataLoadingOptionType::STRING, - value: "4x5y6z".to_string() - })); - assert!(stage_params - .encryption - .options - .contains(&DataLoadingOption { - option_name: "MASTER_KEY".to_string(), - option_type: DataLoadingOptionType::STRING, - value: "key".to_string() - })); - assert!(stage_params - .encryption - .options - .contains(&DataLoadingOption { - option_name: "TYPE".to_string(), - option_type: DataLoadingOptionType::STRING, - value: "AWS_SSE_KMS".to_string() - })); + assert!(stage_params.credentials.options.contains(&KeyValueOption { + option_name: "AWS_KEY_ID".to_string(), + option_type: KeyValueOptionType::STRING, + value: "1a2b3c".to_string() + })); + assert!(stage_params.credentials.options.contains(&KeyValueOption { + option_name: "AWS_SECRET_KEY".to_string(), + option_type: KeyValueOptionType::STRING, + value: "4x5y6z".to_string() + })); + assert!(stage_params.encryption.options.contains(&KeyValueOption { + option_name: "MASTER_KEY".to_string(), + option_type: KeyValueOptionType::STRING, + value: "key".to_string() + })); + assert!(stage_params.encryption.options.contains(&KeyValueOption { + option_name: "TYPE".to_string(), + option_type: KeyValueOptionType::STRING, + value: "AWS_SSE_KMS".to_string() + })); } _ => unreachable!(), }; @@ -2001,13 +2220,16 @@ fn test_copy_into_with_stage_params() { match snowflake().verified_stmt(sql) { Statement::CopyIntoSnowflake { - from_stage, + from_obj, stage_params, .. } => { assert_eq!( - from_stage, - ObjectName(vec![Ident::with_quote('\'', "s3://load/files/")]) + from_obj, + Some(ObjectName::from(vec![Ident::with_quote( + '\'', + "s3://load/files/" + )])) ); assert_eq!("myint", stage_params.storage_integration.unwrap()); } @@ -2030,13 +2252,13 @@ fn test_copy_into_with_files_and_pattern_and_verification() { files, pattern, validation_mode, - from_stage_alias, + from_obj_alias, .. } => { assert_eq!(files.unwrap(), vec!["file1.json", "file2.json"]); assert_eq!(pattern.unwrap(), ".*employees0[1-5].csv.gz"); assert_eq!(validation_mode.unwrap(), "RETURN_7_ROWS"); - assert_eq!(from_stage_alias.unwrap(), Ident::new("some_alias")); + assert_eq!(from_obj_alias.unwrap(), Ident::new("some_alias")); } _ => unreachable!(), } @@ -2047,7 +2269,7 @@ fn test_copy_into_with_files_and_pattern_and_verification() { fn test_copy_into_with_transformations() { let sql = concat!( "COPY INTO my_company.emp_basic FROM ", - "(SELECT t1.$1:st AS st, $1:index, t2.$1 FROM @schema.general_finished AS T) ", + "(SELECT t1.$1:st AS st, $1:index, t2.$1, 4, '5' AS const_str FROM @schema.general_finished AS T) ", "FILES = ('file1.json', 'file2.json') ", "PATTERN = '.*employees0[1-5].csv.gz' ", "VALIDATION_MODE = RETURN_7_ROWS" @@ -2055,45 +2277,68 @@ fn test_copy_into_with_transformations() { match snowflake().verified_stmt(sql) { Statement::CopyIntoSnowflake { - from_stage, + from_obj, from_transformations, .. } => { assert_eq!( - from_stage, - ObjectName(vec![Ident::new("@schema"), Ident::new("general_finished")]) + from_obj, + Some(ObjectName::from(vec![ + Ident::new("@schema"), + Ident::new("general_finished") + ])) ); assert_eq!( from_transformations.as_ref().unwrap()[0], - StageLoadSelectItem { + StageLoadSelectItemKind::StageLoadSelectItem(StageLoadSelectItem { alias: Some(Ident::new("t1")), file_col_num: 1, element: Some(Ident::new("st")), item_as: Some(Ident::new("st")) - } + }) ); assert_eq!( from_transformations.as_ref().unwrap()[1], - StageLoadSelectItem { + StageLoadSelectItemKind::StageLoadSelectItem(StageLoadSelectItem { alias: None, file_col_num: 1, element: Some(Ident::new("index")), item_as: None - } + }) ); assert_eq!( from_transformations.as_ref().unwrap()[2], - StageLoadSelectItem { + StageLoadSelectItemKind::StageLoadSelectItem(StageLoadSelectItem { alias: Some(Ident::new("t2")), file_col_num: 1, element: None, item_as: None - } + }) + ); + assert_eq!( + from_transformations.as_ref().unwrap()[3], + StageLoadSelectItemKind::SelectItem(SelectItem::UnnamedExpr(Expr::Value( + Value::Number("4".parse().unwrap(), false).into() + ))) + ); + assert_eq!( + from_transformations.as_ref().unwrap()[4], + StageLoadSelectItemKind::SelectItem(SelectItem::ExprWithAlias { + expr: Expr::Value(Value::SingleQuotedString("5".parse().unwrap()).into()), + alias: Ident::new("const_str".to_string()) + }) ); } _ => unreachable!(), } assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); + + // Test optional AS keyword to denote an alias for the stage + let sql1 = concat!( + "COPY INTO my_company.emp_basic FROM ", + "(SELECT t1.$1:st AS st, $1:index, t2.$1, 4, '5' AS const_str FROM @schema.general_finished T) " + ); + snowflake().parse_sql_statements(sql1).unwrap(); } #[test] @@ -2108,19 +2353,19 @@ fn test_copy_into_file_format() { match snowflake_without_unescape().verified_stmt(sql) { Statement::CopyIntoSnowflake { file_format, .. } => { - assert!(file_format.options.contains(&DataLoadingOption { + assert!(file_format.options.contains(&KeyValueOption { option_name: "COMPRESSION".to_string(), - option_type: DataLoadingOptionType::ENUM, + option_type: KeyValueOptionType::ENUM, value: "AUTO".to_string() })); - assert!(file_format.options.contains(&DataLoadingOption { + assert!(file_format.options.contains(&KeyValueOption { option_name: "BINARY_FORMAT".to_string(), - option_type: DataLoadingOptionType::ENUM, + option_type: KeyValueOptionType::ENUM, value: "HEX".to_string() })); - assert!(file_format.options.contains(&DataLoadingOption { + assert!(file_format.options.contains(&KeyValueOption { option_name: "ESCAPE".to_string(), - option_type: DataLoadingOptionType::STRING, + option_type: KeyValueOptionType::STRING, value: r#"\\"#.to_string() })); } @@ -2130,6 +2375,41 @@ fn test_copy_into_file_format() { snowflake_without_unescape().verified_stmt(sql).to_string(), sql ); + + // Test commas in file format + let sql = concat!( + "COPY INTO my_company.emp_basic ", + "FROM 'gcs://mybucket/./../a.csv' ", + "FILES = ('file1.json', 'file2.json') ", + "PATTERN = '.*employees0[1-5].csv.gz' ", + r#"FILE_FORMAT=(COMPRESSION=AUTO, BINARY_FORMAT=HEX, ESCAPE='\\')"# + ); + + match snowflake_without_unescape() + .parse_sql_statements(sql) + .unwrap() + .first() + .unwrap() + { + Statement::CopyIntoSnowflake { file_format, .. } => { + assert!(file_format.options.contains(&KeyValueOption { + option_name: "COMPRESSION".to_string(), + option_type: KeyValueOptionType::ENUM, + value: "AUTO".to_string() + })); + assert!(file_format.options.contains(&KeyValueOption { + option_name: "BINARY_FORMAT".to_string(), + option_type: KeyValueOptionType::ENUM, + value: "HEX".to_string() + })); + assert!(file_format.options.contains(&KeyValueOption { + option_name: "ESCAPE".to_string(), + option_type: KeyValueOptionType::STRING, + value: r#"\\"#.to_string() + })); + } + _ => unreachable!(), + } } #[test] @@ -2144,14 +2424,14 @@ fn test_copy_into_copy_options() { match snowflake().verified_stmt(sql) { Statement::CopyIntoSnowflake { copy_options, .. } => { - assert!(copy_options.options.contains(&DataLoadingOption { + assert!(copy_options.options.contains(&KeyValueOption { option_name: "ON_ERROR".to_string(), - option_type: DataLoadingOptionType::ENUM, + option_type: KeyValueOptionType::ENUM, value: "CONTINUE".to_string() })); - assert!(copy_options.options.contains(&DataLoadingOption { + assert!(copy_options.options.contains(&KeyValueOption { option_name: "FORCE".to_string(), - option_type: DataLoadingOptionType::BOOLEAN, + option_type: KeyValueOptionType::BOOLEAN, value: "TRUE".to_string() })); } @@ -2161,29 +2441,53 @@ fn test_copy_into_copy_options() { } #[test] -fn test_snowflake_stage_object_names() { - let allowed_formatted_names = [ - "my_company.emp_basic", - "@namespace.%table_name", - "@namespace.%table_name/path", - "@namespace.stage_name/path", - "@~/path", - ]; +fn test_snowflake_stage_object_names_into_location() { let mut allowed_object_names = [ - ObjectName(vec![Ident::new("my_company"), Ident::new("emp_basic")]), - ObjectName(vec![Ident::new("@namespace"), Ident::new("%table_name")]), - ObjectName(vec![ + ObjectName::from(vec![Ident::new("@namespace"), Ident::new("%table_name")]), + ObjectName::from(vec![ Ident::new("@namespace"), Ident::new("%table_name/path"), ]), - ObjectName(vec![ + ObjectName::from(vec![ Ident::new("@namespace"), Ident::new("stage_name/path"), ]), - ObjectName(vec![Ident::new("@~/path")]), + ObjectName::from(vec![Ident::new("@~/path")]), + ]; + + let allowed_names_into_location = [ + "@namespace.%table_name", + "@namespace.%table_name/path", + "@namespace.stage_name/path", + "@~/path", + ]; + for it in allowed_names_into_location + .iter() + .zip(allowed_object_names.iter_mut()) + { + let (formatted_name, object_name) = it; + let sql = format!( + "COPY INTO {} FROM 'gcs://mybucket/./../a.csv'", + formatted_name + ); + match snowflake().verified_stmt(&sql) { + Statement::CopyIntoSnowflake { into, .. } => { + assert_eq!(into.0, object_name.0) + } + _ => unreachable!(), + } + } +} + +#[test] +fn test_snowflake_stage_object_names_into_table() { + let mut allowed_object_names = [ + ObjectName::from(vec![Ident::new("my_company"), Ident::new("emp_basic")]), + ObjectName::from(vec![Ident::new("emp_basic")]), ]; - for it in allowed_formatted_names + let allowed_names_into_table = ["my_company.emp_basic", "emp_basic"]; + for it in allowed_names_into_table .iter() .zip(allowed_object_names.iter_mut()) { @@ -2206,13 +2510,17 @@ fn test_snowflake_copy_into() { let sql = "COPY INTO a.b FROM @namespace.stage_name"; assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { - into, from_stage, .. - } => { - assert_eq!(into, ObjectName(vec![Ident::new("a"), Ident::new("b")])); + Statement::CopyIntoSnowflake { into, from_obj, .. } => { + assert_eq!( + into, + ObjectName::from(vec![Ident::new("a"), Ident::new("b")]) + ); assert_eq!( - from_stage, - ObjectName(vec![Ident::new("@namespace"), Ident::new("stage_name")]) + from_obj, + Some(ObjectName::from(vec![ + Ident::new("@namespace"), + Ident::new("stage_name") + ])) ) } _ => unreachable!(), @@ -2224,19 +2532,20 @@ fn test_snowflake_copy_into_stage_name_ends_with_parens() { let sql = "COPY INTO SCHEMA.SOME_MONITORING_SYSTEM FROM (SELECT t.$1:st AS st FROM @schema.general_finished)"; assert_eq!(snowflake().verified_stmt(sql).to_string(), sql); match snowflake().verified_stmt(sql) { - Statement::CopyIntoSnowflake { - into, from_stage, .. - } => { + Statement::CopyIntoSnowflake { into, from_obj, .. } => { assert_eq!( into, - ObjectName(vec![ + ObjectName::from(vec![ Ident::new("SCHEMA"), Ident::new("SOME_MONITORING_SYSTEM") ]) ); assert_eq!( - from_stage, - ObjectName(vec![Ident::new("@schema"), Ident::new("general_finished")]) + from_obj, + Some(ObjectName::from(vec![ + Ident::new("@schema"), + Ident::new("general_finished") + ])) ) } _ => unreachable!(), @@ -2252,10 +2561,14 @@ fn test_snowflake_trim() { let select = snowflake().verified_only_select(sql_only_select); assert_eq!( &Expr::Trim { - expr: Box::new(Expr::Value(Value::SingleQuotedString("xyz".to_owned()))), + expr: Box::new(Expr::Value( + (Value::SingleQuotedString("xyz".to_owned())).with_empty_span() + )), trim_where: None, trim_what: None, - trim_characters: Some(vec![Expr::Value(Value::SingleQuotedString("a".to_owned()))]), + trim_characters: Some(vec![Expr::Value( + (Value::SingleQuotedString("a".to_owned())).with_empty_span() + )]), }, expr_from_projection(only(&select.projection)) ); @@ -2273,7 +2586,7 @@ fn test_number_placeholder() { let sql_only_select = "SELECT :1"; let select = snowflake().verified_only_select(sql_only_select); assert_eq!( - &Expr::Value(Value::Placeholder(":1".into())), + &Expr::Value((Value::Placeholder(":1".into())).with_empty_span()), expr_from_projection(only(&select.projection)) ); @@ -2419,7 +2732,7 @@ fn parse_comma_outer_join() { "myudf", [Expr::UnaryOp { op: UnaryOperator::Plus, - expr: Box::new(Expr::Value(number("42"))) + expr: Box::new(Expr::value(number("42"))) }] )), }) @@ -2604,6 +2917,20 @@ fn asof_joins() { "ON s.state = p.state ", "ORDER BY s.observed", )); + + // Test without explicit aliases + #[rustfmt::skip] + snowflake_and_generic().verified_query(concat!( + "SELECT * ", + "FROM snowtime ", + "ASOF JOIN raintime ", + "MATCH_CONDITION (snowtime.observed >= raintime.observed) ", + "ON snowtime.state = raintime.state ", + "ASOF JOIN preciptime ", + "MATCH_CONDITION (showtime.observed >= preciptime.observed) ", + "ON showtime.state = preciptime.state ", + "ORDER BY showtime.observed", + )); } #[test] @@ -2648,17 +2975,17 @@ fn parse_use() { let quote_styles = ['\'', '"', '`']; for object_name in &valid_object_names { // Test single identifier without quotes - std::assert_eq!( + assert_eq!( snowflake().verified_stmt(&format!("USE {}", object_name)), - Statement::Use(Use::Object(ObjectName(vec![Ident::new( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::new( object_name.to_string() )]))) ); for "e in "e_styles { // Test single identifier with different type of quotes - std::assert_eq!( + assert_eq!( snowflake().verified_stmt(&format!("USE {}{}{}", quote, object_name, quote)), - Statement::Use(Use::Object(ObjectName(vec![Ident::with_quote( + Statement::Use(Use::Object(ObjectName::from(vec![Ident::with_quote( quote, object_name.to_string(), )]))) @@ -2668,18 +2995,18 @@ fn parse_use() { for "e in "e_styles { // Test double identifier with different type of quotes - std::assert_eq!( + assert_eq!( snowflake().verified_stmt(&format!("USE {0}CATALOG{0}.{0}my_schema{0}", quote)), - Statement::Use(Use::Object(ObjectName(vec![ + Statement::Use(Use::Object(ObjectName::from(vec![ Ident::with_quote(quote, "CATALOG"), Ident::with_quote(quote, "my_schema") ]))) ); } // Test double identifier without quotes - std::assert_eq!( + assert_eq!( snowflake().verified_stmt("USE mydb.my_schema"), - Statement::Use(Use::Object(ObjectName(vec![ + Statement::Use(Use::Object(ObjectName::from(vec![ Ident::new("mydb"), Ident::new("my_schema") ]))) @@ -2687,37 +3014,63 @@ fn parse_use() { for "e in "e_styles { // Test single and double identifier with keyword and different type of quotes - std::assert_eq!( + assert_eq!( snowflake().verified_stmt(&format!("USE DATABASE {0}my_database{0}", quote)), - Statement::Use(Use::Database(ObjectName(vec![Ident::with_quote( + Statement::Use(Use::Database(ObjectName::from(vec![Ident::with_quote( quote, "my_database".to_string(), )]))) ); - std::assert_eq!( + assert_eq!( snowflake().verified_stmt(&format!("USE SCHEMA {0}my_schema{0}", quote)), - Statement::Use(Use::Schema(ObjectName(vec![Ident::with_quote( + Statement::Use(Use::Schema(ObjectName::from(vec![Ident::with_quote( quote, "my_schema".to_string(), )]))) ); - std::assert_eq!( + assert_eq!( snowflake().verified_stmt(&format!("USE SCHEMA {0}CATALOG{0}.{0}my_schema{0}", quote)), - Statement::Use(Use::Schema(ObjectName(vec![ + Statement::Use(Use::Schema(ObjectName::from(vec![ Ident::with_quote(quote, "CATALOG"), Ident::with_quote(quote, "my_schema") ]))) ); + assert_eq!( + snowflake().verified_stmt(&format!("USE ROLE {0}my_role{0}", quote)), + Statement::Use(Use::Role(ObjectName::from(vec![Ident::with_quote( + quote, + "my_role".to_string(), + )]))) + ); + assert_eq!( + snowflake().verified_stmt(&format!("USE WAREHOUSE {0}my_wh{0}", quote)), + Statement::Use(Use::Warehouse(ObjectName::from(vec![Ident::with_quote( + quote, + "my_wh".to_string(), + )]))) + ); } // Test invalid syntax - missing identifier let invalid_cases = ["USE SCHEMA", "USE DATABASE", "USE WAREHOUSE"]; for sql in &invalid_cases { - std::assert_eq!( + assert_eq!( snowflake().parse_sql_statements(sql).unwrap_err(), ParserError::ParserError("Expected: identifier, found: EOF".to_string()), ); } + + snowflake().verified_stmt("USE SECONDARY ROLES ALL"); + snowflake().verified_stmt("USE SECONDARY ROLES NONE"); + snowflake().verified_stmt("USE SECONDARY ROLES r1, r2, r3"); + + // The following is not documented by Snowflake but still works: + snowflake().one_statement_parses_to("USE SECONDARY ROLE ALL", "USE SECONDARY ROLES ALL"); + snowflake().one_statement_parses_to("USE SECONDARY ROLE NONE", "USE SECONDARY ROLES NONE"); + snowflake().one_statement_parses_to( + "USE SECONDARY ROLE r1, r2, r3", + "USE SECONDARY ROLES r1, r2, r3", + ); } #[test] @@ -2761,7 +3114,9 @@ fn parse_view_column_descriptions() { #[test] fn test_parentheses_overflow() { - let max_nesting_level: usize = 30; + // TODO: increase / improve after we fix the recursion limit + // for real (see https://github.com/apache/datafusion-sqlparser-rs/issues/984) + let max_nesting_level: usize = 25; // Verify the recursion check is not too wasteful... (num of parentheses - 2 is acceptable) let slack = 2; @@ -2781,3 +3136,905 @@ fn test_parentheses_overflow() { snowflake_with_recursion_limit(max_nesting_level).parse_sql_statements(sql.as_str()); assert_eq!(parsed.err(), Some(ParserError::RecursionLimitExceeded)); } + +#[test] +fn test_show_databases() { + snowflake().verified_stmt("SHOW DATABASES"); + snowflake().verified_stmt("SHOW TERSE DATABASES"); + snowflake().verified_stmt("SHOW DATABASES HISTORY"); + snowflake().verified_stmt("SHOW DATABASES LIKE '%abc%'"); + snowflake().verified_stmt("SHOW DATABASES STARTS WITH 'demo_db'"); + snowflake().verified_stmt("SHOW DATABASES LIMIT 12"); + snowflake() + .verified_stmt("SHOW DATABASES HISTORY LIKE '%aa' STARTS WITH 'demo' LIMIT 20 FROM 'abc'"); + snowflake().verified_stmt("SHOW DATABASES IN ACCOUNT abc"); +} + +#[test] +fn test_parse_show_schemas() { + snowflake().verified_stmt("SHOW SCHEMAS"); + snowflake().verified_stmt("SHOW TERSE SCHEMAS"); + snowflake().verified_stmt("SHOW SCHEMAS IN ACCOUNT"); + snowflake().verified_stmt("SHOW SCHEMAS IN ACCOUNT abc"); + snowflake().verified_stmt("SHOW SCHEMAS IN DATABASE"); + snowflake().verified_stmt("SHOW SCHEMAS IN DATABASE xyz"); + snowflake().verified_stmt("SHOW SCHEMAS HISTORY LIKE '%xa%'"); + snowflake().verified_stmt("SHOW SCHEMAS STARTS WITH 'abc' LIMIT 20"); + snowflake().verified_stmt("SHOW SCHEMAS IN DATABASE STARTS WITH 'abc' LIMIT 20 FROM 'xyz'"); +} + +#[test] +fn test_parse_show_objects() { + snowflake().verified_stmt("SHOW OBJECTS"); + snowflake().verified_stmt("SHOW OBJECTS IN abc"); + snowflake().verified_stmt("SHOW OBJECTS LIKE '%test%' IN abc"); + snowflake().verified_stmt("SHOW OBJECTS IN ACCOUNT"); + snowflake().verified_stmt("SHOW OBJECTS IN DATABASE"); + snowflake().verified_stmt("SHOW OBJECTS IN DATABASE abc"); + snowflake().verified_stmt("SHOW OBJECTS IN SCHEMA"); + snowflake().verified_stmt("SHOW OBJECTS IN SCHEMA abc"); + snowflake().verified_stmt("SHOW TERSE OBJECTS"); + snowflake().verified_stmt("SHOW TERSE OBJECTS IN abc"); + snowflake().verified_stmt("SHOW TERSE OBJECTS LIKE '%test%' IN abc"); + snowflake().verified_stmt("SHOW TERSE OBJECTS LIKE '%test%' IN abc STARTS WITH 'b'"); + snowflake().verified_stmt("SHOW TERSE OBJECTS LIKE '%test%' IN abc STARTS WITH 'b' LIMIT 10"); + snowflake() + .verified_stmt("SHOW TERSE OBJECTS LIKE '%test%' IN abc STARTS WITH 'b' LIMIT 10 FROM 'x'"); + match snowflake().verified_stmt("SHOW TERSE OBJECTS LIKE '%test%' IN abc") { + Statement::ShowObjects(ShowObjects { + terse, + show_options, + }) => { + assert!(terse); + let name = match show_options.show_in { + Some(ShowStatementIn { + parent_name: Some(val), + .. + }) => val.to_string(), + _ => unreachable!(), + }; + assert_eq!("abc", name); + let like = match show_options.filter_position { + Some(ShowStatementFilterPosition::Infix(ShowStatementFilter::Like(val))) => val, + _ => unreachable!(), + }; + assert_eq!("%test%", like); + } + _ => unreachable!(), + } +} + +#[test] +fn test_parse_show_tables() { + snowflake().verified_stmt("SHOW TABLES"); + snowflake().verified_stmt("SHOW TERSE TABLES"); + snowflake().verified_stmt("SHOW TABLES IN ACCOUNT"); + snowflake().verified_stmt("SHOW TABLES IN DATABASE"); + snowflake().verified_stmt("SHOW TABLES IN DATABASE xyz"); + snowflake().verified_stmt("SHOW TABLES IN SCHEMA"); + snowflake().verified_stmt("SHOW TABLES IN SCHEMA xyz"); + snowflake().verified_stmt("SHOW TABLES HISTORY LIKE '%xa%'"); + snowflake().verified_stmt("SHOW TABLES STARTS WITH 'abc' LIMIT 20"); + snowflake().verified_stmt("SHOW TABLES IN SCHEMA STARTS WITH 'abc' LIMIT 20 FROM 'xyz'"); + snowflake().verified_stmt("SHOW EXTERNAL TABLES"); + snowflake().verified_stmt("SHOW EXTERNAL TABLES IN ACCOUNT"); + snowflake().verified_stmt("SHOW EXTERNAL TABLES IN DATABASE"); + snowflake().verified_stmt("SHOW EXTERNAL TABLES IN DATABASE xyz"); + snowflake().verified_stmt("SHOW EXTERNAL TABLES IN SCHEMA"); + snowflake().verified_stmt("SHOW EXTERNAL TABLES IN SCHEMA xyz"); + snowflake().verified_stmt("SHOW EXTERNAL TABLES STARTS WITH 'abc' LIMIT 20"); + snowflake() + .verified_stmt("SHOW EXTERNAL TABLES IN SCHEMA STARTS WITH 'abc' LIMIT 20 FROM 'xyz'"); +} + +#[test] +fn test_show_views() { + snowflake().verified_stmt("SHOW VIEWS"); + snowflake().verified_stmt("SHOW TERSE VIEWS"); + snowflake().verified_stmt("SHOW VIEWS IN ACCOUNT"); + snowflake().verified_stmt("SHOW VIEWS IN DATABASE"); + snowflake().verified_stmt("SHOW VIEWS IN DATABASE xyz"); + snowflake().verified_stmt("SHOW VIEWS IN SCHEMA"); + snowflake().verified_stmt("SHOW VIEWS IN SCHEMA xyz"); + snowflake().verified_stmt("SHOW VIEWS STARTS WITH 'abc' LIMIT 20"); + snowflake().verified_stmt("SHOW VIEWS IN SCHEMA STARTS WITH 'abc' LIMIT 20 FROM 'xyz'"); +} + +#[test] +fn test_parse_show_columns_sql() { + snowflake().verified_stmt("SHOW COLUMNS IN TABLE"); + snowflake().verified_stmt("SHOW COLUMNS IN TABLE abc"); + snowflake().verified_stmt("SHOW COLUMNS LIKE '%xyz%' IN TABLE abc"); +} + +#[test] +fn test_projection_with_nested_trailing_commas() { + let sql = "SELECT a, FROM b, LATERAL FLATTEN(input => events)"; + let _ = snowflake().parse_sql_statements(sql).unwrap(); + + //Single nesting + let sql = "SELECT (SELECT a, FROM b, LATERAL FLATTEN(input => events))"; + let _ = snowflake().parse_sql_statements(sql).unwrap(); + + //Double nesting + let sql = "SELECT (SELECT (SELECT a, FROM b, LATERAL FLATTEN(input => events)))"; + let _ = snowflake().parse_sql_statements(sql).unwrap(); + + let sql = "SELECT a, b, FROM c, (SELECT d, e, FROM f, LATERAL FLATTEN(input => events))"; + let _ = snowflake().parse_sql_statements(sql).unwrap(); +} + +#[test] +fn test_sf_double_dot_notation() { + snowflake().verified_stmt("SELECT * FROM db_name..table_name"); + snowflake().verified_stmt("SELECT * FROM x, y..z JOIN a..b AS b ON x.id = b.id"); + + assert_eq!( + snowflake() + .parse_sql_statements("SELECT * FROM X.Y..") + .unwrap_err() + .to_string(), + "sql parser error: Expected: identifier, found: ." + ); + assert_eq!( + snowflake() + .parse_sql_statements("SELECT * FROM X..Y..Z") + .unwrap_err() + .to_string(), + "sql parser error: Expected: identifier, found: ." + ); + assert_eq!( + // Ensure we don't parse leading token + snowflake() + .parse_sql_statements("SELECT * FROM .X.Y") + .unwrap_err() + .to_string(), + "sql parser error: Expected: identifier, found: ." + ); +} + +#[test] +fn test_parse_double_dot_notation_wrong_position() {} + +#[test] +fn parse_insert_overwrite() { + let insert_overwrite_into = r#"INSERT OVERWRITE INTO schema.table SELECT a FROM b"#; + snowflake().verified_stmt(insert_overwrite_into); +} + +#[test] +fn test_table_sample() { + snowflake_and_generic().verified_stmt("SELECT * FROM testtable SAMPLE (10)"); + snowflake_and_generic().verified_stmt("SELECT * FROM testtable TABLESAMPLE (10)"); + snowflake_and_generic() + .verified_stmt("SELECT * FROM testtable AS t TABLESAMPLE BERNOULLI (10)"); + snowflake_and_generic().verified_stmt("SELECT * FROM testtable AS t TABLESAMPLE ROW (10)"); + snowflake_and_generic().verified_stmt("SELECT * FROM testtable AS t TABLESAMPLE ROW (10 ROWS)"); + snowflake_and_generic() + .verified_stmt("SELECT * FROM testtable TABLESAMPLE BLOCK (3) SEED (82)"); + snowflake_and_generic() + .verified_stmt("SELECT * FROM testtable TABLESAMPLE SYSTEM (3) REPEATABLE (82)"); + snowflake_and_generic().verified_stmt("SELECT id FROM mytable TABLESAMPLE (10) REPEATABLE (1)"); + snowflake_and_generic().verified_stmt("SELECT id FROM mytable TABLESAMPLE (10) SEED (1)"); +} + +#[test] +fn parse_ls_and_rm() { + snowflake().one_statement_parses_to("LS @~", "LIST @~"); + snowflake().one_statement_parses_to("RM @~", "REMOVE @~"); + + let statement = snowflake() + .verified_stmt("LIST @SNOWFLAKE_KAFKA_CONNECTOR_externalDataLakeSnowflakeConnector_STAGE_call_tracker_stream/"); + match statement { + Statement::List(command) => { + assert_eq!(command.stage, ObjectName::from(vec!["@SNOWFLAKE_KAFKA_CONNECTOR_externalDataLakeSnowflakeConnector_STAGE_call_tracker_stream/".into()])); + assert!(command.pattern.is_none()); + } + _ => unreachable!(), + }; + + let statement = + snowflake().verified_stmt("REMOVE @my_csv_stage/analysis/ PATTERN='.*data_0.*'"); + match statement { + Statement::Remove(command) => { + assert_eq!( + command.stage, + ObjectName::from(vec!["@my_csv_stage/analysis/".into()]) + ); + assert_eq!(command.pattern, Some(".*data_0.*".to_string())); + } + _ => unreachable!(), + }; + + snowflake().verified_stmt(r#"LIST @"STAGE_WITH_QUOTES""#); + // Semi-colon after stage name - should terminate the stage name + snowflake() + .parse_sql_statements("LIST @db1.schema1.stage1/dir1/;") + .unwrap(); +} + +#[test] +fn test_sql_keywords_as_select_item_aliases() { + // Some keywords that should be parsed as an alias + let unreserved_kws = vec!["CLUSTER", "FETCH", "RETURNING", "LIMIT", "EXCEPT"]; + for kw in unreserved_kws { + snowflake() + .one_statement_parses_to(&format!("SELECT 1 {kw}"), &format!("SELECT 1 AS {kw}")); + } + + // Some keywords that should not be parsed as an alias + let reserved_kws = vec![ + "FROM", + "GROUP", + "HAVING", + "INTERSECT", + "INTO", + "ORDER", + "SELECT", + "UNION", + "WHERE", + "WITH", + ]; + for kw in reserved_kws { + assert!(snowflake() + .parse_sql_statements(&format!("SELECT 1 {kw}")) + .is_err()); + } +} + +#[test] +fn test_timetravel_at_before() { + snowflake().verified_only_select("SELECT * FROM tbl AT(TIMESTAMP => '2024-12-15 00:00:00')"); + snowflake() + .verified_only_select("SELECT * FROM tbl BEFORE(TIMESTAMP => '2024-12-15 00:00:00')"); +} + +#[test] +fn test_grant_account_global_privileges() { + let privileges = vec![ + "ALL", + "ALL PRIVILEGES", + "ATTACH POLICY", + "AUDIT", + "BIND SERVICE ENDPOINT", + "IMPORT SHARE", + "OVERRIDE SHARE RESTRICTIONS", + "PURCHASE DATA EXCHANGE LISTING", + "RESOLVE ALL", + "READ SESSION", + ]; + let with_grant_options = vec!["", " WITH GRANT OPTION"]; + + for p in &privileges { + for wgo in &with_grant_options { + let sql = format!("GRANT {p} ON ACCOUNT TO ROLE role1{wgo}"); + snowflake_and_generic().verified_stmt(&sql); + } + } + + let create_object_types = vec![ + "ACCOUNT", + "APPLICATION", + "APPLICATION PACKAGE", + "COMPUTE POOL", + "DATA EXCHANGE LISTING", + "DATABASE", + "EXTERNAL VOLUME", + "FAILOVER GROUP", + "INTEGRATION", + "NETWORK POLICY", + "ORGANIZATION LISTING", + "REPLICATION GROUP", + "ROLE", + "SHARE", + "USER", + "WAREHOUSE", + ]; + for t in &create_object_types { + for wgo in &with_grant_options { + let sql = format!("GRANT CREATE {t} ON ACCOUNT TO ROLE role1{wgo}"); + snowflake_and_generic().verified_stmt(&sql); + } + } + + let apply_types = vec![ + "AGGREGATION POLICY", + "AUTHENTICATION POLICY", + "JOIN POLICY", + "MASKING POLICY", + "PACKAGES POLICY", + "PASSWORD POLICY", + "PROJECTION POLICY", + "ROW ACCESS POLICY", + "SESSION POLICY", + "TAG", + ]; + for t in &apply_types { + for wgo in &with_grant_options { + let sql = format!("GRANT APPLY {t} ON ACCOUNT TO ROLE role1{wgo}"); + snowflake_and_generic().verified_stmt(&sql); + } + } + + let execute_types = vec![ + "ALERT", + "DATA METRIC FUNCTION", + "MANAGED ALERT", + "MANAGED TASK", + "TASK", + ]; + for t in &execute_types { + for wgo in &with_grant_options { + let sql = format!("GRANT EXECUTE {t} ON ACCOUNT TO ROLE role1{wgo}"); + snowflake_and_generic().verified_stmt(&sql); + } + } + + let manage_types = vec![ + "ACCOUNT SUPPORT CASES", + "EVENT SHARING", + "GRANTS", + "LISTING AUTO FULFILLMENT", + "ORGANIZATION SUPPORT CASES", + "USER SUPPORT CASES", + "WAREHOUSES", + ]; + for t in &manage_types { + for wgo in &with_grant_options { + let sql = format!("GRANT MANAGE {t} ON ACCOUNT TO ROLE role1{wgo}"); + snowflake_and_generic().verified_stmt(&sql); + } + } + + let monitor_types = vec!["EXECUTION", "SECURITY", "USAGE"]; + for t in &monitor_types { + for wgo in &with_grant_options { + let sql = format!("GRANT MONITOR {t} ON ACCOUNT TO ROLE role1{wgo}"); + snowflake_and_generic().verified_stmt(&sql); + } + } +} + +#[test] +fn test_grant_account_object_privileges() { + let privileges = vec![ + "ALL", + "ALL PRIVILEGES", + "APPLYBUDGET", + "MODIFY", + "MONITOR", + "USAGE", + "OPERATE", + ]; + + let objects_types = vec![ + "USER", + "RESOURCE MONITOR", + "WAREHOUSE", + "COMPUTE POOL", + "DATABASE", + "INTEGRATION", + "CONNECTION", + "FAILOVER GROUP", + "REPLICATION GROUP", + "EXTERNAL VOLUME", + ]; + + let with_grant_options = vec!["", " WITH GRANT OPTION"]; + + for t in &objects_types { + for p in &privileges { + for wgo in &with_grant_options { + let sql = format!("GRANT {p} ON {t} obj1 TO ROLE role1{wgo}"); + snowflake_and_generic().verified_stmt(&sql); + } + } + } +} + +#[test] +fn test_grant_role_to() { + snowflake_and_generic().verified_stmt("GRANT ROLE r1 TO ROLE r2"); + snowflake_and_generic().verified_stmt("GRANT ROLE r1 TO USER u1"); +} + +#[test] +fn test_grant_database_role_to() { + snowflake_and_generic().verified_stmt("GRANT DATABASE ROLE r1 TO ROLE r2"); + snowflake_and_generic().verified_stmt("GRANT DATABASE ROLE db1.sc1.r1 TO ROLE db1.sc1.r2"); +} + +#[test] +fn test_alter_session() { + assert_eq!( + snowflake() + .parse_sql_statements("ALTER SESSION SET") + .unwrap_err() + .to_string(), + "sql parser error: expected at least one option" + ); + assert_eq!( + snowflake() + .parse_sql_statements("ALTER SESSION UNSET") + .unwrap_err() + .to_string(), + "sql parser error: expected at least one option" + ); + + snowflake().verified_stmt("ALTER SESSION SET AUTOCOMMIT=TRUE"); + snowflake().verified_stmt("ALTER SESSION SET AUTOCOMMIT=FALSE QUERY_TAG='tag'"); + snowflake().verified_stmt("ALTER SESSION UNSET AUTOCOMMIT"); + snowflake().verified_stmt("ALTER SESSION UNSET AUTOCOMMIT, QUERY_TAG"); + snowflake().one_statement_parses_to( + "ALTER SESSION SET A=false, B='tag';", + "ALTER SESSION SET A=FALSE B='tag'", + ); + snowflake().one_statement_parses_to( + "ALTER SESSION SET A=true \nB='tag'", + "ALTER SESSION SET A=TRUE B='tag'", + ); + snowflake().one_statement_parses_to("ALTER SESSION UNSET a\nB", "ALTER SESSION UNSET a, B"); +} + +#[test] +fn test_alter_session_followed_by_statement() { + let stmts = snowflake() + .parse_sql_statements("ALTER SESSION SET QUERY_TAG='hello'; SELECT 42") + .unwrap(); + match stmts[..] { + [Statement::AlterSession { .. }, Statement::Query { .. }] => {} + _ => panic!("Unexpected statements: {:?}", stmts), + } +} + +#[test] +fn test_nested_join_without_parentheses() { + let query = "SELECT DISTINCT p.product_id FROM orders AS o INNER JOIN customers AS c INNER JOIN products AS p ON p.customer_id = c.customer_id ON c.order_id = o.order_id"; + assert_eq!( + only( + snowflake() + .verified_only_select_with_canonical(query, "SELECT DISTINCT p.product_id FROM orders AS o INNER JOIN (customers AS c INNER JOIN products AS p ON p.customer_id = c.customer_id) ON c.order_id = o.order_id") + .from + ) + .joins, + vec![Join { + relation: TableFactor::NestedJoin { + table_with_joins: Box::new(TableWithJoins { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("customers".to_string())]), + alias: Some(TableAlias { + name: Ident { + value: "c".to_string(), + quote_style: None, + span: Span::empty(), + }, + columns: vec![], + }), + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], + }, + joins: vec![Join { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("products".to_string())]), + alias: Some(TableAlias { + name: Ident { + value: "p".to_string(), + quote_style: None, + span: Span::empty(), + }, + columns: vec![], + }), + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], + }, + global: false, + join_operator: JoinOperator::Inner(JoinConstraint::On(Expr::BinaryOp { + left: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("p".to_string()), + Ident::new("customer_id".to_string()) + ])), + op: BinaryOperator::Eq, + right: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("c".to_string()), + Ident::new("customer_id".to_string()) + ])), + })), + }] + }), + alias: None + }, + global: false, + join_operator: JoinOperator::Inner(JoinConstraint::On(Expr::BinaryOp { + left: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("c".to_string()), + Ident::new("order_id".to_string()) + ])), + op: BinaryOperator::Eq, + right: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("o".to_string()), + Ident::new("order_id".to_string()) + ])), + })) + }], + ); + + let query = "SELECT DISTINCT p.product_id FROM orders AS o JOIN customers AS c JOIN products AS p ON p.customer_id = c.customer_id ON c.order_id = o.order_id"; + assert_eq!( + only( + snowflake() + .verified_only_select_with_canonical(query, "SELECT DISTINCT p.product_id FROM orders AS o JOIN (customers AS c JOIN products AS p ON p.customer_id = c.customer_id) ON c.order_id = o.order_id") + .from + ) + .joins, + vec![Join { + relation: TableFactor::NestedJoin { + table_with_joins: Box::new(TableWithJoins { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("customers".to_string())]), + alias: Some(TableAlias { + name: Ident { + value: "c".to_string(), + quote_style: None, + span: Span::empty(), + }, + columns: vec![], + }), + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], + }, + joins: vec![Join { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("products".to_string())]), + alias: Some(TableAlias { + name: Ident { + value: "p".to_string(), + quote_style: None, + span: Span::empty(), + }, + columns: vec![], + }), + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], + }, + global: false, + join_operator: JoinOperator::Join(JoinConstraint::On(Expr::BinaryOp { + left: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("p".to_string()), + Ident::new("customer_id".to_string()) + ])), + op: BinaryOperator::Eq, + right: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("c".to_string()), + Ident::new("customer_id".to_string()) + ])), + })), + }] + }), + alias: None + }, + global: false, + join_operator: JoinOperator::Join(JoinConstraint::On(Expr::BinaryOp { + left: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("c".to_string()), + Ident::new("order_id".to_string()) + ])), + op: BinaryOperator::Eq, + right: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("o".to_string()), + Ident::new("order_id".to_string()) + ])), + })) + }], + ); + + let query = "SELECT DISTINCT p.product_id FROM orders AS o LEFT JOIN customers AS c LEFT JOIN products AS p ON p.customer_id = c.customer_id ON c.order_id = o.order_id"; + assert_eq!( + only( + snowflake() + .verified_only_select_with_canonical(query, "SELECT DISTINCT p.product_id FROM orders AS o LEFT JOIN (customers AS c LEFT JOIN products AS p ON p.customer_id = c.customer_id) ON c.order_id = o.order_id") + .from + ) + .joins, + vec![Join { + relation: TableFactor::NestedJoin { + table_with_joins: Box::new(TableWithJoins { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("customers".to_string())]), + alias: Some(TableAlias { + name: Ident { + value: "c".to_string(), + quote_style: None, + span: Span::empty(), + }, + columns: vec![], + }), + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], + }, + joins: vec![Join { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("products".to_string())]), + alias: Some(TableAlias { + name: Ident { + value: "p".to_string(), + quote_style: None, + span: Span::empty(), + }, + columns: vec![], + }), + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], + }, + global: false, + join_operator: JoinOperator::Left(JoinConstraint::On(Expr::BinaryOp { + left: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("p".to_string()), + Ident::new("customer_id".to_string()) + ])), + op: BinaryOperator::Eq, + right: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("c".to_string()), + Ident::new("customer_id".to_string()) + ])), + })), + }] + }), + alias: None + }, + global: false, + join_operator: JoinOperator::Left(JoinConstraint::On(Expr::BinaryOp { + left: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("c".to_string()), + Ident::new("order_id".to_string()) + ])), + op: BinaryOperator::Eq, + right: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("o".to_string()), + Ident::new("order_id".to_string()) + ])), + })) + }], + ); + + let query = "SELECT DISTINCT p.product_id FROM orders AS o RIGHT JOIN customers AS c RIGHT JOIN products AS p ON p.customer_id = c.customer_id ON c.order_id = o.order_id"; + assert_eq!( + only( + snowflake() + .verified_only_select_with_canonical(query, "SELECT DISTINCT p.product_id FROM orders AS o RIGHT JOIN (customers AS c RIGHT JOIN products AS p ON p.customer_id = c.customer_id) ON c.order_id = o.order_id") + .from + ) + .joins, + vec![Join { + relation: TableFactor::NestedJoin { + table_with_joins: Box::new(TableWithJoins { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("customers".to_string())]), + alias: Some(TableAlias { + name: Ident { + value: "c".to_string(), + quote_style: None, + span: Span::empty(), + }, + columns: vec![], + }), + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], + }, + joins: vec![Join { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("products".to_string())]), + alias: Some(TableAlias { + name: Ident { + value: "p".to_string(), + quote_style: None, + span: Span::empty(), + }, + columns: vec![], + }), + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], + }, + global: false, + join_operator: JoinOperator::Right(JoinConstraint::On(Expr::BinaryOp { + left: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("p".to_string()), + Ident::new("customer_id".to_string()) + ])), + op: BinaryOperator::Eq, + right: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("c".to_string()), + Ident::new("customer_id".to_string()) + ])), + })), + }] + }), + alias: None + }, + global: false, + join_operator: JoinOperator::Right(JoinConstraint::On(Expr::BinaryOp { + left: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("c".to_string()), + Ident::new("order_id".to_string()) + ])), + op: BinaryOperator::Eq, + right: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("o".to_string()), + Ident::new("order_id".to_string()) + ])), + })) + }], + ); + + let query = "SELECT DISTINCT p.product_id FROM orders AS o FULL JOIN customers AS c FULL JOIN products AS p ON p.customer_id = c.customer_id ON c.order_id = o.order_id"; + assert_eq!( + only( + snowflake() + .verified_only_select_with_canonical(query, "SELECT DISTINCT p.product_id FROM orders AS o FULL JOIN (customers AS c FULL JOIN products AS p ON p.customer_id = c.customer_id) ON c.order_id = o.order_id") + .from + ) + .joins, + vec![Join { + relation: TableFactor::NestedJoin { + table_with_joins: Box::new(TableWithJoins { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("customers".to_string())]), + alias: Some(TableAlias { + name: Ident { + value: "c".to_string(), + quote_style: None, + span: Span::empty(), + }, + columns: vec![], + }), + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], + }, + joins: vec![Join { + relation: TableFactor::Table { + name: ObjectName::from(vec![Ident::new("products".to_string())]), + alias: Some(TableAlias { + name: Ident { + value: "p".to_string(), + quote_style: None, + span: Span::empty(), + }, + columns: vec![], + }), + args: None, + with_hints: vec![], + version: None, + partitions: vec![], + with_ordinality: false, + json_path: None, + sample: None, + index_hints: vec![], + }, + global: false, + join_operator: JoinOperator::FullOuter(JoinConstraint::On( + Expr::BinaryOp { + left: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("p".to_string()), + Ident::new("customer_id".to_string()) + ])), + op: BinaryOperator::Eq, + right: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("c".to_string()), + Ident::new("customer_id".to_string()) + ])), + } + )), + }] + }), + alias: None + }, + global: false, + join_operator: JoinOperator::FullOuter(JoinConstraint::On(Expr::BinaryOp { + left: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("c".to_string()), + Ident::new("order_id".to_string()) + ])), + op: BinaryOperator::Eq, + right: Box::new(Expr::CompoundIdentifier(vec![ + Ident::new("o".to_string()), + Ident::new("order_id".to_string()) + ])), + })) + }], + ); +} + +#[test] +fn parse_connect_by_root_operator() { + let sql = "SELECT CONNECT_BY_ROOT name AS root_name FROM Tbl1"; + + match snowflake().verified_stmt(sql) { + Statement::Query(query) => { + assert_eq!( + query.body.as_select().unwrap().projection[0], + SelectItem::ExprWithAlias { + expr: Expr::Prefixed { + prefix: Ident::new("CONNECT_BY_ROOT"), + value: Box::new(Expr::Identifier(Ident::new("name"))) + }, + alias: Ident::new("root_name"), + } + ); + } + _ => unreachable!(), + } + + let sql = "SELECT CONNECT_BY_ROOT name FROM Tbl2"; + match snowflake().verified_stmt(sql) { + Statement::Query(query) => { + assert_eq!( + query.body.as_select().unwrap().projection[0], + SelectItem::UnnamedExpr(Expr::Prefixed { + prefix: Ident::new("CONNECT_BY_ROOT"), + value: Box::new(Expr::Identifier(Ident::new("name"))) + }) + ); + } + _ => unreachable!(), + } + + let sql = "SELECT CONNECT_BY_ROOT FROM Tbl2"; + let res = snowflake().parse_sql_statements(sql); + assert_eq!( + res.unwrap_err().to_string(), + "sql parser error: Expected an expression, found: FROM" + ); +} diff --git a/tests/sqlparser_sqlite.rs b/tests/sqlparser_sqlite.rs index 6f8bbb2d8..b759065f3 100644 --- a/tests/sqlparser_sqlite.rs +++ b/tests/sqlparser_sqlite.rs @@ -214,7 +214,6 @@ fn parse_create_table_auto_increment() { vec![ColumnDef { name: "bar".into(), data_type: DataType::Int(None), - collation: None, options: vec![ ColumnOptionDef { name: None, @@ -243,7 +242,6 @@ fn parse_create_table_primary_key_asc_desc() { let expected_column_def = |kind| ColumnDef { name: "bar".into(), data_type: DataType::Int(None), - collation: None, options: vec![ ColumnOptionDef { name: None, @@ -286,13 +284,11 @@ fn parse_create_sqlite_quote() { ColumnDef { name: Ident::with_quote('"', "KEY"), data_type: DataType::Int(None), - collation: None, options: vec![], }, ColumnDef { name: Ident::with_quote('[', "INDEX"), data_type: DataType::Int(None), - collation: None, options: vec![], }, ], @@ -373,7 +369,9 @@ fn test_placeholder() { let ast = sqlite().verified_only_select(sql); assert_eq!( ast.projection[0], - UnnamedExpr(Expr::Value(Value::Placeholder("@xxx".into()))), + UnnamedExpr(Expr::Value( + (Value::Placeholder("@xxx".into())).with_empty_span() + )), ); } @@ -418,7 +416,8 @@ fn parse_window_function_with_filter() { assert_eq!( select.projection, vec![SelectItem::UnnamedExpr(Expr::Function(Function { - name: ObjectName(vec![Ident::new(func_name)]), + name: ObjectName::from(vec![Ident::new(func_name)]), + uses_odbc_syntax: false, parameters: FunctionArguments::None, args: FunctionArguments::List(FunctionArgumentList { duplicate_treatment: None, @@ -449,7 +448,11 @@ fn parse_attach_database() { match verified_stmt { Statement::AttachDatabase { schema_name, - database_file_name: Expr::Value(Value::SingleQuotedString(literal_name)), + database_file_name: + Expr::Value(ValueWithSpan { + value: Value::SingleQuotedString(literal_name), + span: _, + }), database: true, } => { assert_eq!(schema_name.value, "test"); @@ -465,27 +468,20 @@ fn parse_update_tuple_row_values() { assert_eq!( sqlite().verified_stmt("UPDATE x SET (a, b) = (1, 2)"), Statement::Update { + or: None, assignments: vec![Assignment { target: AssignmentTarget::Tuple(vec![ - ObjectName(vec![Ident::new("a"),]), - ObjectName(vec![Ident::new("b"),]), + ObjectName::from(vec![Ident::new("a"),]), + ObjectName::from(vec![Ident::new("b"),]), ]), value: Expr::Tuple(vec![ - Expr::Value(Value::Number("1".parse().unwrap(), false)), - Expr::Value(Value::Number("2".parse().unwrap(), false)) + Expr::Value((Value::Number("1".parse().unwrap(), false)).with_empty_span()), + Expr::Value((Value::Number("2".parse().unwrap(), false)).with_empty_span()) ]) }], selection: None, table: TableWithJoins { - relation: TableFactor::Table { - name: ObjectName(vec![Ident::new("x")]), - alias: None, - args: None, - with_hints: vec![], - version: None, - partitions: vec![], - with_ordinality: false, - }, + relation: table_from_name(ObjectName::from(vec![Ident::new("x")])), joins: vec![], }, from: None, @@ -525,32 +521,9 @@ fn parse_start_transaction_with_modifier() { sqlite_and_generic().verified_stmt("BEGIN DEFERRED TRANSACTION"); sqlite_and_generic().verified_stmt("BEGIN IMMEDIATE TRANSACTION"); sqlite_and_generic().verified_stmt("BEGIN EXCLUSIVE TRANSACTION"); - sqlite_and_generic().one_statement_parses_to("BEGIN DEFERRED", "BEGIN DEFERRED TRANSACTION"); - sqlite_and_generic().one_statement_parses_to("BEGIN IMMEDIATE", "BEGIN IMMEDIATE TRANSACTION"); - sqlite_and_generic().one_statement_parses_to("BEGIN EXCLUSIVE", "BEGIN EXCLUSIVE TRANSACTION"); - - let unsupported_dialects = TestedDialects::new( - all_dialects() - .dialects - .into_iter() - .filter(|x| !(x.is::() || x.is::())) - .collect(), - ); - let res = unsupported_dialects.parse_sql_statements("BEGIN DEFERRED"); - assert_eq!( - ParserError::ParserError("Expected: end of statement, found: DEFERRED".to_string()), - res.unwrap_err(), - ); - let res = unsupported_dialects.parse_sql_statements("BEGIN IMMEDIATE"); - assert_eq!( - ParserError::ParserError("Expected: end of statement, found: IMMEDIATE".to_string()), - res.unwrap_err(), - ); - let res = unsupported_dialects.parse_sql_statements("BEGIN EXCLUSIVE"); - assert_eq!( - ParserError::ParserError("Expected: end of statement, found: EXCLUSIVE".to_string()), - res.unwrap_err(), - ); + sqlite_and_generic().verified_stmt("BEGIN DEFERRED"); + sqlite_and_generic().verified_stmt("BEGIN IMMEDIATE"); + sqlite_and_generic().verified_stmt("BEGIN EXCLUSIVE"); } #[test] @@ -563,12 +536,62 @@ fn test_dollar_identifier_as_placeholder() { Expr::BinaryOp { op, left, right } => { assert_eq!(op, BinaryOperator::Eq); assert_eq!(left, Box::new(Expr::Identifier(Ident::new("id")))); - assert_eq!(right, Box::new(Expr::Value(Placeholder("$id".to_string())))); + assert_eq!( + right, + Box::new(Expr::Value( + (Placeholder("$id".to_string())).with_empty_span() + )) + ); + } + _ => unreachable!(), + } + + // $$ is a valid placeholder in SQLite + match sqlite().verified_expr("id = $$") { + Expr::BinaryOp { op, left, right } => { + assert_eq!(op, BinaryOperator::Eq); + assert_eq!(left, Box::new(Expr::Identifier(Ident::new("id")))); + assert_eq!( + right, + Box::new(Expr::Value( + (Placeholder("$$".to_string())).with_empty_span() + )) + ); } _ => unreachable!(), } } +#[test] +fn test_match_operator() { + assert_eq!( + sqlite().verified_expr("col MATCH 'pattern'"), + Expr::BinaryOp { + op: BinaryOperator::Match, + left: Box::new(Expr::Identifier(Ident::new("col"))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("pattern".to_string())).with_empty_span() + )) + } + ); + sqlite().verified_only_select("SELECT * FROM email WHERE email MATCH 'fts5'"); +} + +#[test] +fn test_regexp_operator() { + assert_eq!( + sqlite().verified_expr("col REGEXP 'pattern'"), + Expr::BinaryOp { + op: BinaryOperator::Regexp, + left: Box::new(Expr::Identifier(Ident::new("col"))), + right: Box::new(Expr::Value( + (Value::SingleQuotedString("pattern".to_string())).with_empty_span() + )) + } + ); + sqlite().verified_only_select(r#"SELECT count(*) FROM messages WHERE msg_text REGEXP '\d+'"#); +} + fn sqlite() -> TestedDialects { TestedDialects::new(vec![Box::new(SQLiteDialect {})]) }