diff --git a/src/ast/helpers/stmt_create_table.rs b/src/ast/helpers/stmt_create_table.rs index 94af03481..e63c90dbc 100644 --- a/src/ast/helpers/stmt_create_table.rs +++ b/src/ast/helpers/stmt_create_table.rs @@ -504,8 +504,8 @@ impl CreateTableBuilder { self.require_user = require_user; self } - /// Consume the builder and produce a `Statement::CreateTable`. - pub fn build(self) -> Statement { + /// Consume the builder and produce a `CreateTable`. + pub fn build(self) -> CreateTable { CreateTable { or_replace: self.or_replace, temporary: self.temporary, @@ -561,7 +561,6 @@ impl CreateTableBuilder { initialize: self.initialize, require_user: self.require_user, } - .into() } } @@ -572,115 +571,7 @@ impl TryFrom for CreateTableBuilder { // ownership. fn try_from(stmt: Statement) -> Result { match stmt { - Statement::CreateTable(CreateTable { - or_replace, - temporary, - external, - global, - if_not_exists, - transient, - volatile, - iceberg, - dynamic, - name, - columns, - constraints, - hive_distribution, - hive_formats, - file_format, - location, - query, - without_rowid, - like, - clone, - version, - comment, - on_commit, - on_cluster, - primary_key, - order_by, - partition_by, - cluster_by, - clustered_by, - inherits, - partition_of, - for_values, - strict, - copy_grants, - enable_schema_evolution, - change_tracking, - data_retention_time_in_days, - max_data_extension_time_in_days, - default_ddl_collation, - with_aggregation_policy, - with_row_access_policy, - with_tags, - base_location, - external_volume, - catalog, - catalog_sync, - storage_serialization_policy, - table_options, - target_lag, - warehouse, - refresh_mode, - initialize, - require_user, - }) => Ok(Self { - or_replace, - temporary, - external, - global, - if_not_exists, - transient, - dynamic, - name, - columns, - constraints, - hive_distribution, - hive_formats, - file_format, - location, - query, - without_rowid, - like, - clone, - version, - comment, - on_commit, - on_cluster, - primary_key, - order_by, - partition_by, - cluster_by, - clustered_by, - inherits, - partition_of, - for_values, - strict, - iceberg, - copy_grants, - enable_schema_evolution, - change_tracking, - data_retention_time_in_days, - max_data_extension_time_in_days, - default_ddl_collation, - with_aggregation_policy, - with_row_access_policy, - with_tags, - volatile, - base_location, - external_volume, - catalog, - catalog_sync, - storage_serialization_policy, - table_options, - target_lag, - warehouse, - refresh_mode, - initialize, - require_user, - }), + Statement::CreateTable(create_table) => Ok(create_table.into()), _ => Err(ParserError::ParserError(format!( "Expected create table statement, but received: {stmt}" ))), @@ -688,6 +579,66 @@ impl TryFrom for CreateTableBuilder { } } +impl From for CreateTableBuilder { + fn from(table: CreateTable) -> Self { + Self { + or_replace: table.or_replace, + temporary: table.temporary, + external: table.external, + global: table.global, + if_not_exists: table.if_not_exists, + transient: table.transient, + volatile: table.volatile, + iceberg: table.iceberg, + dynamic: table.dynamic, + name: table.name, + columns: table.columns, + constraints: table.constraints, + hive_distribution: table.hive_distribution, + hive_formats: table.hive_formats, + file_format: table.file_format, + location: table.location, + query: table.query, + without_rowid: table.without_rowid, + like: table.like, + clone: table.clone, + version: table.version, + comment: table.comment, + on_commit: table.on_commit, + on_cluster: table.on_cluster, + primary_key: table.primary_key, + order_by: table.order_by, + partition_by: table.partition_by, + cluster_by: table.cluster_by, + clustered_by: table.clustered_by, + inherits: table.inherits, + partition_of: table.partition_of, + for_values: table.for_values, + strict: table.strict, + copy_grants: table.copy_grants, + enable_schema_evolution: table.enable_schema_evolution, + change_tracking: table.change_tracking, + data_retention_time_in_days: table.data_retention_time_in_days, + max_data_extension_time_in_days: table.max_data_extension_time_in_days, + default_ddl_collation: table.default_ddl_collation, + with_aggregation_policy: table.with_aggregation_policy, + with_row_access_policy: table.with_row_access_policy, + with_tags: table.with_tags, + base_location: table.base_location, + external_volume: table.external_volume, + catalog: table.catalog, + catalog_sync: table.catalog_sync, + storage_serialization_policy: table.storage_serialization_policy, + table_options: table.table_options, + target_lag: table.target_lag, + warehouse: table.warehouse, + refresh_mode: table.refresh_mode, + initialize: table.initialize, + require_user: table.require_user, + } + } +} + /// Helper return type when parsing configuration for a `CREATE TABLE` statement. #[derive(Default)] pub(crate) struct CreateTableConfiguration { @@ -707,7 +658,8 @@ mod tests { pub fn test_from_valid_statement() { let builder = CreateTableBuilder::new(ObjectName::from(vec![Ident::new("table_name")])); - let stmt = builder.clone().build(); + let create_table = builder.clone().build(); + let stmt: Statement = create_table.into(); assert_eq!(builder, CreateTableBuilder::try_from(stmt).unwrap()); } diff --git a/src/ast/mod.rs b/src/ast/mod.rs index 7e0f1a104..6707c95e9 100644 --- a/src/ast/mod.rs +++ b/src/ast/mod.rs @@ -11774,6 +11774,24 @@ impl From for Statement { } } +impl From for Statement { + fn from(c: CreateOperator) -> Self { + Self::CreateOperator(c) + } +} + +impl From for Statement { + fn from(c: CreateOperatorFamily) -> Self { + Self::CreateOperatorFamily(c) + } +} + +impl From for Statement { + fn from(c: CreateOperatorClass) -> Self { + Self::CreateOperatorClass(c) + } +} + impl From for Statement { fn from(a: AlterSchema) -> Self { Self::AlterSchema(a) @@ -11786,6 +11804,36 @@ impl From for Statement { } } +impl From for Statement { + fn from(a: AlterOperator) -> Self { + Self::AlterOperator(a) + } +} + +impl From for Statement { + fn from(a: AlterOperatorFamily) -> Self { + Self::AlterOperatorFamily(a) + } +} + +impl From for Statement { + fn from(a: AlterOperatorClass) -> Self { + Self::AlterOperatorClass(a) + } +} + +impl From for Statement { + fn from(m: Merge) -> Self { + Self::Merge(m) + } +} + +impl From for Statement { + fn from(a: AlterUser) -> Self { + Self::AlterUser(a) + } +} + impl From for Statement { fn from(d: DropDomain) -> Self { Self::DropDomain(d) @@ -11828,6 +11876,24 @@ impl From for Statement { } } +impl From for Statement { + fn from(d: DropOperator) -> Self { + Self::DropOperator(d) + } +} + +impl From for Statement { + fn from(d: DropOperatorFamily) -> Self { + Self::DropOperatorFamily(d) + } +} + +impl From for Statement { + fn from(d: DropOperatorClass) -> Self { + Self::DropOperatorClass(d) + } +} + impl From for Statement { fn from(d: DenyStatement) -> Self { Self::Deny(d) diff --git a/src/dialect/snowflake.rs b/src/dialect/snowflake.rs index ed01c128b..eade01c04 100644 --- a/src/dialect/snowflake.rs +++ b/src/dialect/snowflake.rs @@ -28,11 +28,11 @@ use crate::ast::helpers::stmt_data_loading::{ }; use crate::ast::{ AlterTable, AlterTableOperation, AlterTableType, CatalogSyncNamespaceMode, ColumnOption, - ColumnPolicy, ColumnPolicyProperty, ContactEntry, CopyIntoSnowflakeKind, CreateTableLikeKind, - DollarQuotedString, Ident, IdentityParameters, IdentityProperty, IdentityPropertyFormatKind, - IdentityPropertyKind, IdentityPropertyOrder, InitializeKind, ObjectName, ObjectNamePart, - RefreshModeKind, RowAccessPolicy, ShowObjects, SqlOption, Statement, - StorageSerializationPolicy, TagsColumnOption, Value, WrappedCollection, + ColumnPolicy, ColumnPolicyProperty, ContactEntry, CopyIntoSnowflakeKind, CreateTable, + CreateTableLikeKind, DollarQuotedString, Ident, IdentityParameters, IdentityProperty, + IdentityPropertyFormatKind, IdentityPropertyKind, IdentityPropertyOrder, InitializeKind, + ObjectName, ObjectNamePart, RefreshModeKind, RowAccessPolicy, ShowObjects, SqlOption, + Statement, StorageSerializationPolicy, TagsColumnOption, Value, WrappedCollection, }; use crate::dialect::{Dialect, Precedence}; use crate::keywords::Keyword; @@ -272,9 +272,13 @@ impl Dialect for SnowflakeDialect { // OK - this is CREATE STAGE statement return Some(parse_create_stage(or_replace, temporary, parser)); } else if parser.parse_keyword(Keyword::TABLE) { - return Some(parse_create_table( - or_replace, global, temporary, volatile, transient, iceberg, dynamic, parser, - )); + return Some( + parse_create_table( + or_replace, global, temporary, volatile, transient, iceberg, dynamic, + parser, + ) + .map(Into::into), + ); } else if parser.parse_keyword(Keyword::DATABASE) { return Some(parse_create_database(or_replace, transient, parser)); } else { @@ -719,7 +723,7 @@ pub fn parse_create_table( iceberg: bool, dynamic: bool, parser: &mut Parser, -) -> Result { +) -> Result { let if_not_exists = parser.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let table_name = parser.parse_object_name(false)?; diff --git a/src/parser/alter.rs b/src/parser/alter.rs index 935d22f8d..8ef712ef7 100644 --- a/src/parser/alter.rs +++ b/src/parser/alter.rs @@ -148,7 +148,7 @@ impl Parser<'_> { /// ```sql /// ALTER USER [ IF EXISTS ] [ ] [ OPTIONS ] /// ``` - pub fn parse_alter_user(&mut self) -> Result { + pub fn parse_alter_user(&mut self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let name = self.parse_identifier()?; let _ = self.parse_keyword(Keyword::WITH); @@ -309,7 +309,7 @@ impl Parser<'_> { None }; - Ok(Statement::AlterUser(AlterUser { + Ok(AlterUser { if_exists, name, rename_to, @@ -329,7 +329,7 @@ impl Parser<'_> { set_props, unset_props, password, - })) + }) } fn parse_mfa_method(&mut self) -> Result { diff --git a/src/parser/merge.rs b/src/parser/merge.rs index 81798c456..62da68a20 100644 --- a/src/parser/merge.rs +++ b/src/parser/merge.rs @@ -18,7 +18,7 @@ use alloc::{boxed::Box, format, vec, vec::Vec}; use crate::{ ast::{ Merge, MergeAction, MergeClause, MergeClauseKind, MergeInsertExpr, MergeInsertKind, - MergeUpdateExpr, ObjectName, OutputClause, SetExpr, Statement, + MergeUpdateExpr, ObjectName, OutputClause, SetExpr, }, dialect::{BigQueryDialect, GenericDialect, MySqlDialect}, keywords::Keyword, @@ -36,11 +36,13 @@ impl Parser<'_> { &mut self, merge_token: TokenWithSpan, ) -> Result, ParserError> { - Ok(Box::new(SetExpr::Merge(self.parse_merge(merge_token)?))) + Ok(Box::new(SetExpr::Merge( + self.parse_merge(merge_token)?.into(), + ))) } /// Parse a `MERGE` statement - pub fn parse_merge(&mut self, merge_token: TokenWithSpan) -> Result { + pub fn parse_merge(&mut self, merge_token: TokenWithSpan) -> Result { let into = self.parse_keyword(Keyword::INTO); let table = self.parse_table_factor()?; @@ -55,7 +57,7 @@ impl Parser<'_> { None => None, }; - Ok(Statement::Merge(Merge { + Ok(Merge { merge_token: merge_token.into(), into, table, @@ -63,7 +65,7 @@ impl Parser<'_> { on: Box::new(on), clauses, output, - })) + }) } fn parse_merge_clauses(&mut self) -> Result, ParserError> { diff --git a/src/parser/mod.rs b/src/parser/mod.rs index 8001611e0..8285035a6 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -604,28 +604,28 @@ impl<'a> Parser<'a> { Keyword::DESC => self.parse_explain(DescribeAlias::Desc), Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe), Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain), - Keyword::ANALYZE => self.parse_analyze(), + Keyword::ANALYZE => self.parse_analyze().map(Into::into), Keyword::CASE => { self.prev_token(); - self.parse_case_stmt() + self.parse_case_stmt().map(Into::into) } Keyword::IF => { self.prev_token(); - self.parse_if_stmt() + self.parse_if_stmt().map(Into::into) } Keyword::WHILE => { self.prev_token(); - self.parse_while() + self.parse_while().map(Into::into) } Keyword::RAISE => { self.prev_token(); - self.parse_raise_stmt() + self.parse_raise_stmt().map(Into::into) } Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => { self.prev_token(); - self.parse_query().map(Statement::Query) + self.parse_query().map(Into::into) } - Keyword::TRUNCATE => self.parse_truncate(), + Keyword::TRUNCATE => self.parse_truncate().map(Into::into), Keyword::ATTACH => { if dialect_of!(self is DuckDbDialect) { self.parse_attach_duckdb_database() @@ -636,7 +636,7 @@ impl<'a> Parser<'a> { Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => { self.parse_detach_duckdb_database() } - Keyword::MSCK => self.parse_msck(), + Keyword::MSCK => self.parse_msck().map(Into::into), Keyword::CREATE => self.parse_create(), Keyword::CACHE => self.parse_cache_table(), Keyword::DROP => self.parse_drop(), @@ -679,7 +679,7 @@ impl<'a> Parser<'a> { Keyword::DEALLOCATE => self.parse_deallocate(), Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(), Keyword::PREPARE => self.parse_prepare(), - Keyword::MERGE => self.parse_merge(next_token), + Keyword::MERGE => self.parse_merge(next_token).map(Into::into), // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific // syntaxes. They are used for Postgres statement. Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(), @@ -713,12 +713,12 @@ impl<'a> Parser<'a> { self.prev_token(); self.parse_vacuum() } - Keyword::RESET => self.parse_reset(), + Keyword::RESET => self.parse_reset().map(Into::into), _ => self.expected("an SQL statement", next_token), }, Token::LParen => { self.prev_token(); - self.parse_query().map(Statement::Query) + self.parse_query().map(Into::into) } _ => self.expected("an SQL statement", next_token), } @@ -727,7 +727,7 @@ impl<'a> Parser<'a> { /// Parse a `CASE` statement. /// /// See [Statement::Case] - pub fn parse_case_stmt(&mut self) -> Result { + pub fn parse_case_stmt(&mut self) -> Result { let case_token = self.expect_keyword(Keyword::CASE)?; let match_expr = if self.peek_keyword(Keyword::WHEN) { @@ -752,19 +752,19 @@ impl<'a> Parser<'a> { end_case_token = self.expect_keyword(Keyword::CASE)?; } - Ok(Statement::Case(CaseStatement { + Ok(CaseStatement { case_token: AttachedToken(case_token), match_expr, when_blocks, else_block, end_case_token: AttachedToken(end_case_token), - })) + }) } /// Parse an `IF` statement. /// /// See [Statement::If] - pub fn parse_if_stmt(&mut self) -> Result { + pub fn parse_if_stmt(&mut self) -> Result { self.expect_keyword_is(Keyword::IF)?; let if_block = self.parse_conditional_statement_block(&[ Keyword::ELSE, @@ -793,22 +793,22 @@ impl<'a> Parser<'a> { self.expect_keyword_is(Keyword::END)?; let end_token = self.expect_keyword(Keyword::IF)?; - Ok(Statement::If(IfStatement { + Ok(IfStatement { if_block, elseif_blocks, else_block, end_token: Some(AttachedToken(end_token)), - })) + }) } /// Parse a `WHILE` statement. /// /// See [Statement::While] - fn parse_while(&mut self) -> Result { + fn parse_while(&mut self) -> Result { self.expect_keyword_is(Keyword::WHILE)?; let while_block = self.parse_conditional_statement_block(&[Keyword::END])?; - Ok(Statement::While(WhileStatement { while_block })) + Ok(WhileStatement { while_block }) } /// Parses an expression and associated list of statements @@ -875,7 +875,7 @@ impl<'a> Parser<'a> { /// Parse a `RAISE` statement. /// /// See [Statement::Raise] - pub fn parse_raise_stmt(&mut self) -> Result { + pub fn parse_raise_stmt(&mut self) -> Result { self.expect_keyword_is(Keyword::RAISE)?; let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) { @@ -885,7 +885,7 @@ impl<'a> Parser<'a> { self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))? }; - Ok(Statement::Raise(RaiseStatement { value })) + Ok(RaiseStatement { value }) } /// Parse a COMMENT statement. /// @@ -1024,7 +1024,7 @@ impl<'a> Parser<'a> { } /// Parse `MSCK` statement. - pub fn parse_msck(&mut self) -> Result { + pub fn parse_msck(&mut self) -> Result { let repair = self.parse_keyword(Keyword::REPAIR); self.expect_keyword_is(Keyword::TABLE)?; let table_name = self.parse_object_name(false)?; @@ -1048,12 +1048,11 @@ impl<'a> Parser<'a> { repair, table_name, partition_action, - } - .into()) + }) } /// Parse `TRUNCATE` statement. - pub fn parse_truncate(&mut self) -> Result { + pub fn parse_truncate(&mut self) -> Result { let table = self.parse_keyword(Keyword::TABLE); let table_names = self @@ -1095,8 +1094,7 @@ impl<'a> Parser<'a> { identity, cascade, on_cluster, - } - .into()) + }) } fn parse_cascade_option(&mut self) -> Option { @@ -1192,7 +1190,7 @@ impl<'a> Parser<'a> { } /// Parse `ANALYZE` statement. - pub fn parse_analyze(&mut self) -> Result { + pub fn parse_analyze(&mut self) -> Result { let has_table_keyword = self.parse_keyword(Keyword::TABLE); let table_name = self.parse_object_name(false)?; let mut for_columns = false; @@ -1246,8 +1244,7 @@ impl<'a> Parser<'a> { cache_metadata, noscan, compute_statistics, - } - .into()) + }) } /// Parse a new expression including wildcard & qualified wildcard. @@ -1432,7 +1429,7 @@ impl<'a> Parser<'a> { Ok(RenameTable { old_name, new_name }) })?; - Ok(Statement::RenameTable(rename_tables)) + Ok(rename_tables.into()) } else { self.expected("KEYWORD `TABLE` after RENAME", self.peek_token()) } @@ -4875,41 +4872,45 @@ impl<'a> Parser<'a> { let create_view_params = self.parse_create_view_params()?; if self.parse_keyword(Keyword::TABLE) { self.parse_create_table(or_replace, temporary, global, transient) + .map(Into::into) } else if self.peek_keyword(Keyword::MATERIALIZED) || self.peek_keyword(Keyword::VIEW) || self.peek_keywords(&[Keyword::SECURE, Keyword::MATERIALIZED, Keyword::VIEW]) || self.peek_keywords(&[Keyword::SECURE, Keyword::VIEW]) { self.parse_create_view(or_alter, or_replace, temporary, create_view_params) + .map(Into::into) } else if self.parse_keyword(Keyword::POLICY) { self.parse_create_policy() } else if self.parse_keyword(Keyword::EXTERNAL) { - self.parse_create_external_table(or_replace) + self.parse_create_external_table(or_replace).map(Into::into) } else if self.parse_keyword(Keyword::FUNCTION) { self.parse_create_function(or_alter, or_replace, temporary) } else if self.parse_keyword(Keyword::DOMAIN) { - self.parse_create_domain() + self.parse_create_domain().map(Into::into) } else if self.parse_keyword(Keyword::TRIGGER) { self.parse_create_trigger(temporary, or_alter, or_replace, false) + .map(Into::into) } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) { self.parse_create_trigger(temporary, or_alter, or_replace, true) + .map(Into::into) } else if self.parse_keyword(Keyword::MACRO) { self.parse_create_macro(or_replace, temporary) } else if self.parse_keyword(Keyword::SECRET) { self.parse_create_secret(or_replace, temporary, persistent) } else if self.parse_keyword(Keyword::USER) { - self.parse_create_user(or_replace) + self.parse_create_user(or_replace).map(Into::into) } else if or_replace { self.expected( "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE", self.peek_token(), ) } else if self.parse_keyword(Keyword::EXTENSION) { - self.parse_create_extension() + self.parse_create_extension().map(Into::into) } else if self.parse_keyword(Keyword::INDEX) { - self.parse_create_index(false) + self.parse_create_index(false).map(Into::into) } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) { - self.parse_create_index(true) + self.parse_create_index(true).map(Into::into) } else if self.parse_keyword(Keyword::VIRTUAL) { self.parse_create_virtual_table() } else if self.parse_keyword(Keyword::SCHEMA) { @@ -4917,7 +4918,7 @@ impl<'a> Parser<'a> { } else if self.parse_keyword(Keyword::DATABASE) { self.parse_create_database() } else if self.parse_keyword(Keyword::ROLE) { - self.parse_create_role() + self.parse_create_role().map(Into::into) } else if self.parse_keyword(Keyword::SEQUENCE) { self.parse_create_sequence(temporary) } else if self.parse_keyword(Keyword::TYPE) { @@ -4925,15 +4926,15 @@ impl<'a> Parser<'a> { } else if self.parse_keyword(Keyword::PROCEDURE) { self.parse_create_procedure(or_alter) } else if self.parse_keyword(Keyword::CONNECTOR) { - self.parse_create_connector() + self.parse_create_connector().map(Into::into) } else if self.parse_keyword(Keyword::OPERATOR) { // Check if this is CREATE OPERATOR FAMILY or CREATE OPERATOR CLASS if self.parse_keyword(Keyword::FAMILY) { - self.parse_create_operator_family() + self.parse_create_operator_family().map(Into::into) } else if self.parse_keyword(Keyword::CLASS) { - self.parse_create_operator_class() + self.parse_create_operator_class().map(Into::into) } else { - self.parse_create_operator() + self.parse_create_operator().map(Into::into) } } else if self.parse_keyword(Keyword::SERVER) { self.parse_pg_create_server() @@ -4942,7 +4943,7 @@ impl<'a> Parser<'a> { } } - fn parse_create_user(&mut self, or_replace: bool) -> Result { + fn parse_create_user(&mut self, or_replace: bool) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = self.parse_identifier()?; let options = self @@ -4954,7 +4955,7 @@ impl<'a> Parser<'a> { } else { vec![] }; - Ok(Statement::CreateUser(CreateUser { + Ok(CreateUser { or_replace, if_not_exists, name, @@ -4967,7 +4968,7 @@ impl<'a> Parser<'a> { options: tags, delimiter: KeyValueOptionsDelimiter::Comma, }, - })) + }) } /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details. @@ -5284,14 +5285,18 @@ impl<'a> Parser<'a> { ) -> Result { if dialect_of!(self is HiveDialect) { self.parse_hive_create_function(or_replace, temporary) + .map(Into::into) } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) { self.parse_postgres_create_function(or_replace, temporary) + .map(Into::into) } else if dialect_of!(self is DuckDbDialect) { self.parse_create_macro(or_replace, temporary) } else if dialect_of!(self is BigQueryDialect) { self.parse_bigquery_create_function(or_replace, temporary) + .map(Into::into) } else if dialect_of!(self is MsSqlDialect) { self.parse_mssql_create_function(or_alter, or_replace, temporary) + .map(Into::into) } else { self.prev_token(); self.expected("an object type after CREATE", self.peek_token()) @@ -5305,7 +5310,7 @@ impl<'a> Parser<'a> { &mut self, or_replace: bool, temporary: bool, - ) -> Result { + ) -> Result { let name = self.parse_object_name(false)?; self.expect_token(&Token::LParen)?; @@ -5426,7 +5431,7 @@ impl<'a> Parser<'a> { } } - Ok(Statement::CreateFunction(CreateFunction { + Ok(CreateFunction { or_alter: false, or_replace, temporary, @@ -5445,7 +5450,7 @@ impl<'a> Parser<'a> { determinism_specifier: None, options: None, remote_connection: None, - })) + }) } /// Parse `CREATE FUNCTION` for [Hive] @@ -5455,14 +5460,14 @@ impl<'a> Parser<'a> { &mut self, or_replace: bool, temporary: bool, - ) -> Result { + ) -> Result { let name = self.parse_object_name(false)?; self.expect_keyword_is(Keyword::AS)?; let body = self.parse_create_function_body_string()?; let using = self.parse_optional_create_function_using()?; - Ok(Statement::CreateFunction(CreateFunction { + Ok(CreateFunction { or_alter: false, or_replace, temporary, @@ -5481,7 +5486,7 @@ impl<'a> Parser<'a> { determinism_specifier: None, options: None, remote_connection: None, - })) + }) } /// Parse `CREATE FUNCTION` for [BigQuery] @@ -5491,7 +5496,7 @@ impl<'a> Parser<'a> { &mut self, or_replace: bool, temporary: bool, - ) -> Result { + ) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let (name, args) = self.parse_create_function_name_and_params()?; @@ -5542,7 +5547,7 @@ impl<'a> Parser<'a> { None }; - Ok(Statement::CreateFunction(CreateFunction { + Ok(CreateFunction { or_alter: false, or_replace, temporary, @@ -5561,7 +5566,7 @@ impl<'a> Parser<'a> { parallel: None, security: None, set_params: vec![], - })) + }) } /// Parse `CREATE FUNCTION` for [MsSql] @@ -5572,7 +5577,7 @@ impl<'a> Parser<'a> { or_alter: bool, or_replace: bool, temporary: bool, - ) -> Result { + ) -> Result { let (name, args) = self.parse_create_function_name_and_params()?; self.expect_keyword(Keyword::RETURNS)?; @@ -5633,7 +5638,7 @@ impl<'a> Parser<'a> { parser_err!("Unparsable function body", self.peek_token().span.start)? }; - Ok(Statement::CreateFunction(CreateFunction { + Ok(CreateFunction { or_alter, or_replace, temporary, @@ -5652,7 +5657,7 @@ impl<'a> Parser<'a> { parallel: None, security: None, set_params: vec![], - })) + }) } fn parse_create_function_name_and_params( @@ -5746,7 +5751,7 @@ impl<'a> Parser<'a> { /// ```sql /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ] /// ``` - pub fn parse_drop_trigger(&mut self) -> Result { + pub fn parse_drop_trigger(&mut self) -> Result { if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect) { self.prev_token(); @@ -5767,12 +5772,12 @@ impl<'a> Parser<'a> { )), None => None, }; - Ok(Statement::DropTrigger(DropTrigger { + Ok(DropTrigger { if_exists, trigger_name, table_name, option, - })) + }) } /// Parse a `CREATE TRIGGER` statement. @@ -5782,7 +5787,7 @@ impl<'a> Parser<'a> { or_alter: bool, or_replace: bool, is_constraint: bool, - ) -> Result { + ) -> Result { if !dialect_of!(self is PostgreSqlDialect | SQLiteDialect | GenericDialect | MySqlDialect | MsSqlDialect) { self.prev_token(); @@ -5864,8 +5869,7 @@ impl<'a> Parser<'a> { statements_as: false, statements, characteristics, - } - .into()) + }) } /// Parse the period part of a trigger (`BEFORE`, `AFTER`, etc.). @@ -6008,7 +6012,7 @@ impl<'a> Parser<'a> { pub fn parse_create_external_table( &mut self, or_replace: bool, - ) -> Result { + ) -> Result { self.expect_keyword_is(Keyword::TABLE)?; let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let table_name = self.parse_object_name(false)?; @@ -6098,7 +6102,7 @@ impl<'a> Parser<'a> { or_replace: bool, temporary: bool, create_view_params: Option, - ) -> Result { + ) -> Result { let secure = self.parse_keyword(Keyword::SECURE); let materialized = self.parse_keyword(Keyword::MATERIALIZED); self.expect_keyword_is(Keyword::VIEW)?; @@ -6181,8 +6185,7 @@ impl<'a> Parser<'a> { to, params: create_view_params, name_before_not_exists, - } - .into()) + }) } /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL]. @@ -6244,7 +6247,7 @@ impl<'a> Parser<'a> { } /// Parse a `CREATE ROLE` statement. - pub fn parse_create_role(&mut self) -> Result { + pub fn parse_create_role(&mut self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let names = self.parse_comma_separated(|p| p.parse_object_name(false))?; @@ -6465,8 +6468,7 @@ impl<'a> Parser<'a> { user, admin, authorization_owner, - } - .into()) + }) } /// Parse an `OWNER` clause. @@ -6491,7 +6493,7 @@ impl<'a> Parser<'a> { } /// Parses a [Statement::CreateDomain] statement. - fn parse_create_domain(&mut self) -> Result { + fn parse_create_domain(&mut self) -> Result { let name = self.parse_object_name(false)?; self.expect_keyword_is(Keyword::AS)?; let data_type = self.parse_data_type()?; @@ -6510,13 +6512,13 @@ impl<'a> Parser<'a> { constraints.push(constraint); } - Ok(Statement::CreateDomain(CreateDomain { + Ok(CreateDomain { name, data_type, collation, default, constraints, - })) + }) } /// ```sql @@ -6613,7 +6615,7 @@ impl<'a> Parser<'a> { /// ``` /// /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector) - pub fn parse_create_connector(&mut self) -> Result { + pub fn parse_create_connector(&mut self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = self.parse_identifier()?; @@ -6637,14 +6639,14 @@ impl<'a> Parser<'a> { _ => None, }; - Ok(Statement::CreateConnector(CreateConnector { + Ok(CreateConnector { name, if_not_exists, connector_type, url, comment, with_dcproperties, - })) + }) } /// Parse an operator name, which can contain special characters like +, -, <, >, = @@ -6668,7 +6670,7 @@ impl<'a> Parser<'a> { /// Parse a [Statement::CreateOperator] /// /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createoperator.html) - pub fn parse_create_operator(&mut self) -> Result { + pub fn parse_create_operator(&mut self) -> Result { let name = self.parse_operator_name()?; self.expect_token(&Token::LParen)?; @@ -6777,34 +6779,31 @@ impl<'a> Parser<'a> { ParserError::ParserError("CREATE OPERATOR requires FUNCTION parameter".to_string()) })?; - Ok(Statement::CreateOperator(CreateOperator { + Ok(CreateOperator { name, function, is_procedure, left_arg, right_arg, options, - })) + }) } /// Parse a [Statement::CreateOperatorFamily] /// /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopfamily.html) - pub fn parse_create_operator_family(&mut self) -> Result { + pub fn parse_create_operator_family(&mut self) -> Result { let name = self.parse_object_name(false)?; self.expect_keyword(Keyword::USING)?; let using = self.parse_identifier()?; - Ok(Statement::CreateOperatorFamily(CreateOperatorFamily { - name, - using, - })) + Ok(CreateOperatorFamily { name, using }) } /// Parse a [Statement::CreateOperatorClass] /// /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createopclass.html) - pub fn parse_create_operator_class(&mut self) -> Result { + pub fn parse_create_operator_class(&mut self) -> Result { let name = self.parse_object_name(false)?; let default = self.parse_keyword(Keyword::DEFAULT); self.expect_keywords(&[Keyword::FOR, Keyword::TYPE])?; @@ -6918,14 +6917,14 @@ impl<'a> Parser<'a> { } } - Ok(Statement::CreateOperatorClass(CreateOperatorClass { + Ok(CreateOperatorClass { name, default, for_type, using, family, items, - })) + }) } /// Parse a `DROP` statement. @@ -6961,19 +6960,19 @@ impl<'a> Parser<'a> { } else if self.parse_keyword(Keyword::STREAM) { ObjectType::Stream } else if self.parse_keyword(Keyword::FUNCTION) { - return self.parse_drop_function(); + return self.parse_drop_function().map(Into::into); } else if self.parse_keyword(Keyword::POLICY) { return self.parse_drop_policy(); } else if self.parse_keyword(Keyword::CONNECTOR) { return self.parse_drop_connector(); } else if self.parse_keyword(Keyword::DOMAIN) { - return self.parse_drop_domain(); + return self.parse_drop_domain().map(Into::into); } else if self.parse_keyword(Keyword::PROCEDURE) { return self.parse_drop_procedure(); } else if self.parse_keyword(Keyword::SECRET) { return self.parse_drop_secret(temporary, persistent); } else if self.parse_keyword(Keyword::TRIGGER) { - return self.parse_drop_trigger(); + return self.parse_drop_trigger().map(Into::into); } else if self.parse_keyword(Keyword::EXTENSION) { return self.parse_drop_extension(); } else if self.parse_keyword(Keyword::OPERATOR) { @@ -7038,15 +7037,15 @@ impl<'a> Parser<'a> { /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...] /// [ CASCADE | RESTRICT ] /// ``` - fn parse_drop_function(&mut self) -> Result { + fn parse_drop_function(&mut self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?; let drop_behavior = self.parse_optional_drop_behavior(); - Ok(Statement::DropFunction(DropFunction { + Ok(DropFunction { if_exists, func_desc, drop_behavior, - })) + }) } /// ```sql @@ -7081,15 +7080,15 @@ impl<'a> Parser<'a> { /// ```sql /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ] /// ``` - fn parse_drop_domain(&mut self) -> Result { + fn parse_drop_domain(&mut self) -> Result { let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]); let name = self.parse_object_name(false)?; let drop_behavior = self.parse_optional_drop_behavior(); - Ok(Statement::DropDomain(DropDomain { + Ok(DropDomain { if_exists, name, drop_behavior, - })) + }) } /// ```sql @@ -7603,7 +7602,7 @@ impl<'a> Parser<'a> { } /// Parse a `CREATE INDEX` statement. - pub fn parse_create_index(&mut self, unique: bool) -> Result { + pub fn parse_create_index(&mut self, unique: bool) -> Result { let concurrently = self.parse_keyword(Keyword::CONCURRENTLY); let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); @@ -7677,7 +7676,7 @@ impl<'a> Parser<'a> { alter_options.push(self.parse_alter_table_operation()?) } - Ok(Statement::CreateIndex(CreateIndex { + Ok(CreateIndex { name: index_name, table_name, using, @@ -7691,11 +7690,11 @@ impl<'a> Parser<'a> { predicate, index_options, alter_options, - })) + }) } /// Parse a `CREATE EXTENSION` statement. - pub fn parse_create_extension(&mut self) -> Result { + pub fn parse_create_extension(&mut self) -> Result { let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let name = self.parse_identifier()?; @@ -7725,8 +7724,7 @@ impl<'a> Parser<'a> { schema, version, cascade, - } - .into()) + }) } /// Parse a PostgreSQL-specific [Statement::DropExtension] statement. @@ -8004,7 +8002,7 @@ impl<'a> Parser<'a> { temporary: bool, global: Option, transient: bool, - ) -> Result { + ) -> Result { let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect); let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]); let table_name = self.parse_object_name(allow_unquoted_hyphen)?; @@ -10158,17 +10156,17 @@ impl<'a> Parser<'a> { } Keyword::OPERATOR => { if self.parse_keyword(Keyword::FAMILY) { - self.parse_alter_operator_family() + self.parse_alter_operator_family().map(Into::into) } else if self.parse_keyword(Keyword::CLASS) { - self.parse_alter_operator_class() + self.parse_alter_operator_class().map(Into::into) } else { - self.parse_alter_operator() + self.parse_alter_operator().map(Into::into) } } Keyword::ROLE => self.parse_alter_role(), Keyword::POLICY => self.parse_alter_policy(), Keyword::CONNECTOR => self.parse_alter_connector(), - Keyword::USER => self.parse_alter_user(), + Keyword::USER => self.parse_alter_user().map(Into::into), // unreachable because expect_one_of_keywords used above unexpected_keyword => Err(ParserError::ParserError( format!("Internal parser error: expected any of {{VIEW, TYPE, TABLE, INDEX, ROLE, POLICY, CONNECTOR, ICEBERG, SCHEMA, USER, OPERATOR}}, got {unexpected_keyword:?}"), @@ -10290,7 +10288,7 @@ impl<'a> Parser<'a> { /// Parse a [Statement::AlterOperator] /// /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-alteroperator.html) - pub fn parse_alter_operator(&mut self) -> Result { + pub fn parse_alter_operator(&mut self) -> Result { let name = self.parse_operator_name()?; // Parse (left_type, right_type) @@ -10389,12 +10387,12 @@ impl<'a> Parser<'a> { ); }; - Ok(Statement::AlterOperator(AlterOperator { + Ok(AlterOperator { name, left_type, right_type, operation, - })) + }) } /// Parse an operator item for ALTER OPERATOR FAMILY ADD operations @@ -10527,7 +10525,7 @@ impl<'a> Parser<'a> { /// Parse a [Statement::AlterOperatorFamily] /// See - pub fn parse_alter_operator_family(&mut self) -> Result { + pub fn parse_alter_operator_family(&mut self) -> Result { let name = self.parse_object_name(false)?; self.expect_keyword(Keyword::USING)?; let using = self.parse_identifier()?; @@ -10554,17 +10552,17 @@ impl<'a> Parser<'a> { ); }; - Ok(Statement::AlterOperatorFamily(AlterOperatorFamily { + Ok(AlterOperatorFamily { name, using, operation, - })) + }) } /// Parse an `ALTER OPERATOR CLASS` statement. /// /// Handles operations like `RENAME TO`, `OWNER TO`, and `SET SCHEMA`. - pub fn parse_alter_operator_class(&mut self) -> Result { + pub fn parse_alter_operator_class(&mut self) -> Result { let name = self.parse_object_name(false)?; self.expect_keyword(Keyword::USING)?; let using = self.parse_identifier()?; @@ -10585,11 +10583,11 @@ impl<'a> Parser<'a> { ); }; - Ok(Statement::AlterOperatorClass(AlterOperatorClass { + Ok(AlterOperatorClass { name, using, operation, - })) + }) } /// Parse an `ALTER SCHEMA` statement. @@ -16803,7 +16801,7 @@ impl<'a> Parser<'a> { None }; - Ok(Statement::Insert(Insert { + Ok(Insert { insert_token: insert_token.into(), or, table: table_object, @@ -16824,7 +16822,8 @@ impl<'a> Parser<'a> { insert_alias, settings, format_clause, - })) + } + .into()) } } @@ -19084,15 +19083,15 @@ impl<'a> Parser<'a> { } /// Parses a RESET statement - fn parse_reset(&mut self) -> Result { + fn parse_reset(&mut self) -> Result { if self.parse_keyword(Keyword::ALL) { - return Ok(Statement::Reset(ResetStatement { reset: Reset::ALL })); + return Ok(ResetStatement { reset: Reset::ALL }); } let obj = self.parse_object_name(false)?; - Ok(Statement::Reset(ResetStatement { + Ok(ResetStatement { reset: Reset::ConfigurationParameter(obj), - })) + }) } }