|
| 1 | +// Copyright 2021 Datafuse Labs |
| 2 | +// |
| 3 | +// Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | +// you may not use this file except in compliance with the License. |
| 5 | +// You may obtain a copy of the License at |
| 6 | +// |
| 7 | +// http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | +// |
| 9 | +// Unless required by applicable law or agreed to in writing, software |
| 10 | +// distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | +// See the License for the specific language governing permissions and |
| 13 | +// limitations under the License. |
| 14 | + |
| 15 | +use std::sync::Arc; |
| 16 | + |
| 17 | +use databend_common_ast::ast::UriLocation; |
| 18 | +use databend_common_catalog::table::TableExt; |
| 19 | +use databend_common_exception::ErrorCode; |
| 20 | +use databend_common_exception::Result; |
| 21 | +use databend_common_meta_app::schema::DatabaseType; |
| 22 | +use databend_common_sql::binder::parse_storage_params_from_uri; |
| 23 | +use databend_common_sql::plans::ModifyTableConnectionPlan; |
| 24 | +use databend_common_storage::check_operator; |
| 25 | +use databend_common_storage::init_operator; |
| 26 | +use databend_common_storages_stream::stream_table::STREAM_ENGINE; |
| 27 | +use databend_common_storages_view::view_table::VIEW_ENGINE; |
| 28 | +use log::debug; |
| 29 | + |
| 30 | +use crate::interpreters::interpreter_table_add_column::commit_table_meta; |
| 31 | +use crate::interpreters::Interpreter; |
| 32 | +use crate::pipelines::PipelineBuildResult; |
| 33 | +use crate::sessions::QueryContext; |
| 34 | +use crate::sessions::TableContext; |
| 35 | + |
| 36 | +pub struct ModifyTableConnectionInterpreter { |
| 37 | + ctx: Arc<QueryContext>, |
| 38 | + plan: ModifyTableConnectionPlan, |
| 39 | +} |
| 40 | + |
| 41 | +impl ModifyTableConnectionInterpreter { |
| 42 | + pub fn try_create(ctx: Arc<QueryContext>, plan: ModifyTableConnectionPlan) -> Result<Self> { |
| 43 | + Ok(ModifyTableConnectionInterpreter { ctx, plan }) |
| 44 | + } |
| 45 | +} |
| 46 | + |
| 47 | +#[async_trait::async_trait] |
| 48 | +impl Interpreter for ModifyTableConnectionInterpreter { |
| 49 | + fn name(&self) -> &str { |
| 50 | + "ModifyTableConnectionInterpreter" |
| 51 | + } |
| 52 | + |
| 53 | + fn is_ddl(&self) -> bool { |
| 54 | + true |
| 55 | + } |
| 56 | + |
| 57 | + #[async_backtrace::framed] |
| 58 | + async fn execute2(&self) -> Result<PipelineBuildResult> { |
| 59 | + let catalog_name = self.plan.catalog.as_str(); |
| 60 | + let db_name = self.plan.database.as_str(); |
| 61 | + let tbl_name = self.plan.table.as_str(); |
| 62 | + |
| 63 | + let table = self |
| 64 | + .ctx |
| 65 | + .get_catalog(catalog_name) |
| 66 | + .await? |
| 67 | + .get_table(&self.ctx.get_tenant(), db_name, tbl_name) |
| 68 | + .await?; |
| 69 | + |
| 70 | + // check mutability |
| 71 | + table.check_mutable()?; |
| 72 | + |
| 73 | + let table_info = table.get_table_info(); |
| 74 | + let engine = table.engine(); |
| 75 | + if matches!(engine, VIEW_ENGINE | STREAM_ENGINE) { |
| 76 | + return Err(ErrorCode::TableEngineNotSupported(format!( |
| 77 | + "{}.{} engine is {} that doesn't support alter", |
| 78 | + &self.plan.database, &self.plan.table, engine |
| 79 | + ))); |
| 80 | + } |
| 81 | + if table_info.db_type != DatabaseType::NormalDB { |
| 82 | + return Err(ErrorCode::TableEngineNotSupported(format!( |
| 83 | + "{}.{} doesn't support alter", |
| 84 | + &self.plan.database, &self.plan.table |
| 85 | + ))); |
| 86 | + } |
| 87 | + let Some(old_sp) = table_info.meta.storage_params.clone() else { |
| 88 | + return Err(ErrorCode::TableEngineNotSupported(format!( |
| 89 | + "{}.{} is not an external table, cannot alter connection", |
| 90 | + &self.plan.database, &self.plan.table |
| 91 | + ))); |
| 92 | + }; |
| 93 | + |
| 94 | + debug!("old storage params before update: {old_sp:?}"); |
| 95 | + |
| 96 | + // This location is used to parse the storage parameters from the URI. |
| 97 | + // |
| 98 | + // We don't really this this location to replace the old one, we just parse it out and change the storage parameters on needs. |
| 99 | + let mut location = UriLocation::new( |
| 100 | + // The storage type is not changeable, we just use the old one. |
| 101 | + old_sp.storage_type(), |
| 102 | + // name is not changeable, we just use a dummy value here. |
| 103 | + "test".to_string(), |
| 104 | + // root is not changeable, we just use a dummy value here. |
| 105 | + "/".to_string(), |
| 106 | + self.plan.new_connection.clone(), |
| 107 | + ); |
| 108 | + // NOTE: never use this storage params directly. |
| 109 | + let updated_sp = parse_storage_params_from_uri( |
| 110 | + &mut location, |
| 111 | + Some(self.ctx.as_ref() as _), |
| 112 | + "when ALTER TABLE CONNECTION", |
| 113 | + ) |
| 114 | + .await?; |
| 115 | + |
| 116 | + debug!("storage params used for update: {updated_sp:?}"); |
| 117 | + let new_sp = old_sp.apply_update(updated_sp)?; |
| 118 | + debug!("new storage params been updated: {new_sp:?}"); |
| 119 | + |
| 120 | + // Check the storage params via init operator. |
| 121 | + let op = init_operator(&new_sp).map_err(|err| { |
| 122 | + ErrorCode::InvalidConfig(format!( |
| 123 | + "Input storage config for stage is invalid: {err:?}" |
| 124 | + )) |
| 125 | + })?; |
| 126 | + check_operator(&op, &new_sp).await?; |
| 127 | + |
| 128 | + let catalog = self.ctx.get_catalog(self.plan.catalog.as_str()).await?; |
| 129 | + let mut new_table_meta = table_info.meta.clone(); |
| 130 | + new_table_meta.storage_params = Some(new_sp); |
| 131 | + |
| 132 | + commit_table_meta( |
| 133 | + &self.ctx, |
| 134 | + table.as_ref(), |
| 135 | + table_info, |
| 136 | + new_table_meta, |
| 137 | + catalog, |
| 138 | + ) |
| 139 | + .await?; |
| 140 | + |
| 141 | + Ok(PipelineBuildResult::create()) |
| 142 | + } |
| 143 | +} |
0 commit comments