Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions lib/superset/dataset/update_schema.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@ module Superset
module Dataset
class UpdateSchema < Superset::Request

attr_reader :source_dataset_id, :target_database_id, :target_schema, :remove_copy_suffix
attr_reader :source_dataset_id, :target_database_id, :target_schema, :remove_copy_suffix, :catalog

def initialize(source_dataset_id: , target_database_id: , target_schema: , remove_copy_suffix: false)
def initialize(source_dataset_id: , target_database_id: , target_schema: , remove_copy_suffix: false, catalog: nil)
@source_dataset_id = source_dataset_id
@target_database_id = target_database_id
@target_schema = target_schema
@remove_copy_suffix = remove_copy_suffix
@catalog = catalog
end

def perform
Expand All @@ -34,9 +35,10 @@ def response
def params_updated
@params_updated ||= begin
new_params = source_dataset.slice(*acceptable_attributes).with_indifferent_access

# primary database and schema changes
new_params.merge!("database_id": target_database_id) # add the target database id
new_params['catalog'] = catalog
new_params['schema'] = target_schema
new_params['owners'] = new_params['owners'].map {|o| o['id'] } # expects an array of user ids
new_params['table_name'] = new_params['table_name'].gsub(/ \(COPY\)/, '') if remove_copy_suffix
Expand Down
2 changes: 1 addition & 1 deletion lib/superset/services/duplicate_dashboard.rb
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def dataset_duplication_tracker
def duplicate_source_dashboard_datasets
source_dashboard_datasets.each do |dataset|
# duplicate the dataset, renaming to use of suffix as the target_schema
# reason: there is a bug(or feature) in the SS API where a dataset name must be uniq when duplicating.
# reason: there is a bug(or feature) in the SS API where a dataset name must be uniq when duplicating.
# (note however renaming in the GUI to a dup name works fine)
new_dataset_name = "#{dataset[:datasource_name]}-#{target_schema}"
existing_datasets = Superset::Dataset::List.new(title_equals: new_dataset_name, schema_equals: target_schema).result
Expand Down
10 changes: 8 additions & 2 deletions lib/superset/services/import_dashboard_across_environment.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,10 @@
must already exist as a database connection in the target superset environment.

Currently handles only 1 Database yaml file in the zip file. ( ie only 1 common database connection per dashboards datasets )
Targeted towards Dashboards for an individual Clients Database Data only.
Most often used in EXTERNAL facing embedded client dashboards.

Required Attributes:
Required Attributes:
- target_database_yaml_file - location of the target database yaml config file
- target_database_schema - the schema name to be used in the target database
- dashboard_export_zip - location of the source dashboard export zip file to tranferred to a new superset Env
Expand Down Expand Up @@ -78,6 +80,10 @@ def update_dataset_configs
dashboard_config[:datasets].each do |dataset|
dataset[:content][:database_uuid] = dashboard_config[:databases].first[:content][:uuid]
dataset[:content][:schema] = target_database_schema

# by clearing out the source's catalog, this automatically allows superset to set to the default catalog on the target
dataset[:content][:catalog] = nil
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note .. at RDY we do not use catalogs atm for high level namespace architecture
so for our purposes currently nil is fine.


stringified_content = deep_transform_keys_to_strings(dataset[:content])
File.open(dataset[:filename], 'w') { |f| f.write stringified_content.to_yaml }
end
Expand All @@ -103,7 +109,7 @@ def new_database_yaml_file_path

def dashboard_export_root_path
# locate the unziped dashboard_export_* directory as named by superset app, eg dashboard_export_20240821T001536
@dashboard_export_root_path ||= begin
@dashboard_export_root_path ||= begin
pattern = File.join(dashboard_config[:tmp_uniq_dashboard_path], 'dashboard_export_*')
Dir.glob(pattern).first
end
Expand Down
26 changes: 19 additions & 7 deletions spec/superset/dataset/update_schema_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,19 @@

RSpec.describe Superset::Dataset::UpdateSchema do
subject { described_class.new(
source_dataset_id: source_dataset_id,
target_database_id: target_database_id,
source_dataset_id: source_dataset_id,
target_database_id: target_database_id,
target_schema: target_schema,
remove_copy_suffix: remove_copy_suffix) }
remove_copy_suffix: remove_copy_suffix,
catalog: catalog
) }

let(:source_dataset_id) { 226 }
let(:source_schema) { 'schema_one' }
let(:target_database_id) { 6 }
let(:target_schema) { 'schema_three' }
let(:remove_copy_suffix) { false }
let(:catalog) { nil }

let(:source_dataset) do
{
Expand Down Expand Up @@ -87,20 +90,20 @@
context 'with invalid params' do
context 'source_dataset_id is empty' do
let(:source_dataset_id) { nil }

specify do
expect { subject.perform }.to raise_error(RuntimeError, "Error: source_dataset_id integer is required")
end
end

context 'target_database_id is empty' do
let(:target_database_id) { nil }

specify do
expect { subject.perform }.to raise_error(RuntimeError, "Error: target_database_id integer is required")
end
end

context 'target_schema is empty' do
let(:target_schema) { nil }

Expand Down Expand Up @@ -129,9 +132,10 @@
end

describe '#params_updated' do
context 'with remove_copy_suffix true' do
context 'with remove_copy_suffix false' do
specify 'set the new target schema and target database correctly' do
expect(subject.params_updated['schema']).to eq(target_schema)
expect(subject.params_updated['catalog']).to eq(nil)
expect(subject.params_updated['database_id']).to eq(target_database_id)
expect(subject.params_updated['table_name']).to eq('JR SP Service Counts (COPY)') # unchanged if remove_copy_suffix is false
end
Expand All @@ -144,5 +148,13 @@
expect(subject.params_updated['table_name']).to eq('JR SP Service Counts') # removed (COPY) suffix
end
end

context 'with catalog set to blah' do
let(:catalog) { 'blah' }

specify do
expect(subject.params_updated['catalog']).to eq('blah')
end
end
end
end