Skip to content

Commit af9ad4f

Browse files
authored
Merge pull request #14 from rdytech/NEP-17493-save-embedded-domain-in-duplicate-process
[NEP-17493] duplicate dashboard should also create embedded setting
2 parents 328f015 + f15392b commit af9ad4f

File tree

12 files changed

+224
-113
lines changed

12 files changed

+224
-113
lines changed

lib/superset/dashboard/embedded/put.rb

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,19 +2,22 @@ module Superset
22
module Dashboard
33
module Embedded
44
class Put < Superset::Request
5-
attr_reader :dashboard_id, :embedded_domain
5+
attr_reader :dashboard_id, :allowed_domains
66

7-
def initialize(dashboard_id: , embedded_domain: )
7+
def initialize(dashboard_id: , allowed_domains: )
88
@dashboard_id = dashboard_id
9-
@embedded_domain = embedded_domain
9+
@allowed_domains = allowed_domains
1010
end
1111

1212
def response
13+
raise InvalidParameterError, 'dashboard_id integer is required' if dashboard_id.nil? || dashboard_id.class != Integer
14+
raise InvalidParameterError, 'allowed_domains array is required' if allowed_domains.nil? || allowed_domains.class != Array
15+
1316
@response ||= client.put(route, params)
1417
end
1518

1619
def params
17-
{ "allowed_domains": [embedded_domain] }
20+
{ "allowed_domains": allowed_domains }
1821
end
1922

2023
def uuid

lib/superset/dataset/duplicate.rb

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,9 @@ def initialize(source_dataset_id: :source_dataset_id, new_dataset_name: :new_dat
1515
def perform
1616
raise "Error: source_dataset_id integer is required" unless source_dataset_id.present? && source_dataset_id.is_a?(Integer)
1717
raise "Error: new_dataset_name string is required" unless new_dataset_name.present? && new_dataset_name.is_a?(String)
18-
raise "Error: new_dataset_name already in use" if new_dataset_name_already_in_use?
18+
raise "Error: new_dataset_name already in use in this schema: #{new_dataset_name}. Suggest you add (COPY) as a suffix to the name" if new_dataset_name_already_in_use?
1919

20-
logger.info(" Start Duplicate Source Dataset Id: #{source_dataset_id} to New Dataset Name: #{new_dataset_name}")
20+
logger.info(" Duplicating Source Dataset in schema #{source_dataset.schema}. Source Dataset Id: #{source_dataset_id} to New Dataset Name: #{new_dataset_name}")
2121

2222
new_dataset_id
2323
end
@@ -34,10 +34,14 @@ def params
3434
end
3535

3636
private
37-
37+
38+
def source_dataset
39+
@source_dataset ||= Dataset::Get.new(source_dataset_id).perform
40+
end
41+
42+
# The API demands that the new_dataset_name be uniq within the schema it points to.
3843
def new_dataset_name_already_in_use?
39-
existing_datasets = List.new(title_equals: new_dataset_name)
40-
existing_datasets.result.any?
44+
Dataset::List.new(title_equals: new_dataset_name, schema_equals: source_dataset.schema).result.any?
4145
end
4246

4347
def new_dataset_id

lib/superset/dataset/get.rb

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,19 @@ def self.call(id)
1212
self.new(id).list
1313
end
1414

15+
def perform
16+
response
17+
self
18+
end
19+
1520
def rows
1621
[ [title, schema, database_name, database_id] ]
1722
end
1823

24+
def schema
25+
result['schema']
26+
end
27+
1928
private
2029

2130
def route
@@ -39,9 +48,6 @@ def title
3948
result['name']
4049
end
4150

42-
def schema
43-
result['schema']
44-
end
4551

4652
def sql
4753
['sql']

lib/superset/dataset/list.rb

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
module Superset
22
module Dataset
33
class List < Superset::Request
4-
attr_reader :title_contains, :title_equals
4+
attr_reader :title_contains, :title_equals, :schema_equals
55

6-
def initialize(page_num: 0, title_contains: '', title_equals: '')
6+
def initialize(page_num: 0, title_contains: '', title_equals: '', schema_equals: '')
77
@title_contains = title_contains
88
@title_equals = title_equals
9+
@schema_equals = schema_equals
910
super(page_num: page_num)
1011
end
1112

@@ -20,10 +21,14 @@ def route
2021
end
2122

2223
def filters
23-
raise 'ERROR: only one filter supported currently' if title_contains.present? && title_equals.present?
24-
25-
return "filters:!((col:table_name,opr:ct,value:'#{title_contains}'))," if title_contains.present?
26-
return "filters:!((col:table_name,opr:eq,value:'#{title_equals}'))," if title_equals.present?
24+
# TODO filtering across all list classes can be refactored to support multiple options in a more flexible way
25+
filters = []
26+
filters << "(col:table_name,opr:ct,value:'#{title_contains}')" if title_contains.present?
27+
filters << "(col:table_name,opr:eq,value:'#{title_equals}')" if title_equals.present?
28+
filters << "(col:schema,opr:eq,value:'#{schema_equals}')" if schema_equals.present?
29+
unless filters.empty?
30+
"filters:!(" + filters.join(',') + "),"
31+
end
2732
end
2833

2934
def list_attributes

lib/superset/dataset/update_schema.rb

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,13 @@ module Superset
22
module Dataset
33
class UpdateSchema < Superset::Request
44

5-
attr_reader :source_dataset_id, :target_database_id, :target_schema
5+
attr_reader :source_dataset_id, :target_database_id, :target_schema, :remove_copy_suffix
66

7-
def initialize(source_dataset_id: :source_dataset_id, target_database_id: :target_database_id, target_schema: :target_schema)
7+
def initialize(source_dataset_id: :source_dataset_id, target_database_id: :target_database_id, target_schema: :target_schema, remove_copy_suffix: false)
88
@source_dataset_id = source_dataset_id
99
@target_database_id = target_database_id
1010
@target_schema = target_schema
11+
@remove_copy_suffix = remove_copy_suffix
1112
end
1213

1314
def perform
@@ -35,6 +36,7 @@ def params_updated
3536
new_params.merge!("database_id": target_database_id) # add the target database id
3637
new_params['schema'] = target_schema
3738
new_params['owners'] = new_params['owners'].map {|o| o['id'] } # expects an array of user ids
39+
new_params['table_name'] = new_params['table_name'].gsub(/ \(COPY\)/, '') if remove_copy_suffix
3840

3941
# remove unwanted fields from metrics and columns arrays
4042
new_params['metrics'].each {|m| m.delete('changed_on') }

lib/superset/services/duplicate_dashboard.rb

Lines changed: 45 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -7,30 +7,35 @@ module Superset
77
module Services
88
class DuplicateDashboard < Superset::Request
99

10-
attr_reader :source_dashboard_id, :target_schema, :target_database_id
10+
DUPLICATED_DATASET_SUFFIX = ' (COPY)'
1111

12-
def initialize(source_dashboard_id:, target_schema:, target_database_id: )
12+
attr_reader :source_dashboard_id, :target_schema, :target_database_id, :allowed_domains
13+
14+
def initialize(source_dashboard_id:, target_schema:, target_database_id: , allowed_domains: nil)
1315
@source_dashboard_id = source_dashboard_id
1416
@target_schema = target_schema
1517
@target_database_id = target_database_id
18+
@allowed_domains = allowed_domains
1619
end
1720

1821
def perform
1922
validate_params
2023

24+
# Pull the Datasets for all charts on the source dashboard
25+
source_dashboard_datasets
26+
2127
# create a new_dashboard by copying the source_dashboard using with 'duplicate_slices: true' to get a new set of charts.
28+
# The new_dashboard will have a copy of charts from the source_dashboard, but with the same datasets as the source_dashboard
2229
new_dashboard
2330

24-
# Pull the Datasets for all charts on the source dashboard
25-
# currently the new_dashboard charts(slices) all point to these same datasets from the orig source dashboard
26-
source_dashboard_datasets
27-
2831
# Duplicate these Datasets to the new target schema and target database
2932
duplicate_source_dashboard_datasets
3033

3134
# Update the Charts on the New Dashboard with the New Datasets
3235
update_charts_with_new_datasets
3336

37+
created_embedded_config
38+
3439
end_log_message
3540

3641
# return the new dashboard id and url
@@ -41,7 +46,16 @@ def perform
4146
raise e
4247
end
4348

44-
# private
49+
private
50+
51+
def created_embedded_config
52+
return unless allowed_domains.present?
53+
54+
result = Dashboard::Embedded::Put.new(dashboard_id: new_dashboard.id, allowed_domains: allowed_domains).result
55+
logger.info " Embedded Domain Added to New Dashboard #{new_dashboard.id}:"
56+
logger.info " Embedded Domain allowed_domains: #{result['allowed_domains']}"
57+
logger.info " Embedded Domain uuid: #{result['uuid']}"
58+
end
4559

4660
def dataset_duplication_tracker
4761
@dataset_duplication_tracker ||= []
@@ -68,7 +82,7 @@ def update_charts_with_new_datasets
6882
def duplicate_source_dashboard_datasets
6983
source_dashboard_datasets.each do |dataset|
7084
# duplicate the dataset
71-
new_dataset_id = Superset::Dataset::Duplicate.new(source_dataset_id: dataset[:id], new_dataset_name: "#{dataset[:datasource_name]} #{target_schema} DUPLICATION").perform
85+
new_dataset_id = Superset::Dataset::Duplicate.new(source_dataset_id: dataset[:id], new_dataset_name: "#{dataset[:datasource_name]}#{DUPLICATED_DATASET_SUFFIX}").perform
7286

7387
# keep track of the previous dataset and the matching new dataset_id
7488
dataset_duplication_tracker << { source_dataset_id: dataset[:id], new_dataset_id: new_dataset_id }
@@ -111,6 +125,10 @@ def validate_params
111125
# schema validations
112126
raise ValidationError, "Schema #{target_schema} does not exist in target database: #{target_database_id}" unless target_database_available_schemas.include?(target_schema)
113127
raise ValidationError, "The source_dashboard_id #{source_dashboard_id} datasets are required to point to one schema only. Actual schema list is #{source_dashboard_schemas.join(',')}" if source_dashboard_has_more_than_one_schema?
128+
129+
# new dataset validations
130+
raise ValidationError, "DATASET NAME CONFLICT: The Target Schema #{target_schema} already has existing datasets named: #{target_schema_matching_dataset_names.join(',')}" unless target_schema_matching_dataset_names.empty?
131+
114132
end
115133

116134
def target_database_available_schemas
@@ -121,10 +139,29 @@ def source_dashboard_has_more_than_one_schema?
121139
source_dashboard_schemas.count > 1
122140
end
123141

142+
# Pull the Datasets for all charts on the source dashboard
124143
def source_dashboard_schemas
125144
source_dashboard_datasets.map { |dataset| dataset[:schema] }.uniq
126145
end
127146

147+
def source_dashboard_dataset_names
148+
source_dashboard_datasets.map { |dataset| dataset[:datasource_name] }.uniq
149+
end
150+
151+
# identify any already existing datasets in the target schema that have the same name as the source dashboard datasets
152+
# note this is prior to adding the (COPY) suffix
153+
# here we will need to decide if we want to use the existing dataset or not see NEP-????
154+
# for now we will exit with an error if we find any existing datasets of the same name
155+
def target_schema_matching_dataset_names
156+
source_dashboard_dataset_names.map do |source_dataset_name|
157+
existing_names = Superset::Dataset::List.new(title_contains: source_dataset_name, schema_equals: target_schema).result.map{|t|t['table_name']}.uniq # contains match to cover with suffix as well
158+
unless existing_names.flatten.empty?
159+
logger.error " HALTING PROCESS: Schema #{target_schema} already has Dataset called #{existing_names}"
160+
end
161+
existing_names
162+
end.flatten.compact
163+
end
164+
128165
def logger
129166
@logger ||= Superset::Logger.new
130167
end

spec/superset/chart/update_dataset_spec.rb

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
"id"=>chart_id,
2020
"is_managed_externally"=>false,
2121
"owners"=>[{"first_name"=>"Jay", "id"=>9, "last_name"=>"Bee"}],
22-
"query_context"=>nil,
2322
"slice_name"=>"JRStg DoB per Year",
2423
"tags"=>[{"id"=>1, "name"=>"owner:9", "type"=>3}, {"id"=>28, "name"=>"type:chart", "type"=>2}],
2524
"thumbnail_url"=>"/api/v1/chart/54507/thumbnail/1595a10937091faff0aed5df628a1292/",

spec/superset/dashboard/embedded/put_spec.rb

Lines changed: 48 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,17 @@
11
require 'spec_helper'
22

33
RSpec.describe Superset::Dashboard::Embedded::Put, type: :service do
4-
subject { described_class.new(dashboard_id: dashboard_id, embedded_domain: allowed_domain) }
4+
subject { described_class.new(dashboard_id: dashboard_id, allowed_domains: allowed_domains) }
55
let(:dashboard_id) { 1 }
66

77
describe 'with a dashboard that has embedded settings, ie has a result' do
8-
let(:allowed_domain) { ['http://test-domain.io/'] }
8+
let(:allowed_domains) { ['http://test-domain.io/'] }
99
let(:uuid) { '631bxxxx-xxxx-xxxx-xxxx-xxxxxxxxx247' }
1010
let(:response) do
1111
{
1212
'result' =>
1313
{
14-
"allowed_domains" => allowed_domain,
14+
"allowed_domains" => allowed_domains,
1515
"changed_by"=>{"first_name"=>"Jay", "id"=>9, "last_name"=>"Bee", "username"=>"4bf....3f5"},
1616
"changed_on" => "2023-10-30T03:06:51.437527",
1717
"dashboard_id" => "1",
@@ -20,13 +20,53 @@
2020
}.with_indifferent_access
2121
end
2222

23-
before do
24-
allow(subject).to receive(:response).and_return(response)
23+
context 'with params' do
24+
before { allow(subject).to receive(:response).and_return(response) }
25+
26+
context 'that are valid' do
27+
it 'returns uuid' do
28+
expect(subject.uuid).to eq(uuid)
29+
end
30+
end
31+
end
32+
33+
context 'where allowed_domains is not an array' do
34+
let(:allowed_domains) { 'http://test-domain.io/' }
35+
36+
before { allow(subject).to receive(:response).and_call_original }
37+
38+
it 'raises error' do
39+
expect { subject.response }.to raise_error(Superset::Request::InvalidParameterError, "allowed_domains array is required")
40+
end
2541
end
2642

27-
describe '#uuid' do
28-
it 'returns uuid' do
29-
expect(subject.uuid).to eq(uuid)
43+
context 'where allowed_domains is nil' do
44+
let(:allowed_domains) { nil }
45+
46+
before { allow(subject).to receive(:response).and_call_original }
47+
48+
it 'raises error' do
49+
expect { subject.response }.to raise_error(Superset::Request::InvalidParameterError, "allowed_domains array is required")
50+
end
51+
end
52+
53+
context 'where dashboard_id is nil' do
54+
let(:dashboard_id) { nil }
55+
56+
before { allow(subject).to receive(:response).and_call_original }
57+
58+
it 'raises error' do
59+
expect { subject.response }.to raise_error(Superset::Request::InvalidParameterError, "dashboard_id integer is required")
60+
end
61+
end
62+
63+
context 'where dashboard_id is not an int' do
64+
let(:dashboard_id) { 'asdf' }
65+
66+
before { allow(subject).to receive(:response).and_call_original }
67+
68+
it 'raises error' do
69+
expect { subject.response }.to raise_error(Superset::Request::InvalidParameterError, "dashboard_id integer is required")
3070
end
3171
end
3272
end

spec/superset/dataset/duplicate_spec.rb

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
before do
1616
allow(subject).to receive(:response).and_return(response)
1717
allow(subject).to receive(:new_dataset_name_already_in_use?).and_return(false)
18+
allow(subject).to receive(:source_dataset).and_return(double(schema: 'public'))
1819
end
1920

2021
describe '#perform' do

spec/superset/dataset/list_spec.rb

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -83,5 +83,14 @@
8383
end
8484
end
8585

86+
87+
context 'with title_contains and schema_contains' do
88+
subject { described_class.new(title_equals: 'birth_days', schema_equals: 'schema_one') }
89+
90+
specify do
91+
expect(subject.query_params).to eq("filters:!((col:table_name,opr:eq,value:'birth_days'),(col:schema,opr:eq,value:'schema_one')),page:0,page_size:100")
92+
end
93+
end
94+
8695
end
8796
end

0 commit comments

Comments
 (0)