Skip to content

Commit 39f8a6c

Browse files
committed
[NEP-18619]: Adjust the endpoint for Dashboard::Datasets::List to optionally include filter datasets
1 parent 98064fc commit 39f8a6c

File tree

3 files changed

+58
-10
lines changed

3 files changed

+58
-10
lines changed

lib/superset/dashboard/datasets/list.rb

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,15 @@ module Superset
77
module Dashboard
88
module Datasets
99
class List < Superset::Request
10-
attr_reader :id # dashboard id
10+
attr_reader :id, :include_filter_datasets # dashboard id
1111

1212
def self.call(id)
1313
self.new(id).list
1414
end
1515

16-
def initialize(id)
16+
def initialize(id, include_filter_datasets = false)
1717
@id = id
18+
@include_filter_datasets = include_filter_datasets
1819
end
1920

2021
def perform
@@ -36,13 +37,31 @@ def schemas
3637
end
3738

3839
def datasets_details
39-
result.map do |details|
40+
chart_datasets = result.map do |details|
4041
details.slice('id', 'datasource_name', 'schema', 'sql').merge('database' => details['database'].slice('id', 'name', 'backend')).with_indifferent_access
4142
end
43+
return chart_datasets unless include_filter_datasets
44+
chart_dataset_ids = chart_datasets.map{|d| d['id'] }
45+
filter_dataset_ids_not_used_in_charts = filter_dataset_ids - chart_dataset_ids
46+
return chart_datasets if filter_dataset_ids_not_used_in_charts.empty?
47+
filter_datasets = filter_dataset_ids_not_used_in_charts.map do |filter_dataset_id|
48+
filter_dataset = Superset::Dataset::Get.new(filter_dataset_id).result
49+
database_info = {
50+
'id' => filter_dataset['database']['id'],
51+
'name' => filter_dataset['database']['database_name'],
52+
'backend' => filter_dataset['database']['backend']
53+
}
54+
filter_dataset.slice('id', 'datasource_name', 'schema', 'sql').merge('database' => database_info).with_indifferent_access
55+
end
56+
dashboard_datasets = chart_datasets + filter_datasets
4257
end
4358

4459
private
4560

61+
def filter_dataset_ids
62+
@filter_dataset_ids ||= Superset::Dashboard::Filters::List.new(id).perform
63+
end
64+
4665
def route
4766
"dashboard/#{id}/datasets"
4867
end
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
module Superset
2+
module Dashboard
3+
module Filters
4+
class List < Superset::Request
5+
attr_reader :id
6+
7+
def initialize(id)
8+
@id = id
9+
end
10+
11+
def perform
12+
filters_configuration = JSON.parse(dashboard.result['json_metadata'])['native_filter_configuration'] || []
13+
return Array.new unless filters_configuration && filters_configuration.any?
14+
15+
# pull only the filters dataset ids from the dashboard
16+
filters_configuration.map { |c| c['targets'] }.flatten.compact.map { |c| c['datasetId'] }.flatten.compact
17+
end
18+
19+
private
20+
21+
def dashboard
22+
dashboard ||= Superset::Dashboard::Get.new(id)
23+
end
24+
end
25+
end
26+
end
27+
end

lib/superset/services/duplicate_dashboard.rb

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,13 @@ def duplicate_source_dashboard_datasets
9393
# duplicate the dataset, renaming to use of suffix as the target_schema
9494
# reason: there is a bug(or feature) in the SS API where a dataset name must be uniq when duplicating.
9595
# (note however renaming in the GUI to a dup name works fine)
96-
new_dataset_id = Superset::Dataset::Duplicate.new(source_dataset_id: dataset[:id], new_dataset_name: "#{dataset[:datasource_name]}-#{target_schema}").perform
96+
new_dataset_name = "#{dataset[:datasource_name]}-#{target_schema}"
97+
existing_datasets = Dataset::List.new(title_equals: new_dataset_name, schema_equals: source_dataset.schema).result
98+
if existing_dataset.any?
99+
new_dataset_id = existing_datasets[0]["id"] # assuming that we do not name multiple datasets with same name in a single schema
100+
else
101+
new_dataset_id = Superset::Dataset::Duplicate.new(source_dataset_id: dataset[:id], new_dataset_name: new_dataset_name).perform
102+
end
97103

98104
# keep track of the previous dataset and the matching new dataset_id
99105
dataset_duplication_tracker << { source_dataset_id: dataset[:id], new_dataset_id: new_dataset_id }
@@ -179,7 +185,7 @@ def new_dashboard
179185

180186
# retrieve the datasets that will be duplicated
181187
def source_dashboard_datasets
182-
@source_dashboard_datasets ||= Superset::Dashboard::Datasets::List.new(source_dashboard_id).datasets_details
188+
@source_dashboard_datasets ||= Superset::Dashboard::Datasets::List.new(source_dashboard_id, true).datasets_details
183189
rescue => e
184190
raise "Unable to retrieve datasets for source dashboard #{source_dashboard_id}: #{e.message}"
185191
end
@@ -255,11 +261,7 @@ def source_dashboard_dataset_ids
255261
end
256262

257263
def source_dashboard_filter_dataset_ids
258-
filters_configuration = JSON.parse(source_dashboard.result['json_metadata'])['native_filter_configuration'] || []
259-
return Array.new unless filters_configuration && filters_configuration.any?
260-
261-
# pull only the filters dataset ids from the dashboard
262-
filters_configuration.map { |c| c['targets'] }.flatten.compact.map { |c| c['datasetId'] }.flatten.compact
264+
@filter_dataset_ids ||= Superset::Dashboard::Filters::List.new(id).perform
263265
end
264266

265267
# Primary Assumption is that all charts datasets on the source dashboard are pointing to the same database schema

0 commit comments

Comments
 (0)