|
12 | 12 |
|
13 | 13 | logger = logging.getLogger(__name__) |
14 | 14 |
|
| 15 | +# Known prefixes for external HMS spark attributes |
| 16 | +ext_hms_prefixes = [ |
| 17 | + "spark_conf.spark.sql.hive.metastore", |
| 18 | + "spark_conf.spark.hadoop.hive.metastore", |
| 19 | + "spark_conf.spark.hadoop.javax.jdo.option", |
| 20 | + "spark_conf.spark.databricks.hive.metastore", |
| 21 | +] |
| 22 | + |
15 | 23 |
|
16 | 24 | class ClusterPolicyInstaller: |
17 | 25 | def __init__(self, installation: Installation, ws: WorkspaceClient, prompts: Prompts): |
@@ -142,13 +150,9 @@ def _extract_external_hive_metastore_conf(cluster_policy): |
142 | 150 | instance_profile = cluster_policy.get("aws_attributes.instance_profile_arn").get("value") |
143 | 151 | logger.info(f"Instance Profile is Set to {instance_profile}") |
144 | 152 | for key in cluster_policy.keys(): |
145 | | - if ( |
146 | | - key.startswith("spark_conf.spark.sql.hive.metastore") |
147 | | - or key.startswith("spark_conf.spark.hadoop.javax.jdo.option") |
148 | | - or key.startswith("spark_conf.spark.databricks.hive.metastore") |
149 | | - or key.startswith("spark_conf.spark.hadoop.hive.metastore.glue") |
150 | | - ): |
151 | | - spark_conf_dict[key[11:]] = cluster_policy[key]["value"] |
| 153 | + for known_prefix in ext_hms_prefixes: |
| 154 | + if key.startswith(known_prefix): |
| 155 | + spark_conf_dict[key[11:]] = cluster_policy[key]["value"] |
152 | 156 | return instance_profile, spark_conf_dict |
153 | 157 |
|
154 | 158 | def _get_cluster_policies_with_external_hive_metastores(self): |
|
0 commit comments