Skip to content

Commit a3d5cd6

Browse files
committed
[generator] Removed redundant generator changes
1 parent 75a89db commit a3d5cd6

File tree

4 files changed

+13
-521
lines changed

4 files changed

+13
-521
lines changed

pyatlan/generator/class_generator.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -469,9 +469,6 @@ def create(cls, relationship_defs: List[RelationshipDef]):
469469
name=to_snake_case(rel_def.name), relationship_def=rel_def
470470
)
471471
)
472-
# if rel_def.name == "UserDefRelationship":
473-
# import ipdb; ipdb.set_trace()
474-
# print(to_python_class_name(rel_def.name))
475472

476473

477474
class AttributeType(Enum):

pyatlan/model/assets/a_d_l_s.py

Lines changed: 4 additions & 197 deletions
Original file line numberDiff line numberDiff line change
@@ -8,17 +8,13 @@
88

99
from pydantic.v1 import Field, validator
1010

11-
from pyatlan.model.fields.atlan_fields import (
12-
KeywordField,
13-
KeywordTextField,
14-
RelationField,
15-
)
11+
from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField
1612
from pyatlan.model.structs import AzureTag
1713

18-
from .azure import Azure
14+
from .object_store import ObjectStore
1915

2016

21-
class ADLS(Azure):
17+
class ADLS(ObjectStore):
2218
"""Description"""
2319

2420
type_name: str = Field(default="ADLS", allow_mutation=False)
@@ -71,66 +67,13 @@ def __setattr__(self, name, value):
7167
Tags that have been applied to this asset in Azure.
7268
"""
7369

74-
INPUT_TO_SPARK_JOBS: ClassVar[RelationField] = RelationField("inputToSparkJobs")
75-
"""
76-
TBC
77-
"""
78-
INPUT_TO_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField(
79-
"inputToAirflowTasks"
80-
)
81-
"""
82-
TBC
83-
"""
84-
INPUT_TO_PROCESSES: ClassVar[RelationField] = RelationField("inputToProcesses")
85-
"""
86-
TBC
87-
"""
88-
MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[RelationField] = RelationField(
89-
"modelImplementedAttributes"
90-
)
91-
"""
92-
TBC
93-
"""
94-
OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField(
95-
"outputFromAirflowTasks"
96-
)
97-
"""
98-
TBC
99-
"""
100-
OUTPUT_FROM_SPARK_JOBS: ClassVar[RelationField] = RelationField(
101-
"outputFromSparkJobs"
102-
)
103-
"""
104-
TBC
105-
"""
106-
MODEL_IMPLEMENTED_ENTITIES: ClassVar[RelationField] = RelationField(
107-
"modelImplementedEntities"
108-
)
109-
"""
110-
TBC
111-
"""
112-
OUTPUT_FROM_PROCESSES: ClassVar[RelationField] = RelationField(
113-
"outputFromProcesses"
114-
)
115-
"""
116-
TBC
117-
"""
118-
11970
_convenience_properties: ClassVar[List[str]] = [
12071
"adls_account_qualified_name",
12172
"adls_account_name",
12273
"azure_resource_id",
12374
"azure_location",
12475
"adls_account_secondary_location",
12576
"azure_tags",
126-
"input_to_spark_jobs",
127-
"input_to_airflow_tasks",
128-
"input_to_processes",
129-
"model_implemented_attributes",
130-
"output_from_airflow_tasks",
131-
"output_from_spark_jobs",
132-
"model_implemented_entities",
133-
"output_from_processes",
13477
]
13578

13679
@property
@@ -205,113 +148,7 @@ def azure_tags(self, azure_tags: Optional[List[AzureTag]]):
205148
self.attributes = self.Attributes()
206149
self.attributes.azure_tags = azure_tags
207150

208-
@property
209-
def input_to_spark_jobs(self) -> Optional[List[SparkJob]]:
210-
return None if self.attributes is None else self.attributes.input_to_spark_jobs
211-
212-
@input_to_spark_jobs.setter
213-
def input_to_spark_jobs(self, input_to_spark_jobs: Optional[List[SparkJob]]):
214-
if self.attributes is None:
215-
self.attributes = self.Attributes()
216-
self.attributes.input_to_spark_jobs = input_to_spark_jobs
217-
218-
@property
219-
def input_to_airflow_tasks(self) -> Optional[List[AirflowTask]]:
220-
return (
221-
None if self.attributes is None else self.attributes.input_to_airflow_tasks
222-
)
223-
224-
@input_to_airflow_tasks.setter
225-
def input_to_airflow_tasks(
226-
self, input_to_airflow_tasks: Optional[List[AirflowTask]]
227-
):
228-
if self.attributes is None:
229-
self.attributes = self.Attributes()
230-
self.attributes.input_to_airflow_tasks = input_to_airflow_tasks
231-
232-
@property
233-
def input_to_processes(self) -> Optional[List[Process]]:
234-
return None if self.attributes is None else self.attributes.input_to_processes
235-
236-
@input_to_processes.setter
237-
def input_to_processes(self, input_to_processes: Optional[List[Process]]):
238-
if self.attributes is None:
239-
self.attributes = self.Attributes()
240-
self.attributes.input_to_processes = input_to_processes
241-
242-
@property
243-
def model_implemented_attributes(self) -> Optional[List[ModelAttribute]]:
244-
return (
245-
None
246-
if self.attributes is None
247-
else self.attributes.model_implemented_attributes
248-
)
249-
250-
@model_implemented_attributes.setter
251-
def model_implemented_attributes(
252-
self, model_implemented_attributes: Optional[List[ModelAttribute]]
253-
):
254-
if self.attributes is None:
255-
self.attributes = self.Attributes()
256-
self.attributes.model_implemented_attributes = model_implemented_attributes
257-
258-
@property
259-
def output_from_airflow_tasks(self) -> Optional[List[AirflowTask]]:
260-
return (
261-
None
262-
if self.attributes is None
263-
else self.attributes.output_from_airflow_tasks
264-
)
265-
266-
@output_from_airflow_tasks.setter
267-
def output_from_airflow_tasks(
268-
self, output_from_airflow_tasks: Optional[List[AirflowTask]]
269-
):
270-
if self.attributes is None:
271-
self.attributes = self.Attributes()
272-
self.attributes.output_from_airflow_tasks = output_from_airflow_tasks
273-
274-
@property
275-
def output_from_spark_jobs(self) -> Optional[List[SparkJob]]:
276-
return (
277-
None if self.attributes is None else self.attributes.output_from_spark_jobs
278-
)
279-
280-
@output_from_spark_jobs.setter
281-
def output_from_spark_jobs(self, output_from_spark_jobs: Optional[List[SparkJob]]):
282-
if self.attributes is None:
283-
self.attributes = self.Attributes()
284-
self.attributes.output_from_spark_jobs = output_from_spark_jobs
285-
286-
@property
287-
def model_implemented_entities(self) -> Optional[List[ModelEntity]]:
288-
return (
289-
None
290-
if self.attributes is None
291-
else self.attributes.model_implemented_entities
292-
)
293-
294-
@model_implemented_entities.setter
295-
def model_implemented_entities(
296-
self, model_implemented_entities: Optional[List[ModelEntity]]
297-
):
298-
if self.attributes is None:
299-
self.attributes = self.Attributes()
300-
self.attributes.model_implemented_entities = model_implemented_entities
301-
302-
@property
303-
def output_from_processes(self) -> Optional[List[Process]]:
304-
return (
305-
None if self.attributes is None else self.attributes.output_from_processes
306-
)
307-
308-
@output_from_processes.setter
309-
def output_from_processes(self, output_from_processes: Optional[List[Process]]):
310-
if self.attributes is None:
311-
self.attributes = self.Attributes()
312-
self.attributes.output_from_processes = output_from_processes
313-
314-
class Attributes(Azure.Attributes):
151+
class Attributes(ObjectStore.Attributes):
315152
adls_account_qualified_name: Optional[str] = Field(default=None, description="")
316153
adls_account_name: Optional[str] = Field(default=None, description="")
317154
azure_resource_id: Optional[str] = Field(default=None, description="")
@@ -320,30 +157,6 @@ class Attributes(Azure.Attributes):
320157
default=None, description=""
321158
)
322159
azure_tags: Optional[List[AzureTag]] = Field(default=None, description="")
323-
input_to_spark_jobs: Optional[List[SparkJob]] = Field(
324-
default=None, description=""
325-
) # relationship
326-
input_to_airflow_tasks: Optional[List[AirflowTask]] = Field(
327-
default=None, description=""
328-
) # relationship
329-
input_to_processes: Optional[List[Process]] = Field(
330-
default=None, description=""
331-
) # relationship
332-
model_implemented_attributes: Optional[List[ModelAttribute]] = Field(
333-
default=None, description=""
334-
) # relationship
335-
output_from_airflow_tasks: Optional[List[AirflowTask]] = Field(
336-
default=None, description=""
337-
) # relationship
338-
output_from_spark_jobs: Optional[List[SparkJob]] = Field(
339-
default=None, description=""
340-
) # relationship
341-
model_implemented_entities: Optional[List[ModelEntity]] = Field(
342-
default=None, description=""
343-
) # relationship
344-
output_from_processes: Optional[List[Process]] = Field(
345-
default=None, description=""
346-
) # relationship
347160

348161
attributes: ADLS.Attributes = Field(
349162
default_factory=lambda: ADLS.Attributes(),
@@ -355,10 +168,4 @@ class Attributes(Azure.Attributes):
355168
)
356169

357170

358-
from .core.airflow_task import AirflowTask # noqa: E402, F401
359-
from .core.model_attribute import ModelAttribute # noqa: E402, F401
360-
from .core.model_entity import ModelEntity # noqa: E402, F401
361-
from .core.process import Process # noqa: E402, F401
362-
from .core.spark_job import SparkJob # noqa: E402, F401
363-
364171
ADLS.Attributes.update_forward_refs()

0 commit comments

Comments
 (0)