Skip to content

Commit da07dcb

Browse files
author
Olga Annenkova
committed
feat: managed spark wrapper
1 parent 89baaff commit da07dcb

File tree

2 files changed

+40
-38
lines changed

2 files changed

+40
-38
lines changed

yandexcloud/_wrappers/__init__.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
11
from typing import TYPE_CHECKING
22

33
from yandexcloud._wrappers.dataproc import Dataproc, InitializationAction
4-
from yandexcloud._wrappers.spark import (
5-
Spark, SparkClusterParameters, SparkJobParameters, PysparkJobParameters
6-
)
4+
from yandexcloud._wrappers.spark import Spark, SparkClusterParameters, SparkJobParameters, PysparkJobParameters
75

86
if TYPE_CHECKING:
97
from yandexcloud._sdk import SDK

yandexcloud/_wrappers/spark/__init__.py

Lines changed: 39 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
# mypy: ignore-errors
44
import logging
55
import random
6-
from typing import Dict, List
6+
from typing import Dict, List, Optional
77
from dataclasses import dataclass
88

99
import yandex.cloud.spark.v1.cluster_pb2 as cluster_pb
@@ -87,15 +87,16 @@ class SparkClusterParameters:
8787
From 1 to 24.
8888
:type maintenance_hour: int, optional
8989
"""
90+
9091
# pylint: disable=too-many-instance-attributes
9192

9293
folder_id: str
9394
service_account_id: str
94-
name: str | None = None
95+
name: Optional[str] = None
9596
description: str = ""
96-
labels: Dict[str, str] | None = None
97-
subnet_ids: List[str] | None = None
98-
security_group_ids: List[str] | None = None
97+
labels: Optional[Dict[str, str]] = None
98+
subnet_ids: Optional[List[str]] = None
99+
security_group_ids: Optional[List[str]] = None
99100
deletion_protection: bool = False
100101
driver_pool_resource_preset: str = ""
101102
driver_pool_size: int = 0
@@ -106,14 +107,14 @@ class SparkClusterParameters:
106107
executor_pool_min_size: int = 0
107108
executor_pool_max_size: int = 0
108109
logging_enabled: bool = True
109-
log_group_id: str | None = None
110-
log_folder_id: str | None = None
110+
log_group_id: Optional[str] = None
111+
log_folder_id: Optional[str] = None
111112
history_server_enabled: bool = True
112-
pip_packages: List[str] | None = None
113-
deb_packages: List[str] | None = None
113+
pip_packages: Optional[List[str]] = None
114+
deb_packages: Optional[List[str]] = None
114115
metastore_cluster_id: str = ""
115-
maintenance_weekday: int | None = None
116-
maintenance_hour: int | None = None
116+
maintenance_weekday: Optional[int] = None
117+
maintenance_hour: Optional[int] = None
117118

118119

119120
@dataclass
@@ -152,19 +153,20 @@ class SparkJobParameters:
152153
dependency conflicts.
153154
:type exclude_packages: List[str], optional
154155
"""
156+
155157
# pylint: disable=too-many-instance-attributes
156158

157159
name: str = ""
158160
main_jar_file_uri: str = ""
159161
main_class: str = ""
160-
args: List[str] | None = None
161-
properties: Dict[str, str] | None = None
162-
packages: List[str] | None = None
163-
file_uris: List[str] | None = None
164-
jar_file_uris: List[str] | None = None
165-
archive_uris: List[str] | None = None
166-
repositories: List[str] | None = None
167-
exclude_packages: List[str] | None = None
162+
args: Optional[List[str]] = None
163+
properties: Optional[Dict[str, str]] = None
164+
packages: Optional[List[str]] = None
165+
file_uris: Optional[List[str]] = None
166+
jar_file_uris: Optional[List[str]] = None
167+
archive_uris: Optional[List[str]] = None
168+
repositories: Optional[List[str]] = None
169+
exclude_packages: Optional[List[str]] = None
168170

169171

170172
@dataclass
@@ -202,19 +204,20 @@ class PysparkJobParameters:
202204
dependency conflicts.
203205
:type exclude_packages: List[str], optional
204206
"""
207+
205208
# pylint: disable=too-many-instance-attributes
206209

207210
name: str = ""
208211
main_python_file_uri: str = ""
209-
args: List[str] | None = None
210-
properties: Dict[str, str] | None = None
211-
packages: List[str] | None = None
212-
file_uris: List[str] | None = None
213-
python_file_uris: List[str] | None = None
214-
jar_file_uris: List[str] | None = None
215-
archive_uris: List[str] | None = None
216-
repositories: List[str] | None = None
217-
exclude_packages: List[str] | None = None
212+
args: Optional[List[str]] = None
213+
properties: Optional[Dict[str, str]] = None
214+
packages: Optional[List[str]] = None
215+
file_uris: Optional[List[str]] = None
216+
python_file_uris: Optional[List[str]] = None
217+
jar_file_uris: Optional[List[str]] = None
218+
archive_uris: Optional[List[str]] = None
219+
repositories: Optional[List[str]] = None
220+
exclude_packages: Optional[List[str]] = None
218221

219222

220223
class Spark:
@@ -245,6 +248,7 @@ def create_cluster(self, spec: SparkClusterParameters) -> str:
245248
:return: Operation result
246249
:rtype: OperationResult
247250
"""
251+
248252
# pylint: disable=too-many-branches
249253

250254
if not spec.folder_id:
@@ -380,7 +384,7 @@ def create_cluster(self, spec: SparkClusterParameters) -> str:
380384
self.cluster_id = result.response.id
381385
return result
382386

383-
def delete_cluster(self, cluster_id: str | None = None):
387+
def delete_cluster(self, cluster_id: Optional[str] = None):
384388
"""
385389
Delete cluster.
386390
@@ -405,7 +409,7 @@ def delete_cluster(self, cluster_id: str | None = None):
405409
meta_type=cluster_service_pb.DeleteClusterMetadata,
406410
)
407411

408-
def stop_cluster(self, cluster_id: str | None = None):
412+
def stop_cluster(self, cluster_id: Optional[str] = None):
409413
"""
410414
Stop cluster.
411415
@@ -430,7 +434,7 @@ def stop_cluster(self, cluster_id: str | None = None):
430434
meta_type=cluster_service_pb.StopClusterMetadata,
431435
)
432436

433-
def start_cluster(self, cluster_id: str | None = None):
437+
def start_cluster(self, cluster_id: Optional[str] = None):
434438
"""
435439
Start cluster.
436440
@@ -455,7 +459,7 @@ def start_cluster(self, cluster_id: str | None = None):
455459
meta_type=cluster_service_pb.StartClusterMetadata,
456460
)
457461

458-
def create_spark_job(self, spec: SparkJobParameters, cluster_id: str | None = None):
462+
def create_spark_job(self, spec: SparkJobParameters, cluster_id: Optional[str] = None):
459463
"""
460464
Run spark job.
461465
@@ -498,7 +502,7 @@ def create_spark_job(self, spec: SparkJobParameters, cluster_id: str | None = No
498502
meta_type=job_service_pb.CreateJobMetadata,
499503
)
500504

501-
def create_pyspark_job(self, spec: PysparkJobParameters, cluster_id: str | None = None):
505+
def create_pyspark_job(self, spec: PysparkJobParameters, cluster_id: Optional[str] = None):
502506
"""
503507
Run Pyspark job on the cluster.
504508
@@ -541,7 +545,7 @@ def create_pyspark_job(self, spec: PysparkJobParameters, cluster_id: str | None
541545
meta_type=job_service_pb.CreateJobMetadata,
542546
)
543547

544-
def get_job(self, job_id: str, cluster_id: str | None = None):
548+
def get_job(self, job_id: str, cluster_id: Optional[str] = None):
545549
"""
546550
Get job info.
547551
@@ -564,7 +568,7 @@ def get_job(self, job_id: str, cluster_id: str | None = None):
564568
job = self.sdk.client(job_service_grpc_pb.JobServiceStub).Get(request)
565569
return job, job_pb.Job.Status.Name(job.status)
566570

567-
def get_job_log(self, job_id: str, cluster_id: str | None = None):
571+
def get_job_log(self, job_id: str, cluster_id: Optional[str] = None):
568572
"""
569573
Get job log.
570574

0 commit comments

Comments
 (0)