Skip to content

Commit 187b3ed

Browse files
Release 0.14.0 (#55)
* feat: execution env config
1 parent 81f7f8f commit 187b3ed

32 files changed

+106
-252
lines changed

CHANGELOG.md

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
## [Unreleased]
44

5+
## [0.14.0] - 2022-02-02
6+
57
## [0.13.0] - 2022-02-01
68

79
## [0.12.0] - 2022-01-31
@@ -150,7 +152,9 @@
150152
- Draft of `dp init`, `dp create`, `dp template new`, `dp template list` and `dp dbt`
151153
- Draft of `dp compile` and `dp deploy`
152154

153-
[Unreleased]: https://github.com/getindata/data-pipelines-cli/compare/0.13.0...HEAD
155+
[Unreleased]: https://github.com/getindata/data-pipelines-cli/compare/0.14.0...HEAD
156+
157+
[0.14.0]: https://github.com/getindata/data-pipelines-cli/compare/0.13.0...0.14.0
154158

155159
[0.13.0]: https://github.com/getindata/data-pipelines-cli/compare/0.12.0...0.13.0
156160

data_pipelines_cli/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,4 @@
55
pipelines.
66
"""
77

8-
version = "0.13.0"
8+
version = "0.14.0"

data_pipelines_cli/cli_commands/compile.py

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -66,18 +66,14 @@ def _copy_dbt_manifest() -> None:
6666
)
6767

6868

69-
def _replace_k8s_settings(docker_args: DockerArgs) -> None:
70-
k8s_config = BUILD_DIR.joinpath("dag", "config", "base", "k8s.yml")
71-
echo_info(
72-
f"Replacing {IMAGE_TAG_TO_REPLACE} with commit SHA = {docker_args.commit_sha}"
73-
)
69+
def replace_image_settings(docker_args: DockerArgs) -> None:
70+
k8s_config = BUILD_DIR.joinpath("dag", "config", "base", "execution_env.yml")
71+
echo_info(f"Replacing {IMAGE_TAG_TO_REPLACE} with commit SHA = {docker_args.commit_sha}")
7472
replace(k8s_config, IMAGE_TAG_TO_REPLACE, docker_args.commit_sha)
7573

7674

7775
def _replace_datahub_with_jinja_vars(env: str) -> None:
78-
datahub_config_path: pathlib.Path = BUILD_DIR.joinpath(
79-
"dag", "config", "base", "datahub.yml"
80-
)
76+
datahub_config_path: pathlib.Path = BUILD_DIR.joinpath("dag", "config", "base", "datahub.yml")
8177

8278
if not datahub_config_path.exists():
8379
echo_warning(
@@ -112,7 +108,7 @@ def compile_project(
112108
copy_config_dir_to_build_dir()
113109

114110
docker_args = DockerArgs(env)
115-
_replace_k8s_settings(docker_args)
111+
replace_image_settings(docker_args)
116112
_replace_datahub_with_jinja_vars(env)
117113

118114
_dbt_compile(env)

data_pipelines_cli/cli_commands/create.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,5 @@ def create(project_path: str, template_path: Optional[str]) -> None:
6565
@click.argument("template-path", nargs=-1)
6666
def create_command(project_path: str, template_path: Sequence[str]) -> None:
6767
if template_path and len(template_path) > 1:
68-
echo_warning(
69-
"dp create expects at most two arguments -- project-path and template-path"
70-
)
68+
echo_warning("dp create expects at most two arguments -- project-path and template-path")
7169
create(project_path, template_path[0] if template_path else None)

data_pipelines_cli/cli_commands/deploy.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -129,9 +129,7 @@ def _sync_bucket(self) -> None:
129129
name="deploy",
130130
help="Push and deploy the project to the remote machine",
131131
)
132-
@click.option(
133-
"--env", default="base", show_default=True, type=str, help="Name of the environment"
134-
)
132+
@click.option("--env", default="base", show_default=True, type=str, help="Name of the environment")
135133
@click.option("--dags-path", required=False, help="Remote storage URI")
136134
@click.option(
137135
"--blob-args",

data_pipelines_cli/cli_commands/publish.py

Lines changed: 6 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -34,9 +34,7 @@ def _get_database_and_schema_name(manifest: Manifest) -> Tuple[str, str]:
3434
raise DataPipelinesError("There is no model in 'manifest.json' file.")
3535

3636

37-
def _parse_columns_dict_into_table_list(
38-
columns: Dict[str, ColumnInfo]
39-
) -> List[DbtTableColumn]:
37+
def _parse_columns_dict_into_table_list(columns: Dict[str, ColumnInfo]) -> List[DbtTableColumn]:
4038
return [
4139
DbtTableColumn(
4240
name=column.name,
@@ -64,16 +62,12 @@ def _parse_models_schema(manifest: Manifest) -> List[DbtModel]:
6462

6563

6664
def _get_dag_id() -> str:
67-
with open(
68-
BUILD_DIR.joinpath("dag", "config", "base", "airflow.yml"), "r"
69-
) as airflow_yml:
65+
with open(BUILD_DIR.joinpath("dag", "config", "base", "airflow.yml"), "r") as airflow_yml:
7066
return yaml.safe_load(airflow_yml)["dag"]["dag_id"]
7167

7268

7369
def _create_source(project_name: str) -> DbtSource:
74-
with open(
75-
pathlib.Path.cwd().joinpath("target", "manifest.json"), "r"
76-
) as manifest_json:
70+
with open(pathlib.Path.cwd().joinpath("target", "manifest.json"), "r") as manifest_json:
7771
manifest_dict = json.load(manifest_json)
7872
manifest = Manifest.from_dict(manifest_dict)
7973

@@ -130,9 +124,7 @@ def _clean_repo(packages_repo: pathlib.Path) -> None:
130124
shutil.rmtree(packages_repo)
131125

132126

133-
def _copy_publication_to_repo(
134-
package_dest: pathlib.Path, package_path: pathlib.Path
135-
) -> None:
127+
def _copy_publication_to_repo(package_dest: pathlib.Path, package_path: pathlib.Path) -> None:
136128
if package_dest.exists():
137129
echo_info(f"Removing {package_dest}")
138130
shutil.rmtree(package_dest)
@@ -145,14 +137,10 @@ def _configure_git_env(repo: Repo, config: Dict[str, Any]) -> None:
145137
repo.config_writer().set_value("user", "email", config["email"]).release()
146138

147139

148-
def _commit_and_push_changes(
149-
repo: Repo, project_name: str, project_version: str
150-
) -> None:
140+
def _commit_and_push_changes(repo: Repo, project_name: str, project_version: str) -> None:
151141
echo_info("Publishing")
152142
repo.git.add(all=True)
153-
repo.index.commit(
154-
f"Publication from project {project_name}, version: {project_version}"
155-
)
143+
repo.index.commit(f"Publication from project {project_name}, version: {project_version}")
156144
origin = repo.remote(name="origin")
157145
origin.push()
158146

data_pipelines_cli/cli_commands/template.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,6 @@ def list_templates() -> None:
1313
click.echo(yaml.dump(tc))
1414

1515

16-
@click.command(
17-
name="template-list", help="Print a list of all templates saved in the config file"
18-
)
16+
@click.command(name="template-list", help="Print a list of all templates saved in the config file")
1917
def list_templates_command() -> None:
2018
list_templates()

data_pipelines_cli/data_structures.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -91,14 +91,14 @@ def _get_docker_repository_uri_from_k8s_config(env: str) -> str:
9191
read_dictionary_from_config_directory,
9292
)
9393

94-
k8s_config = read_dictionary_from_config_directory(
95-
BUILD_DIR.joinpath("dag"), env, "k8s.yml"
94+
execution_env_config = read_dictionary_from_config_directory(
95+
BUILD_DIR.joinpath("dag"), env, "execution_env.yml"
9696
)
9797
try:
98-
return k8s_config["image"]["repository"]
98+
return execution_env_config["image"]["repository"]
9999
except KeyError as key_error:
100100
raise DataPipelinesError(
101-
f"Could not find 'repository' variable in build/config/{env}/k8s.yml."
101+
f"Could not find 'repository' variable in build/config/{env}/execution_env.yml."
102102
) from key_error
103103

104104

data_pipelines_cli/dbt_utils.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,7 @@ def _dump_dbt_vars_from_configs_to_string(env: str) -> str:
4040
return yaml.dump(dbt_vars, default_flow_style=True, width=sys.maxsize)
4141

4242

43-
def run_dbt_command(
44-
command: Tuple[str, ...], env: str, profiles_path: pathlib.Path
45-
) -> None:
43+
def run_dbt_command(command: Tuple[str, ...], env: str, profiles_path: pathlib.Path) -> None:
4644
"""
4745
Run dbt subprocess in a context of specified *env*.
4846

data_pipelines_cli/docker_response_reader.py

Lines changed: 6 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -83,9 +83,7 @@ def click_echo_ok_responses(self) -> None:
8383
click.echo(response.msg)
8484

8585
@staticmethod
86-
def _prepare_status(
87-
log: Dict[str, Union[str, Dict[str, str]]]
88-
) -> DockerReadResponse:
86+
def _prepare_status(log: Dict[str, Union[str, Dict[str, str]]]) -> DockerReadResponse:
8987
status_message = cast(str, log["status"])
9088
progress_detail = cast(str, log.get("progressDetail", ""))
9189
status_id = cast(str, log.get("id", ""))
@@ -98,9 +96,7 @@ def _prepare_status(
9896
return DockerReadResponse(message, False)
9997

10098
@staticmethod
101-
def _prepare_stream(
102-
log: Dict[str, Union[str, Dict[str, str]]]
103-
) -> List[DockerReadResponse]:
99+
def _prepare_stream(log: Dict[str, Union[str, Dict[str, str]]]) -> List[DockerReadResponse]:
104100
stream = cast(str, log["stream"])
105101
return list(
106102
map(
@@ -110,9 +106,7 @@ def _prepare_stream(
110106
)
111107

112108
@staticmethod
113-
def _prepare_aux(
114-
log: Dict[str, Union[str, Dict[str, str]]]
115-
) -> List[DockerReadResponse]:
109+
def _prepare_aux(log: Dict[str, Union[str, Dict[str, str]]]) -> List[DockerReadResponse]:
116110
aux = cast(Dict[str, str], log["aux"])
117111
to_return = []
118112
if "Digest" in aux:
@@ -122,21 +116,15 @@ def _prepare_aux(
122116
return to_return
123117

124118
@staticmethod
125-
def _prepare_error_detail(
126-
log: Dict[str, Union[str, Dict[str, str]]]
127-
) -> DockerReadResponse:
119+
def _prepare_error_detail(log: Dict[str, Union[str, Dict[str, str]]]) -> DockerReadResponse:
128120
error_detail = cast(Dict[str, str], log["errorDetail"])
129121
error_message = error_detail.get("message", "")
130122
error_code = error_detail.get("code", None)
131123
return DockerReadResponse(
132-
"ERROR: "
133-
+ error_message
134-
+ (f"\nError code: {error_code}" if error_code else ""),
124+
"ERROR: " + error_message + (f"\nError code: {error_code}" if error_code else ""),
135125
True,
136126
)
137127

138128
@staticmethod
139-
def _prepare_error(
140-
log: Dict[str, Union[str, Dict[str, str]]]
141-
) -> DockerReadResponse:
129+
def _prepare_error(log: Dict[str, Union[str, Dict[str, str]]]) -> DockerReadResponse:
142130
return DockerReadResponse("ERROR: " + cast(str, log["error"]), True)

0 commit comments

Comments
 (0)