Skip to content

Commit b98908f

Browse files
committed
fix coverage
Signed-off-by: Alexandros Koumparoulis <akoumparouli@nvidia.com>
1 parent 561f61a commit b98908f

File tree

4 files changed

+31
-7
lines changed

4 files changed

+31
-7
lines changed
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
# Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.

tests/functional_tests/llm_pretrain_and_kd/customizer_retrieval/test_biencoder_checkpoint_restoration.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,8 +80,11 @@ def _run_training(recipe_yaml: str, checkpoint_dir: str) -> Path:
8080
"""Launch biencoder training as a subprocess and return the checkpoint dir."""
8181
cmd = [
8282
sys.executable,
83-
"-m",
84-
"nemo_automodel.recipes.biencoder.train_biencoder",
83+
"-m", "coverage", "run",
84+
"--data-file=/workspace/.coverage",
85+
"--source=/workspace/",
86+
"--parallel-mode",
87+
"-m", "nemo_automodel.recipes.biencoder.train_biencoder",
8588
"--config",
8689
recipe_yaml,
8790
]

tests/functional_tests/llm_pretrain_and_kd/customizer_retrieval/test_biencoder_finetuning_inline.sh

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,29 +15,33 @@
1515

1616
set -exo pipefail
1717

18+
COVERAGE_ARGS="--data-file=/workspace/.coverage --source=/workspace/ --parallel-mode"
19+
1820
# Run the biencoder recipe (uses nemo_automodel/recipes/biencoder/train_biencoder.py via module entrypoint).
19-
python3 -m nemo_automodel.recipes.biencoder.train_biencoder \
21+
python3 -m coverage run ${COVERAGE_ARGS} \
22+
-m nemo_automodel.recipes.biencoder.train_biencoder \
2023
--config \
2124
tests/functional_tests/llm_pretrain_and_kd/customizer_retrieval/recipe.yaml \
2225
--model.pretrained_model_name_or_path $TEST_DATA_DIR/llama-nemotron-embed-1b-v2/ \
2326
--tokenizer.pretrained_model_name_or_path $TEST_DATA_DIR/llama-nemotron-embed-1b-v2/ \
2427
--dataloader.dataset.data_dir_list $TEST_DATA_DIR/embedding_testdata/training.jsonl \
2528

2629
# Compare baseline vs finetuned biencoder checkpoint (pos-neg separation should not degrade).
27-
python3 \
30+
python3 -m coverage run --append ${COVERAGE_ARGS} \
2831
tests/functional_tests/llm_pretrain_and_kd/customizer_retrieval/compare_biencoder_models.py \
2932
$TEST_DATA_DIR/llama-nemotron-embed-1b-v2 \
3033
/workspace/output/biencoder_inline/checkpoints/epoch_0_step_31/ \
3134
$TEST_DATA_DIR/embedding_testdata/testing.jsonl \
3235
true
3336

34-
# ---- Checkpoint restoration tests ----
37+
# Checkpoint restoration tests
3538
# Test 1: Full-model checkpoint restoration (NeMo -> save -> transformers load)
3639
# Test 2: PEFT (LoRA) checkpoint restoration (NeMo -> save -> transformers + safetensors load)
3740
BASE_MODEL_PATH=$TEST_DATA_DIR/llama-nemotron-embed-1b-v2 \
3841
CHECKPOINT_DIR=/workspace/output/biencoder_ckpt_restore/checkpoints \
3942
PEFT_CHECKPOINT_DIR=/workspace/output/biencoder_ckpt_restore_peft/checkpoints \
4043
RECIPE_YAML=tests/functional_tests/llm_pretrain_and_kd/customizer_retrieval/recipe_ckpt_restore.yaml \
4144
PEFT_RECIPE_YAML=tests/functional_tests/llm_pretrain_and_kd/customizer_retrieval/recipe_peft.yaml \
42-
python3 -m pytest -xvs \
45+
python3 -m coverage run --append ${COVERAGE_ARGS} \
46+
-m pytest -xvs \
4347
tests/functional_tests/llm_pretrain_and_kd/customizer_retrieval/test_biencoder_checkpoint_restoration.py

tests/functional_tests/llm_pretrain_and_kd/customizer_retrieval/test_customizer_retrieval.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,11 @@ def _run_training() -> Path:
7373
"""Launch the biencoder training recipe as a subprocess and return the
7474
checkpoint directory produced by the run."""
7575
cmd = [
76-
sys.executable, "-m", "nemo_automodel.recipes.biencoder.train_biencoder",
76+
sys.executable, "-m", "coverage", "run",
77+
"--data-file=/workspace/.coverage",
78+
"--source=/workspace/",
79+
"--parallel-mode",
80+
"-m", "nemo_automodel.recipes.biencoder.train_biencoder",
7781
"--config", RECIPE_YAML,
7882
]
7983
result = subprocess.run(cmd, cwd=str(_REPO_ROOT), check=True)

0 commit comments

Comments
 (0)