Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -986,9 +986,14 @@ metric::proto::EvaluationResults TrainingLogToEvaluationResults(
evaluation.set_loss_value(eval_set == TrainingLogEvaluationSet::kValidation
? log_entry.validation_loss()
: log_entry.training_loss());

for (int metrix_idx = 0;
metrix_idx < training_logs.secondary_metric_names_size(); metrix_idx++) {
int secondary_metric_size =
eval_set == TrainingLogEvaluationSet::kValidation
? log_entry.validation_secondary_metrics_size()
: log_entry.training_secondary_metrics_size();
secondary_metric_size = std::min(secondary_metric_size,
training_logs.secondary_metric_names_size());

for (int metrix_idx = 0; metrix_idx < secondary_metric_size; metrix_idx++) {
const auto& metric_name = training_logs.secondary_metric_names(metrix_idx);
const auto metric_value =
eval_set == TrainingLogEvaluationSet::kValidation
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,30 @@ def test_training_logs(self):
self.assertAlmostEqual(training_evaluation.loss, 0.5057407)
self.assertAlmostEqual(training_evaluation.accuracy, 0.89436144)

def test_training_logs_with_newly_trained_model(self):
dataset = {
"x": np.array([0, 0, 1, 1] * 20),
"y": np.array([0, 0, 1, 1] * 20),
}
model = specialized_learners.GradientBoostedTreesLearner(
label="y",
num_trees=5,
validation_ratio=0.5,
).train(dataset)

training_logs = model.training_logs()
self.assertLen(training_logs, 5)

for log in training_logs:
# Check validation evaluation
self.assertIsNotNone(log.evaluation)
self.assertTrue(hasattr(log.evaluation, "loss"))
self.assertIsInstance(log.evaluation.loss, float)

# Check training evaluation
self.assertIsNotNone(log.training_evaluation)
self.assertTrue(hasattr(log.training_evaluation, "loss"))

def test_empty_training_logs(self):
# This model has no training logs.
training_logs = self.adult_binary_class_gbdt.training_logs()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

#include <pybind11/numpy.h>

#include <cmath>
#include <cstring>
#include <memory>
#include <utility>
Expand Down Expand Up @@ -94,18 +95,21 @@ std::vector<GBTCCTrainingLogEntry> GradientBoostedTreesCCModel::training_logs()
const auto& label_col_spec = gbt_model_->label_col_spec();
logs.reserve(training_logs.entries_size());
for (const auto& entry : training_logs.entries()) {
const auto& validation_evaluation =
model::gradient_boosted_trees::internal::TrainingLogToEvaluationResults(
entry, training_logs, gbt_model_->task(), label_col_spec,
gbt_model_->loss_config(), gbt_model_->GetLossName(),
model::gradient_boosted_trees::internal::TrainingLogEvaluationSet::
kValidation);
const auto& training_evaluation =
const auto training_evaluation =
model::gradient_boosted_trees::internal::TrainingLogToEvaluationResults(
entry, training_logs, gbt_model_->task(), label_col_spec,
gbt_model_->loss_config(), gbt_model_->GetLossName(),
model::gradient_boosted_trees::internal::TrainingLogEvaluationSet::
kTraining);
metric::proto::EvaluationResults validation_evaluation;
if (!std::isnan(gbt_model_->validation_loss())) {
validation_evaluation = model::gradient_boosted_trees::internal::
TrainingLogToEvaluationResults(
entry, training_logs, gbt_model_->task(), label_col_spec,
gbt_model_->loss_config(), gbt_model_->GetLossName(),
model::gradient_boosted_trees::internal::
TrainingLogEvaluationSet::kValidation);
}
logs.push_back({.iteration = entry.number_of_trees(),
.validation_evaluation = validation_evaluation,
.training_evaluation = training_evaluation});
Expand Down
Loading