|
| 1 | +/*! |
| 2 | + * Copyright 2017-2019 XGBoost contributors |
| 3 | + */ |
| 4 | +#include <gtest/gtest.h> |
| 5 | +#include <xgboost/objective.h> |
| 6 | +#include <xgboost/context.h> |
| 7 | +#include <xgboost/json.h> |
| 8 | +#include "../helpers.h" |
| 9 | +namespace xgboost { |
| 10 | + |
| 11 | +TEST(SyclObjective, LinearRegressionGPair) { |
| 12 | + Context ctx; |
| 13 | + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); |
| 14 | + std::vector<std::pair<std::string, std::string>> args; |
| 15 | + |
| 16 | + std::unique_ptr<ObjFunction> obj { |
| 17 | + ObjFunction::Create("reg:squarederror_oneapi", &ctx) |
| 18 | + }; |
| 19 | + |
| 20 | + obj->Configure(args); |
| 21 | + CheckObjFunction(obj, |
| 22 | + {0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, |
| 23 | + {0, 0, 0, 0, 1, 1, 1, 1}, |
| 24 | + {1, 1, 1, 1, 1, 1, 1, 1}, |
| 25 | + {0, 0.1f, 0.9f, 1.0f, -1.0f, -0.9f, -0.1f, 0}, |
| 26 | + {1, 1, 1, 1, 1, 1, 1, 1}); |
| 27 | + CheckObjFunction(obj, |
| 28 | + {0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, |
| 29 | + {0, 0, 0, 0, 1, 1, 1, 1}, |
| 30 | + {}, // empty weight |
| 31 | + {0, 0.1f, 0.9f, 1.0f, -1.0f, -0.9f, -0.1f, 0}, |
| 32 | + {1, 1, 1, 1, 1, 1, 1, 1}); |
| 33 | + ASSERT_NO_THROW(obj->DefaultEvalMetric()); |
| 34 | +} |
| 35 | + |
| 36 | +TEST(SyclObjective, SquaredLog) { |
| 37 | + Context ctx; |
| 38 | + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); |
| 39 | + std::vector<std::pair<std::string, std::string>> args; |
| 40 | + |
| 41 | + std::unique_ptr<ObjFunction> obj { ObjFunction::Create("reg:squaredlogerror_oneapi", &ctx) }; |
| 42 | + obj->Configure(args); |
| 43 | + CheckConfigReload(obj, "reg:squaredlogerror_oneapi"); |
| 44 | + |
| 45 | + CheckObjFunction(obj, |
| 46 | + {0.1f, 0.2f, 0.4f, 0.8f, 1.6f}, // pred |
| 47 | + {1.0f, 1.0f, 1.0f, 1.0f, 1.0f}, // labels |
| 48 | + {1.0f, 1.0f, 1.0f, 1.0f, 1.0f}, // weights |
| 49 | + {-0.5435f, -0.4257f, -0.25475f, -0.05855f, 0.1009f}, |
| 50 | + { 1.3205f, 1.0492f, 0.69215f, 0.34115f, 0.1091f}); |
| 51 | + CheckObjFunction(obj, |
| 52 | + {0.1f, 0.2f, 0.4f, 0.8f, 1.6f}, // pred |
| 53 | + {1.0f, 1.0f, 1.0f, 1.0f, 1.0f}, // labels |
| 54 | + {}, // empty weights |
| 55 | + {-0.5435f, -0.4257f, -0.25475f, -0.05855f, 0.1009f}, |
| 56 | + { 1.3205f, 1.0492f, 0.69215f, 0.34115f, 0.1091f}); |
| 57 | + ASSERT_EQ(obj->DefaultEvalMetric(), std::string{"rmsle"}); |
| 58 | +} |
| 59 | + |
| 60 | +TEST(SyclObjective, LogisticRegressionGPair) { |
| 61 | + Context ctx; |
| 62 | + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); |
| 63 | + std::vector<std::pair<std::string, std::string>> args; |
| 64 | + std::unique_ptr<ObjFunction> obj { ObjFunction::Create("reg:logistic_oneapi", &ctx) }; |
| 65 | + |
| 66 | + obj->Configure(args); |
| 67 | + CheckConfigReload(obj, "reg:logistic_oneapi"); |
| 68 | + |
| 69 | + CheckObjFunction(obj, |
| 70 | + { 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, // preds |
| 71 | + { 0, 0 , 0, 0, 1, 1, 1, 1}, // labels |
| 72 | + { 1, 1, 1, 1, 1, 1, 1, 1}, // weights |
| 73 | + { 0.5f, 0.52f, 0.71f, 0.73f, -0.5f, -0.47f, -0.28f, -0.26f}, // out_grad |
| 74 | + {0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f}); // out_hess |
| 75 | +} |
| 76 | + |
| 77 | +TEST(SyclObjective, LogisticRegressionBasic) { |
| 78 | + Context ctx; |
| 79 | + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); |
| 80 | + std::vector<std::pair<std::string, std::string>> args; |
| 81 | + std::unique_ptr<ObjFunction> obj { |
| 82 | + ObjFunction::Create("reg:logistic_oneapi", &ctx) |
| 83 | + }; |
| 84 | + |
| 85 | + obj->Configure(args); |
| 86 | + CheckConfigReload(obj, "reg:logistic_oneapi"); |
| 87 | + |
| 88 | + // test label validation |
| 89 | + EXPECT_ANY_THROW(CheckObjFunction(obj, {0}, {10}, {1}, {0}, {0})) |
| 90 | + << "Expected error when label not in range [0,1f] for LogisticRegression"; |
| 91 | + |
| 92 | + // test ProbToMargin |
| 93 | + EXPECT_NEAR(obj->ProbToMargin(0.1f), -2.197f, 0.01f); |
| 94 | + EXPECT_NEAR(obj->ProbToMargin(0.5f), 0, 0.01f); |
| 95 | + EXPECT_NEAR(obj->ProbToMargin(0.9f), 2.197f, 0.01f); |
| 96 | + EXPECT_ANY_THROW(obj->ProbToMargin(10)) |
| 97 | + << "Expected error when base_score not in range [0,1f] for LogisticRegression"; |
| 98 | + |
| 99 | + // test PredTransform |
| 100 | + HostDeviceVector<bst_float> io_preds = {0, 0.1f, 0.5f, 0.9f, 1}; |
| 101 | + std::vector<bst_float> out_preds = {0.5f, 0.524f, 0.622f, 0.710f, 0.731f}; |
| 102 | + obj->PredTransform(&io_preds); |
| 103 | + auto& preds = io_preds.HostVector(); |
| 104 | + for (int i = 0; i < static_cast<int>(io_preds.Size()); ++i) { |
| 105 | + EXPECT_NEAR(preds[i], out_preds[i], 0.01f); |
| 106 | + } |
| 107 | +} |
| 108 | + |
| 109 | +TEST(SyclObjective, LogisticRawGPair) { |
| 110 | + Context ctx; |
| 111 | + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); |
| 112 | + std::vector<std::pair<std::string, std::string>> args; |
| 113 | + std::unique_ptr<ObjFunction> obj { |
| 114 | + ObjFunction::Create("binary:logitraw_oneapi", &ctx) |
| 115 | + }; |
| 116 | + |
| 117 | + obj->Configure(args); |
| 118 | + |
| 119 | + CheckObjFunction(obj, |
| 120 | + { 0, 0.1f, 0.9f, 1, 0, 0.1f, 0.9f, 1}, |
| 121 | + { 0, 0, 0, 0, 1, 1, 1, 1}, |
| 122 | + { 1, 1, 1, 1, 1, 1, 1, 1}, |
| 123 | + { 0.5f, 0.52f, 0.71f, 0.73f, -0.5f, -0.47f, -0.28f, -0.26f}, |
| 124 | + {0.25f, 0.24f, 0.20f, 0.19f, 0.25f, 0.24f, 0.20f, 0.19f}); |
| 125 | +} |
| 126 | + |
| 127 | +TEST(SyclObjective, CPUvsSycl) { |
| 128 | + Context ctx; |
| 129 | + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); |
| 130 | + ObjFunction * obj_sycl = |
| 131 | + ObjFunction::Create("reg:squarederror_oneapi", &ctx); |
| 132 | + |
| 133 | + ctx = ctx.MakeCPU(); |
| 134 | + ObjFunction * obj_cpu = |
| 135 | + ObjFunction::Create("reg:squarederror", &ctx); |
| 136 | + |
| 137 | + HostDeviceVector<GradientPair> cpu_out_preds; |
| 138 | + HostDeviceVector<GradientPair> sycl_out_preds; |
| 139 | + |
| 140 | + constexpr size_t kRows = 400; |
| 141 | + constexpr size_t kCols = 100; |
| 142 | + auto pdmat = RandomDataGenerator(kRows, kCols, 0).Seed(0).GenerateDMatrix(); |
| 143 | + HostDeviceVector<float> preds; |
| 144 | + preds.Resize(kRows); |
| 145 | + auto& h_preds = preds.HostVector(); |
| 146 | + for (size_t i = 0; i < h_preds.size(); ++i) { |
| 147 | + h_preds[i] = static_cast<float>(i); |
| 148 | + } |
| 149 | + auto& info = pdmat->Info(); |
| 150 | + |
| 151 | + info.labels.Reshape(kRows, 1); |
| 152 | + auto& h_labels = info.labels.Data()->HostVector(); |
| 153 | + for (size_t i = 0; i < h_labels.size(); ++i) { |
| 154 | + h_labels[i] = 1 / static_cast<float>(i+1); |
| 155 | + } |
| 156 | + |
| 157 | + { |
| 158 | + // CPU |
| 159 | + obj_cpu->GetGradient(preds, info, 0, &cpu_out_preds); |
| 160 | + } |
| 161 | + { |
| 162 | + // sycl |
| 163 | + obj_sycl->GetGradient(preds, info, 0, &sycl_out_preds); |
| 164 | + } |
| 165 | + |
| 166 | + auto& h_cpu_out = cpu_out_preds.HostVector(); |
| 167 | + auto& h_sycl_out = sycl_out_preds.HostVector(); |
| 168 | + |
| 169 | + float sgrad = 0; |
| 170 | + float shess = 0; |
| 171 | + for (size_t i = 0; i < kRows; ++i) { |
| 172 | + sgrad += std::pow(h_cpu_out[i].GetGrad() - h_sycl_out[i].GetGrad(), 2); |
| 173 | + shess += std::pow(h_cpu_out[i].GetHess() - h_sycl_out[i].GetHess(), 2); |
| 174 | + } |
| 175 | + ASSERT_NEAR(sgrad, 0.0f, kRtEps); |
| 176 | + ASSERT_NEAR(shess, 0.0f, kRtEps); |
| 177 | + |
| 178 | + delete obj_cpu; |
| 179 | + delete obj_sycl; |
| 180 | +} |
| 181 | + |
| 182 | +} // namespace xgboost |
0 commit comments