Skip to content

Commit 4121ad3

Browse files
committed
fix test paths
1 parent 312f917 commit 4121ad3

File tree

7 files changed

+42
-39
lines changed

7 files changed

+42
-39
lines changed

paddle/legacy/gserver/tests/test_CompareSparse.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ limitations under the License. */
2222
using namespace paddle; // NOLINT
2323
using namespace std; // NOLINT
2424

25-
static const string& configFile1 = "gserver/tests/sequence_lstm.conf";
25+
static const string& configFile1 = "legacy/gserver/tests/sequence_lstm.conf";
2626

2727
DECLARE_bool(use_gpu);
2828
DECLARE_string(config);

paddle/legacy/gserver/tests/test_CompareTwoNets.cpp

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,10 @@ DEFINE_double(
4040
DECLARE_bool(thread_local_rand_use_global_seed);
4141
DECLARE_int32(seed);
4242

43-
static const string& config_file_a = "gserver/tests/sequence_recurrent.py";
43+
static const string& config_file_a =
44+
"legacy/gserver/tests/sequence_recurrent.py";
4445
static const string& config_file_b =
45-
"gserver/tests/sequence_recurrent_group.py";
46+
"legacy/gserver/tests/sequence_recurrent_group.py";
4647

4748
struct ComData {
4849
vector<Argument> outArgs;

paddle/legacy/gserver/tests/test_MKLDNN.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -426,7 +426,7 @@ DECLARE_string(config_args);
426426
TEST(MKLDNNNet, net) {
427427
std::vector<std::string> cases = {"simple", "branch"};
428428
for (auto name : cases) {
429-
std::string config = "./gserver/tests/mkldnn_" + name + "_net.conf";
429+
std::string config = "./legacy/gserver/tests/mkldnn_" + name + "_net.conf";
430430
for (auto channels : {2, 32}) {
431431
std::ostringstream oss;
432432
oss << "channels=" << channels;

paddle/legacy/gserver/tests/test_NetworkCompare.cpp

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -220,42 +220,42 @@ void compareNetwork(const std::string& config_file_a,
220220
}
221221

222222
TEST(Compare, concat_dotmul) {
223-
std::string config_file_a = "./gserver/tests/concat_dotmul_a.conf";
224-
std::string config_file_b = "./gserver/tests/concat_dotmul_b.conf";
223+
std::string config_file_a = "./legacy/gserver/tests/concat_dotmul_a.conf";
224+
std::string config_file_b = "./legacy/gserver/tests/concat_dotmul_b.conf";
225225
compareNetwork(config_file_a, config_file_b);
226226
}
227227

228228
TEST(Compare, concat_fullmatrix) {
229-
std::string config_file_a = "./gserver/tests/concat_fullmatrix_a.conf";
230-
std::string config_file_b = "./gserver/tests/concat_fullmatrix_b.conf";
229+
std::string config_file_a = "./legacy/gserver/tests/concat_fullmatrix_a.conf";
230+
std::string config_file_b = "./legacy/gserver/tests/concat_fullmatrix_b.conf";
231231
compareNetwork(config_file_a, config_file_b);
232232
}
233233

234234
TEST(Compare, concat_table) {
235-
std::string config_file_a = "./gserver/tests/concat_table_a.conf";
236-
std::string config_file_b = "./gserver/tests/concat_table_b.conf";
235+
std::string config_file_a = "./legacy/gserver/tests/concat_table_a.conf";
236+
std::string config_file_b = "./legacy/gserver/tests/concat_table_b.conf";
237237
compareNetwork(config_file_a, config_file_b);
238238
}
239239

240240
TEST(Compare, concat_slice) {
241-
std::string config_file_a = "./gserver/tests/concat_slice_a.conf";
242-
std::string config_file_b = "./gserver/tests/concat_slice_b.conf";
241+
std::string config_file_a = "./legacy/gserver/tests/concat_slice_a.conf";
242+
std::string config_file_b = "./legacy/gserver/tests/concat_slice_b.conf";
243243
compareNetwork(config_file_a, config_file_b);
244244
}
245245

246246
#ifdef PADDLE_WITH_CUDA
247247
TEST(Compare, img_pool) {
248-
std::string config_file_a = "./gserver/tests/img_pool_a.conf";
249-
std::string config_file_b = "./gserver/tests/img_pool_b.conf";
248+
std::string config_file_a = "./legacy/gserver/tests/img_pool_a.conf";
249+
std::string config_file_b = "./legacy/gserver/tests/img_pool_b.conf";
250250
bool useGpu = FLAGS_use_gpu;
251251
FLAGS_use_gpu = true;
252252
compareNetwork(config_file_a, config_file_b);
253253
FLAGS_use_gpu = useGpu;
254254
}
255255

256256
TEST(Compare, img_conv) {
257-
std::string config_file_a = "./gserver/tests/img_conv_a.conf";
258-
std::string config_file_b = "./gserver/tests/img_conv_b.conf";
257+
std::string config_file_a = "./legacy/gserver/tests/img_conv_a.conf";
258+
std::string config_file_b = "./legacy/gserver/tests/img_conv_b.conf";
259259
bool useGpu = FLAGS_use_gpu;
260260
FLAGS_use_gpu = true;
261261
compareNetwork(config_file_a, config_file_b);
@@ -264,8 +264,8 @@ TEST(Compare, img_conv) {
264264

265265
// Test cudnn_conv and exconv give the same result
266266
TEST(Compare, img_conv2) {
267-
std::string config_file_a = "./gserver/tests/img_conv_cudnn.py";
268-
std::string config_file_b = "./gserver/tests/img_conv_exconv.py";
267+
std::string config_file_a = "./legacy/gserver/tests/img_conv_cudnn.py";
268+
std::string config_file_b = "./legacy/gserver/tests/img_conv_exconv.py";
269269
bool useGpu = FLAGS_use_gpu;
270270
double eps = FLAGS_checkgrad_eps;
271271
FLAGS_use_gpu = true;

paddle/legacy/gserver/tests/test_PyDataProvider.cpp

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,8 @@ TEST(PyDataProvider, py_fill_slots) {
3535
config.set_load_data_module(std::string("pyDataProvider"));
3636
config.set_load_data_object(std::string("SimpleDataProvider"));
3737
config.clear_files();
38-
std::string dataFile = "gserver/tests/pyDataProvider/pyDataProviderList";
38+
std::string dataFile =
39+
"legacy/gserver/tests/pyDataProvider/pyDataProviderList";
3940
config.set_files(dataFile);
4041
#ifndef PADDLE_WITH_CUDA
4142
bool useGpu = false;
@@ -68,7 +69,8 @@ TEST(PyDataProvider, py_fill_nest_slots) {
6869
config.set_load_data_module(std::string("pyDataProvider"));
6970
config.set_load_data_object(std::string("SimpleNestDataProvider"));
7071
config.clear_files();
71-
std::string dataFile = "gserver/tests/pyDataProvider/pyDataProviderList";
72+
std::string dataFile =
73+
"legacy/gserver/tests/pyDataProvider/pyDataProviderList";
7274
config.set_files(dataFile);
7375
EXPECT_EQ(config.IsInitialized(), true);
7476
#ifndef PADDLE_WITH_CUDA

paddle/legacy/gserver/tests/test_RecurrentGradientMachine.cpp

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -102,11 +102,11 @@ void test(const string& conf1, const string& conf2, double eps, bool useGpu) {
102102
FLAGS_use_gpu = useGpu;
103103
int num_passes = 5;
104104
real* cost1 = new real[num_passes];
105-
const string dir1 = "gserver/tests/t1";
105+
const string dir1 = "legacy/gserver/tests/t1";
106106
CalCost(conf1, dir1, cost1, num_passes);
107107

108108
real* cost2 = new real[num_passes];
109-
const string dir2 = "gserver/tests/t2";
109+
const string dir2 = "legacy/gserver/tests/t2";
110110
CalCost(conf2, dir2, cost2, num_passes);
111111

112112
for (int i = 0; i < num_passes; i++) {
@@ -121,44 +121,44 @@ void test(const string& conf1, const string& conf2, double eps, bool useGpu) {
121121

122122
TEST(RecurrentGradientMachine, HasSubSequence) {
123123
for (bool useGpu : {false, true}) {
124-
test("gserver/tests/sequence_layer_group.conf",
125-
"gserver/tests/sequence_nest_layer_group.conf",
124+
test("legacy/gserver/tests/sequence_layer_group.conf",
125+
"legacy/gserver/tests/sequence_nest_layer_group.conf",
126126
1e-5,
127127
useGpu);
128128
}
129129
}
130130

131131
TEST(RecurrentGradientMachine, rnn) {
132132
for (bool useGpu : {false, true}) {
133-
test("gserver/tests/sequence_rnn.conf",
134-
"gserver/tests/sequence_nest_rnn.conf",
133+
test("legacy/gserver/tests/sequence_rnn.conf",
134+
"legacy/gserver/tests/sequence_nest_rnn.conf",
135135
1e-6,
136136
useGpu);
137137
}
138138
}
139139

140140
TEST(RecurrentGradientMachine, rnn_multi_input) {
141141
for (bool useGpu : {false, true}) {
142-
test("gserver/tests/sequence_rnn_multi_input.conf",
143-
"gserver/tests/sequence_nest_rnn_multi_input.conf",
142+
test("legacy/gserver/tests/sequence_rnn_multi_input.conf",
143+
"legacy/gserver/tests/sequence_nest_rnn_multi_input.conf",
144144
1e-6,
145145
useGpu);
146146
}
147147
}
148148

149149
TEST(RecurrentGradientMachine, rnn_multi_unequalength_input) {
150150
for (bool useGpu : {false, true}) {
151-
test("gserver/tests/sequence_rnn_multi_unequalength_inputs.py",
152-
"gserver/tests/sequence_nest_rnn_multi_unequalength_inputs.py",
151+
test("legacy/gserver/tests/sequence_rnn_multi_unequalength_inputs.py",
152+
"legacy/gserver/tests/sequence_nest_rnn_multi_unequalength_inputs.py",
153153
1e-6,
154154
useGpu);
155155
}
156156
}
157157

158158
TEST(RecurrentGradientMachine, rnn_mixed_input) {
159159
for (bool useGpu : {false, true}) {
160-
test("gserver/tests/sequence_rnn_mixed_inputs.py",
161-
"gserver/tests/sequence_rnn_matched_inputs.py",
160+
test("legacy/gserver/tests/sequence_rnn_mixed_inputs.py",
161+
"legacy/gserver/tests/sequence_rnn_matched_inputs.py",
162162
1e-6,
163163
useGpu);
164164
}

paddle/legacy/gserver/tests/test_SelectiveFCLayer.cpp

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ void calcOutput(ComData& comData,
7676
FLAGS_config = configFile;
7777
FLAGS_config_args = configArgs;
7878
FLAGS_use_gpu = useGpu;
79-
FLAGS_init_model_path = "gserver/tests/SelectiveFcTest/model";
79+
FLAGS_init_model_path = "legacy/gserver/tests/SelectiveFcTest/model";
8080
*ThreadLocalRand::getSeed() = 0;
8181
srand(0);
8282

@@ -311,13 +311,13 @@ LayerPtr initFcLayer(LayerPtr dataLayer,
311311
#ifndef PADDLE_TYPE_DOUBLE
312312
// The parameter file used in fc.conf and selective_fc.conf is float
313313
TEST(Layer, SelectiveFcLayer_train_dense_mul) {
314-
const string& fcConfig = "gserver/tests/SelectiveFcTest/conf/fc.conf";
314+
const string& fcConfig = "legacy/gserver/tests/SelectiveFcTest/conf/fc.conf";
315315
const string& fcConfigArgs =
316-
"filelist=gserver/tests/SelectiveFcTest/dense_mul_list";
316+
"filelist=legacy/gserver/tests/SelectiveFcTest/dense_mul_list";
317317
const string& selFcConfig =
318-
"gserver/tests/SelectiveFcTest/conf/selective_fc.conf";
318+
"legacy/gserver/tests/SelectiveFcTest/conf/selective_fc.conf";
319319
const string& selConfigArgs =
320-
"filelist=gserver/tests/SelectiveFcTest/dense_mul_list";
320+
"filelist=legacy/gserver/tests/SelectiveFcTest/dense_mul_list";
321321

322322
for (auto useGpu : {false, true}) {
323323
#ifndef PADDLE_WITH_CUDA
@@ -350,7 +350,7 @@ void testSelectiveFcLayerTrainSparseMul(const LayerConfig& config,
350350
creatDataLayer("data", batchSize, dataLayerSize, values, useGpu);
351351

352352
const string& selfcParaFile =
353-
"gserver/tests/SelectiveFcTest/model/rand_fc_param.w.transpose";
353+
"legacy/gserver/tests/SelectiveFcTest/model/rand_fc_param.w.transpose";
354354
const string& selfcParaName = "rand_fc_param.w.transpose";
355355

356356
std::shared_ptr<SelectiveFullyConnectedLayer> selfcLayer =
@@ -396,7 +396,7 @@ void testSelectiveFcLayerTrainSparseMul(const LayerConfig& config,
396396
size_t nnz = cpuOutMatSelfc->getElementCnt();
397397

398398
const string& fcParaFile =
399-
"gserver/tests/SelectiveFcTest/model/rand_fc_param.w";
399+
"legacy/gserver/tests/SelectiveFcTest/model/rand_fc_param.w";
400400
const string& fcParaName = "rand_fc_param.w";
401401
LayerConfig fcLayerConfig;
402402
fcLayerConfig.set_name("fc_layer");

0 commit comments

Comments
 (0)