@@ -15,8 +15,6 @@ tuple<torch::Tensor, torch::Tensor, torch::Tensor, torch::Tensor>
1515sample (const torch::Tensor &colptr, const torch::Tensor &row,
1616 const torch::Tensor &input_node, const vector<int64_t > num_neighbors) {
1717
18- srand (time (NULL ) + 1000 * getpid ()); // Initialize random seed.
19-
2018 // Initialize some data structures for the sampling process:
2119 vector<int64_t > samples;
2220 unordered_map<int64_t , int64_t > to_local_node;
@@ -59,7 +57,7 @@ sample(const torch::Tensor &colptr, const torch::Tensor &row,
5957 }
6058 } else if (replace) {
6159 for (int64_t j = 0 ; j < num_samples; j++) {
62- const int64_t offset = col_start + rand () % col_count;
60+ const int64_t offset = col_start + uniform_randint ( col_count) ;
6361 const int64_t &v = row_data[offset];
6462 const auto res = to_local_node.insert ({v, samples.size ()});
6563 if (res.second )
@@ -73,7 +71,7 @@ sample(const torch::Tensor &colptr, const torch::Tensor &row,
7371 } else {
7472 unordered_set<int64_t > rnd_indices;
7573 for (int64_t j = col_count - num_samples; j < col_count; j++) {
76- int64_t rnd = rand () % j ;
74+ int64_t rnd = uniform_randint (j) ;
7775 if (!rnd_indices.insert (rnd).second ) {
7876 rnd = j;
7977 rnd_indices.insert (j);
@@ -127,8 +125,6 @@ hetero_sample(const vector<node_t> &node_types,
127125 const c10::Dict<rel_t , vector<int64_t >> &num_neighbors_dict,
128126 const int64_t num_hops) {
129127
130- srand (time (NULL ) + 1000 * getpid ()); // Initialize random seed.
131-
132128 // Create a mapping to convert single string relations to edge type triplets:
133129 unordered_map<rel_t , edge_t > to_edge_type;
134130 for (const auto &k : edge_types)
@@ -180,8 +176,10 @@ hetero_sample(const vector<node_t> &node_types,
180176 auto &src_samples = samples_dict.at (src_node_type);
181177 auto &to_local_src_node = to_local_node_dict.at (src_node_type);
182178
183- const auto *colptr_data = ((torch::Tensor)colptr_dict.at (rel_type)).data_ptr <int64_t >();
184- const auto *row_data = ((torch::Tensor)row_dict.at (rel_type)).data_ptr <int64_t >();
179+ const auto *colptr_data =
180+ ((torch::Tensor)colptr_dict.at (rel_type)).data_ptr <int64_t >();
181+ const auto *row_data =
182+ ((torch::Tensor)row_dict.at (rel_type)).data_ptr <int64_t >();
185183
186184 auto &rows = rows_dict.at (rel_type);
187185 auto &cols = cols_dict.at (rel_type);
@@ -212,7 +210,7 @@ hetero_sample(const vector<node_t> &node_types,
212210 }
213211 } else if (replace) {
214212 for (int64_t j = 0 ; j < num_samples; j++) {
215- const int64_t offset = col_start + rand () % col_count;
213+ const int64_t offset = col_start + uniform_randint ( col_count) ;
216214 const int64_t &v = row_data[offset];
217215 const auto res = to_local_src_node.insert ({v, src_samples.size ()});
218216 if (res.second )
@@ -226,7 +224,7 @@ hetero_sample(const vector<node_t> &node_types,
226224 } else {
227225 unordered_set<int64_t > rnd_indices;
228226 for (int64_t j = col_count - num_samples; j < col_count; j++) {
229- int64_t rnd = rand () % j ;
227+ int64_t rnd = uniform_randint (j) ;
230228 if (!rnd_indices.insert (rnd).second ) {
231229 rnd = j;
232230 rnd_indices.insert (j);
@@ -262,7 +260,8 @@ hetero_sample(const vector<node_t> &node_types,
262260 auto &to_local_src_node = to_local_node_dict.at (src_node_type);
263261
264262 const auto *colptr_data = ((torch::Tensor)kv.value ()).data_ptr <int64_t >();
265- const auto *row_data = ((torch::Tensor)row_dict.at (rel_type)).data_ptr <int64_t >();
263+ const auto *row_data =
264+ ((torch::Tensor)row_dict.at (rel_type)).data_ptr <int64_t >();
266265
267266 auto &rows = rows_dict.at (rel_type);
268267 auto &cols = cols_dict.at (rel_type);
0 commit comments