@@ -136,15 +136,15 @@ namespace detail {
136136
137137inline uint64_t fetch64 (const char *p) {
138138 uint64_t result;
139- memcpy (&result, p, sizeof (result));
139+ std:: memcpy (&result, p, sizeof (result));
140140 if (sys::IsBigEndianHost)
141141 sys::swapByteOrder (result);
142142 return result;
143143}
144144
145145inline uint32_t fetch32 (const char *p) {
146146 uint32_t result;
147- memcpy (&result, p, sizeof (result));
147+ std:: memcpy (&result, p, sizeof (result));
148148 if (sys::IsBigEndianHost)
149149 sys::swapByteOrder (result);
150150 return result;
@@ -379,7 +379,7 @@ bool store_and_advance(char *&buffer_ptr, char *buffer_end, const T& value,
379379 if (buffer_ptr + store_size > buffer_end)
380380 return false ;
381381 const char *value_data = reinterpret_cast <const char *>(&value);
382- memcpy (buffer_ptr, value_data + offset, store_size);
382+ std:: memcpy (buffer_ptr, value_data + offset, store_size);
383383 buffer_ptr += store_size;
384384 return true ;
385385}
@@ -513,7 +513,7 @@ struct hash_combine_recursive_helper {
513513 // with the variadic combine because that formation can have varying
514514 // argument types.
515515 size_t partial_store_size = buffer_end - buffer_ptr;
516- memcpy (buffer_ptr, &data, partial_store_size);
516+ std:: memcpy (buffer_ptr, &data, partial_store_size);
517517
518518 // If the store fails, our buffer is full and ready to hash. We have to
519519 // either initialize the hash state (on the first full buffer) or mix
0 commit comments