Skip to content

Commit d066b07

Browse files
QiJunereyoung
authored andcommitted
change GPU memory allocating policy (#6159)
* change GPU memory allocating policy * fix potential overflow bug
1 parent e50f357 commit d066b07

File tree

1 file changed

+9
-5
lines changed

1 file changed

+9
-5
lines changed

paddle/platform/gpu_info.cc

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -75,15 +75,19 @@ size_t GpuMaxChunkSize() {
7575
GpuMemoryUsage(available, total);
7676

7777
// Reserving the rest memory for page tables, etc.
78-
size_t reserving = (1 - FLAGS_fraction_of_gpu_memory_to_use) * total;
78+
size_t reserving = 0.05 * total;
7979

8080
// If available less than minimum chunk size, no usable memory exists.
81-
available = std::max(available, GpuMinChunkSize()) - GpuMinChunkSize();
81+
available =
82+
std::max(std::max(available, GpuMinChunkSize()) - GpuMinChunkSize(),
83+
reserving) -
84+
reserving;
8285

83-
// If available less than reserving, no usable memory exists.
84-
size_t usable = std::max(available, reserving) - reserving;
86+
size_t allocating = FLAGS_fraction_of_gpu_memory_to_use * total;
8587

86-
return usable;
88+
PADDLE_ENFORCE_LT(allocating, available);
89+
90+
return allocating;
8791
}
8892

8993
void GpuMemcpyAsync(void *dst, const void *src, size_t count,

0 commit comments

Comments
 (0)