Skip to content

Commit 8287fa8

Browse files
committed
fix totalGlobalMem
1 parent dc6c4a8 commit 8287fa8

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

aten/src/ATen/cuda/CUDABlas.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ static size_t _parseChosenWorkspaceSize() {
193193
cudaDeviceProp* p = at::cuda::getDeviceProperties(c10::cuda::current_device());
194194
// Keep workspace_size = 1024 for small Ampere GPUs
195195
// See https://github.com/pytorch/pytorch/pull/120925#issuecomment-1977556485
196-
if (p->major == 8 && p->total_memory / 1073741824 >= 24) {
196+
if (p->major == 8 && p->totalGlobalMem / 1073741824 >= 24) {
197197
workspace_size = 4096;
198198
} else if (p->major >= 9) {
199199
workspace_size = 32768;

0 commit comments

Comments
 (0)