@@ -155,7 +155,7 @@ void GLBackend::init() {
155
155
#if defined(Q_OS_ANDROID) || defined(USE_GLES) || defined(Q_OS_DARWIN)
156
156
qCDebug (gpugllogging) << " Automatic texture memory not supported in this configuration" ;
157
157
_videoCard = Unknown;
158
- _dedicatedMemory = gpu->getMemory () * BYTES_PER_MIB;
158
+ _dedicatedMemory = ( size_t )( gpu->getMemory () ) * BYTES_PER_MIB;
159
159
_totalMemory = _dedicatedMemory;
160
160
#endif
161
161
@@ -171,8 +171,8 @@ void GLBackend::init() {
171
171
qCDebug (gpugllogging) << " GPU_MEMORY_INFO_TOTAL_AVAILABLE_MEMORY_NVX: " << GPU_MEMORY_INFO_TOTAL_AVAILABLE_MEMORY_NVX;
172
172
qCDebug (gpugllogging) << " GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX: " << GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX;
173
173
174
- _totalMemory = GPU_MEMORY_INFO_TOTAL_AVAILABLE_MEMORY_NVX * BYTES_PER_KIB;
175
- _dedicatedMemory = GPU_MEMORY_INFO_DEDICATED_VIDMEM_NVX * BYTES_PER_KIB;
174
+ _totalMemory = ( size_t )( GPU_MEMORY_INFO_TOTAL_AVAILABLE_MEMORY_NVX) * BYTES_PER_KIB;
175
+ _dedicatedMemory = ( size_t )( GPU_MEMORY_INFO_DEDICATED_VIDMEM_NVX) * BYTES_PER_KIB;
176
176
_videoCard = NVIDIA;
177
177
178
178
@@ -182,20 +182,20 @@ void GLBackend::init() {
182
182
GL_GET_INTEGER (TEXTURE_FREE_MEMORY_ATI);
183
183
184
184
// We are actually getting free memory instead of total memory
185
- _totalMemory = TEXTURE_FREE_MEMORY_ATI * BYTES_PER_KIB;
185
+ _totalMemory = ( size_t )( TEXTURE_FREE_MEMORY_ATI) * BYTES_PER_KIB;
186
186
_dedicatedMemory = _totalMemory;
187
187
_videoCard = ATI;
188
188
} else if ( ::gl::queryCurrentRendererIntegerMESA (GLX_RENDERER_VIDEO_MEMORY_MESA, &mem) ) {
189
189
// This works only on Linux. queryCurrentRendererIntegerMESA will return false if the
190
190
// function is not supported because we're not on Linux, or for any other reason.
191
191
qCDebug (gpugllogging) << " MESA card detected" ;
192
- _totalMemory = mem * BYTES_PER_MIB;
192
+ _totalMemory = ( size_t )( mem) * BYTES_PER_MIB;
193
193
_dedicatedMemory = _totalMemory;
194
194
_videoCard = MESA;
195
195
} else {
196
196
qCCritical (gpugllogging) << " Don't know how to get memory for OpenGL vendor " << vendor << " ; renderer " << renderer << " , trying fallback" ;
197
197
_videoCard = Unknown;
198
- _dedicatedMemory = gpu->getMemory () * BYTES_PER_MIB;
198
+ _dedicatedMemory = ( size_t )( gpu->getMemory () ) * BYTES_PER_MIB;
199
199
_totalMemory = _dedicatedMemory;
200
200
}
201
201
#endif
@@ -237,12 +237,12 @@ size_t GLBackend::getAvailableMemory() {
237
237
#if !defined(Q_OS_ANDROID) && !defined(USE_GLES)
238
238
glGetIntegerv (GL_GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX, &mem[0 ]);
239
239
#endif
240
- return mem[0 ] * BYTES_PER_KIB;
240
+ return ( size_t )( mem[0 ]) * BYTES_PER_KIB;
241
241
case ATI:
242
242
#if !defined(Q_OS_ANDROID) && !defined(USE_GLES)
243
243
glGetIntegerv (GL_TEXTURE_FREE_MEMORY_ATI, &mem[0 ]);
244
244
#endif
245
- return mem[0 ] * BYTES_PER_KIB;
245
+ return ( size_t )( mem[0 ]) * BYTES_PER_KIB;
246
246
case MESA:
247
247
return 0 ; // Don't know the current value
248
248
case Unknown:
0 commit comments