... | @@ -13,13 +13,14 @@ If you use TensorFlow 2.0 (or Keras with a TensorFlow backend), you can limit th |
... | @@ -13,13 +13,14 @@ If you use TensorFlow 2.0 (or Keras with a TensorFlow backend), you can limit th |
|
```python
|
|
```python
|
|
import tensorflow as tf
|
|
import tensorflow as tf
|
|
|
|
|
|
megabytes = 2048
|
|
lim = 2048 # megabytes
|
|
gpus = tf.config.experimental.list_physical_devices('GPU')
|
|
gpus = tf.config.experimental.list_physical_devices('GPU')
|
|
if gpus:
|
|
if gpus:
|
|
for gpu in gpus:
|
|
for gpu in gpus:
|
|
tf.config.experimental.set_virtual_device_configuration(gpu [tf.config.experimental.VirtualDeviceConfiguration(memory_limit=megabytes)])
|
|
tf.config.experimental.set_virtual_device_configuration(gpu, [tf.config.experimental.VirtualDeviceConfiguration(memory_limit=lim)])
|
|
```
|
|
```
|
|
|
|
|
|
|
|
This code first checks if your script is using GPUs. In that case, it limits the GPU memory requested to `lim` MB (2GB in the example). If you are using a single GPU you can also simplify this removing the for loop and setting the limit just for `gpus[0]`.
|
|
|
|
|
|
## Limiting GPU Memory in PyTorch
|
|
## Limiting GPU Memory in PyTorch
|
|
|
|
|