Skip to content

Commit dcb527d

Browse files
clee2000malfet
andauthored
data_parallel_tutorial to run on multigpu (#2477)
* data_parallel_tutorial to run on multigpu * Collect unused memory * set numpy seed * empty cuda cache * update --------- Co-authored-by: Nikita Shulga <[email protected]>
1 parent 081f7df commit dcb527d

File tree

2 files changed

+7
-0
lines changed

2 files changed

+7
-0
lines changed

.jenkins/metadata.json

+3
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,9 @@
2222
"duration": 320,
2323
"needs": "gpu.nvidia.small.multi"
2424
},
25+
"beginner_source/blitz/data_parallel_tutorial.py": {
26+
"needs": "gpu.nvidia.small.multi"
27+
},
2528
"intermediate_source/model_parallel_tutorial.py": {
2629
"needs": "gpu.nvidia.small.multi"
2730
},

conf.py

+4
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,8 @@
3333
sys.path.insert(0, os.path.abspath('./.jenkins'))
3434
import pytorch_sphinx_theme
3535
import torch
36+
import numpy
37+
import gc
3638
import glob
3739
import random
3840
import shutil
@@ -91,6 +93,8 @@ def reset_seeds(gallery_conf, fname):
9193
torch.manual_seed(42)
9294
torch.set_default_device(None)
9395
random.seed(10)
96+
numpy.random.seed(10)
97+
gc.collect()
9498

9599
sphinx_gallery_conf = {
96100
'examples_dirs': ['beginner_source', 'intermediate_source',

0 commit comments

Comments
 (0)