Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
dgl
Commits
33e80452
Unverified
Commit
33e80452
authored
Nov 23, 2023
by
Ramon Zhou
Committed by
GitHub
Nov 23, 2023
Browse files
[GraphBolt] Set persistent_workers in MultiProcessDataLoader (#6592)
parent
81ac9d27
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
12 additions
and
2 deletions
+12
-2
python/dgl/graphbolt/dataloader.py
python/dgl/graphbolt/dataloader.py
+12
-2
No files found.
python/dgl/graphbolt/dataloader.py
View file @
33e80452
...
...
@@ -78,14 +78,19 @@ class MultiprocessingWrapper(dp.iter.IterDataPipe):
num_workers : int, optional
The number of worker processes. Default is 0, meaning that there
will be no multiprocessing.
persistent_workers : bool, optional
If True, the data loader will not shut down the worker processes after a
dataset has been consumed once. This allows to maintain the workers
instances alive.
"""
def
__init__
(
self
,
datapipe
,
num_workers
=
0
):
def
__init__
(
self
,
datapipe
,
num_workers
=
0
,
persistent_workers
=
True
):
self
.
datapipe
=
datapipe
self
.
dataloader
=
torch
.
utils
.
data
.
DataLoader
(
datapipe
,
batch_size
=
None
,
num_workers
=
num_workers
,
persistent_workers
=
(
num_workers
>
0
)
and
persistent_workers
,
)
def
__iter__
(
self
):
...
...
@@ -109,9 +114,13 @@ class MultiProcessDataLoader(torch.utils.data.DataLoader):
num_workers : int, optional
Number of worker processes. Default is 0, which is identical to
:class:`SingleProcessDataLoader`.
persistent_workers : bool, optional
If True, the data loader will not shut down the worker processes after a
dataset has been consumed once. This allows to maintain the workers
instances alive.
"""
def
__init__
(
self
,
datapipe
,
num_workers
=
0
):
def
__init__
(
self
,
datapipe
,
num_workers
=
0
,
persistent_workers
=
True
):
# Multiprocessing requires two modifications to the datapipe:
#
# 1. Insert a stage after ItemSampler to distribute the
...
...
@@ -144,6 +153,7 @@ class MultiProcessDataLoader(torch.utils.data.DataLoader):
FeatureFetcher
,
MultiprocessingWrapper
,
num_workers
=
num_workers
,
persistent_workers
=
persistent_workers
,
)
# (3) Cut datapipe at CopyTo and wrap with prefetcher. This enables the
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment