Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
dgl
Commits
894ae31f
"docs/source/api/python/dgl.data.rst" did not exist on "967ecb8064aa4058820fae430a824e164f086204"
Unverified
Commit
894ae31f
authored
Jun 14, 2023
by
Rhett Ying
Committed by
GitHub
Jun 14, 2023
Browse files
[DistDGL] keep net_type in public API even it is deprecated (#5875)
parent
add152bf
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
8 additions
and
1 deletion
+8
-1
python/dgl/distributed/dist_context.py
python/dgl/distributed/dist_context.py
+8
-1
No files found.
python/dgl/distributed/dist_context.py
View file @
894ae31f
...
@@ -12,7 +12,7 @@ import traceback
...
@@ -12,7 +12,7 @@ import traceback
from
enum
import
Enum
from
enum
import
Enum
from
..
import
utils
from
..
import
utils
from
..base
import
DGLError
from
..base
import
dgl_warning
,
DGLError
from
.
import
rpc
from
.
import
rpc
from
.constants
import
MAX_QUEUE_SIZE
from
.constants
import
MAX_QUEUE_SIZE
from
.kvstore
import
close_kvstore
,
init_kvstore
from
.kvstore
import
close_kvstore
,
init_kvstore
...
@@ -208,6 +208,7 @@ class CustomPool:
...
@@ -208,6 +208,7 @@ class CustomPool:
def
initialize
(
def
initialize
(
ip_config
,
ip_config
,
max_queue_size
=
MAX_QUEUE_SIZE
,
max_queue_size
=
MAX_QUEUE_SIZE
,
net_type
=
None
,
num_worker_threads
=
1
,
num_worker_threads
=
1
,
):
):
"""Initialize DGL's distributed module
"""Initialize DGL's distributed module
...
@@ -226,6 +227,8 @@ def initialize(
...
@@ -226,6 +227,8 @@ def initialize(
Note that the 20 GB is just an upper-bound and DGL uses zero-copy and
Note that the 20 GB is just an upper-bound and DGL uses zero-copy and
it will not allocate 20GB memory at once.
it will not allocate 20GB memory at once.
net_type : str, optional
[Deprecated] Networking type, can be 'socket' only.
num_worker_threads: int
num_worker_threads: int
The number of OMP threads in each sampler process.
The number of OMP threads in each sampler process.
...
@@ -235,6 +238,10 @@ def initialize(
...
@@ -235,6 +238,10 @@ def initialize(
distributed API. For example, when used with Pytorch, users have to invoke this function
distributed API. For example, when used with Pytorch, users have to invoke this function
before Pytorch's `pytorch.distributed.init_process_group`.
before Pytorch's `pytorch.distributed.init_process_group`.
"""
"""
if
net_type
is
not
None
:
dgl_warning
(
"net_type is deprecated and will be removed in future release."
)
if
os
.
environ
.
get
(
"DGL_ROLE"
,
"client"
)
==
"server"
:
if
os
.
environ
.
get
(
"DGL_ROLE"
,
"client"
)
==
"server"
:
from
.dist_graph
import
DistGraphServer
from
.dist_graph
import
DistGraphServer
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment