Commit 0081afb8 authored by Christian Sarofeen's avatar Christian Sarofeen
Browse files

Decrease default message size.

parent cae6005c
...@@ -47,13 +47,13 @@ class DistributedDataParallel(Module): ...@@ -47,13 +47,13 @@ class DistributedDataParallel(Module):
Args: Args:
module: Network definition to be run in multi-gpu/distributed mode. module: Network definition to be run in multi-gpu/distributed mode.
message_size (Default = 100e6): Minimum number of elements in a communication bucket. message_size (Default = 10e6): Minimum number of elements in a communication bucket.
shared_param (Default = False): If your model uses shared parameters this must be true, shared_param (Default = False): If your model uses shared parameters this must be true,
it will disable bucketing of parameters which is necessary to avoid race conditions. it will disable bucketing of parameters which is necessary to avoid race conditions.
""" """
def __init__(self, module, message_size=100000000, shared_param=False): def __init__(self, module, message_size=10000000, shared_param=False):
super(DistributedDataParallel, self).__init__() super(DistributedDataParallel, self).__init__()
self.warn_on_half = True if dist._backend == dist.dist_backend.GLOO else False self.warn_on_half = True if dist._backend == dist.dist_backend.GLOO else False
self.shared_param = shared_param self.shared_param = shared_param
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment