Commit 8c3cea7f authored by Will Feng's avatar Will Feng Committed by Francisco Massa
Browse files

Change all torch::nn::init::Nonlinearity::{name} and...

Change all torch::nn::init::Nonlinearity::{name} and torch::nn::init::FanMode::{name} to torch::k{name} (#1394)

* Change all torch::nn::init::Nonlinearity::{name} and torch::nn::init::FanMode::{name} to torch::k{name}

* empty commit

* fix lint

* fix lint

* fix lint
parent 96ec0e1d
...@@ -109,10 +109,7 @@ void MNASNetImpl::_initialize_weights() { ...@@ -109,10 +109,7 @@ void MNASNetImpl::_initialize_weights() {
for (auto& module : modules(/*include_self=*/false)) { for (auto& module : modules(/*include_self=*/false)) {
if (auto M = dynamic_cast<torch::nn::Conv2dImpl*>(module.get())) if (auto M = dynamic_cast<torch::nn::Conv2dImpl*>(module.get()))
torch::nn::init::kaiming_normal_( torch::nn::init::kaiming_normal_(
M->weight, M->weight, 0, torch::kFanOut, torch::kReLU);
0,
torch::nn::init::FanMode::FanOut,
torch::nn::init::Nonlinearity::ReLU);
else if (auto M = dynamic_cast<torch::nn::BatchNormImpl*>(module.get())) { else if (auto M = dynamic_cast<torch::nn::BatchNormImpl*>(module.get())) {
torch::nn::init::ones_(M->weight); torch::nn::init::ones_(M->weight);
torch::nn::init::zeros_(M->bias); torch::nn::init::zeros_(M->bias);
......
...@@ -134,8 +134,7 @@ MobileNetV2Impl::MobileNetV2Impl( ...@@ -134,8 +134,7 @@ MobileNetV2Impl::MobileNetV2Impl(
for (auto& module : modules(/*include_self=*/false)) { for (auto& module : modules(/*include_self=*/false)) {
if (auto M = dynamic_cast<torch::nn::Conv2dImpl*>(module.get())) { if (auto M = dynamic_cast<torch::nn::Conv2dImpl*>(module.get())) {
torch::nn::init::kaiming_normal_( torch::nn::init::kaiming_normal_(M->weight, 0, torch::kFanOut);
M->weight, 0, torch::nn::init::FanMode::FanOut);
if (M->options.with_bias()) if (M->options.with_bias())
torch::nn::init::zeros_(M->bias); torch::nn::init::zeros_(M->bias);
} else if (auto M = dynamic_cast<torch::nn::BatchNormImpl*>(module.get())) { } else if (auto M = dynamic_cast<torch::nn::BatchNormImpl*>(module.get())) {
......
...@@ -146,8 +146,8 @@ ResNetImpl<Block>::ResNetImpl( ...@@ -146,8 +146,8 @@ ResNetImpl<Block>::ResNetImpl(
torch::nn::init::kaiming_normal_( torch::nn::init::kaiming_normal_(
M->weight, M->weight,
/*a=*/0, /*a=*/0,
torch::nn::init::FanMode::FanOut, torch::kFanOut,
torch::nn::init::Nonlinearity::ReLU); torch::kReLU);
else if (auto M = dynamic_cast<torch::nn::BatchNormImpl*>(module.get())) { else if (auto M = dynamic_cast<torch::nn::BatchNormImpl*>(module.get())) {
torch::nn::init::constant_(M->weight, 1); torch::nn::init::constant_(M->weight, 1);
torch::nn::init::constant_(M->bias, 0); torch::nn::init::constant_(M->bias, 0);
......
...@@ -35,8 +35,8 @@ void VGGImpl::_initialize_weights() { ...@@ -35,8 +35,8 @@ void VGGImpl::_initialize_weights() {
torch::nn::init::kaiming_normal_( torch::nn::init::kaiming_normal_(
M->weight, M->weight,
/*a=*/0, /*a=*/0,
torch::nn::init::FanMode::FanOut, torch::kFanOut,
torch::nn::init::Nonlinearity::ReLU); torch::kReLU);
torch::nn::init::constant_(M->bias, 0); torch::nn::init::constant_(M->bias, 0);
} else if (auto M = dynamic_cast<torch::nn::BatchNormImpl*>(module.get())) { } else if (auto M = dynamic_cast<torch::nn::BatchNormImpl*>(module.get())) {
torch::nn::init::constant_(M->weight, 1); torch::nn::init::constant_(M->weight, 1);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment