Commit 71f496e5 authored by Benjamin Thomas Graham's avatar Benjamin Thomas Graham
Browse files

Activations

parent a0d33d69
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
forward_pass_multiplyAdd_count = 0 forward_pass_multiplyAdd_count = 0
forward_pass_hidden_states = 0 forward_pass_hidden_states = 0
from .activations import Tanh, Sigmoid, ReLU, ELU
from .averagePooling import AveragePooling from .averagePooling import AveragePooling
from .batchNormalization import BatchNormalization, BatchNormReLU, BatchNormLeakyReLU from .batchNormalization import BatchNormalization, BatchNormReLU, BatchNormLeakyReLU
from .classificationTrainValidate import ClassificationTrainValidate from .classificationTrainValidate import ClassificationTrainValidate
......
# Copyright 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
import sparseconvnet
import torch.nn.functional as F
from torch.autograd import Function, Variable
from torch.nn import Module, Parameter
from .utils import *
from .sparseConvNetTensor import SparseConvNetTensor
from .batchNormalization import BatchNormalization
class Sigmoid(Module):
def forward(self, input):
output = SparseConvNetTensor()
output.features = F.sigmoid(input.features)
output.metadata = input.metadata
output.spatial_size = input.spatial_size
return output
class Tanh(Module):
def forward(self, input):
output = SparseConvNetTensor()
output.features = F.tanh(input.features)
output.metadata = input.metadata
output.spatial_size = input.spatial_size
return output
class ReLU(Module):
def forward(self, input):
output = SparseConvNetTensor()
output.features = F.relu(input.features)
output.metadata = input.metadata
output.spatial_size = input.spatial_size
return output
class ELU(Module):
def forward(self, input):
output = SparseConvNetTensor()
output.features = F.elu(input.features)
output.metadata = input.metadata
output.spatial_size = input.spatial_size
return output
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment