"...git@developer.sourcefind.cn:renzhc/diffusers_dcu.git" did not exist on "8eefed65bd675a6d54184b7ef269b100a6eea88d"
Unverified Commit a3ea4873 authored by Sai Kandregula's avatar Sai Kandregula Committed by GitHub
Browse files

[Example] Remove Bias in Linear layer because of BatchNorm1d usage (#4071)



* typo fix in TUDataset docs

* remove bias in Linear layer because of BatchNorm1d
Co-authored-by: default avatardecoherencer <decoherencer@users.noreply.github.com>
Co-authored-by: default avatarMufei Li <mufeili1996@gmail.com>
parent 6a91d181
...@@ -60,10 +60,10 @@ class MLP(nn.Module): ...@@ -60,10 +60,10 @@ class MLP(nn.Module):
self.linears = torch.nn.ModuleList() self.linears = torch.nn.ModuleList()
self.batch_norms = torch.nn.ModuleList() self.batch_norms = torch.nn.ModuleList()
self.linears.append(nn.Linear(input_dim, hidden_dim)) self.linears.append(nn.Linear(input_dim, hidden_dim, bias=False))
for layer in range(num_layers - 2): for layer in range(num_layers - 2):
self.linears.append(nn.Linear(hidden_dim, hidden_dim)) self.linears.append(nn.Linear(hidden_dim, hidden_dim, bias=False))
self.linears.append(nn.Linear(hidden_dim, output_dim)) self.linears.append(nn.Linear(hidden_dim, output_dim, bias=False))
for layer in range(num_layers - 1): for layer in range(num_layers - 1):
self.batch_norms.append(nn.BatchNorm1d((hidden_dim))) self.batch_norms.append(nn.BatchNorm1d((hidden_dim)))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment