Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
torch-harmonics
Commits
d70dee87
"docs/vscode:/vscode.git/clone" did not exist on "2475dc3452c0359ab9c0bb111b8d4da55d90ba0c"
Commit
d70dee87
authored
Jul 16, 2025
by
Andrea Paris
Committed by
Boris Bonev
Jul 21, 2025
Browse files
removed docstrings from _init_weights
parent
b17bfdc4
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
5 additions
and
56 deletions
+5
-56
examples/baseline_models/unet.py
examples/baseline_models/unet.py
+0
-16
torch_harmonics/examples/models/s2segformer.py
torch_harmonics/examples/models/s2segformer.py
+3
-24
torch_harmonics/examples/models/s2unet.py
torch_harmonics/examples/models/s2unet.py
+2
-16
No files found.
examples/baseline_models/unet.py
View file @
d70dee87
...
...
@@ -171,14 +171,6 @@ class DownsamplingBlock(nn.Module):
self
.
apply
(
self
.
_init_weights
)
def
_init_weights
(
self
,
m
):
"""
Initialize weights for the module.
Parameters
-----------
m : torch.nn.Module
Module to initialize weights for
"""
if
isinstance
(
m
,
nn
.
Conv2d
):
nn
.
init
.
trunc_normal_
(
m
.
weight
,
std
=
.
02
)
if
m
.
bias
is
not
None
:
...
...
@@ -344,14 +336,6 @@ class UpsamplingBlock(nn.Module):
self
.
apply
(
self
.
_init_weights
)
def
_init_weights
(
self
,
m
):
"""
Initialize weights for the module.
Parameters
-----------
m : torch.nn.Module
Module to initialize weights for
"""
if
isinstance
(
m
,
nn
.
Conv2d
):
nn
.
init
.
trunc_normal_
(
m
.
weight
,
std
=
.
02
)
if
m
.
bias
is
not
None
:
...
...
torch_harmonics/examples/models/s2segformer.py
View file @
d70dee87
...
...
@@ -117,14 +117,7 @@ class OverlapPatchMerging(nn.Module):
self
.
apply
(
self
.
_init_weights
)
def
_init_weights
(
self
,
m
):
"""
Initialize weights for the module.
Parameters
-----------
m : nn.Module
Module to initialize
"""
if
isinstance
(
m
,
nn
.
LayerNorm
):
nn
.
init
.
constant_
(
m
.
bias
,
0
)
nn
.
init
.
constant_
(
m
.
weight
,
1.0
)
...
...
@@ -230,14 +223,7 @@ class MixFFN(nn.Module):
self
.
apply
(
self
.
_init_weights
)
def
_init_weights
(
self
,
m
):
"""
Initialize weights for the module.
Parameters
-----------
m : nn.Module
Module to initialize
"""
if
isinstance
(
m
,
nn
.
Conv2d
):
nn
.
init
.
trunc_normal_
(
m
.
weight
,
std
=
0.02
)
if
m
.
bias
is
not
None
:
...
...
@@ -792,14 +778,7 @@ class SphericalSegformer(nn.Module):
self
.
apply
(
self
.
_init_weights
)
def
_init_weights
(
self
,
m
):
"""
Initialize weights for the module.
Parameters
-----------
m : nn.Module
Module to initialize
"""
if
isinstance
(
m
,
nn
.
Conv2d
):
nn
.
init
.
trunc_normal_
(
m
.
weight
,
std
=
0.02
)
if
m
.
bias
is
not
None
:
...
...
torch_harmonics/examples/models/s2unet.py
View file @
d70dee87
...
...
@@ -194,14 +194,7 @@ class DownsamplingBlock(nn.Module):
self
.
apply
(
self
.
_init_weights
)
def
_init_weights
(
self
,
m
):
"""
Initialize weights for the module.
Parameters
-----------
m : nn.Module
Module to initialize
"""
if
isinstance
(
m
,
nn
.
Conv2d
):
nn
.
init
.
trunc_normal_
(
m
.
weight
,
std
=
0.02
)
if
m
.
bias
is
not
None
:
...
...
@@ -585,14 +578,7 @@ class SphericalUNet(nn.Module):
self
.
apply
(
self
.
_init_weights
)
def
_init_weights
(
self
,
m
):
"""
Initialize weights for the module.
Parameters
-----------
m : nn.Module
Module to initialize
"""
if
isinstance
(
m
,
nn
.
Conv2d
):
nn
.
init
.
trunc_normal_
(
m
.
weight
,
std
=
0.02
)
if
m
.
bias
is
not
None
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment