Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
OpenFold
Commits
c775cc12
Unverified
Commit
c775cc12
authored
Jan 20, 2022
by
Gustaf Ahdritz
Committed by
GitHub
Jan 20, 2022
Browse files
Merge pull request #58 from lhatsk/pr
update loss logging and stop sampling recycling iterations in validation
parents
b45f6234
8ed52b70
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
15 additions
and
13 deletions
+15
-13
openfold/config.py
openfold/config.py
+3
-1
openfold/data/data_modules.py
openfold/data/data_modules.py
+10
-11
train_openfold.py
train_openfold.py
+2
-1
No files found.
openfold/config.py
View file @
c775cc12
...
...
@@ -174,7 +174,6 @@ config = mlc.ConfigDict(
},
"supervised"
:
{
"clamp_prob"
:
0.9
,
"uniform_recycling"
:
True
,
"supervised_features"
:
[
"all_atom_mask"
,
"all_atom_positions"
,
...
...
@@ -194,6 +193,7 @@ config = mlc.ConfigDict(
"crop_size"
:
None
,
"supervised"
:
False
,
"subsample_recycling"
:
False
,
"uniform_recycling"
:
False
,
},
"eval"
:
{
"fixed_size"
:
True
,
...
...
@@ -206,6 +206,7 @@ config = mlc.ConfigDict(
"crop_size"
:
None
,
"supervised"
:
True
,
"subsample_recycling"
:
False
,
"uniform_recycling"
:
False
,
},
"train"
:
{
"fixed_size"
:
True
,
...
...
@@ -221,6 +222,7 @@ config = mlc.ConfigDict(
"clamp_prob"
:
0.9
,
"subsample_recycling"
:
True
,
"max_distillation_msa_clusters"
:
1000
,
"uniform_recycling"
:
True
,
},
"data_module"
:
{
"use_small_bfd"
:
False
,
...
...
openfold/data/data_modules.py
View file @
c775cc12
...
...
@@ -245,7 +245,7 @@ class OpenFoldDataset(torch.utils.data.IterableDataset):
class
OpenFoldBatchCollator
:
def
__init__
(
self
,
config
,
generator
,
stage
=
"train"
):
def
__init__
(
self
,
config
,
stage
=
"train"
):
self
.
stage
=
stage
self
.
feature_pipeline
=
feature_pipeline
.
FeaturePipeline
(
config
)
...
...
@@ -283,21 +283,20 @@ class OpenFoldDataLoader(torch.utils.data.DataLoader):
keyed_probs
.
append
(
(
"use_clamped_fape"
,
[
1
-
clamp_prob
,
clamp_prob
])
)
if
(
self
.
config
.
supervised
.
uniform_recycling
):
recycling_probs
=
[
1.
/
(
max_iters
+
1
)
for
_
in
range
(
max_iters
+
1
)
]
keyed_probs
.
append
(
(
"no_recycling_iters"
,
recycling_probs
)
)
if
(
stage_cfg
.
uniform_recycling
):
recycling_probs
=
[
1.
/
(
max_iters
+
1
)
for
_
in
range
(
max_iters
+
1
)
]
else
:
recycling_probs
=
[
0.
for
_
in
range
(
max_iters
+
1
)
]
recycling_probs
[
-
1
]
=
1.
keyed_probs
.
append
(
(
"no_recycling_iters"
,
recycling_probs
)
)
keyed_probs
.
append
(
(
"no_recycling_iters"
,
recycling_probs
)
)
keys
,
probs
=
zip
(
*
keyed_probs
)
max_len
=
max
([
len
(
p
)
for
p
in
probs
])
...
...
train_openfold.py
View file @
c775cc12
...
...
@@ -66,7 +66,7 @@ class OpenFoldWrapper(pl.LightningModule):
# Compute loss
loss
=
self
.
loss
(
outputs
,
batch
)
self
.
log
(
"loss"
,
loss
)
return
{
"loss"
:
loss
}
def
validation_step
(
self
,
batch
,
batch_idx
):
...
...
@@ -79,6 +79,7 @@ class OpenFoldWrapper(pl.LightningModule):
outputs
=
self
(
batch
)
batch
=
tensor_tree_map
(
lambda
t
:
t
[...,
-
1
],
batch
)
loss
=
self
.
loss
(
outputs
,
batch
)
self
.
log
(
"val_loss"
,
loss
,
prog_bar
=
True
)
return
{
"val_loss"
:
loss
}
def
validation_epoch_end
(
self
,
_
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment