Commit b32bfeec authored by Gustaf Ahdritz's avatar Gustaf Ahdritz
Browse files

Remove redundancy in data_transforms

parent 03bb003a
...@@ -1131,12 +1131,12 @@ def random_crop_to_size( ...@@ -1131,12 +1131,12 @@ def random_crop_to_size(
templates_select_indices = torch.randperm( templates_select_indices = torch.randperm(
num_templates, device=protein["seq_length"].device, generator=g num_templates, device=protein["seq_length"].device, generator=g
) )
num_templates_crop_size = min(
num_templates - templates_crop_start, max_templates
)
else: else:
templates_crop_start = 0 templates_crop_start = 0
num_templates_crop_size = num_templates
num_templates_crop_size = min(
num_templates - templates_crop_start, max_templates
)
n = seq_length - num_res_crop_size n = seq_length - num_res_crop_size
if "use_clamped_fape" in protein and protein["use_clamped_fape"] == 1.: if "use_clamped_fape" in protein and protein["use_clamped_fape"] == 1.:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment