You need to sign in or sign up before continuing.
Commit 1ebf61af authored by zhangqha's avatar zhangqha
Browse files

rm result

parent 865fa22b
Starting prediction...
/usr/local/lib/python3.7/site-packages/torch/jit/annotations.py:289: UserWarning: TorchScript will treat type annotations of Tensor dtype-specific subtypes as if they are normal Tensors. dtype constraints are not enforced in compilation either.
warnings.warn("TorchScript will treat type annotations of Tensor "
start to load params /root/Uni-Fold-main/Alphafold/monomer.unifold.pt
start to predict T1024
{'aatype': torch.Size([1, 1, 408]), 'residue_index': torch.Size([1, 1, 408]), 'seq_length': torch.Size([1, 1]), 'template_aatype': torch.Size([1, 1, 4, 408]), 'template_all_atom_mask': torch.Size([1, 1, 4, 408, 37]), 'template_all_atom_positions': torch.Size([1, 1, 4, 408, 37, 3]), 'num_recycling_iters': torch.Size([1, 1]), 'is_distillation': torch.Size([8, 1]), 'seq_mask': torch.Size([1, 1, 408]), 'msa_mask': torch.Size([8, 1, 508, 408]), 'msa_row_mask': torch.Size([8, 1, 508]), 'template_mask': torch.Size([1, 1, 4]), 'template_pseudo_beta': torch.Size([1, 1, 4, 408, 3]), 'template_pseudo_beta_mask': torch.Size([1, 1, 4, 408]), 'template_torsion_angles_sin_cos': torch.Size([1, 1, 4, 408, 7, 2]), 'template_alt_torsion_angles_sin_cos': torch.Size([1, 1, 4, 408, 7, 2]), 'template_torsion_angles_mask': torch.Size([1, 1, 4, 408, 7]), 'residx_atom14_to_atom37': torch.Size([1, 1, 408, 14]), 'residx_atom37_to_atom14': torch.Size([1, 1, 408, 37]), 'atom14_atom_exists': torch.Size([1, 1, 408, 14]), 'atom37_atom_exists': torch.Size([1, 1, 408, 37]), 'target_feat': torch.Size([1, 1, 408, 22]), 'extra_msa': torch.Size([8, 1, 1024, 408]), 'extra_msa_mask': torch.Size([8, 1, 1024, 408]), 'extra_msa_row_mask': torch.Size([8, 1, 1024]), 'bert_mask': torch.Size([8, 1, 508, 408]), 'true_msa': torch.Size([8, 1, 508, 408]), 'extra_msa_has_deletion': torch.Size([8, 1, 1024, 408]), 'extra_msa_deletion_value': torch.Size([8, 1, 1024, 408]), 'msa_feat': torch.Size([8, 1, 508, 408, 49])}
Inference time: 138.50844680101727
Starting prediction...
/usr/local/lib/python3.7/site-packages/torch/jit/annotations.py:289: UserWarning: TorchScript will treat type annotations of Tensor dtype-specific subtypes as if they are normal Tensors. dtype constraints are not enforced in compilation either.
warnings.warn("TorchScript will treat type annotations of Tensor "
start to load params /root/Uni-Fold-main/Alphafold/multimer.unifold.pt
start to predict H1036
{'aatype': torch.Size([1, 1, 856]), 'residue_index': torch.Size([1, 1, 856]), 'seq_length': torch.Size([1, 1]), 'msa_chains': torch.Size([8, 1, 252, 1]), 'template_aatype': torch.Size([1, 1, 4, 856]), 'template_all_atom_mask': torch.Size([1, 1, 4, 856, 37]), 'template_all_atom_positions': torch.Size([1, 1, 4, 856, 37, 3]), 'asym_id': torch.Size([1, 1, 856]), 'sym_id': torch.Size([1, 1, 856]), 'entity_id': torch.Size([1, 1, 856]), 'num_sym': torch.Size([1, 1, 856]), 'assembly_num_chains': torch.Size([1, 1, 1]), 'cluster_bias_mask': torch.Size([1, 1, 252]), 'bert_mask': torch.Size([8, 1, 252, 856]), 'msa_mask': torch.Size([8, 1, 252, 856]), 'asym_len': torch.Size([1, 1, 3]), 'num_recycling_iters': torch.Size([1, 1]), 'is_distillation': torch.Size([8, 1]), 'seq_mask': torch.Size([1, 1, 856]), 'msa_row_mask': torch.Size([8, 1, 252]), 'template_mask': torch.Size([1, 1, 4]), 'template_pseudo_beta': torch.Size([1, 1, 4, 856, 3]), 'template_pseudo_beta_mask': torch.Size([1, 1, 4, 856]), 'template_torsion_angles_sin_cos': torch.Size([1, 1, 4, 856, 7, 2]), 'template_alt_torsion_angles_sin_cos': torch.Size([1, 1, 4, 856, 7, 2]), 'template_torsion_angles_mask': torch.Size([1, 1, 4, 856, 7]), 'residx_atom14_to_atom37': torch.Size([1, 1, 856, 14]), 'residx_atom37_to_atom14': torch.Size([1, 1, 856, 37]), 'atom14_atom_exists': torch.Size([1, 1, 856, 14]), 'atom37_atom_exists': torch.Size([1, 1, 856, 37]), 'target_feat': torch.Size([1, 1, 856, 22]), 'extra_msa': torch.Size([8, 1, 1152, 856]), 'extra_msa_mask': torch.Size([8, 1, 1152, 856]), 'extra_msa_row_mask': torch.Size([8, 1, 1152]), 'true_msa': torch.Size([8, 1, 252, 856]), 'msa_feat': torch.Size([8, 1, 252, 856, 49]), 'extra_msa_has_deletion': torch.Size([8, 1, 1152, 856]), 'extra_msa_deletion_value': torch.Size([8, 1, 1152, 856])}
Inference time: 410.5106331880088
{'aatype': torch.Size([1, 1, 856]), 'residue_index': torch.Size([1, 1, 856]), 'seq_length': torch.Size([1, 1]), 'msa_chains': torch.Size([8, 1, 252, 1]), 'template_aatype': torch.Size([1, 1, 4, 856]), 'template_all_atom_mask': torch.Size([1, 1, 4, 856, 37]), 'template_all_atom_positions': torch.Size([1, 1, 4, 856, 37, 3]), 'asym_id': torch.Size([1, 1, 856]), 'sym_id': torch.Size([1, 1, 856]), 'entity_id': torch.Size([1, 1, 856]), 'num_sym': torch.Size([1, 1, 856]), 'assembly_num_chains': torch.Size([1, 1, 1]), 'cluster_bias_mask': torch.Size([1, 1, 252]), 'bert_mask': torch.Size([8, 1, 252, 856]), 'msa_mask': torch.Size([8, 1, 252, 856]), 'asym_len': torch.Size([1, 1, 3]), 'num_recycling_iters': torch.Size([1, 1]), 'is_distillation': torch.Size([8, 1]), 'seq_mask': torch.Size([1, 1, 856]), 'msa_row_mask': torch.Size([8, 1, 252]), 'template_mask': torch.Size([1, 1, 4]), 'template_pseudo_beta': torch.Size([1, 1, 4, 856, 3]), 'template_pseudo_beta_mask': torch.Size([1, 1, 4, 856]), 'template_torsion_angles_sin_cos': torch.Size([1, 1, 4, 856, 7, 2]), 'template_alt_torsion_angles_sin_cos': torch.Size([1, 1, 4, 856, 7, 2]), 'template_torsion_angles_mask': torch.Size([1, 1, 4, 856, 7]), 'residx_atom14_to_atom37': torch.Size([1, 1, 856, 14]), 'residx_atom37_to_atom14': torch.Size([1, 1, 856, 37]), 'atom14_atom_exists': torch.Size([1, 1, 856, 14]), 'atom37_atom_exists': torch.Size([1, 1, 856, 37]), 'target_feat': torch.Size([1, 1, 856, 22]), 'extra_msa': torch.Size([8, 1, 1152, 856]), 'extra_msa_mask': torch.Size([8, 1, 1152, 856]), 'extra_msa_row_mask': torch.Size([8, 1, 1152]), 'true_msa': torch.Size([8, 1, 252, 856]), 'msa_feat': torch.Size([8, 1, 252, 856, 49]), 'extra_msa_has_deletion': torch.Size([8, 1, 1152, 856]), 'extra_msa_deletion_value': torch.Size([8, 1, 1152, 856])}
Inference time: 406.87861637599417
{'aatype': torch.Size([1, 1, 856]), 'residue_index': torch.Size([1, 1, 856]), 'seq_length': torch.Size([1, 1]), 'msa_chains': torch.Size([8, 1, 252, 1]), 'template_aatype': torch.Size([1, 1, 4, 856]), 'template_all_atom_mask': torch.Size([1, 1, 4, 856, 37]), 'template_all_atom_positions': torch.Size([1, 1, 4, 856, 37, 3]), 'asym_id': torch.Size([1, 1, 856]), 'sym_id': torch.Size([1, 1, 856]), 'entity_id': torch.Size([1, 1, 856]), 'num_sym': torch.Size([1, 1, 856]), 'assembly_num_chains': torch.Size([1, 1, 1]), 'cluster_bias_mask': torch.Size([1, 1, 252]), 'bert_mask': torch.Size([8, 1, 252, 856]), 'msa_mask': torch.Size([8, 1, 252, 856]), 'asym_len': torch.Size([1, 1, 3]), 'num_recycling_iters': torch.Size([1, 1]), 'is_distillation': torch.Size([8, 1]), 'seq_mask': torch.Size([1, 1, 856]), 'msa_row_mask': torch.Size([8, 1, 252]), 'template_mask': torch.Size([1, 1, 4]), 'template_pseudo_beta': torch.Size([1, 1, 4, 856, 3]), 'template_pseudo_beta_mask': torch.Size([1, 1, 4, 856]), 'template_torsion_angles_sin_cos': torch.Size([1, 1, 4, 856, 7, 2]), 'template_alt_torsion_angles_sin_cos': torch.Size([1, 1, 4, 856, 7, 2]), 'template_torsion_angles_mask': torch.Size([1, 1, 4, 856, 7]), 'residx_atom14_to_atom37': torch.Size([1, 1, 856, 14]), 'residx_atom37_to_atom14': torch.Size([1, 1, 856, 37]), 'atom14_atom_exists': torch.Size([1, 1, 856, 14]), 'atom37_atom_exists': torch.Size([1, 1, 856, 37]), 'target_feat': torch.Size([1, 1, 856, 22]), 'extra_msa': torch.Size([8, 1, 1152, 856]), 'extra_msa_mask': torch.Size([8, 1, 1152, 856]), 'extra_msa_row_mask': torch.Size([8, 1, 1152]), 'true_msa': torch.Size([8, 1, 252, 856]), 'msa_feat': torch.Size([8, 1, 252, 856, 49]), 'extra_msa_has_deletion': torch.Size([8, 1, 1152, 856]), 'extra_msa_deletion_value': torch.Size([8, 1, 1152, 856])}
Inference time: 406.9402783330006
plddts {'multimer_ft_multimer.unifold.pt_20281': '0.4212213', 'multimer_ft_multimer.unifold.pt_2806': '0.41139442', 'multimer_ft_multimer.unifold.pt_55231': '0.4146896'}
ptms {'multimer_ft_multimer.unifold.pt_20281': '0.99775934', 'multimer_ft_multimer.unifold.pt_2806': '0.99926674', 'multimer_ft_multimer.unifold.pt_55231': '0.99753684'}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment