Commit 7a36da42 authored by Angela Fan's avatar Angela Fan Committed by Myle Ott
Browse files

modified writing prompts model parameters to make readme cleaner

parent b59815bc
...@@ -494,9 +494,17 @@ def base_architecture(args): ...@@ -494,9 +494,17 @@ def base_architecture(args):
@register_model_architecture('fconv_self_att', 'fconv_self_att_wp') @register_model_architecture('fconv_self_att', 'fconv_self_att_wp')
def fconv_self_att_wp(args): def fconv_self_att_wp(args):
base_architecture(args) base_architecture(args)
args.encoder_embed_dim = getattr(args, 'encoder_embed_dim', 512) args.encoder_embed_dim = getattr(args, 'encoder_embed_dim', 256)
args.encoder_layers = getattr(args, 'encoder_layers', '[(128, 3)] * 2 + [(512,3)] * 1') args.encoder_layers = getattr(args, 'encoder_layers', '[(128, 3)] * 2 + [(512,3)] * 1')
args.decoder_embed_dim = getattr(args, 'decoder_embed_dim', 512) args.decoder_embed_dim = getattr(args, 'decoder_embed_dim', 256)
args.decoder_layers = getattr(args, 'decoder_layers', '[(512, 4)] * 4 + [(768, 4)] * 2 + [(1024, 4)] * 1 + [(2048,4)] * 1') args.decoder_layers = getattr(args, 'decoder_layers', '[(512, 4)] * 4 + [(768, 4)] * 2 + [(1024, 4)] * 1')
args.decoder_out_embed_dim = getattr(args, 'decoder_out_embed_dim', 256) args.decoder_out_embed_dim = getattr(args, 'decoder_out_embed_dim', 256)
args.decoder_attention = getattr(args, 'decoder_attention', 'True') args.decoder_attention = getattr(args, 'decoder_attention', 'True')
args.multihead_attention_nheads = getattr(args, 'multihead_attention_nheads', 1)
args.encoder_attention = getattr(args, 'encoder_attention', 'False')
args.encoder_attention_nheads = getattr(args, 'encoder_attention_nheads', 1)
args.self_attention = getattr(args, 'self_attention', 'True')
args.gated_attention = getattr(args, 'gated_attention', 'True')
args.downsample = getattr(args, 'downsample', 'True')
args.project_input = getattr(args, 'project_input', 'True')
args.multihead_self_attention_nheads = getattr(args, 'multihead_self_attention_nheads', 4)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment