Commit f2d64c00 authored by Mohammad Shoeybi's avatar Mohammad Shoeybi
Browse files

Merge branch 'bfloat_jit' into 'main'

removed the checks for bfloat jitting

See merge request ADLR/megatron-lm!255
parents 2f3a2d68 d28716e8
......@@ -136,9 +136,6 @@ def parse_args(extra_args_provider=None, defaults={},
if args.bf16:
assert not args.fp16
args.params_dtype = torch.bfloat16
# Jitting fusion is not supported for bfloat for now
assert not args.bias_gelu_fusion
assert not args.bias_dropout_fusion
if args.rank == 0:
print('using {} for parameters ...'.format(args.params_dtype),
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment