Commit e2d98ddc authored by zhouxiang's avatar zhouxiang
Browse files

解决bf16编译问题

parent d7117b95
...@@ -47,6 +47,7 @@ std::shared_ptr<AbstractTransformerModel> AbstractTransformerModel::createLlamaM ...@@ -47,6 +47,7 @@ std::shared_ptr<AbstractTransformerModel> AbstractTransformerModel::createLlamaM
reader.GetInteger("ft_instance_hyperparameter", "enable_custom_all_reduce", 0), reader.GetInteger("ft_instance_hyperparameter", "enable_custom_all_reduce", 0),
model_dir); model_dir);
} }
#ifdef ENABLE_BF16
else if (data_type == "bf16") { else if (data_type == "bf16") {
#ifdef ENABLE_BF16 #ifdef ENABLE_BF16
return std::make_shared<LlamaTritonModel<__nv_bfloat16>>( return std::make_shared<LlamaTritonModel<__nv_bfloat16>>(
...@@ -59,6 +60,7 @@ std::shared_ptr<AbstractTransformerModel> AbstractTransformerModel::createLlamaM ...@@ -59,6 +60,7 @@ std::shared_ptr<AbstractTransformerModel> AbstractTransformerModel::createLlamaM
ft::FT_CHECK(false); ft::FT_CHECK(false);
#endif #endif
} }
#endif
else { else {
return std::make_shared<LlamaTritonModel<float>>( return std::make_shared<LlamaTritonModel<float>>(
reader.GetInteger("ft_instance_hyperparameter", "tensor_para_size"), reader.GetInteger("ft_instance_hyperparameter", "tensor_para_size"),
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment