Commit 05481617 authored by Tri Dao's avatar Tri Dao
Browse files

Bump version to 0.2.1

parent 0fa5c0d7
...@@ -14,7 +14,7 @@ from apex.transformer.tensor_parallel.utils import VocabUtility ...@@ -14,7 +14,7 @@ from apex.transformer.tensor_parallel.utils import VocabUtility
# `all_gather_into_tensor` and `reduce_scatter_tensor` are new placeholders for # `all_gather_into_tensor` and `reduce_scatter_tensor` are new placeholders for
# `_all_gather_base` and `_reduce_scatter_base`. They require the most recent # `_all_gather_base` and `_reduce_scatter_base`. They require the most recent
# version of PyTorch. The following 4 lines are for backward comparability with # version of PyTorch. The following 4 lines are for backward compatibility with
# older PyTorch. # older PyTorch.
if "all_gather_into_tensor" not in dir(torch.distributed): if "all_gather_into_tensor" not in dir(torch.distributed):
torch.distributed.all_gather_into_tensor = torch.distributed._all_gather_base torch.distributed.all_gather_into_tensor = torch.distributed._all_gather_base
......
...@@ -152,7 +152,7 @@ ext_modules.append( ...@@ -152,7 +152,7 @@ ext_modules.append(
setup( setup(
name="flash_attn", name="flash_attn",
version="0.2.0", version="0.2.1",
packages=find_packages( packages=find_packages(
exclude=("build", "csrc", "include", "tests", "dist", "docs", "benchmarks", "flash_attn.egg-info",) exclude=("build", "csrc", "include", "tests", "dist", "docs", "benchmarks", "flash_attn.egg-info",)
), ),
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment