Unverified Commit f1cf6eef authored by Chen Shengzhi's avatar Chen Shengzhi Committed by GitHub
Browse files

[Fix] Check the device backend before calling empty_cache function (#4212)

parent 0a59a465
...@@ -1228,7 +1228,16 @@ def cleanup_dist_env_and_memory(shutdown_ray: bool = False): ...@@ -1228,7 +1228,16 @@ def cleanup_dist_env_and_memory(shutdown_ray: bool = False):
ray.shutdown() ray.shutdown()
gc.collect() gc.collect()
if not current_platform.is_cpu(): if not current_platform.is_cpu():
torch.cuda.empty_cache() if hasattr(torch, "cuda") and torch.cuda.is_available():
torch.cuda.empty_cache()
if hasattr(torch._C, "_host_emptyCache"):
torch._C._host_emptyCache()
else:
logger.warning(
"torch._C._host_emptyCache() only available in Pytorch >=2.5"
)
elif hasattr(torch, "xpu") and torch.xpu.is_available():
torch.xpu.empty_cache()
def in_the_same_node_as(pg: ProcessGroup, source_rank: int = 0) -> List[bool]: def in_the_same_node_as(pg: ProcessGroup, source_rank: int = 0) -> List[bool]:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment