Commit 46e17eec authored by helloyongyang's avatar helloyongyang
Browse files

update logs

parent a9f0ab52
...@@ -20,36 +20,22 @@ class _ProfilingContext: ...@@ -20,36 +20,22 @@ class _ProfilingContext:
def __enter__(self): def __enter__(self):
torch.cuda.synchronize() torch.cuda.synchronize()
if torch.cuda.is_available():
torch.cuda.reset_peak_memory_stats()
self.start_time = time.perf_counter() self.start_time = time.perf_counter()
return self return self
def __exit__(self, exc_type, exc_val, exc_tb): def __exit__(self, exc_type, exc_val, exc_tb):
torch.cuda.synchronize() torch.cuda.synchronize()
if torch.cuda.is_available():
peak_memory = torch.cuda.max_memory_allocated() / (1024**3) # 转换为GB
logger.info(f"[Profile] {self.rank_info} - {self.name} Peak Memory: {peak_memory:.2f} GB")
else:
logger.info(f"[Profile] {self.rank_info} - {self.name} executed without GPU.")
elapsed = time.perf_counter() - self.start_time elapsed = time.perf_counter() - self.start_time
logger.info(f"[Profile] {self.rank_info} - {self.name} cost {elapsed:.6f} seconds") logger.info(f"[Profile] {self.rank_info} - {self.name} cost {elapsed:.6f} seconds")
return False return False
async def __aenter__(self): async def __aenter__(self):
torch.cuda.synchronize() torch.cuda.synchronize()
if torch.cuda.is_available():
torch.cuda.reset_peak_memory_stats()
self.start_time = time.perf_counter() self.start_time = time.perf_counter()
return self return self
async def __aexit__(self, exc_type, exc_val, exc_tb): async def __aexit__(self, exc_type, exc_val, exc_tb):
torch.cuda.synchronize() torch.cuda.synchronize()
if torch.cuda.is_available():
peak_memory = torch.cuda.max_memory_allocated() / (1024**3) # 转换为GB
logger.info(f"[Profile] {self.rank_info} - {self.name} Peak Memory: {peak_memory:.2f} GB")
else:
logger.info(f"[Profile] {self.rank_info} - {self.name} executed without GPU.")
elapsed = time.perf_counter() - self.start_time elapsed = time.perf_counter() - self.start_time
logger.info(f"[Profile] {self.rank_info} - {self.name} cost {elapsed:.6f} seconds") logger.info(f"[Profile] {self.rank_info} - {self.name} cost {elapsed:.6f} seconds")
return False return False
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment