"...git@developer.sourcefind.cn:sugon_wxj/megatron-lm.git" did not exist on "cf7efd4f5adeb676ec30d2ff3bf2149da58ec4a3"
Commit 3e2608e1 authored by 藍+85CD's avatar 藍+85CD
Browse files

Fix auto lowvram detection on CUDA

parent 7cb924f6
...@@ -22,11 +22,12 @@ set_vram_to = NORMAL_VRAM ...@@ -22,11 +22,12 @@ set_vram_to = NORMAL_VRAM
try: try:
import torch import torch
try:
import intel_extension_for_pytorch as ipex import intel_extension_for_pytorch as ipex
if torch.xpu.is_available(): if torch.xpu.is_available():
xpu_available = True xpu_available = True
total_vram = torch.xpu.get_device_properties(torch.xpu.current_device()).total_memory / (1024 * 1024) total_vram = torch.xpu.get_device_properties(torch.xpu.current_device()).total_memory / (1024 * 1024)
else: except:
total_vram = torch.cuda.mem_get_info(torch.cuda.current_device())[1] / (1024 * 1024) total_vram = torch.cuda.mem_get_info(torch.cuda.current_device())[1] / (1024 * 1024)
total_ram = psutil.virtual_memory().total / (1024 * 1024) total_ram = psutil.virtual_memory().total / (1024 * 1024)
forced_normal_vram = "--normalvram" in sys.argv forced_normal_vram = "--normalvram" in sys.argv
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment