hpwang commited on
Commit
8ba473c
·
verified ·
1 Parent(s): d7e0376

Update tools/Fooocus/ldm_patched/modules/model_management_former.py

Browse files
tools/Fooocus/ldm_patched/modules/model_management_former.py CHANGED
@@ -103,12 +103,12 @@ def get_total_memory(dev=None, torch_total_too=False):
103
  mem_total_torch = mem_total
104
  elif is_intel_xpu():
105
  stats = torch.xpu.memory_stats(dev)
106
- mem_reserved = stats['reserved_bytes.all.current']
107
  mem_total = torch.xpu.get_device_properties(dev).total_memory
108
  mem_total_torch = mem_reserved
109
  else:
110
  stats = torch.cuda.memory_stats(dev)
111
- mem_reserved = stats['reserved_bytes.all.current']
112
  _, mem_total_cuda = torch.cuda.mem_get_info(dev)
113
  mem_total_torch = mem_reserved
114
  mem_total = mem_total_cuda
@@ -663,15 +663,15 @@ def get_free_memory(dev=None, torch_free_too=False):
663
  mem_free_torch = mem_free_total
664
  elif is_intel_xpu():
665
  stats = torch.xpu.memory_stats(dev)
666
- mem_active = stats['active_bytes.all.current']
667
- mem_allocated = stats['allocated_bytes.all.current']
668
- mem_reserved = stats['reserved_bytes.all.current']
669
  mem_free_torch = mem_reserved - mem_active
670
  mem_free_total = torch.xpu.get_device_properties(dev).total_memory - mem_allocated
671
  else:
672
  stats = torch.cuda.memory_stats(dev)
673
- mem_active = stats['active_bytes.all.current']
674
- mem_reserved = stats['reserved_bytes.all.current']
675
  mem_free_cuda, _ = torch.cuda.mem_get_info(dev)
676
  mem_free_torch = mem_reserved - mem_active
677
  mem_free_total = mem_free_cuda + mem_free_torch
 
103
  mem_total_torch = mem_total
104
  elif is_intel_xpu():
105
  stats = torch.xpu.memory_stats(dev)
106
+ mem_reserved = 100#stats['reserved_bytes.all.current']
107
  mem_total = torch.xpu.get_device_properties(dev).total_memory
108
  mem_total_torch = mem_reserved
109
  else:
110
  stats = torch.cuda.memory_stats(dev)
111
+ mem_reserved = 100#stats['reserved_bytes.all.current']
112
  _, mem_total_cuda = torch.cuda.mem_get_info(dev)
113
  mem_total_torch = mem_reserved
114
  mem_total = mem_total_cuda
 
663
  mem_free_torch = mem_free_total
664
  elif is_intel_xpu():
665
  stats = torch.xpu.memory_stats(dev)
666
+ mem_active = 100#stats['active_bytes.all.current']
667
+ mem_allocated = 100#stats['allocated_bytes.all.current']
668
+ mem_reserved = 100#stats['reserved_bytes.all.current']
669
  mem_free_torch = mem_reserved - mem_active
670
  mem_free_total = torch.xpu.get_device_properties(dev).total_memory - mem_allocated
671
  else:
672
  stats = torch.cuda.memory_stats(dev)
673
+ mem_active = 100#stats['active_bytes.all.current']
674
+ mem_reserved = 100#stats['reserved_bytes.all.current']
675
  mem_free_cuda, _ = torch.cuda.mem_get_info(dev)
676
  mem_free_torch = mem_reserved - mem_active
677
  mem_free_total = mem_free_cuda + mem_free_torch