0x90e commited on
Commit
425da69
ยท
1 Parent(s): 4798451

Improved logging.

Browse files
Files changed (3) hide show
  1. app.py +0 -1
  2. inference.py +2 -0
  3. inference_manga_v2.py +2 -0
app.py CHANGED
@@ -10,7 +10,6 @@ is_colab = util.is_google_colab()
10
 
11
  def run_cmd(command):
12
  try:
13
- print(command)
14
  call(command, shell=True)
15
  except KeyboardInterrupt:
16
  print("Process interrupted")
 
10
 
11
  def run_cmd(command):
12
  try:
 
13
  call(command, shell=True)
14
  except KeyboardInterrupt:
15
  print("Process interrupted")
inference.py CHANGED
@@ -27,8 +27,10 @@ device = torch.device('cuda' if is_cuda() else 'cpu')
27
  model = arch.RRDB_Net(3, 3, 64, 23, gc=32, upscale=4, norm_type=None, act_type='leakyrelu', mode='CNA', res_scale=1, upsample_mode='upconv')
28
 
29
  if is_cuda():
 
30
  model.load_state_dict(torch.load(model_path), strict=True)
31
  else:
 
32
  model.load_state_dict(torch.load(model_path, map_location=torch.device('cpu')), strict=True)
33
 
34
  model.eval()
 
27
  model = arch.RRDB_Net(3, 3, 64, 23, gc=32, upscale=4, norm_type=None, act_type='leakyrelu', mode='CNA', res_scale=1, upsample_mode='upconv')
28
 
29
  if is_cuda():
30
+ print("Using GPU ๐Ÿฅถ")
31
  model.load_state_dict(torch.load(model_path), strict=True)
32
  else:
33
+ print("Using CPU ๐Ÿ˜’")
34
  model.load_state_dict(torch.load(model_path, map_location=torch.device('cpu')), strict=True)
35
 
36
  model.eval()
inference_manga_v2.py CHANGED
@@ -19,8 +19,10 @@ device = torch.device('cuda' if is_cuda() else 'cpu')
19
  model = arch.RRDB_Net(1, 1, 64, 23, gc=32, upscale=4, norm_type=None, act_type='leakyrelu', mode='CNA', res_scale=1, upsample_mode='upconv')
20
 
21
  if is_cuda():
 
22
  model.load_state_dict(torch.load(model_path), strict=True)
23
  else:
 
24
  model.load_state_dict(torch.load(model_path, map_location=torch.device('cpu')), strict=True)
25
 
26
  model.eval()
 
19
  model = arch.RRDB_Net(1, 1, 64, 23, gc=32, upscale=4, norm_type=None, act_type='leakyrelu', mode='CNA', res_scale=1, upsample_mode='upconv')
20
 
21
  if is_cuda():
22
+ print("Using GPU ๐Ÿฅถ")
23
  model.load_state_dict(torch.load(model_path), strict=True)
24
  else:
25
+ print("Using CPU ๐Ÿ˜’")
26
  model.load_state_dict(torch.load(model_path, map_location=torch.device('cpu')), strict=True)
27
 
28
  model.eval()