sunshangquan commited on
Commit
0e065e1
1 Parent(s): 387d8e9

commit from ssq

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -13,7 +13,7 @@ model_restoration.eval()
13
 
14
  factor = 8
15
  def predict(input_img):
16
- img = np.float32(load_img(file_))/255.
17
  img = torch.from_numpy(img).permute(2,0,1)
18
  input_ = img.unsqueeze(0).cuda()
19
 
@@ -23,13 +23,13 @@ def predict(input_img):
23
  padh = H-h if h%factor!=0 else 0
24
  padw = W-w if w%factor!=0 else 0
25
  input_ = F.pad(input_, (0,padw,0,padh), 'reflect')
26
- return input_
27
- prediction = model_restoration(input_)
28
  output_path = "restored.png"
29
- result_dir = restored[:,:,:h,:w]
30
  restored = torch.clamp(restored,0,1).cpu().detach().permute(0, 2, 3, 1).squeeze(0).numpy()
31
 
32
- save_img(result_dir, img_as_ubyte(restored))
33
 
34
  example_images = [
35
  "examples/example.jpeg",
 
13
 
14
  factor = 8
15
  def predict(input_img):
16
+ img = np.float32(load_img(input_img))/255.
17
  img = torch.from_numpy(img).permute(2,0,1)
18
  input_ = img.unsqueeze(0).cuda()
19
 
 
23
  padh = H-h if h%factor!=0 else 0
24
  padw = W-w if w%factor!=0 else 0
25
  input_ = F.pad(input_, (0,padw,0,padh), 'reflect')
26
+
27
+ restored = model_restoration(input_)
28
  output_path = "restored.png"
29
+ restored = restored[:,:,:h,:w]
30
  restored = torch.clamp(restored,0,1).cpu().detach().permute(0, 2, 3, 1).squeeze(0).numpy()
31
 
32
+ save_img(output_path, img_as_ubyte(restored))
33
 
34
  example_images = [
35
  "examples/example.jpeg",