jiang commited on
Commit
36c6351
1 Parent(s): 16d7835
Files changed (2) hide show
  1. demo.py +1 -1
  2. models/polyformer/swin.py +2 -2
demo.py CHANGED
@@ -227,7 +227,7 @@ def visual_grounding(image, text):
227
  sample = utils.move_to_cuda(sample) if use_cuda else sample
228
  sample = utils.apply_to_sample(apply_half, sample) if use_fp16 else sample
229
 
230
- with torch.no_grad():
231
  if isinstance(models, list):
232
  model = models[0]
233
  min_len = 6
 
227
  sample = utils.move_to_cuda(sample) if use_cuda else sample
228
  sample = utils.apply_to_sample(apply_half, sample) if use_fp16 else sample
229
 
230
+ with torch.autocast("cpu"), torch.no_grad():
231
  if isinstance(models, list):
232
  model = models[0]
233
  min_len = 6
models/polyformer/swin.py CHANGED
@@ -287,7 +287,7 @@ class WindowAttention(nn.Module):
287
  else:
288
  attn = self.softmax(attn)
289
 
290
- attn = self.attn_drop(attn).half()
291
 
292
  x = (attn @ v).transpose(1, 2).reshape(B_, N, C)
293
  x = self.proj(x)
@@ -758,4 +758,4 @@ class SwinTransformer(nn.Module):
758
  #outs = outs['layer2'] # [B, 512, 32, 32]
759
  outs = outs[f'layer{self.out_indices[0]}'] # [B, 512, 16, 16]
760
  #print(outs.shape)
761
- return outs
 
287
  else:
288
  attn = self.softmax(attn)
289
 
290
+ attn = self.attn_drop(attn)
291
 
292
  x = (attn @ v).transpose(1, 2).reshape(B_, N, C)
293
  x = self.proj(x)
 
758
  #outs = outs['layer2'] # [B, 512, 32, 32]
759
  outs = outs[f'layer{self.out_indices[0]}'] # [B, 512, 16, 16]
760
  #print(outs.shape)
761
+ return outs