jclyo1 commited on
Commit
66a8ad0
1 Parent(s): 1a746a4
Files changed (2) hide show
  1. Dockerfile +1 -1
  2. app.py +20 -9
Dockerfile CHANGED
@@ -35,7 +35,7 @@ COPY --chown=user . $HOME/app
35
 
36
  # Download and uzip truepic-size from Google Drive
37
  RUN pip install gdown
38
- RUN gdown --id 1MdXMu8xSuG8WClY3JSgtL3Vy1IWTmWZO
39
  RUN tar -xf truepic-sign-v0.1.0-ubuntu-latest.tar.gz
40
  RUN chmod +x truepic-sign
41
 
 
35
 
36
  # Download and uzip truepic-size from Google Drive
37
  RUN pip install gdown
38
+ RUN gdown 1MdXMu8xSuG8WClY3JSgtL3Vy1IWTmWZO
39
  RUN tar -xf truepic-sign-v0.1.0-ubuntu-latest.tar.gz
40
  RUN chmod +x truepic-sign
41
 
app.py CHANGED
@@ -2,25 +2,36 @@ import gradio as gr
2
  import torch
3
  import requests
4
  from torchvision import transforms
 
 
5
 
6
  model = torch.hub.load("pytorch/vision:v0.6.0", "resnet18", pretrained=True).eval()
7
  response = requests.get("https://git.io/JJkYN")
8
  labels = response.text.split("\n")
9
 
10
 
11
- def predict(inp):
12
- inp = transforms.ToTensor()(inp).unsqueeze(0)
13
- with torch.no_grad():
14
- prediction = torch.nn.functional.softmax(model(inp)[0], dim=0)
15
- confidences = {labels[i]: float(prediction[i]) for i in range(1000)}
16
- return confidences
 
 
 
 
 
 
 
 
17
 
18
 
19
  def run():
20
  demo = gr.Interface(
21
- fn=predict,
22
- inputs=gr.inputs.Image(type="pil"),
23
- outputs=gr.outputs.Label(num_top_classes=3),
 
24
  )
25
 
26
  demo.launch(server_name="0.0.0.0", server_port=7860)
 
2
  import torch
3
  import requests
4
  from torchvision import transforms
5
+ from diffusers import StableDiffusionPipeline, DPMSolverMultistepScheduler, EulerDiscreteScheduler
6
+
7
 
8
  model = torch.hub.load("pytorch/vision:v0.6.0", "resnet18", pretrained=True).eval()
9
  response = requests.get("https://git.io/JJkYN")
10
  labels = response.text.split("\n")
11
 
12
 
13
+ def generate(inp):
14
+ torch.cuda.empty_cache()
15
+ print(f"Is CUDA available: {torch.cuda.is_available()}")
16
+
17
+ pipeline = StableDiffusionPipeline.from_pretrained(str(model), torch_dtype=torch.float16)
18
+
19
+ #pipeline.scheduler = DPMSolverMultistepScheduler.from_config(pipeline.scheduler.config)
20
+ #another comment
21
+ pipeline.scheduler = EulerDiscreteScheduler.from_config(pipeline.scheduler.config)
22
+
23
+ pipeline = pipeline.to("cuda")
24
+ image = pipeline(inp, height=512, width=512).images[0]
25
+
26
+ return image
27
 
28
 
29
  def run():
30
  demo = gr.Interface(
31
+ fn=generate,
32
+ inputs=gr.inputs.Text(label="Prompt"),
33
+ outputs=gr.outputs.Image(type="pil"),
34
+
35
  )
36
 
37
  demo.launch(server_name="0.0.0.0", server_port=7860)