hysts HF staff commited on
Commit
de28386
1 Parent(s): 191e2b7
Files changed (5) hide show
  1. .pre-commit-config.yaml +3 -12
  2. README.md +4 -1
  3. app.py +69 -102
  4. model.py +9 -14
  5. requirements.txt +5 -5
.pre-commit-config.yaml CHANGED
@@ -21,26 +21,17 @@ repos:
21
  - id: docformatter
22
  args: ['--in-place']
23
  - repo: https://github.com/pycqa/isort
24
- rev: 5.10.1
25
  hooks:
26
  - id: isort
27
  - repo: https://github.com/pre-commit/mirrors-mypy
28
- rev: v0.812
29
  hooks:
30
  - id: mypy
31
  args: ['--ignore-missing-imports']
 
32
  - repo: https://github.com/google/yapf
33
  rev: v0.32.0
34
  hooks:
35
  - id: yapf
36
  args: ['--parallel', '--in-place']
37
- - repo: https://github.com/kynan/nbstripout
38
- rev: 0.5.0
39
- hooks:
40
- - id: nbstripout
41
- args: ['--extra-keys', 'metadata.interpreter metadata.kernelspec cell.metadata.pycharm']
42
- - repo: https://github.com/nbQA-dev/nbQA
43
- rev: 1.3.1
44
- hooks:
45
- - id: nbqa-isort
46
- - id: nbqa-yapf
 
21
  - id: docformatter
22
  args: ['--in-place']
23
  - repo: https://github.com/pycqa/isort
24
+ rev: 5.12.0
25
  hooks:
26
  - id: isort
27
  - repo: https://github.com/pre-commit/mirrors-mypy
28
+ rev: v0.991
29
  hooks:
30
  - id: mypy
31
  args: ['--ignore-missing-imports']
32
+ additional_dependencies: ['types-python-slugify']
33
  - repo: https://github.com/google/yapf
34
  rev: v0.32.0
35
  hooks:
36
  - id: yapf
37
  args: ['--parallel', '--in-place']
 
 
 
 
 
 
 
 
 
 
README.md CHANGED
@@ -4,9 +4,12 @@ emoji: 🐨
4
  colorFrom: indigo
5
  colorTo: red
6
  sdk: gradio
7
- sdk_version: 3.1.3
8
  app_file: app.py
9
  pinned: false
 
10
  ---
11
 
12
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces#reference
 
 
 
4
  colorFrom: indigo
5
  colorTo: red
6
  sdk: gradio
7
+ sdk_version: 3.35.2
8
  app_file: app.py
9
  pinned: false
10
+ suggested_hardware: t4-small
11
  ---
12
 
13
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces#reference
14
+
15
+ https://arxiv.org/abs/2202.12211
app.py CHANGED
@@ -2,7 +2,6 @@
2
 
3
  from __future__ import annotations
4
 
5
- import argparse
6
  import pathlib
7
 
8
  import gradio as gr
@@ -10,23 +9,7 @@ import numpy as np
10
 
11
  from model import Model
12
 
13
- TITLE = '# Self-Distilled StyleGAN'
14
- DESCRIPTION = '''This is an unofficial demo for [https://github.com/self-distilled-stylegan/self-distilled-internet-photos](https://github.com/self-distilled-stylegan/self-distilled-internet-photos).
15
-
16
- Expected execution time on Hugging Face Spaces: 2s'''
17
- FOOTER = '<img id="visitor-badge" src="https://visitor-badge.glitch.me/badge?page_id=hysts.self-distilled-stylegan" alt="visitor badge" />'
18
-
19
-
20
- def parse_args() -> argparse.Namespace:
21
- parser = argparse.ArgumentParser()
22
- parser.add_argument('--device', type=str, default='cpu')
23
- parser.add_argument('--theme', type=str)
24
- parser.add_argument('--share', action='store_true')
25
- parser.add_argument('--port', type=int)
26
- parser.add_argument('--disable-queue',
27
- dest='enable_queue',
28
- action='store_false')
29
- return parser.parse_args()
30
 
31
 
32
  def get_sample_image_url(name: str) -> str:
@@ -56,87 +39,71 @@ def get_cluster_center_image_markdown(model_name: str) -> str:
56
  return f'![cluster center images]({url})'
57
 
58
 
59
- def main():
60
- args = parse_args()
61
-
62
- model = Model(args.device)
63
-
64
- with gr.Blocks(theme=args.theme, css='style.css') as demo:
65
- gr.Markdown(TITLE)
66
- gr.Markdown(DESCRIPTION)
67
-
68
- with gr.Tabs():
69
- with gr.TabItem('App'):
70
- with gr.Row():
71
- with gr.Column():
72
- with gr.Group():
73
- model_name = gr.Dropdown(
74
- model.MODEL_NAMES,
75
- value=model.MODEL_NAMES[0],
76
- label='Model')
77
- seed = gr.Slider(0,
78
- np.iinfo(np.uint32).max,
79
- value=0,
80
- step=1,
81
- label='Seed')
82
- psi = gr.Slider(0,
83
- 2,
84
- step=0.05,
85
- value=0.7,
86
- label='Truncation psi')
87
- truncation_type = gr.Dropdown(
88
- model.TRUNCATION_TYPES,
89
- value=model.TRUNCATION_TYPES[0],
90
- label='Truncation Type')
91
- run_button = gr.Button('Run')
92
- with gr.Column():
93
- result = gr.Image(label='Result', elem_id='result')
94
-
95
- with gr.TabItem('Sample Images'):
96
- with gr.Row():
97
- paths = sorted(pathlib.Path('samples').glob('*'))
98
- names = [path.stem for path in paths]
99
- model_name2 = gr.Dropdown(
100
- names,
101
- value='dogs_1024_multimodal_lpips',
102
- label='Type')
103
- with gr.Row():
104
- text = get_sample_image_markdown(model_name2.value)
105
- sample_images = gr.Markdown(text)
106
-
107
- with gr.TabItem('Cluster Center Images'):
108
- with gr.Row():
109
- model_name3 = gr.Dropdown(model.MODEL_NAMES,
110
- value=model.MODEL_NAMES[0],
111
- label='Model')
112
- with gr.Row():
113
- text = get_cluster_center_image_markdown(model_name3.value)
114
- cluster_center_images = gr.Markdown(value=text)
115
-
116
- gr.Markdown(FOOTER)
117
-
118
- model_name.change(fn=model.set_model, inputs=model_name, outputs=None)
119
- run_button.click(fn=model.set_model_and_generate_image,
120
- inputs=[
121
- model_name,
122
- seed,
123
- psi,
124
- truncation_type,
125
- ],
126
- outputs=result)
127
- model_name2.change(fn=get_sample_image_markdown,
128
- inputs=model_name2,
129
- outputs=sample_images)
130
- model_name3.change(fn=get_cluster_center_image_markdown,
131
- inputs=model_name3,
132
- outputs=cluster_center_images)
133
-
134
- demo.launch(
135
- enable_queue=args.enable_queue,
136
- server_port=args.port,
137
- share=args.share,
138
- )
139
-
140
-
141
- if __name__ == '__main__':
142
- main()
 
2
 
3
  from __future__ import annotations
4
 
 
5
  import pathlib
6
 
7
  import gradio as gr
 
9
 
10
  from model import Model
11
 
12
+ DESCRIPTION = '# [Self-Distilled StyleGAN](https://github.com/self-distilled-stylegan/self-distilled-internet-photos)'
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
 
15
  def get_sample_image_url(name: str) -> str:
 
39
  return f'![cluster center images]({url})'
40
 
41
 
42
+ model = Model()
43
+
44
+ with gr.Blocks(css='style.css') as demo:
45
+ gr.Markdown(DESCRIPTION)
46
+
47
+ with gr.Tabs():
48
+ with gr.TabItem('App'):
49
+ with gr.Row():
50
+ with gr.Column():
51
+ with gr.Group():
52
+ model_name = gr.Dropdown(label='Model',
53
+ choices=model.MODEL_NAMES,
54
+ value=model.MODEL_NAMES[0])
55
+ seed = gr.Slider(label='Seed',
56
+ minimum=0,
57
+ maximum=np.iinfo(np.uint32).max,
58
+ step=1,
59
+ value=0)
60
+ psi = gr.Slider(label='Truncation psi',
61
+ minimum=0,
62
+ maximum=2,
63
+ step=0.05,
64
+ value=0.7)
65
+ truncation_type = gr.Dropdown(
66
+ label='Truncation Type',
67
+ choices=model.TRUNCATION_TYPES,
68
+ value=model.TRUNCATION_TYPES[0])
69
+ run_button = gr.Button('Run')
70
+ with gr.Column():
71
+ result = gr.Image(label='Result', elem_id='result')
72
+
73
+ with gr.TabItem('Sample Images'):
74
+ with gr.Row():
75
+ paths = sorted(pathlib.Path('samples').glob('*'))
76
+ names = [path.stem for path in paths]
77
+ model_name2 = gr.Dropdown(label='Type',
78
+ choices=names,
79
+ value='dogs_1024_multimodal_lpips')
80
+ with gr.Row():
81
+ text = get_sample_image_markdown(model_name2.value)
82
+ sample_images = gr.Markdown(text)
83
+
84
+ with gr.TabItem('Cluster Center Images'):
85
+ with gr.Row():
86
+ model_name3 = gr.Dropdown(label='Model',
87
+ choices=model.MODEL_NAMES,
88
+ value=model.MODEL_NAMES[0])
89
+ with gr.Row():
90
+ text = get_cluster_center_image_markdown(model_name3.value)
91
+ cluster_center_images = gr.Markdown(value=text)
92
+
93
+ model_name.change(fn=model.set_model, inputs=model_name)
94
+ run_button.click(fn=model.set_model_and_generate_image,
95
+ inputs=[
96
+ model_name,
97
+ seed,
98
+ psi,
99
+ truncation_type,
100
+ ],
101
+ outputs=result)
102
+ model_name2.change(fn=get_sample_image_markdown,
103
+ inputs=model_name2,
104
+ outputs=sample_images)
105
+ model_name3.change(fn=get_cluster_center_image_markdown,
106
+ inputs=model_name3,
107
+ outputs=cluster_center_images)
108
+
109
+ demo.queue(max_size=10).launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
model.py CHANGED
@@ -1,6 +1,5 @@
1
  from __future__ import annotations
2
 
3
- import os
4
  import pathlib
5
  import pickle
6
  import sys
@@ -15,8 +14,6 @@ current_dir = pathlib.Path(__file__).parent
15
  submodule_dir = current_dir / 'stylegan3'
16
  sys.path.insert(0, submodule_dir.as_posix())
17
 
18
- HF_TOKEN = os.environ['HF_TOKEN']
19
-
20
 
21
  class LPIPS(lpips.LPIPS):
22
  @staticmethod
@@ -60,8 +57,9 @@ class Model:
60
  'Global',
61
  ]
62
 
63
- def __init__(self, device: str | torch.device):
64
- self.device = torch.device(device)
 
65
  self._download_all_models()
66
  self._download_all_cluster_centers()
67
  self._download_all_cluster_center_images()
@@ -77,9 +75,8 @@ class Model:
77
  )
78
 
79
  def _load_model(self, model_name: str) -> nn.Module:
80
- path = hf_hub_download('hysts/Self-Distilled-StyleGAN',
81
- f'models/{model_name}_pytorch.pkl',
82
- use_auth_token=HF_TOKEN)
83
  with open(path, 'rb') as f:
84
  model = pickle.load(f)['G_ema']
85
  model.eval()
@@ -87,17 +84,15 @@ class Model:
87
  return model
88
 
89
  def _load_cluster_centers(self, model_name: str) -> torch.Tensor:
90
- path = hf_hub_download('hysts/Self-Distilled-StyleGAN',
91
- f'cluster_centers/{model_name}.npy',
92
- use_auth_token=HF_TOKEN)
93
  centers = np.load(path)
94
  centers = torch.from_numpy(centers).float().to(self.device)
95
  return centers
96
 
97
  def _load_cluster_center_images(self, model_name: str) -> np.ndarray:
98
- path = hf_hub_download('hysts/Self-Distilled-StyleGAN',
99
- f'cluster_center_images/{model_name}.npy',
100
- use_auth_token=HF_TOKEN)
101
  return np.load(path)
102
 
103
  def set_model(self, model_name: str) -> None:
 
1
  from __future__ import annotations
2
 
 
3
  import pathlib
4
  import pickle
5
  import sys
 
14
  submodule_dir = current_dir / 'stylegan3'
15
  sys.path.insert(0, submodule_dir.as_posix())
16
 
 
 
17
 
18
  class LPIPS(lpips.LPIPS):
19
  @staticmethod
 
57
  'Global',
58
  ]
59
 
60
+ def __init__(self):
61
+ self.device = torch.device(
62
+ 'cuda:0' if torch.cuda.is_available() else 'cpu')
63
  self._download_all_models()
64
  self._download_all_cluster_centers()
65
  self._download_all_cluster_center_images()
 
75
  )
76
 
77
  def _load_model(self, model_name: str) -> nn.Module:
78
+ path = hf_hub_download('public-data/Self-Distilled-StyleGAN',
79
+ f'models/{model_name}_pytorch.pkl')
 
80
  with open(path, 'rb') as f:
81
  model = pickle.load(f)['G_ema']
82
  model.eval()
 
84
  return model
85
 
86
  def _load_cluster_centers(self, model_name: str) -> torch.Tensor:
87
+ path = hf_hub_download('public-data/Self-Distilled-StyleGAN',
88
+ f'cluster_centers/{model_name}.npy')
 
89
  centers = np.load(path)
90
  centers = torch.from_numpy(centers).float().to(self.device)
91
  return centers
92
 
93
  def _load_cluster_center_images(self, model_name: str) -> np.ndarray:
94
+ path = hf_hub_download('public-data/Self-Distilled-StyleGAN',
95
+ f'cluster_center_images/{model_name}.npy')
 
96
  return np.load(path)
97
 
98
  def set_model(self, model_name: str) -> None:
requirements.txt CHANGED
@@ -1,6 +1,6 @@
1
  lpips==0.1.4
2
- numpy==1.22.3
3
- Pillow==9.0.1
4
- scipy==1.8.0
5
- torch==1.11.0
6
- torchvision==0.12.0
 
1
  lpips==0.1.4
2
+ numpy==1.23.5
3
+ Pillow==10.0.0
4
+ scipy==1.10.1
5
+ torch==2.0.1
6
+ torchvision==0.15.2