r3gm commited on
Commit
8f68514
1 Parent(s): c24dce8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -2
app.py CHANGED
@@ -54,6 +54,8 @@ from utils import (
54
  extract_exif_data,
55
  create_mask_now,
56
  download_diffuser_repo,
 
 
57
  progress_step_bar,
58
  html_template_message,
59
  escape_html,
@@ -95,8 +97,8 @@ embed_list = get_model_list(DIRECTORY_EMBEDS)
95
  embed_list = [
96
  (os.path.splitext(os.path.basename(emb))[0], emb) for emb in embed_list
97
  ]
98
- model_list = get_model_list(DIRECTORY_MODELS)
99
- model_list = LOAD_DIFFUSERS_FORMAT_MODEL + model_list
100
  lora_model_list = get_model_list(DIRECTORY_LORAS)
101
  lora_model_list.insert(0, "None")
102
  lora_model_list = lora_model_list + DIFFUSERS_FORMAT_LORAS
@@ -130,9 +132,28 @@ class GuiSD:
130
  self.status_loading = False
131
  self.sleep_loading = 4
132
  self.last_load = datetime.now()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
133
 
134
  def load_new_model(self, model_name, vae_model, task, progress=gr.Progress(track_tqdm=True)):
135
 
 
 
 
 
136
  vae_model = vae_model if vae_model != "None" else None
137
  model_type = get_model_type(model_name)
138
  dtype_model = torch.bfloat16 if model_type == "FLUX" else torch.float16
@@ -145,6 +166,8 @@ class GuiSD:
145
  token=True,
146
  )
147
 
 
 
148
  for i in range(68):
149
  if not self.status_loading:
150
  self.status_loading = True
 
54
  extract_exif_data,
55
  create_mask_now,
56
  download_diffuser_repo,
57
+ get_used_storage_gb,
58
+ delete_model,
59
  progress_step_bar,
60
  html_template_message,
61
  escape_html,
 
97
  embed_list = [
98
  (os.path.splitext(os.path.basename(emb))[0], emb) for emb in embed_list
99
  ]
100
+ single_file_model_list = get_model_list(DIRECTORY_MODELS)
101
+ model_list = LOAD_DIFFUSERS_FORMAT_MODEL + single_file_model_list
102
  lora_model_list = get_model_list(DIRECTORY_LORAS)
103
  lora_model_list.insert(0, "None")
104
  lora_model_list = lora_model_list + DIFFUSERS_FORMAT_LORAS
 
132
  self.status_loading = False
133
  self.sleep_loading = 4
134
  self.last_load = datetime.now()
135
+ self.inventory = []
136
+
137
+ def update_storage_models(self, storage_floor_gb=42, required_inventory_for_purge=3):
138
+ while get_used_storage_gb() > storage_floor_gb:
139
+ if len(self.inventory) < required_inventory_for_purge:
140
+ break
141
+ removal_candidate = self.inventory.pop(0)
142
+ delete_model(removal_candidate)
143
+
144
+ def update_inventory(self, model_name):
145
+ if model_name not in single_file_model_list:
146
+ self.inventory = [
147
+ m for m in self.inventory if m != model_name
148
+ ] + [model_name]
149
+ print(self.inventory)
150
 
151
  def load_new_model(self, model_name, vae_model, task, progress=gr.Progress(track_tqdm=True)):
152
 
153
+ self.update_storage_models()
154
+
155
+ # download link model > model_name
156
+
157
  vae_model = vae_model if vae_model != "None" else None
158
  model_type = get_model_type(model_name)
159
  dtype_model = torch.bfloat16 if model_type == "FLUX" else torch.float16
 
166
  token=True,
167
  )
168
 
169
+ self.update_inventory(model_name)
170
+
171
  for i in range(68):
172
  if not self.status_loading:
173
  self.status_loading = True