Kit-Lemonfoot commited on
Commit
d2ffab4
1 Parent(s): 6b281b8

The great model purge is complete.

Browse files

TONS of model changes, added Muu, Nasa and Nodoka.
The Space has had its' size reduced from 45GB to 14GB. Massive!!!

Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -202,6 +202,8 @@ def load_model():
202
  model_index = f"weights/{category_folder}/{character_name}/{info['feature_retrieval_library']}"
203
  if info['feature_retrieval_library'] == "None":
204
  model_index = None
 
 
205
  if not (model_author in authors or "/" in model_author or "&" in model_author):
206
  authors.append(model_author)
207
  model_path = f"weights/{category_folder}/{character_name}/{model_name}"
@@ -485,7 +487,7 @@ if __name__ == '__main__':
485
  "<center>Do no evil.\n\n"
486
  "[![image](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1Til3SY7-X0x3Wss3YXlgfq8go39DzWHk)\n\n"
487
  )
488
- gr.Markdown("## <center> On March 31 2024, several models will be <b>removed</b> from this Space. <a href=\"https://rentry.co/vtrvcm_changes\">Please visit this link for more information.</a>")
489
  for (folder_title, folder, models) in categories:
490
  with gr.TabItem(folder_title):
491
  with gr.Tabs():
@@ -599,4 +601,4 @@ if __name__ == '__main__':
599
  if limitation is True:
600
  app.queue(concurrency_count=1, max_size=20, api_open=config.api).launch(share=config.colab)
601
  else:
602
- app.queue(concurrency_count=1, max_size=20, api_open=config.api).launch(share=True)
 
202
  model_index = f"weights/{category_folder}/{character_name}/{info['feature_retrieval_library']}"
203
  if info['feature_retrieval_library'] == "None":
204
  model_index = None
205
+ if model_index:
206
+ assert os.path.exists(model_index), f"Model {model_title} failed to load index."
207
  if not (model_author in authors or "/" in model_author or "&" in model_author):
208
  authors.append(model_author)
209
  model_path = f"weights/{category_folder}/{character_name}/{model_name}"
 
487
  "<center>Do no evil.\n\n"
488
  "[![image](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1Til3SY7-X0x3Wss3YXlgfq8go39DzWHk)\n\n"
489
  )
490
+ gr.Markdown("<center> Looking for more models? <a href=\"https://docs.google.com/spreadsheets/d/1tvZSggOsZGAPjbMrWOAAaoJJFpJuQlwUEQCf5x1ssO8\">Check out the VTuber AI Model Tracking spreadsheet!</a>")
491
  for (folder_title, folder, models) in categories:
492
  with gr.TabItem(folder_title):
493
  with gr.Tabs():
 
601
  if limitation is True:
602
  app.queue(concurrency_count=1, max_size=20, api_open=config.api).launch(share=config.colab)
603
  else:
604
+ app.queue(concurrency_count=1, max_size=20, api_open=config.api).launch(share=False)