davanstrien HF staff commited on
Commit
c66e8f9
1 Parent(s): 054abb2

filter zero

Browse files
Files changed (1) hide show
  1. app.py +14 -3
app.py CHANGED
@@ -8,8 +8,15 @@ from cachetools import cached, TTLCache
8
 
9
  @cached(TTLCache(maxsize=10, ttl=60 * 60 * 3))
10
  def get_all_models():
11
- models = list(tqdm(iter(list_models(cardData=True, limit=None))))
12
- return [model for model in models if model is not None]
 
 
 
 
 
 
 
13
 
14
 
15
  def has_base_model_info(model):
@@ -69,7 +76,10 @@ def return_models_for_base_model(base_model):
69
  # sort models by downloads
70
  models = sorted(models, key=lambda x: x.downloads, reverse=True)
71
  results = ""
72
- results += f"## Models fine-tuned from [`{base_model}`](https://huggingface.co/{base_model}) \n\n"
 
 
 
73
  results += f"`{base_model}` has {len(models)} children\n\n"
74
  total_download_number = sum(model.downloads for model in models)
75
  results += (
@@ -131,6 +141,7 @@ with gr.Blocks() as demo:
131
  You can also optionally filter by task to see rankings for a particular machine learning task.
132
  Don't forget to ❤ if you like this space 🤗"""
133
  )
 
134
  gr.Markdown(produce_summary())
135
  gr.Markdown("## Find all models trained from a base model")
136
  base_model = gr.Dropdown(all_base_models, label="Base Model")
 
8
 
9
  @cached(TTLCache(maxsize=10, ttl=60 * 60 * 3))
10
  def get_all_models():
11
+ models = list(
12
+ tqdm(
13
+ iter(list_models(cardData=True, limit=None, sort="downloads", direction=-1))
14
+ )
15
+ )
16
+ models = [model for model in models if model is not None]
17
+ return [
18
+ model for model in models if model.downloads > 1
19
+ ] # filter out models with 0 downloads
20
 
21
 
22
  def has_base_model_info(model):
 
76
  # sort models by downloads
77
  models = sorted(models, key=lambda x: x.downloads, reverse=True)
78
  results = ""
79
+ results += (
80
+ "## Models fine-tuned from"
81
+ f" [`{base_model}`](https://huggingface.co/{base_model}) \n\n"
82
+ )
83
  results += f"`{base_model}` has {len(models)} children\n\n"
84
  total_download_number = sum(model.downloads for model in models)
85
  results += (
 
141
  You can also optionally filter by task to see rankings for a particular machine learning task.
142
  Don't forget to ❤ if you like this space 🤗"""
143
  )
144
+
145
  gr.Markdown(produce_summary())
146
  gr.Markdown("## Find all models trained from a base model")
147
  base_model = gr.Dropdown(all_base_models, label="Base Model")