jsulz HF staff commited on
Commit
9c5987b
1 Parent(s): f6db30c

adding intro and futzing with some charts

Browse files
Files changed (1) hide show
  1. app.py +22 -4
app.py CHANGED
@@ -172,7 +172,7 @@ def cumulative_growth_plot_analysis(df, df_compressed):
172
 
173
  # Update layout
174
  fig.update_layout(
175
- title="Cumulative Growth of Models, Spaces, and Datasets Over Time",
176
  xaxis_title="Date",
177
  yaxis_title="Cumulative Size (PBs)",
178
  legend_title="Type",
@@ -242,13 +242,22 @@ def filter_by_extension_month(_df, _extension):
242
  fig.add_trace(
243
  go.Scatter(
244
  x=pivot_df.index,
245
- y=pivot_df[column] / 1e15, # Convert to petabytes
246
  mode="lines",
247
- name=column.capitalize(),
248
  line=dict(color=px.colors.qualitative.Alphabet[i]),
249
  )
250
  )
251
 
 
 
 
 
 
 
 
 
 
252
  return fig
253
 
254
 
@@ -258,6 +267,13 @@ with gr.Blocks() as demo:
258
 
259
  # Add a heading
260
  gr.Markdown("# Git LFS Analysis Across the Hub")
 
 
 
 
 
 
 
261
  with gr.Row():
262
  # scale so that
263
  # group the data by month and year and compute a cumulative sum of the total_size column
@@ -265,7 +281,7 @@ with gr.Blocks() as demo:
265
  with gr.Column(scale=1):
266
  gr.Markdown("# Repository Growth")
267
  gr.Markdown(
268
- "The cumulative growth of models, spaces, and datasets over time can be seen in the adjacent chart. Beside that is a view of the total change, month to month, of LFS files stored on the hub over 2024. We're averaging nearly **2.3 PBs uploaded to LFS per month!**"
269
  )
270
  gr.Dataframe(last_10_months, height=250)
271
  with gr.Column(scale=3):
@@ -328,4 +344,6 @@ with gr.Blocks() as demo:
328
  )
329
  _by_extension_month = gr.State(by_extension_month)
330
  gr.Plot(filter_by_extension_month, inputs=[_by_extension_month, extension])
 
 
331
  demo.launch()
 
172
 
173
  # Update layout
174
  fig.update_layout(
175
+ title="Cumulative Growth of Models, Spaces, and Datasets Over Time<br><sup>Dotted lines represent growth with file-level deduplication</sup>",
176
  xaxis_title="Date",
177
  yaxis_title="Cumulative Size (PBs)",
178
  legend_title="Type",
 
242
  fig.add_trace(
243
  go.Scatter(
244
  x=pivot_df.index,
245
+ y=pivot_df[column] / 1e12, # Convert to petabytes
246
  mode="lines",
247
+ name=column,
248
  line=dict(color=px.colors.qualitative.Alphabet[i]),
249
  )
250
  )
251
 
252
+ # Update layout
253
+ fig.update_layout(
254
+ title="Monthly Additions of LFS Files by Extension (in TBs)",
255
+ xaxis_title="Date",
256
+ yaxis_title="Size (TBs)",
257
+ legend_title="Type",
258
+ yaxis=dict(tickformat=".2f"), # Format y-axis labels to 2 decimal places
259
+ )
260
+
261
  return fig
262
 
263
 
 
267
 
268
  # Add a heading
269
  gr.Markdown("# Git LFS Analysis Across the Hub")
270
+ gr.Markdown(
271
+ "The Hugging Face Hub has just crossed 1,000,000 models - but where is all that data stored? The short answer is Git LFS. This analysis dives into the LFS storage on the Hub, breaking down the data by repository type, file extension, and growth over time."
272
+ )
273
+
274
+ gr.Markdown(
275
+ "Now, you might ask yourself, 'Why are you doing this?' Well, the [Xet Team](https://huggingface.co/xet-team) is a [new addition to Hugging Face](https://huggingface.co/blog/xethub-joins-hf), bringing a new way to store massive datasets and models to enable ML teams to operate like software teams: Quickly and without friction. Because this story all starts with storage, that's where we've begun with our own deep dives into what the Hub holds. As part of this, we've included a look at what happens with just one simple deduplication strategy - deduplicating at the file level. Read on to see more!"
276
+ )
277
  with gr.Row():
278
  # scale so that
279
  # group the data by month and year and compute a cumulative sum of the total_size column
 
281
  with gr.Column(scale=1):
282
  gr.Markdown("# Repository Growth")
283
  gr.Markdown(
284
+ "The cumulative growth of models, spaces, and datasets over time can be seen in the adjacent chart. Beside that is a view of the total change, from the previous month to the current one, of LFS files stored on the hub over 2024. We're averaging nearly **2.3 PBs uploaded to LFS per month!**"
285
  )
286
  gr.Dataframe(last_10_months, height=250)
287
  with gr.Column(scale=3):
 
344
  )
345
  _by_extension_month = gr.State(by_extension_month)
346
  gr.Plot(filter_by_extension_month, inputs=[_by_extension_month, extension])
347
+
348
+ # launch the dang thing
349
  demo.launch()