ozo commited on
Commit
490c4aa
1 Parent(s): e96e6ad

neurips note 4m-21

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -107,7 +107,7 @@ with gr.Blocks(css=css, theme=gr.themes.Base()) as demo:
107
  gr.Markdown(f"""
108
  *A framework for training any-to-any multimodal foundation models. Scalable. Open-sourced. Across tens of modalities and tasks.*
109
 
110
- [`Website`](https://4m.epfl.ch) | [`GitHub`](https://github.com/apple/ml-4m) <br>[`4M Paper (NeurIPS'23)`](https://arxiv.org/abs/2312.06647) | [`4M-21 Paper (arXiv'24)`](https://arxiv.org/abs/2406.09406)
111
 
112
  This demo predicts all modalities from a given RGB input, using [{FM_MODEL_ID}](https://huggingface.co/{FM_MODEL_ID}), running on *{power_device}*.
113
  For more generative any-to-any examples, please see our [GitHub repo](https://github.com/apple/ml-4m#generation).
 
107
  gr.Markdown(f"""
108
  *A framework for training any-to-any multimodal foundation models. Scalable. Open-sourced. Across tens of modalities and tasks.*
109
 
110
+ [`Website`](https://4m.epfl.ch) | [`GitHub`](https://github.com/apple/ml-4m) <br>[`4M Paper (NeurIPS'23)`](https://arxiv.org/abs/2312.06647) | [`4M-21 Paper (NeurIPS'24)`](https://arxiv.org/abs/2406.09406)
111
 
112
  This demo predicts all modalities from a given RGB input, using [{FM_MODEL_ID}](https://huggingface.co/{FM_MODEL_ID}), running on *{power_device}*.
113
  For more generative any-to-any examples, please see our [GitHub repo](https://github.com/apple/ml-4m#generation).