Fraser commited on
Commit
0997afc
1 Parent(s): 7bbddfb
Files changed (2) hide show
  1. app.py +12 -3
  2. info.py +0 -5
app.py CHANGED
@@ -3,7 +3,6 @@ import jax.numpy as jnp
3
  from transformers import AutoTokenizer
4
  from transformers.models.t5.modeling_flax_t5 import shift_tokens_right
5
  from t5_vae_flax_alt.src.t5_vae import FlaxT5VaeForAutoencoding
6
- import info
7
 
8
 
9
  st.set_page_config(
@@ -25,7 +24,9 @@ See how it works [here](http://fras.uk/ml/large%20prior-free%20models/transforme
25
  ''')
26
 
27
  st.text('''
28
- Try interpolating between lines of Python code using this T5-VAE.
 
 
29
  ''')
30
 
31
 
@@ -119,7 +120,11 @@ st.write(out)
119
 
120
 
121
  st.text('''
122
- Try interpolating between sentences from wikipedia using this T5-VAE.
 
 
 
 
123
  ''')
124
 
125
 
@@ -146,6 +151,10 @@ st.write(out)
146
 
147
  st.text('''
148
  Try arithmetic in latent space.
 
 
 
 
149
  ''')
150
 
151
 
 
3
  from transformers import AutoTokenizer
4
  from transformers.models.t5.modeling_flax_t5 import shift_tokens_right
5
  from t5_vae_flax_alt.src.t5_vae import FlaxT5VaeForAutoencoding
 
6
 
7
 
8
  st.set_page_config(
 
24
  ''')
25
 
26
  st.text('''
27
+ ### t5-vae-python
28
+
29
+ This model is trained on lines of Python code from GitHub ([dataset](https://huggingface.co/datasets/Fraser/python-lines).
30
  ''')
31
 
32
 
 
120
 
121
 
122
  st.text('''
123
+ ### t5-vae-wiki
124
+
125
+ This model is trained on just 5% of the sentences on wikipedia.
126
+
127
+ Hopefully we'll release another model trained on the whole dataset soon.
128
  ''')
129
 
130
 
 
151
 
152
  st.text('''
153
  Try arithmetic in latent space.
154
+
155
+ Here latent codes for each sentence are found and arithmetic is done with them.
156
+
157
+ Here it runs the sum `C + (B - A) = ?`
158
  ''')
159
 
160
 
info.py DELETED
@@ -1,5 +0,0 @@
1
-
2
- BACKGROUND = """
3
-
4
-
5
- """