Fraser commited on
Commit
51051f5
β€’
1 Parent(s): 0997afc
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -15,7 +15,7 @@ st.set_page_config(
15
 
16
  st.title('T5-VAE πŸ™‚πŸ˜πŸ™')
17
 
18
- st.text('''
19
  This is a variational autoencoder trained on text.
20
 
21
  It allows interpolating on text at a high level, try it out!
@@ -23,7 +23,7 @@ It allows interpolating on text at a high level, try it out!
23
  See how it works [here](http://fras.uk/ml/large%20prior-free%20models/transformer-vae/2020/08/13/Transformers-as-Variational-Autoencoders.html).
24
  ''')
25
 
26
- st.text('''
27
  ### t5-vae-python
28
 
29
  This model is trained on lines of Python code from GitHub ([dataset](https://huggingface.co/datasets/Fraser/python-lines).
@@ -111,7 +111,7 @@ def decode(cnt, ratio, txt_1, txt_2):
111
 
112
  in_1 = st.text_input("A line of Python code.", "x = a - 1")
113
  in_2 = st.text_input("Another line of Python code.", "x = a + 10 * 2")
114
- r = st.slider('Interpolation Ratio', min_value=0.0, max_value=1.0, value=0.5)
115
  container = st.empty()
116
  container.write('Loading...')
117
  out = decode(container, r, in_1, in_2)
@@ -119,7 +119,7 @@ container.empty()
119
  st.write(out)
120
 
121
 
122
- st.text('''
123
  ### t5-vae-wiki
124
 
125
  This model is trained on just 5% of the sentences on wikipedia.
@@ -141,7 +141,7 @@ model, tokenizer = get_wiki_model()
141
 
142
  in_1 = st.text_input("A sentence.", "Children are looking for the water to be clear.")
143
  in_2 = st.text_input("Another sentence.", "There are two people playing soccer.")
144
- r = st.slider('Interpolation Ratio', min_value=0.0, max_value=1.0, value=0.5)
145
  container = st.empty()
146
  container.write('Loading...')
147
  out = decode(r, in_1, in_2)
@@ -149,7 +149,7 @@ container.empty()
149
  st.write(out)
150
 
151
 
152
- st.text('''
153
  Try arithmetic in latent space.
154
 
155
  Here latent codes for each sentence are found and arithmetic is done with them.
@@ -173,7 +173,7 @@ in_a = st.text_input("A", "Children are looking for the water to be clear.")
173
  in_b = st.text_input("B", "There are two people playing soccer.")
174
  in_c = st.text_input("C", "Children are looking for the water to be clear.")
175
 
176
- st.text('''
177
  A is to B as C is to...
178
  ''')
179
  container = st.empty()
 
15
 
16
  st.title('T5-VAE πŸ™‚πŸ˜πŸ™')
17
 
18
+ st.markdown('''
19
  This is a variational autoencoder trained on text.
20
 
21
  It allows interpolating on text at a high level, try it out!
 
23
  See how it works [here](http://fras.uk/ml/large%20prior-free%20models/transformer-vae/2020/08/13/Transformers-as-Variational-Autoencoders.html).
24
  ''')
25
 
26
+ st.markdown('''
27
  ### t5-vae-python
28
 
29
  This model is trained on lines of Python code from GitHub ([dataset](https://huggingface.co/datasets/Fraser/python-lines).
 
111
 
112
  in_1 = st.text_input("A line of Python code.", "x = a - 1")
113
  in_2 = st.text_input("Another line of Python code.", "x = a + 10 * 2")
114
+ r = st.slider('Python Interpolation Ratio', min_value=0.0, max_value=1.0, value=0.5)
115
  container = st.empty()
116
  container.write('Loading...')
117
  out = decode(container, r, in_1, in_2)
 
119
  st.write(out)
120
 
121
 
122
+ st.markdown('''
123
  ### t5-vae-wiki
124
 
125
  This model is trained on just 5% of the sentences on wikipedia.
 
141
 
142
  in_1 = st.text_input("A sentence.", "Children are looking for the water to be clear.")
143
  in_2 = st.text_input("Another sentence.", "There are two people playing soccer.")
144
+ r = st.slider('English Interpolation Ratio', min_value=0.0, max_value=1.0, value=0.5)
145
  container = st.empty()
146
  container.write('Loading...')
147
  out = decode(r, in_1, in_2)
 
149
  st.write(out)
150
 
151
 
152
+ st.markdown('''
153
  Try arithmetic in latent space.
154
 
155
  Here latent codes for each sentence are found and arithmetic is done with them.
 
173
  in_b = st.text_input("B", "There are two people playing soccer.")
174
  in_c = st.text_input("C", "Children are looking for the water to be clear.")
175
 
176
+ st.markdown('''
177
  A is to B as C is to...
178
  ''')
179
  container = st.empty()