pizzagatakasugi commited on
Commit
d1a91c6
β€’
1 Parent(s): 8717155

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -119,7 +119,7 @@ if num in [str(x) for x in list(range(10))]:
119
 
120
  #st.write(idx,"ε…₯εŠ›",input)
121
  with st.spinner("ζŽ¨θ«–δΈ­γ§γ™..."):
122
- input = kifs+"。"+best+"。"+best2
123
  tokenized_inputs = tokenizer.encode(
124
  input, max_length= 512, truncation=True,
125
  padding="max_length", return_tensors="pt"
@@ -136,7 +136,7 @@ if num in [str(x) for x in list(range(10))]:
136
  num_return_sequences = 5,
137
  )
138
  output_list = []
139
- st.write(input)
140
  for x in range(5):
141
  output_text = tokenizer.decode(output_ids[x], skip_special_tokens=True,
142
  clean_up_tokenization_spaces=False)
 
119
 
120
  #st.write(idx,"ε…₯εŠ›",input)
121
  with st.spinner("ζŽ¨θ«–δΈ­γ§γ™..."):
122
+ input = kifs+"γ€‚ζœ€ε–„ζ‰‹γ―"+best+"。欑善手は"+best2
123
  tokenized_inputs = tokenizer.encode(
124
  input, max_length= 512, truncation=True,
125
  padding="max_length", return_tensors="pt"
 
136
  num_return_sequences = 5,
137
  )
138
  output_list = []
139
+ # st.write(input)
140
  for x in range(5):
141
  output_text = tokenizer.decode(output_ids[x], skip_special_tokens=True,
142
  clean_up_tokenization_spaces=False)