File size: 2,210 Bytes
3cdb03f
 
d6a4a02
 
b578382
 
 
 
 
 
d6a4a02
b578382
 
3cdb03f
b578382
 
 
3cdb03f
b578382
 
3cdb03f
b578382
 
 
3cdb03f
b578382
 
 
 
 
 
3cdb03f
b578382
 
 
 
 
3cdb03f
b578382
3cdb03f
b578382
3cdb03f
b578382
3cdb03f
b578382
 
3cdb03f
a2ad5f6
b578382
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import tensorflow as tf
from tensorflow import keras
import gradio as gr

def generate_text(model,temperature, start_string):
  char2idx={'\t': 0, '\n': 1, ' ': 2, 'ء': 3, 'آ': 4, 'أ': 5, 'ؤ': 6, 'إ': 7, 'ئ': 8, 'ا': 9, 'ب': 10, 'ة': 11, 'ت': 12, 'ث': 13, 'ج': 14, 'ح': 15, 'خ': 16, 'د': 17, 'ذ': 18, 'ر': 19, 'ز': 20, 'س': 21, 'ش': 22, 'ص': 23, 'ض': 24, 'ط': 25, 'ظ': 26, 'ع': 27, 'غ': 28, 'ف': 29, 'ق': 30, 'ك': 31, 'ل': 32, 'م': 33, 'ن': 34, 'ه': 35, 'و': 36, 'ى': 37, 'ي': 38}
  idx2char=['\t', '\n', ' ', 'ء', 'آ', 'أ', 'ؤ', 'إ', 'ئ', 'ا', 'ب', 'ة', 'ت',
       'ث', 'ج', 'ح', 'خ', 'د', 'ذ', 'ر', 'ز', 'س', 'ش', 'ص', 'ض', 'ط',
       'ظ', 'ع', 'غ', 'ف', 'ق', 'ك', 'ل', 'م', 'ن', 'ه', 'و', 'ى', 'ي']
  # Evaluation step (generating text using the learned model)

  # Number of characters to generate
  num_generate = 1000

  # Converting our start string to numbers (vectorizing)
  input_eval = [char2idx[s] for s in start_string]
  input_eval = tf.expand_dims(input_eval, 0)

  # Empty string to store our results
  text_generated = []

  # Low temperatures results in more predictable text.
  # Higher temperatures results in more surprising text.
  # Experiment to find the best setting.

  # Here batch size == 1
  model.reset_states()
  for i in range(num_generate):
      predictions = model(input_eval)
      # remove the batch dimension
      predictions = tf.squeeze(predictions, 0)

      # using a random.categorical distribution to predict the word returned by the model
      predictions = predictions / temperature
      predicted_id = tf.random.categorical(predictions, num_samples=1)[-1,0].numpy()
      
      input_eval = tf.expand_dims([predicted_id], 0)

      text_generated.append(idx2char[predicted_id])

  return (start_string + ''.join(text_generated))

reconstructed_model = keras.models.load_model("poems_generation_GRU (1).h5")

def generate_poem(start,temperature):
    return generate_text(reconstructed_model,temperature, start_string=u""+start )

iface = gr.Interface(fn=generate_poem, inputs=["text",gr.Slider(0, 1, value=1)],outputs= ["text"],    examples=[['حبيبتى ليلى']],)
iface.launch()