Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,28 +1,39 @@
|
|
1 |
-
import gradio as gr
|
2 |
from huggingface_hub import from_pretrained_keras
|
3 |
-
|
|
|
4 |
|
5 |
-
#
|
6 |
-
|
7 |
|
8 |
-
#
|
9 |
-
|
|
|
10 |
|
11 |
-
#
|
12 |
-
|
13 |
-
|
14 |
-
inputs = tokenizer(text, return_tensors="tf", padding=True, truncation=True)
|
15 |
|
16 |
-
|
17 |
-
|
18 |
|
19 |
-
|
20 |
-
|
21 |
|
22 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
|
24 |
-
#
|
25 |
inputs = gr.inputs.Textbox(label="Enter Malayalam Text")
|
26 |
outputs = gr.outputs.Textbox(label="Transliteration to English")
|
27 |
-
interface = gr.Interface(
|
28 |
interface.launch()
|
|
|
|
|
1 |
from huggingface_hub import from_pretrained_keras
|
2 |
+
import gradio as gr
|
3 |
+
import json
|
4 |
|
5 |
+
# Set the path to the directory containing the model files
|
6 |
+
model_directory = "Bajiyo/Malayalam_transliteration"
|
7 |
|
8 |
+
# Set the paths to the tokenizer configuration files
|
9 |
+
source_tokenizer_config_path = f"{model_directory}/source_tokenizer_config.json"
|
10 |
+
target_tokenizer_config_path = f"{model_directory}/target_tokenizer_config.json"
|
11 |
|
12 |
+
# Load tokenizer configurations
|
13 |
+
with open(source_tokenizer_config_path, "r") as source_config_file:
|
14 |
+
source_tokenizer_config = json.load(source_config_file)
|
|
|
15 |
|
16 |
+
with open(target_tokenizer_config_path, "r") as target_config_file:
|
17 |
+
target_tokenizer_config = json.load(target_config_file)
|
18 |
|
19 |
+
# Load the model from Hugging Face
|
20 |
+
model = from_pretrained_keras("Bajiyo/Malayalam_transliteration")
|
21 |
|
22 |
+
def transliterate(input_text):
|
23 |
+
# Tokenize the input text using the loaded tokenizer configurations
|
24 |
+
# (Assuming you have functions to tokenize input_text using source_tokenizer_config and target_tokenizer_config)
|
25 |
+
inputs = tokenize_input(input_text, source_tokenizer_config)
|
26 |
+
|
27 |
+
# Make predictions using the model
|
28 |
+
predictions = model.predict(inputs)
|
29 |
+
|
30 |
+
# Post-process the predictions if needed
|
31 |
+
output_text = post_process_predictions(predictions, target_tokenizer_config)
|
32 |
+
|
33 |
+
return output_text
|
34 |
|
35 |
+
# Define Gradio interface
|
36 |
inputs = gr.inputs.Textbox(label="Enter Malayalam Text")
|
37 |
outputs = gr.outputs.Textbox(label="Transliteration to English")
|
38 |
+
interface = gr.Interface(transliterate, inputs, outputs, title="Malayalam to English Transliteration")
|
39 |
interface.launch()
|