Spaces:
Sleeping
Sleeping
nmk
Browse files- app.py +41 -28
- requirements.txt +2 -1
app.py
CHANGED
@@ -1,34 +1,47 @@
|
|
1 |
-
import
|
2 |
-
import
|
3 |
-
|
4 |
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
-
# Hub Model configuration. https://huggingface.co/models
|
12 |
-
hub = {
|
13 |
-
'HF_MODEL_ID':'microsoft/speecht5_tts',
|
14 |
-
'HF_TASK':'text-to-speech'
|
15 |
-
}
|
16 |
|
17 |
-
# create Hugging Face Model Class
|
18 |
-
huggingface_model = HuggingFaceModel(
|
19 |
-
transformers_version='4.26.0',
|
20 |
-
pytorch_version='1.13.1',
|
21 |
-
py_version='py39',
|
22 |
-
env=hub,
|
23 |
-
role=role,
|
24 |
-
)
|
25 |
|
26 |
-
#
|
27 |
-
|
28 |
-
|
29 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
)
|
31 |
|
32 |
-
|
33 |
-
|
34 |
-
})
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import requests
|
3 |
+
import os
|
4 |
|
5 |
+
API_URL = "https://api-inference.huggingface.co/models/openai-gpt"
|
6 |
+
API_TOKEN = os.environ.get("API_TOKEN")
|
7 |
+
|
8 |
+
headers = {"Authorization": f"Bearer {API_TOKEN}"}
|
9 |
+
|
10 |
+
# Function to translate code using the Hugging Face model API
|
11 |
+
# Function to translate code using the Hugging Face model API
|
12 |
+
# Function to translate code using the Hugging Face model API
|
13 |
+
def translate_code(input_text, source_lang, target_lang):
|
14 |
+
payload = {
|
15 |
+
"inputs": f"convert the below {source_lang} code to {target_lang} code: {input_text}"
|
16 |
+
}
|
17 |
+
|
18 |
+
response = requests.post(API_URL, headers=headers, json=payload)
|
19 |
+
response_data = response.json() # Store the entire response for inspection
|
20 |
+
print("API Response:", response_data) # Print the response for inspection
|
21 |
+
|
22 |
+
# Extract the translated code from the response
|
23 |
+
translated_code = "No translation available" # Default value
|
24 |
+
|
25 |
+
if response_data:
|
26 |
+
if isinstance(response_data, list) and len(response_data) > 0:
|
27 |
+
translated_code = response_data[0].get("generated_text", "").strip()
|
28 |
+
|
29 |
+
return translated_code
|
30 |
|
|
|
|
|
|
|
|
|
|
|
31 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
|
33 |
+
# Interface for the Gradio app
|
34 |
+
iface = gr.Interface(
|
35 |
+
fn=translate_code,
|
36 |
+
inputs=[
|
37 |
+
gr.inputs.Textbox(label="Enter code to translate"),
|
38 |
+
gr.inputs.Textbox(label="Source Language (e.g., English)"),
|
39 |
+
gr.inputs.Textbox(label="Target Language (e.g., German)")
|
40 |
+
],
|
41 |
+
outputs=gr.outputs.Textbox(label="Translated Code"),
|
42 |
+
title="Code Translator",
|
43 |
+
description="Translate code snippets between programming languages"
|
44 |
)
|
45 |
|
46 |
+
# Launch the Gradio app
|
47 |
+
iface.launch()
|
|
requirements.txt
CHANGED
@@ -1 +1,2 @@
|
|
1 |
-
|
|
|
|
1 |
+
gradio
|
2 |
+
requests
|