Version 0.15.1
#4
by
milanszulc29
- opened
README.md
CHANGED
@@ -9,4 +9,4 @@ app_file: app.py
|
|
9 |
pinned: false
|
10 |
---
|
11 |
|
12 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
9 |
pinned: false
|
10 |
---
|
11 |
|
12 |
+
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
CHANGED
@@ -2,11 +2,9 @@ import gradio as gr
|
|
2 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
|
3 |
import torch
|
4 |
|
5 |
-
|
6 |
-
|
7 |
-
model = AutoModelForSeq2SeqLM.from_pretrained("Jayyydyyy/m2m100_418m_tokipona").to(device)
|
8 |
tokenizer = AutoTokenizer.from_pretrained("facebook/m2m100_418M")
|
9 |
-
|
10 |
LANG_CODES = {
|
11 |
"English":"en",
|
12 |
"toki pona":"tl"
|
|
|
2 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
|
3 |
import torch
|
4 |
|
5 |
+
model = AutoModelForSeq2SeqLM.from_pretrained("Jayyydyyy/m2m100_418m_tokipona")
|
|
|
|
|
6 |
tokenizer = AutoTokenizer.from_pretrained("facebook/m2m100_418M")
|
7 |
+
device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
8 |
LANG_CODES = {
|
9 |
"English":"en",
|
10 |
"toki pona":"tl"
|