Spaces:
Sleeping
Sleeping
BeveledCube
commited on
Commit
•
a89ce24
1
Parent(s):
5517f9c
Update main.py
Browse files
main.py
CHANGED
@@ -1,8 +1,15 @@
|
|
|
|
1 |
from flask import Flask, request, jsonify
|
2 |
from transformers import GPT2LMHeadModel, GPT2Tokenizer
|
3 |
import torch
|
4 |
|
5 |
app = Flask("Response API")
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
|
7 |
# Load the Hugging Face GPT-2 model and tokenizer
|
8 |
model = GPT2LMHeadModel.from_pretrained("microsoft/DialoGPT-medium")
|
@@ -28,4 +35,4 @@ def receive_data():
|
|
28 |
print("Answered with:", answer_data)
|
29 |
return jsonify(answer_data)
|
30 |
|
31 |
-
app.run(debug=False, port=7860)
|
|
|
1 |
+
import os
|
2 |
from flask import Flask, request, jsonify
|
3 |
from transformers import GPT2LMHeadModel, GPT2Tokenizer
|
4 |
import torch
|
5 |
|
6 |
app = Flask("Response API")
|
7 |
+
size = "small"
|
8 |
+
# microsoft/DialoGPT-small
|
9 |
+
# microsoft/DialoGPT-medium
|
10 |
+
# microsoft/DialoGPT-large
|
11 |
+
|
12 |
+
os.environ["TRANSFORMERS_CACHE"] = r"./cache"
|
13 |
|
14 |
# Load the Hugging Face GPT-2 model and tokenizer
|
15 |
model = GPT2LMHeadModel.from_pretrained("microsoft/DialoGPT-medium")
|
|
|
35 |
print("Answered with:", answer_data)
|
36 |
return jsonify(answer_data)
|
37 |
|
38 |
+
app.run(debug=False, port=7860)
|