Spaces:
Runtime error
Runtime error
File size: 1,102 Bytes
099d92b 80125b7 099d92b ebd4551 099d92b 80125b7 099d92b 80125b7 099d92b 80125b7 099d92b 80125b7 099d92b 847a510 70d2687 d61c4d5 0083d2c 847a510 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
from flask import Flask, request
from transformers import AutoModelForImageClassification
from transformers import AutoImageProcessor
from PIL import Image
from io import BytesIO
import os
import torch
app = Flask(__name__)
model = AutoModelForImageClassification.from_pretrained(
'./myModel')
image_processor = AutoImageProcessor.from_pretrained(
"google/vit-base-patch16-224-in21k")
@app.route('/upload_image', methods=['POST'])
def upload_image():
# Get the image file from the request
image_file = request.files['image'].stream
# image = Image.open(BytesIO(image_file.read()))
image = Image.open(image_file)
inputs = image_processor(image, return_tensors="pt")
with torch.no_grad():
logits = model(**inputs).logits
predicted_label = logits.argmax(-1).item()
disease = model.config.id2label[predicted_label]
# You can perform additional operations with the image here
# ...
return disease
@app.route('/', methods=['GET'])
def hi():
return "NAPTAH Mobile Application"
app.run(host='0.0.0.0', port=7860) |