from flask import Flask, render_template, request from transformers import GPT2LMHeadModel, GPT2Tokenizer app = Flask(__name__) # Load the fine-tuned model and tokenizer model = GPT2LMHeadModel.from_pretrained("./fine_tuned_model") tokenizer = GPT2Tokenizer.from_pretrained("./fine_tuned_model") # Function to generate feedback def generate_feedback(user_input): inputs = tokenizer(user_input, return_tensors="pt", truncation=True, padding=True, max_length=512) outputs = model.generate(inputs["input_ids"], max_length=150, num_return_sequences=1) feedback = tokenizer.decode(outputs[0], skip_special_tokens=True) return feedback @app.route('/') def index(): return render_template('behavioral.html') @app.route('/submit_answer', methods=['POST']) def submit_answer(): # Get the user input from the form user_input = request.form['answer'] # Assuming the form uses 'answer' as input field name # Generate feedback using the fine-tuned model feedback = generate_feedback(user_input) # Return the feedback to the user and render it in behavioral.html return render_template('behavioral.html', feedback=feedback) if __name__ == '__main__': app.run(debug=True)