import gradio as gr from transformers import AutoModelForSeq2SeqLM, AutoTokenizer # Load the model and tokenizer model_name = "google/flan-t5-base" # Free LLM from Hugging Face tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForSeq2SeqLM.from_pretrained(model_name) def solve_math_problem(problem): inputs = tokenizer.encode(problem, return_tensors="pt") outputs = model.generate(inputs, max_length=500) result = tokenizer.decode(outputs[0], skip_special_tokens=True) # Breaking it down to step-by-step steps = "Step-by-Step: " + result return steps # Gradio interface iface = gr.Interface( fn=solve_math_problem, inputs="text", outputs="text", title="Maths Step-by-Step Solver with LLM", description="Enter a maths problem and get a step-by-step solution using LLM." ) iface.launch()