Anupam251272 commited on
Commit
ea19bf1
·
verified ·
1 Parent(s): 014b718

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +87 -0
app.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from datasets import load_dataset
3
+ import torch
4
+ from transformers import AutoTokenizer, T5ForConditionalGeneration # Changed model class
5
+
6
+ # Check if GPU is available
7
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
8
+ print(f"Using device: {device}")
9
+
10
+ # Load dataset
11
+ ds = load_dataset("AI-MO/NuminaMath-CoT")
12
+
13
+ # Load model and tokenizer
14
+ model_name = "google/flan-t5-base"
15
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
16
+ model = T5ForConditionalGeneration.from_pretrained(model_name).to(device) # Changed model class
17
+
18
+ def process_example(example):
19
+ """Process a single example from the dataset"""
20
+ question = example['question']
21
+ solution = example['solution']
22
+ answer = example['answer']
23
+ return f"Question: {question}\nSolution: {solution}\nAnswer: {answer}"
24
+
25
+ def get_random_example():
26
+ """Get a random example from the dataset"""
27
+ import random
28
+ idx = random.randint(0, len(ds['train']) - 1)
29
+ return process_example(ds['train'][idx])
30
+
31
+ def solve_math_problem(question):
32
+ """Generate solution for a given math problem"""
33
+ # Add prefix for T5
34
+ input_text = "solve math: " + question
35
+ inputs = tokenizer(input_text, return_tensors="pt", max_length=512, truncation=True).to(device)
36
+
37
+ # Generate response
38
+ outputs = model.generate(
39
+ inputs["input_ids"],
40
+ max_length=200,
41
+ num_return_sequences=1,
42
+ temperature=0.7,
43
+ do_sample=True,
44
+ top_p=0.9,
45
+ )
46
+
47
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
48
+ return response
49
+
50
+ # Create Gradio interface
51
+ with gr.Blocks() as demo:
52
+ gr.Markdown("# Math Problem Solver")
53
+ gr.Markdown("Using FLAN-T5 model to solve mathematical problems with step-by-step solutions.")
54
+
55
+ with gr.Row():
56
+ with gr.Column():
57
+ input_text = gr.Textbox(
58
+ label="Enter your math problem",
59
+ placeholder="Type your math problem here...",
60
+ lines=3
61
+ )
62
+ with gr.Row():
63
+ submit_btn = gr.Button("Solve Problem", variant="primary")
64
+ example_btn = gr.Button("Show Random Example")
65
+
66
+ with gr.Column():
67
+ output_text = gr.Textbox(
68
+ label="Solution",
69
+ lines=8,
70
+ show_copy_button=True
71
+ )
72
+
73
+ # Set up event handlers
74
+ submit_btn.click(
75
+ fn=solve_math_problem,
76
+ inputs=input_text,
77
+ outputs=output_text
78
+ )
79
+
80
+ example_btn.click(
81
+ fn=get_random_example,
82
+ inputs=None,
83
+ outputs=input_text
84
+ )
85
+
86
+ # Launch the interface
87
+ demo.launch(share=True)