Update interface.py
Browse files- interface.py +3 -2
interface.py
CHANGED
@@ -12,14 +12,14 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
12 |
from sympy import symbols, sympify, lambdify
|
13 |
import copy
|
14 |
from config import DEVICE, MODEL_PATH, MAX_LENGTH, TEMPERATURE
|
|
|
15 |
|
16 |
device = DEVICE
|
17 |
model_path = MODEL_PATH
|
18 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
19 |
model = AutoModelForCausalLM.from_pretrained(model_path).to(device).eval()
|
20 |
|
21 |
-
|
22 |
-
@spaces.GPU(duration=100)
|
23 |
def generate_analysis(prompt, max_length=MAX_LENGTH):
|
24 |
try:
|
25 |
input_ids = tokenizer.encode(prompt, return_tensors='pt').to(device)
|
@@ -50,6 +50,7 @@ def parse_bounds(bounds_str, num_params):
|
|
50 |
upper_bounds = [np.inf] * num_params
|
51 |
return lower_bounds, upper_bounds
|
52 |
|
|
|
53 |
def process_and_plot(
|
54 |
file,
|
55 |
biomass_eq1, biomass_eq2, biomass_eq3,
|
|
|
12 |
from sympy import symbols, sympify, lambdify
|
13 |
import copy
|
14 |
from config import DEVICE, MODEL_PATH, MAX_LENGTH, TEMPERATURE
|
15 |
+
from decorators import spaces # Import the spaces class
|
16 |
|
17 |
device = DEVICE
|
18 |
model_path = MODEL_PATH
|
19 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
20 |
model = AutoModelForCausalLM.from_pretrained(model_path).to(device).eval()
|
21 |
|
22 |
+
@spaces.GPU(duration=100) # Apply the GPU decorator
|
|
|
23 |
def generate_analysis(prompt, max_length=MAX_LENGTH):
|
24 |
try:
|
25 |
input_ids = tokenizer.encode(prompt, return_tensors='pt').to(device)
|
|
|
50 |
upper_bounds = [np.inf] * num_params
|
51 |
return lower_bounds, upper_bounds
|
52 |
|
53 |
+
@spaces.GPU(duration=100)
|
54 |
def process_and_plot(
|
55 |
file,
|
56 |
biomass_eq1, biomass_eq2, biomass_eq3,
|