EnariGmbH commited on
Commit
3c1dae0
1 Parent(s): 369a189

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +6 -6
handler.py CHANGED
@@ -38,20 +38,20 @@ class EndpointHandler:
38
  # Extract inputs from the data dictionary
39
  clip = data.get("clip")
40
  prompt = data.get("prompt")
41
-
42
  if clip is None or prompt is None:
43
  return [{"error": "Missing 'clip' or 'prompt' in input data"}]
44
-
45
  # Prepare the inputs for the model
46
  inputs_video = self.processor(text=prompt, videos=clip, padding=True, return_tensors="pt").to(self.model.device)
47
-
48
  # Generate output from the model
49
  generate_kwargs = {"max_new_tokens": 512, "do_sample": True, "top_p": 0.9}
50
  output = self.model.generate(**inputs_video, **generate_kwargs)
51
  generated_text = self.processor.batch_decode(output, skip_special_tokens=True)
52
-
53
  # Extract the relevant part of the assistant's answer
54
  assistant_answer_start = generated_text[0].find("ASSISTANT:") + len("ASSISTANT:")
55
  assistant_answer = generated_text[0][assistant_answer_start:].strip()
56
-
57
- return assistant_answer
 
38
  # Extract inputs from the data dictionary
39
  clip = data.get("clip")
40
  prompt = data.get("prompt")
41
+
42
  if clip is None or prompt is None:
43
  return [{"error": "Missing 'clip' or 'prompt' in input data"}]
44
+
45
  # Prepare the inputs for the model
46
  inputs_video = self.processor(text=prompt, videos=clip, padding=True, return_tensors="pt").to(self.model.device)
47
+
48
  # Generate output from the model
49
  generate_kwargs = {"max_new_tokens": 512, "do_sample": True, "top_p": 0.9}
50
  output = self.model.generate(**inputs_video, **generate_kwargs)
51
  generated_text = self.processor.batch_decode(output, skip_special_tokens=True)
52
+
53
  # Extract the relevant part of the assistant's answer
54
  assistant_answer_start = generated_text[0].find("ASSISTANT:") + len("ASSISTANT:")
55
  assistant_answer = generated_text[0][assistant_answer_start:].strip()
56
+
57
+ return [{"generated_text": assistant_answer}]