decorate the right function for ZERO gpu inference

#1
by fffiloni - opened
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -5,7 +5,6 @@ from PIL import Image
5
  import spaces
6
  from transformers import MllamaForConditionalGeneration, AutoProcessor
7
 
8
- @spaces.GPU
9
  # Load the Llama 3.2 Vision Model
10
  def load_llama_model():
11
  model_id = "meta-llama/Llama-3.2-11B-Vision"
@@ -21,6 +20,7 @@ def load_llama_model():
21
  return model, processor
22
 
23
  # Function to generate predictions for text and image
 
24
  def process_input(text, image=None):
25
  model, processor = load_llama_model()
26
 
 
5
  import spaces
6
  from transformers import MllamaForConditionalGeneration, AutoProcessor
7
 
 
8
  # Load the Llama 3.2 Vision Model
9
  def load_llama_model():
10
  model_id = "meta-llama/Llama-3.2-11B-Vision"
 
20
  return model, processor
21
 
22
  # Function to generate predictions for text and image
23
+ @spaces.GPU
24
  def process_input(text, image=None):
25
  model, processor = load_llama_model()
26