tykiww commited on
Commit
6db0299
·
verified ·
1 Parent(s): d10012f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -1
app.py CHANGED
@@ -31,6 +31,21 @@ Given the question, chart analysis output answer and additional context, come up
31
  """
32
 
33
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
  @spaces.GPU
35
  def predict(image, input_text, input_context):
36
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
@@ -44,7 +59,7 @@ def predict(image, input_text, input_context):
44
  prompt_length = inputs['input_ids'].shape[1]
45
 
46
  # Generate chart text
47
- generate_ids = model.generate(image=image, input_text=input_text, max_new_tokens=512)
48
  output_text = processor.batch_decode(generate_ids[:, prompt_length:], skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
49
 
50
  # Generate next layer prompt (WIP)
 
31
  """
32
 
33
 
34
+
35
+ def function_that_does_something(**kwargs):
36
+ # Extract the first two items from the kwargs dictionary
37
+ first_two_items = dict(list(kwargs.items())[:2])
38
+
39
+ # Extract the 'three' argument, defaulting to 3 if not provided
40
+ three = kwargs.get('three', 3)
41
+
42
+ # Call another function with the filtered keyword arguments
43
+ result = function_of_interest(**first_two_items)
44
+
45
+ # Return both the result of function_of_interest and the 'three' argument
46
+ return result, three
47
+
48
+
49
  @spaces.GPU
50
  def predict(image, input_text, input_context):
51
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
 
59
  prompt_length = inputs['input_ids'].shape[1]
60
 
61
  # Generate chart text
62
+ generate_ids = model.generate(**inputs, max_new_tokens=512)
63
  output_text = processor.batch_decode(generate_ids[:, prompt_length:], skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
64
 
65
  # Generate next layer prompt (WIP)