Tonic commited on
Commit
7b7377a
Β·
verified Β·
1 Parent(s): ce60cd6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -5
app.py CHANGED
@@ -49,12 +49,15 @@ def last_token_pool(last_hidden_states: Tensor, attention_mask: Tensor) -> Tenso
49
 
50
  @spaces.GPU
51
  def compute_embeddings(selected_task, *input_texts):
52
- max_length = 2042
53
- if selected_task:
54
- task = tasks[selected_task]
55
- processed_texts = [f'Instruct: {task}\nQuery: {text}' for text in input_texts]
56
  else:
57
- processed_texts = [f'Instruct: {system_prompt}\nQuerry: {text}' for text in input_texts]
 
 
 
58
  task = tasks[selected_task]
59
  batch_dict = tokenizer(processed_texts, max_length=max_length - 1, return_attention_mask=False, padding=False, truncation=True)
60
  batch_dict['input_ids'] = [input_ids + [tokenizer.eos_token_id] for input_ids in batch_dict['input_ids']]
 
49
 
50
  @spaces.GPU
51
  def compute_embeddings(selected_task, *input_texts):
52
+
53
+ if selected_task == "None":
54
+ # Use the system prompt if 'None' is selected
55
+ processed_texts = [f'Instruct: {system_prompt}\nQuery: {input_text}']
56
  else:
57
+ # Use the task description from the tasks dictionary
58
+ task_description = tasks[selected_task]
59
+ processed_texts = [f'Instruct: {task_description}\nQuery: {input_text}']
60
+
61
  task = tasks[selected_task]
62
  batch_dict = tokenizer(processed_texts, max_length=max_length - 1, return_attention_mask=False, padding=False, truncation=True)
63
  batch_dict['input_ids'] = [input_ids + [tokenizer.eos_token_id] for input_ids in batch_dict['input_ids']]