AhmadT198 commited on
Commit
5293d29
·
1 Parent(s): dde3493

Putting the pipe outside - Trial

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -7,6 +7,9 @@ import torch
7
  # Use a pipeline as a high-level helper
8
  from transformers import pipeline
9
 
 
 
 
10
 
11
  @spaces.GPU(duration=120)
12
  def llama3_1_8B(question):
@@ -22,9 +25,7 @@ def llama3_1_8B(question):
22
 
23
  else:
24
  print("CUDA is not available.")
25
- print("RUNNING PIPE")
26
- pipe = pipeline("text-generation", model="NousResearch/Hermes-3-Llama-3.1-8B", max_new_tokens=200, device=0)
27
-
28
  print("GATHERING RESPONSES")
29
  responses = pipe(messages)
30
 
 
7
  # Use a pipeline as a high-level helper
8
  from transformers import pipeline
9
 
10
+ print("RUNNING PIPE")
11
+ pipe = pipeline("text-generation", model="NousResearch/Hermes-3-Llama-3.1-8B", max_new_tokens=200, device=0)
12
+
13
 
14
  @spaces.GPU(duration=120)
15
  def llama3_1_8B(question):
 
25
 
26
  else:
27
  print("CUDA is not available.")
28
+
 
 
29
  print("GATHERING RESPONSES")
30
  responses = pipe(messages)
31