Spaces:
Sleeping
Sleeping
testing vscode
Browse files
app.py
CHANGED
@@ -20,7 +20,7 @@ pipe = pipeline("text-generation", "meta-llama/Meta-Llama-3-8B-Instruct", torch_
|
|
20 |
def generate(prompt):
|
21 |
response = pipe(prompt, max_new_tokens=512)
|
22 |
# r = response[0]['generated_text'][-1]['content']
|
23 |
-
print(f'
|
24 |
r = response[0]['generated_text']
|
25 |
return r
|
26 |
|
|
|
20 |
def generate(prompt):
|
21 |
response = pipe(prompt, max_new_tokens=512)
|
22 |
# r = response[0]['generated_text'][-1]['content']
|
23 |
+
print(f'Response received!')
|
24 |
r = response[0]['generated_text']
|
25 |
return r
|
26 |
|