Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,25 +1,8 @@
|
|
1 |
-
|
2 |
-
|
3 |
|
4 |
-
model = AutoModelForCausalLM.from_pretrained(
|
5 |
-
"NTQAI/Nxcode-CQ-7B-orpo",
|
6 |
-
torch_dtype="auto",
|
7 |
-
device_map="auto"
|
8 |
-
)
|
9 |
-
tokenizer = AutoTokenizer.from_pretrained("NTQAI/Nxcode-CQ-7B-orpo")
|
10 |
-
|
11 |
-
prompt = """Complete the following Python function:
|
12 |
-
from typing import List
|
13 |
-
|
14 |
-
|
15 |
-
def has_close_elements(numbers: List[float], threshold: float) -> bool:
|
16 |
-
|
17 |
-
|
18 |
-
"""
|
19 |
messages = [
|
20 |
-
{"role": "user", "content":
|
21 |
]
|
22 |
-
|
23 |
-
|
24 |
-
outputs = model.generate(inputs, max_new_tokens=512, do_sample=True, top_k=50, num_return_sequences=1, eos_token_id=tokenizer.eos_token_id)
|
25 |
-
res = tokenizer.decode(outputs[0][len(inputs[0]):], skip_special_tokens=True)
|
|
|
1 |
+
# Use a pipeline as a high-level helper
|
2 |
+
from transformers import pipeline
|
3 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
messages = [
|
5 |
+
{"role": "user", "content": "Who are you?"},
|
6 |
]
|
7 |
+
pipe = pipeline("text-generation", model="defog/llama-3-sqlcoder-8b")
|
8 |
+
pipe(messages)
|
|
|
|