Shahrokhpk commited on
Commit
5185adf
·
verified ·
1 Parent(s): c1e85af

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -0
app.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoModelForCausalLM, AutoTokenizer
2
+ device = "cuda" # the device to load the model onto
3
+
4
+ model = AutoModelForCausalLM.from_pretrained(
5
+ "NTQAI/Nxcode-CQ-7B-orpo",
6
+ torch_dtype="auto",
7
+ device_map="auto"
8
+ )
9
+ tokenizer = AutoTokenizer.from_pretrained("NTQAI/Nxcode-CQ-7B-orpo")
10
+
11
+ prompt = """Complete the following Python function:
12
+ from typing import List
13
+
14
+
15
+ def has_close_elements(numbers: List[float], threshold: float) -> bool:
16
+ """ Check if in given list of numbers, are any two numbers closer to each other than
17
+ given threshold.
18
+ >>> has_close_elements([1.0, 2.0, 3.0], 0.5)
19
+ False
20
+ >>> has_close_elements([1.0, 2.8, 3.0, 4.0, 5.0, 2.0], 0.3)
21
+ True
22
+ """
23
+ """
24
+ messages = [
25
+ {"role": "user", "content": prompt}
26
+ ]
27
+
28
+ inputs = tokenizer.apply_chat_template(messages, add_generation_prompt=True, return_tensors="pt").to(model.device)
29
+ outputs = model.generate(inputs, max_new_tokens=512, do_sample=False, top_k=50, top_p=0.95, num_return_sequences=1, eos_token_id=tokenizer.eos_token_id)
30
+ res = tokenizer.decode(outputs[0][len(inputs[0]):], skip_special_tokens=True)