NicoNico commited on
Commit
c5948cb
·
verified ·
1 Parent(s): a6b123d

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +17 -7
README.md CHANGED
@@ -1,9 +1,9 @@
1
- ---
2
- base_model: GreenBitAI/DeepSeek-R1-Distill-Llama-70B-layer-mix-bpw-4.0
3
- license: apache-2.0
4
- tags:
5
- - mlx
6
- ---
7
 
8
  # GreenBitAI/DeepSeek-R1-Distill-Llama-70B-layer-mix-bpw-4.0-mlx
9
 
@@ -20,5 +20,15 @@ pip install gbx-lm
20
  from gbx_lm import load, generate
21
 
22
  model, tokenizer = load("GreenBitAI/DeepSeek-R1-Distill-Llama-70B-layer-mix-bpw-4.0-mlx")
23
- response = generate(model, tokenizer, prompt="hello", verbose=True)
 
 
 
 
 
 
 
 
 
 
24
  ```
 
1
+ ---
2
+ base_model: GreenBitAI/DeepSeek-R1-Distill-Llama-70B-layer-mix-bpw-4.0
3
+ license: apache-2.0
4
+ tags:
5
+ - mlx
6
+ ---
7
 
8
  # GreenBitAI/DeepSeek-R1-Distill-Llama-70B-layer-mix-bpw-4.0-mlx
9
 
 
20
  from gbx_lm import load, generate
21
 
22
  model, tokenizer = load("GreenBitAI/DeepSeek-R1-Distill-Llama-70B-layer-mix-bpw-4.0-mlx")
23
+
24
+ prompt = "hello"
25
+
26
+ if tokenizer.chat_template is not None:
27
+ messages = [{"role": "user", "content": prompt}]
28
+ prompt = tokenizer.apply_chat_template(
29
+ messages, add_generation_prompt=True
30
+ )
31
+
32
+ response = generate(model, tokenizer, prompt=prompt, verbose=True)
33
+
34
  ```