apepkuss79
commited on
Commit
•
2989685
1
Parent(s):
fbfddbe
Update README.md
Browse files
README.md
CHANGED
@@ -66,7 +66,7 @@ tags:
|
|
66 |
- Run as LlamaEdge service
|
67 |
|
68 |
```bash
|
69 |
-
wasmedge --dir .:. --nn-preload default:GGML:AUTO:Meta-Llama-3.1-8B-Instruct-
|
70 |
llama-api-server.wasm \
|
71 |
--prompt-template llama-3-chat \
|
72 |
--ctx-size 128000 \
|
@@ -76,7 +76,7 @@ tags:
|
|
76 |
- Run as LlamaEdge command app
|
77 |
|
78 |
```bash
|
79 |
-
wasmedge --dir .:. --nn-preload default:GGML:AUTO:Meta-Llama-3.1-8B-Instruct-
|
80 |
llama-chat.wasm \
|
81 |
--prompt-template llama-3-chat \
|
82 |
--ctx-size 128000 \
|
|
|
66 |
- Run as LlamaEdge service
|
67 |
|
68 |
```bash
|
69 |
+
wasmedge --dir .:. --nn-preload default:GGML:AUTO:Meta-Llama-3.1-8B-Instruct-Q5_K_M.gguf \
|
70 |
llama-api-server.wasm \
|
71 |
--prompt-template llama-3-chat \
|
72 |
--ctx-size 128000 \
|
|
|
76 |
- Run as LlamaEdge command app
|
77 |
|
78 |
```bash
|
79 |
+
wasmedge --dir .:. --nn-preload default:GGML:AUTO:Meta-Llama-3.1-8B-Instruct-Q5_K_M.gguf \
|
80 |
llama-chat.wasm \
|
81 |
--prompt-template llama-3-chat \
|
82 |
--ctx-size 128000 \
|