README.md
CHANGED
@@ -12057,7 +12057,7 @@ transformers==4.37.2
|
|
12057 |
from transformers import AutoModel
|
12058 |
import torch
|
12059 |
|
12060 |
-
model_name = "
|
12061 |
model = AutoModel.from_pretrained(model_name, trust_remote_code=True, torch_dtype=torch.float16).to("cuda")
|
12062 |
|
12063 |
# you can use flash_attention_2 for faster inference
|
@@ -12114,7 +12114,7 @@ from infinity_emb import AsyncEngineArray, EngineArgs, AsyncEmbeddingEngine
|
|
12114 |
import numpy as np
|
12115 |
|
12116 |
array = AsyncEngineArray.from_args([
|
12117 |
-
EngineArgs(model_name_or_path = "
|
12118 |
])
|
12119 |
queries = ["中国的首都是哪里?"] # "What is the capital of China?"
|
12120 |
passages = ["beijing", "shanghai"] # "北京", "上海"
|
@@ -12140,7 +12140,7 @@ print(scores.tolist()) # [[0.40356746315956116, 0.36183443665504456]]
|
|
12140 |
from FlagEmbedding import FlagModel
|
12141 |
|
12142 |
|
12143 |
-
model = FlagModel("
|
12144 |
query_instruction_for_retrieval="Query: ",
|
12145 |
pooling_method="mean",
|
12146 |
trust_remote_code=True,
|
@@ -12204,10 +12204,10 @@ print(scores.tolist()) # [[0.40356746315956116, 0.36183440685272217]]
|
|
12204 |
|
12205 |
## 许可证 License
|
12206 |
|
12207 |
-
- 本仓库中代码依照 [Apache-2.0 协议](https://github.com/
|
12208 |
-
- MiniCPM-Embedding-Light 模型权重的使用则需要遵循 [MiniCPM 模型协议](https://github.com/
|
12209 |
- MiniCPM-Embedding-Light 模型权重对学术研究完全开放。如需将模型用于商业用途,请填写[此问卷](https://modelbest.feishu.cn/share/base/form/shrcnpV5ZT9EJ6xYjh3Kx0J6v8g)。
|
12210 |
|
12211 |
-
* The code in this repo is released under the [Apache-2.0](https://github.com/
|
12212 |
-
* The usage of MiniCPM-Embedding-Light model weights must strictly follow [MiniCPM Model License.md](https://github.com/
|
12213 |
* The models and weights of MiniCPM-Embedding-Light are completely free for academic research. After filling out a ["questionnaire"](https://modelbest.feishu.cn/share/base/form/shrcnpV5ZT9EJ6xYjh3Kx0J6v8g) for registration, MiniCPM-Embedding-Light weights are also available for free commercial use.
|
|
|
12057 |
from transformers import AutoModel
|
12058 |
import torch
|
12059 |
|
12060 |
+
model_name = "openbmb/MiniCPM-Embedding-Light"
|
12061 |
model = AutoModel.from_pretrained(model_name, trust_remote_code=True, torch_dtype=torch.float16).to("cuda")
|
12062 |
|
12063 |
# you can use flash_attention_2 for faster inference
|
|
|
12114 |
import numpy as np
|
12115 |
|
12116 |
array = AsyncEngineArray.from_args([
|
12117 |
+
EngineArgs(model_name_or_path = "openbmb/MiniCPM-Embedding-Light", engine="torch", dtype="float16", bettertransformer=False, pooling_method="mean", trust_remote_code=True),
|
12118 |
])
|
12119 |
queries = ["中国的首都是哪里?"] # "What is the capital of China?"
|
12120 |
passages = ["beijing", "shanghai"] # "北京", "上海"
|
|
|
12140 |
from FlagEmbedding import FlagModel
|
12141 |
|
12142 |
|
12143 |
+
model = FlagModel("openbmb/MiniCPM-Embedding-Light",
|
12144 |
query_instruction_for_retrieval="Query: ",
|
12145 |
pooling_method="mean",
|
12146 |
trust_remote_code=True,
|
|
|
12204 |
|
12205 |
## 许可证 License
|
12206 |
|
12207 |
+
- 本仓库中代码依照 [Apache-2.0 协议](https://github.com/openbmb/MiniCPM/blob/main/LICENSE)开源。
|
12208 |
+
- MiniCPM-Embedding-Light 模型权重的使用则需要遵循 [MiniCPM 模型协议](https://github.com/openbmb/MiniCPM/blob/main/MiniCPM%20Model%20License.md)。
|
12209 |
- MiniCPM-Embedding-Light 模型权重对学术研究完全开放。如需将模型用于商业用途,请填写[此问卷](https://modelbest.feishu.cn/share/base/form/shrcnpV5ZT9EJ6xYjh3Kx0J6v8g)。
|
12210 |
|
12211 |
+
* The code in this repo is released under the [Apache-2.0](https://github.com/openbmb/MiniCPM/blob/main/LICENSE) License.
|
12212 |
+
* The usage of MiniCPM-Embedding-Light model weights must strictly follow [MiniCPM Model License.md](https://github.com/openbmb/MiniCPM/blob/main/MiniCPM%20Model%20License.md).
|
12213 |
* The models and weights of MiniCPM-Embedding-Light are completely free for academic research. After filling out a ["questionnaire"](https://modelbest.feishu.cn/share/base/form/shrcnpV5ZT9EJ6xYjh3Kx0J6v8g) for registration, MiniCPM-Embedding-Light weights are also available for free commercial use.
|