juliuslipp
commited on
Update README.md
Browse files
README.md
CHANGED
@@ -2690,38 +2690,10 @@ similarities = cos_sim(embeddings[0], embeddings[1:])
|
|
2690 |
print('similarities:', similarities)
|
2691 |
```
|
2692 |
|
2693 |
-
###
|
2694 |
|
2695 |
You’ll be able to use the models through our API as well. The API is coming soon and will have some exciting features. Stay tuned!
|
2696 |
|
2697 |
-
### Transformers.js
|
2698 |
-
|
2699 |
-
If you haven't already, you can install the [Transformers.js](https://huggingface.co/docs/transformers.js) JavaScript library from [NPM](https://www.npmjs.com/package/@xenova/transformers) using:
|
2700 |
-
```bash
|
2701 |
-
npm i @xenova/transformers
|
2702 |
-
```
|
2703 |
-
|
2704 |
-
You can then use the model to compute embeddings as follows:
|
2705 |
-
|
2706 |
-
```js
|
2707 |
-
import { pipeline, cos_sim } from '@xenova/transformers';
|
2708 |
-
|
2709 |
-
// Create a feature-extraction pipeline
|
2710 |
-
const extractor = await pipeline('feature-extraction', 'mixedbread-ai/mxbai-embed-2d-large-v1', {
|
2711 |
-
quantized: false, // (Optional) remove this line to use the 8-bit quantized model
|
2712 |
-
});
|
2713 |
-
|
2714 |
-
// Compute sentence embeddings (with `cls` pooling)
|
2715 |
-
const sentences = ['Who is german and likes bread?', 'Everybody in Germany.' ];
|
2716 |
-
const output = await extractor(sentences, { pooling: 'cls' });
|
2717 |
-
|
2718 |
-
// Set embedding size and truncate embeddings
|
2719 |
-
const new_embedding_size = 768;
|
2720 |
-
const truncated = output.slice(null, [0, new_embedding_size]);
|
2721 |
-
|
2722 |
-
// Compute cosine similarity
|
2723 |
-
console.log(cos_sim(truncated[0].data, truncated[1].data)); // 0.6979532021425204
|
2724 |
-
```
|
2725 |
|
2726 |
## Evaluation
|
2727 |
|
|
|
2690 |
print('similarities:', similarities)
|
2691 |
```
|
2692 |
|
2693 |
+
### Using API
|
2694 |
|
2695 |
You’ll be able to use the models through our API as well. The API is coming soon and will have some exciting features. Stay tuned!
|
2696 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2697 |
|
2698 |
## Evaluation
|
2699 |
|