gmastrapas Xenova HF staff commited on
Commit
dbc4a34
1 Parent(s): 4f4251a

Add Transformers.js example code (#14)

Browse files

- Add Transformers.js example code (76f6a542c7d43583f90869e8d47e1f52b6dfcf0c)


Co-authored-by: Joshua <[email protected]>

Files changed (1) hide show
  1. README.md +60 -0
README.md CHANGED
@@ -351,6 +351,66 @@ query_embeddings = model.encode(
351
  ```
352
  </details>
353
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
354
  <details>
355
  <summary>via the <a href="https://onnxruntime.ai/">ONNX Runtime</a></summary>
356
 
 
351
  ```
352
  </details>
353
 
354
+ <details>
355
+ <summary>via <a href="https://huggingface.co/docs/transformers.js/en/index">transformers.js</a></summary>
356
+
357
+ > [!NOTE]
358
+ > JinaCLIP was added in Transformers.js v3.1.0, so make sure you're using a compatible version!
359
+ > See the [release notes](https://github.com/huggingface/transformers.js/releases/tag/3.1.0) for more information.
360
+
361
+ If you haven't already, you can install the [Transformers.js](https://huggingface.co/docs/transformers.js) JavaScript library from [NPM](https://www.npmjs.com/package/@huggingface/transformers) using:
362
+ ```bash
363
+ npm i @huggingface/transformers
364
+ ```
365
+
366
+ **Example:** Compute text and/or image embeddings with `jinaai/jina-clip-v2`:
367
+ ```js
368
+ import { AutoModel, AutoProcessor, RawImage, matmul } from "@huggingface/transformers";
369
+
370
+ // Load processor and model
371
+ const model_id = "jinaai/jina-clip-v2";
372
+ const processor = await AutoProcessor.from_pretrained(model_id);
373
+ const model = await AutoModel.from_pretrained(model_id, { dtype: "q4" /* e.g., "fp16", "q8", or "q4" */ });
374
+
375
+ // Prepare inputs
376
+ const urls = ["https://i.ibb.co/nQNGqL0/beach1.jpg", "https://i.ibb.co/r5w8hG8/beach2.jpg"];
377
+ const images = await Promise.all(urls.map(url => RawImage.read(url)));
378
+ const sentences = [
379
+ "غروب جميل على الشاطئ", // Arabic
380
+ "海滩上美丽的日落", // Chinese
381
+ "Un beau coucher de soleil sur la plage", // French
382
+ "Ein wunderschöner Sonnenuntergang am Strand", // German
383
+ "Ένα όμορφο ηλιοβασίλεμα πάνω από την παραλία", // Greek
384
+ "समुद्र तट पर एक खूबसूरत सूर्यास्त", // Hindi
385
+ "Un bellissimo tramonto sulla spiaggia", // Italian
386
+ "浜辺に沈む美しい夕日", // Japanese
387
+ "해변 위로 아름다운 일몰", // Korean
388
+ ];
389
+
390
+ // Encode text and images
391
+ const inputs = await processor(sentences, images, { padding: true, truncation: true });
392
+ const { l2norm_text_embeddings, l2norm_image_embeddings } = await model(inputs);
393
+
394
+ // Encode query (text-only)
395
+ const query_prefix = "Represent the query for retrieving evidence documents: ";
396
+ const query_inputs = await processor(query_prefix + "beautiful sunset over the beach");
397
+ const { l2norm_text_embeddings: query_embeddings } = await model(query_inputs);
398
+
399
+ // Compute text-image similarity scores
400
+ const text_to_image_scores = await matmul(query_embeddings, l2norm_image_embeddings.transpose(1, 0));
401
+ console.log("text-image similarity scores", text_to_image_scores.tolist()[0]); // [0.29530206322669983, 0.3183615803718567]
402
+
403
+ // Compute image-image similarity scores
404
+ const image_to_image_score = await matmul(l2norm_image_embeddings[0], l2norm_image_embeddings[1]);
405
+ console.log("image-image similarity score", image_to_image_score.item()); // 0.9344457387924194
406
+
407
+ // Compute text-text similarity scores
408
+ const text_to_text_scores = await matmul(query_embeddings, l2norm_text_embeddings.transpose(1, 0));
409
+ console.log("text-text similarity scores", text_to_text_scores.tolist()[0]); // [0.5566609501838684, 0.7028406858444214, 0.582255482673645, 0.6648036241531372, 0.5462006330490112, 0.6791588068008423, 0.6192430257797241, 0.6258729100227356, 0.6453716158866882]
410
+ ```
411
+ </details>
412
+
413
+
414
  <details>
415
  <summary>via the <a href="https://onnxruntime.ai/">ONNX Runtime</a></summary>
416