File size: 4,669 Bytes
5f14f33 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 |
using System.Collections.Generic;
using UnityEngine;
using Unity.Sentis;
using System.IO;
using System.Text;
/*
* Tiny Stories Inference Code
* ===========================
*
* Put this script on the Main Camera
*
* In Assets/StreamingAssets put:
*
* MiniLMv6.sentis
* vocab.txt
*
* Install package com.unity.sentis
*
*/
public class MiniLM : MonoBehaviour
{
const BackendType backend = BackendType.GPUCompute;
string string1 = "That is a happy person"; // similarity = 1
//Choose a string to comapre string1 to:
string string2 = "That is a happy dog"; // similarity = 0.695
//string string2 = "That is a very happy person"; // similarity = 0.943
//string string2 = "Today is a sunny day"; // similarity = 0.257
//Special tokens
const int START_TOKEN = 101;
const int END_TOKEN = 102;
Ops ops;
ITensorAllocator allocator;
//Store the vocabulary
string[] tokens;
IWorker engine;
void Start()
{
allocator = new TensorCachingAllocator();
ops = WorkerFactory.CreateOps(backend, allocator);
tokens = File.ReadAllLines(Application.streamingAssetsPath + "/vocab.txt");
Model model = ModelLoader.Load(Application.streamingAssetsPath + "/MiniLMv6.sentis");
engine = WorkerFactory.CreateWorker(backend, model);
var tokens1 = GetTokens(string1);
var tokens2 = GetTokens(string2);
TensorFloat embedding1 = GetEmbedding(tokens1);
TensorFloat embedding2 = GetEmbedding(tokens2);
Debug.Log("Similarity Score: " + DotScore(embedding1, embedding2));
}
float DotScore(TensorFloat embedding1, TensorFloat embedding2)
{
using var prod = ops.Mul(embedding1, embedding2);
using var dot = ops.ReduceSum(prod, new int[] { 1 }, false);
dot.MakeReadable();
return dot[0];
}
TensorFloat GetEmbedding(List<int> tokens)
{
int N = tokens.Count;
using var input_ids = new TensorInt(new TensorShape(1, N), tokens.ToArray());
using var token_type_ids = new TensorInt(new TensorShape(1, N), new int[N]);
int[] mask = new int[N];
for (int i = 0; i < mask.Length; i++)
{
mask[i] = 1;
}
using var attention_mask = new TensorInt(new TensorShape(1, N), mask);
var inputs = new Dictionary<string, Tensor>
{
{"input_ids",input_ids },
{"token_type_ids", token_type_ids},
{"attention_mask", attention_mask }
};
engine.Execute(inputs);
var tokenEmbeddings = engine.PeekOutput("output") as TensorFloat;
return MeanPooling(tokenEmbeddings, attention_mask);
}
//Get average of token embeddings taking into account the attention mask
TensorFloat MeanPooling(TensorFloat tokenEmbeddings, TensorInt attentonMask)
{
using var mask0 = attentonMask.ShallowReshape(attentonMask.shape.Unsqueeze(-1)) as TensorInt;
using var maskExpanded = ops.Expand(mask0, tokenEmbeddings.shape);
using var maskExpandedF = ops.Cast(maskExpanded, DataType.Float) as TensorFloat;
using var D = ops.Mul(tokenEmbeddings, maskExpandedF);
using var A = ops.ReduceSum(D, new[] { 1 }, false);
using var C = ops.ReduceSum(maskExpandedF, new[] { 1 }, false);
using var B = ops.Clip(C, 1e-9f, float.MaxValue);
using var E = ops.Div(A, B);
using var F = ops.ReduceL2(E, new[] { 1 }, true);
return ops.Div(E, F);
}
List<int> GetTokens(string text)
{
//split over whitespace
string[] words = text.ToLower().Split(null);
var ids = new List<int>
{
START_TOKEN
};
string s = "";
foreach (var word in words)
{
int start = 0;
for(int i = word.Length; i >= 0;i--)
{
string subword = start == 0 ? word.Substring(start, i) : "##" + word.Substring(start, i-start);
int index = System.Array.IndexOf(tokens, subword);
if (index >= 0)
{
ids.Add(index);
s += subword + " ";
if (i == word.Length) break;
start = i;
i = word.Length + 1;
}
}
}
ids.Add(END_TOKEN);
Debug.Log("Tokenized sentece = " + s);
return ids;
}
private void OnDestroy()
{
engine?.Dispose();
ops?.Dispose();
allocator?.Dispose();
}
}
|