fliou2 commited on
Commit
c17a211
·
verified ·
1 Parent(s): 8122953

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. README.md +193 -293
  2. config_sentence_transformers.json +2 -2
  3. model.safetensors +1 -1
README.md CHANGED
@@ -1,7 +1,5 @@
1
  ---
2
  base_model: BAAI/bge-small-en-v1.5
3
- datasets: []
4
- language: []
5
  library_name: sentence-transformers
6
  metrics:
7
  - cosine_accuracy@1
@@ -19,78 +17,69 @@ metrics:
19
  - cosine_ndcg@10
20
  - cosine_mrr@10
21
  - cosine_map@100
22
- - dot_accuracy@1
23
- - dot_accuracy@3
24
- - dot_accuracy@5
25
- - dot_accuracy@10
26
- - dot_precision@1
27
- - dot_precision@3
28
- - dot_precision@5
29
- - dot_precision@10
30
- - dot_recall@1
31
- - dot_recall@3
32
- - dot_recall@5
33
- - dot_recall@10
34
- - dot_ndcg@10
35
- - dot_mrr@10
36
- - dot_map@100
37
  pipeline_tag: sentence-similarity
38
  tags:
39
  - sentence-transformers
40
  - sentence-similarity
41
  - feature-extraction
42
  - generated_from_trainer
43
- - dataset_size:1089
44
  - loss:MultipleNegativesRankingLoss
45
  widget:
46
- - source_sentence: sort my holdings based on lowest fees
47
  sentences:
48
- - '[{"get_portfolio(None,None)": "portfolio"}, {"factor_contribution(''portfolio'',''<DATES>'',''asset_class'',''us
49
- equity'',''portfolio'')": "portfolio"}]'
50
- - '[{"get_portfolio(None,None)": "portfolio"}, {"get_attribute(''portfolio'',[''expense
51
- ratio''],''<DATES>'')": "portfolio"}, {"sort(''portfolio'',''expense ratio'',''asc'')":
52
- "portfolio"}]'
53
- - '[{"get_all_portfolios(''virtual'')": "virtual_portfolios"}]'
54
- - source_sentence: what is the volatility of each of my holdings?
55
- sentences:
56
- - '[{"get_portfolio(None,None)": "portfolio"}, {"factor_contribution(''portfolio'',''<DATES>'',''sector'',''sector
57
- industrials'',''portfolio'')": "portfolio"}]'
58
- - '[{"get_portfolio([''type''],None)": "portfolio"}, {"get_attribute(''portfolio'',[''risk''],''<DATES>'')":
59
- "portfolio"}, {"sort(''portfolio'',''risk'',''asc'')": "portfolio"}]'
60
  - '[{"get_portfolio([''type''],None)": "portfolio"}, {"filter(''portfolio'',''type'',''=='',''ETF'')":
61
  "portfolio"}, {"get_attribute(''portfolio'',[''losses''],''<DATES>'')": "portfolio"},
62
  {"filter(''portfolio'',''losses'',''<'',''0'')": "portfolio"}, {"sort(''portfolio'',''losses'',''asc'')":
63
  "portfolio"}]'
64
- - source_sentence: show region breakdown of my returns
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
  sentences:
66
  - '[{"get_portfolio(None,None)": "portfolio"}, {"get_attribute(''portfolio'',[''gains''],''<DATES>'')":
67
  "portfolio"}, {"sort(''portfolio'',''gains'',''desc'')": "portfolio"}, {"get_attribute([''<TICKER1>''],[''returns''],''<DATES>'')":
68
  "<TICKER1>_performance_data"}]'
69
  - '[{"get_portfolio(None,None)": "portfolio"}, {"factor_contribution(''portfolio'',''<DATES>'',''asset_class'',''global
70
  bonds'',''returns'')": "portfolio"}]'
71
- - '[{"get_portfolio(None,None)": "portfolio"}, {"factor_contribution(''portfolio'',''<DATES>'',''region'',None,''returns'')":
72
- "portfolio"}]'
73
- - source_sentence: show me my single stocks
74
  sentences:
75
- - '[{"get_portfolio([''type''],None)": "portfolio"}, {"filter(''portfolio'',''type'',''=='',''SHARE'')":
76
- "portfolio"}, {"aggregate(''portfolio'',''ticker'',''marketValue'',''sum'',None)":
77
- "stocks_amount"}]'
78
  - '[{"get_portfolio([''averageCost''],None)": "portfolio"}, {"get_attribute(''portfolio'',[''price''],''<DATES>'')":
79
  "portfolio"}, {"calculate(''portfolio'',[''price'', ''averageCost''],''difference'',''price_delta'')":
80
  "portfolio"}, {"filter(''portfolio'',''price_delta'',''>'',''0'')": "portfolio"},
81
  {"sort(''portfolio'',''price_delta'',''desc'')": "portfolio"}]'
82
- - '[{"get_portfolio(None,None)": "portfolio"}, {"factor_contribution(''portfolio'',''<DATES>'',''sector'',''sector
83
- information technology'',''portfolio'')": "portfolio"}]'
84
- - source_sentence: how much do I have [TICKER] all accounts
 
85
  sentences:
86
- - '[{"get_portfolio([''marketValue''],None)": "portfolio"}, {"get_attribute(''portfolio'',[''<TICKER1>''],''<DATES>'')":
87
- "portfolio"}, {"calculate(''portfolio'',[''marketValue'', ''<TICKER1>''],''multiply'',''expo_<TICKER1>'')":
88
- "portfolio"}, {"sort(''portfolio'',''expo_<TICKER1>'',''desc'')": "portfolio"},
89
- {"aggregate(''portfolio'',''ticker'',''expo_<TICKER1>'',''sum'',None)": "port_expo_<TICKER1>"}]'
90
- - '[{"get_portfolio(None,None)": "portfolio"}, {"analyze_impact(''portfolio'',''<TICKER1>'',''sell'')":
91
- "impact_of_selling_<TICKER1>"}]'
92
  - '[{"get_portfolio(None,None)": "portfolio"}, {"factor_contribution(''portfolio'',''<DATES>'',''sector'',''sector
93
- automobiles'',''portfolio'')": "portfolio"}]'
 
 
 
 
94
  model-index:
95
  - name: SentenceTransformer based on BAAI/bge-small-en-v1.5
96
  results:
@@ -102,95 +91,50 @@ model-index:
102
  type: unknown
103
  metrics:
104
  - type: cosine_accuracy@1
105
- value: 0.6712328767123288
106
  name: Cosine Accuracy@1
107
  - type: cosine_accuracy@3
108
- value: 0.815068493150685
109
  name: Cosine Accuracy@3
110
  - type: cosine_accuracy@5
111
- value: 0.8561643835616438
112
  name: Cosine Accuracy@5
113
  - type: cosine_accuracy@10
114
- value: 0.9178082191780822
115
  name: Cosine Accuracy@10
116
  - type: cosine_precision@1
117
- value: 0.6712328767123288
118
  name: Cosine Precision@1
119
  - type: cosine_precision@3
120
- value: 0.27168949771689493
121
  name: Cosine Precision@3
122
  - type: cosine_precision@5
123
- value: 0.17123287671232873
124
  name: Cosine Precision@5
125
  - type: cosine_precision@10
126
- value: 0.0917808219178082
127
  name: Cosine Precision@10
128
  - type: cosine_recall@1
129
- value: 0.018645357686453576
130
  name: Cosine Recall@1
131
  - type: cosine_recall@3
132
- value: 0.02264079147640792
133
  name: Cosine Recall@3
134
  - type: cosine_recall@5
135
- value: 0.023782343987823442
136
  name: Cosine Recall@5
137
  - type: cosine_recall@10
138
- value: 0.02549467275494673
139
  name: Cosine Recall@10
140
  - type: cosine_ndcg@10
141
- value: 0.1737871975139111
142
  name: Cosine Ndcg@10
143
  - type: cosine_mrr@10
144
- value: 0.7488530115242443
145
  name: Cosine Mrr@10
146
  - type: cosine_map@100
147
- value: 0.020899452334742528
148
  name: Cosine Map@100
149
- - type: dot_accuracy@1
150
- value: 0.6712328767123288
151
- name: Dot Accuracy@1
152
- - type: dot_accuracy@3
153
- value: 0.815068493150685
154
- name: Dot Accuracy@3
155
- - type: dot_accuracy@5
156
- value: 0.8561643835616438
157
- name: Dot Accuracy@5
158
- - type: dot_accuracy@10
159
- value: 0.9178082191780822
160
- name: Dot Accuracy@10
161
- - type: dot_precision@1
162
- value: 0.6712328767123288
163
- name: Dot Precision@1
164
- - type: dot_precision@3
165
- value: 0.27168949771689493
166
- name: Dot Precision@3
167
- - type: dot_precision@5
168
- value: 0.17123287671232873
169
- name: Dot Precision@5
170
- - type: dot_precision@10
171
- value: 0.0917808219178082
172
- name: Dot Precision@10
173
- - type: dot_recall@1
174
- value: 0.018645357686453576
175
- name: Dot Recall@1
176
- - type: dot_recall@3
177
- value: 0.02264079147640792
178
- name: Dot Recall@3
179
- - type: dot_recall@5
180
- value: 0.023782343987823442
181
- name: Dot Recall@5
182
- - type: dot_recall@10
183
- value: 0.02549467275494673
184
- name: Dot Recall@10
185
- - type: dot_ndcg@10
186
- value: 0.1737871975139111
187
- name: Dot Ndcg@10
188
- - type: dot_mrr@10
189
- value: 0.7488530115242443
190
- name: Dot Mrr@10
191
- - type: dot_map@100
192
- value: 0.020899452334742528
193
- name: Dot Map@100
194
  ---
195
 
196
  # SentenceTransformer based on BAAI/bge-small-en-v1.5
@@ -203,7 +147,7 @@ This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [B
203
  - **Model Type:** Sentence Transformer
204
  - **Base model:** [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) <!-- at revision 5c38ec7c405ec4b44b94cc5a9bb96e735b38267a -->
205
  - **Maximum Sequence Length:** 512 tokens
206
- - **Output Dimensionality:** 384 tokens
207
  - **Similarity Function:** Cosine Similarity
208
  <!-- - **Training Dataset:** Unknown -->
209
  <!-- - **Language:** Unknown -->
@@ -243,9 +187,9 @@ from sentence_transformers import SentenceTransformer
243
  model = SentenceTransformer("sentence_transformers_model_id")
244
  # Run inference
245
  sentences = [
246
- 'how much do I have [TICKER] all accounts',
247
- '[{"get_portfolio([\'marketValue\'],None)": "portfolio"}, {"get_attribute(\'portfolio\',[\'<TICKER1>\'],\'<DATES>\')": "portfolio"}, {"calculate(\'portfolio\',[\'marketValue\', \'<TICKER1>\'],\'multiply\',\'expo_<TICKER1>\')": "portfolio"}, {"sort(\'portfolio\',\'expo_<TICKER1>\',\'desc\')": "portfolio"}, {"aggregate(\'portfolio\',\'ticker\',\'expo_<TICKER1>\',\'sum\',None)": "port_expo_<TICKER1>"}]',
248
- '[{"get_portfolio(None,None)": "portfolio"}, {"analyze_impact(\'portfolio\',\'<TICKER1>\',\'sell\')": "impact_of_selling_<TICKER1>"}]',
249
  ]
250
  embeddings = model.encode(sentences)
251
  print(embeddings.shape)
@@ -289,38 +233,23 @@ You can finetune this model on your own dataset.
289
 
290
  * Evaluated with [<code>InformationRetrievalEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator)
291
 
292
- | Metric | Value |
293
- |:--------------------|:-----------|
294
- | cosine_accuracy@1 | 0.6712 |
295
- | cosine_accuracy@3 | 0.8151 |
296
- | cosine_accuracy@5 | 0.8562 |
297
- | cosine_accuracy@10 | 0.9178 |
298
- | cosine_precision@1 | 0.6712 |
299
- | cosine_precision@3 | 0.2717 |
300
- | cosine_precision@5 | 0.1712 |
301
- | cosine_precision@10 | 0.0918 |
302
- | cosine_recall@1 | 0.0186 |
303
- | cosine_recall@3 | 0.0226 |
304
- | cosine_recall@5 | 0.0238 |
305
- | cosine_recall@10 | 0.0255 |
306
- | cosine_ndcg@10 | 0.1738 |
307
- | cosine_mrr@10 | 0.7489 |
308
- | **cosine_map@100** | **0.0209** |
309
- | dot_accuracy@1 | 0.6712 |
310
- | dot_accuracy@3 | 0.8151 |
311
- | dot_accuracy@5 | 0.8562 |
312
- | dot_accuracy@10 | 0.9178 |
313
- | dot_precision@1 | 0.6712 |
314
- | dot_precision@3 | 0.2717 |
315
- | dot_precision@5 | 0.1712 |
316
- | dot_precision@10 | 0.0918 |
317
- | dot_recall@1 | 0.0186 |
318
- | dot_recall@3 | 0.0226 |
319
- | dot_recall@5 | 0.0238 |
320
- | dot_recall@10 | 0.0255 |
321
- | dot_ndcg@10 | 0.1738 |
322
- | dot_mrr@10 | 0.7489 |
323
- | dot_map@100 | 0.0209 |
324
 
325
  <!--
326
  ## Bias, Risks and Limitations
@@ -341,13 +270,13 @@ You can finetune this model on your own dataset.
341
  #### Unnamed Dataset
342
 
343
 
344
- * Size: 1,089 training samples
345
  * Columns: <code>sentence_0</code> and <code>sentence_1</code>
346
  * Approximate statistics based on the first 1000 samples:
347
- | | sentence_0 | sentence_1 |
348
- |:--------|:---------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
349
- | type | string | string |
350
- | details | <ul><li>min: 5 tokens</li><li>mean: 13.3 tokens</li><li>max: 27 tokens</li></ul> | <ul><li>min: 26 tokens</li><li>mean: 87.73 tokens</li><li>max: 196 tokens</li></ul> |
351
  * Samples:
352
  | sentence_0 | sentence_1 |
353
  |:------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
@@ -483,6 +412,7 @@ You can finetune this model on your own dataset.
483
  - `batch_eval_metrics`: False
484
  - `eval_on_start`: False
485
  - `eval_use_gather_object`: False
 
486
  - `batch_sampler`: batch_sampler
487
  - `multi_dataset_batch_sampler`: round_robin
488
 
@@ -491,155 +421,125 @@ You can finetune this model on your own dataset.
491
  ### Training Logs
492
  <details><summary>Click to expand</summary>
493
 
494
- | Epoch | Step | cosine_map@100 |
495
  |:------:|:----:|:--------------:|
496
- | 0.0183 | 2 | 0.0136 |
497
- | 0.0367 | 4 | 0.0136 |
498
- | 0.0550 | 6 | 0.0137 |
499
- | 0.0734 | 8 | 0.0140 |
500
- | 0.0917 | 10 | 0.0141 |
501
- | 0.1101 | 12 | 0.0142 |
502
- | 0.1284 | 14 | 0.0144 |
503
- | 0.1468 | 16 | 0.0146 |
504
- | 0.1651 | 18 | 0.0147 |
505
- | 0.1835 | 20 | 0.0151 |
506
- | 0.2018 | 22 | 0.0152 |
507
- | 0.2202 | 24 | 0.0157 |
508
- | 0.2385 | 26 | 0.0159 |
509
- | 0.2569 | 28 | 0.0163 |
510
- | 0.2752 | 30 | 0.0164 |
511
- | 0.2936 | 32 | 0.0165 |
512
- | 0.3119 | 34 | 0.0165 |
513
- | 0.3303 | 36 | 0.0168 |
514
- | 0.3486 | 38 | 0.0174 |
515
- | 0.3670 | 40 | 0.0176 |
516
- | 0.3853 | 42 | 0.0180 |
517
- | 0.4037 | 44 | 0.0183 |
518
- | 0.4220 | 46 | 0.0182 |
519
- | 0.4404 | 48 | 0.0184 |
520
- | 0.4587 | 50 | 0.0184 |
521
- | 0.4771 | 52 | 0.0185 |
522
- | 0.4954 | 54 | 0.0187 |
523
- | 0.5138 | 56 | 0.0188 |
524
- | 0.5321 | 58 | 0.0190 |
525
- | 0.5505 | 60 | 0.0191 |
526
- | 0.5688 | 62 | 0.0191 |
527
- | 0.5872 | 64 | 0.0193 |
528
- | 0.6055 | 66 | 0.0192 |
529
- | 0.6239 | 68 | 0.0190 |
530
- | 0.6422 | 70 | 0.0190 |
531
- | 0.6606 | 72 | 0.0186 |
532
- | 0.6789 | 74 | 0.0185 |
533
- | 0.6972 | 76 | 0.0185 |
534
- | 0.7156 | 78 | 0.0185 |
535
- | 0.7339 | 80 | 0.0184 |
536
- | 0.7523 | 82 | 0.0185 |
537
- | 0.7706 | 84 | 0.0189 |
538
- | 0.7890 | 86 | 0.0191 |
539
- | 0.8073 | 88 | 0.0192 |
540
- | 0.8257 | 90 | 0.0195 |
541
- | 0.8440 | 92 | 0.0196 |
542
- | 0.8624 | 94 | 0.0198 |
543
- | 0.8807 | 96 | 0.0199 |
544
- | 0.8991 | 98 | 0.0200 |
545
- | 0.9174 | 100 | 0.0202 |
546
- | 0.9358 | 102 | 0.0202 |
547
- | 0.9541 | 104 | 0.0202 |
548
- | 0.9725 | 106 | 0.0203 |
549
- | 0.9908 | 108 | 0.0202 |
550
- | 1.0 | 109 | 0.0201 |
551
- | 1.0092 | 110 | 0.0202 |
552
- | 1.0275 | 112 | 0.0202 |
553
- | 1.0459 | 114 | 0.0202 |
554
- | 1.0642 | 116 | 0.0202 |
555
- | 1.0826 | 118 | 0.0205 |
556
- | 1.1009 | 120 | 0.0205 |
557
- | 1.1193 | 122 | 0.0205 |
558
- | 1.1376 | 124 | 0.0207 |
559
- | 1.1560 | 126 | 0.0206 |
560
- | 1.1743 | 128 | 0.0205 |
561
- | 1.1927 | 130 | 0.0204 |
562
- | 1.2110 | 132 | 0.0205 |
563
- | 1.2294 | 134 | 0.0205 |
564
- | 1.2477 | 136 | 0.0205 |
565
- | 1.2661 | 138 | 0.0203 |
566
- | 1.2844 | 140 | 0.0204 |
567
- | 1.3028 | 142 | 0.0205 |
568
- | 1.3211 | 144 | 0.0205 |
569
- | 1.3394 | 146 | 0.0206 |
570
- | 1.3578 | 148 | 0.0205 |
571
- | 1.3761 | 150 | 0.0205 |
572
- | 1.3945 | 152 | 0.0205 |
573
- | 1.4128 | 154 | 0.0205 |
574
- | 1.4312 | 156 | 0.0204 |
575
- | 1.4495 | 158 | 0.0204 |
576
- | 1.4679 | 160 | 0.0205 |
577
- | 1.4862 | 162 | 0.0205 |
578
- | 1.5046 | 164 | 0.0205 |
579
- | 1.5229 | 166 | 0.0205 |
580
- | 1.5413 | 168 | 0.0206 |
581
- | 1.5596 | 170 | 0.0206 |
582
- | 1.5780 | 172 | 0.0206 |
583
- | 1.5963 | 174 | 0.0206 |
584
- | 1.6147 | 176 | 0.0206 |
585
- | 1.6330 | 178 | 0.0205 |
586
- | 1.6514 | 180 | 0.0205 |
587
- | 1.6697 | 182 | 0.0205 |
588
- | 1.6881 | 184 | 0.0205 |
589
- | 1.7064 | 186 | 0.0205 |
590
- | 1.7248 | 188 | 0.0205 |
591
- | 1.7431 | 190 | 0.0206 |
592
- | 1.7615 | 192 | 0.0207 |
593
- | 1.7798 | 194 | 0.0207 |
594
- | 1.7982 | 196 | 0.0205 |
595
- | 1.8165 | 198 | 0.0203 |
596
- | 1.8349 | 200 | 0.0202 |
597
- | 1.8532 | 202 | 0.0201 |
598
- | 1.8716 | 204 | 0.0203 |
599
- | 1.8899 | 206 | 0.0204 |
600
- | 1.9083 | 208 | 0.0204 |
601
- | 1.9266 | 210 | 0.0204 |
602
- | 1.9450 | 212 | 0.0203 |
603
- | 1.9633 | 214 | 0.0203 |
604
- | 1.9817 | 216 | 0.0206 |
605
- | 2.0 | 218 | 0.0205 |
606
- | 2.0183 | 220 | 0.0206 |
607
- | 2.0367 | 222 | 0.0207 |
608
- | 2.0550 | 224 | 0.0207 |
609
- | 2.0734 | 226 | 0.0207 |
610
- | 2.0917 | 228 | 0.0206 |
611
- | 2.1101 | 230 | 0.0206 |
612
- | 2.1284 | 232 | 0.0206 |
613
- | 2.1468 | 234 | 0.0205 |
614
- | 2.1651 | 236 | 0.0205 |
615
- | 2.1835 | 238 | 0.0205 |
616
- | 2.2018 | 240 | 0.0204 |
617
- | 2.2202 | 242 | 0.0203 |
618
- | 2.2385 | 244 | 0.0203 |
619
- | 2.2569 | 246 | 0.0203 |
620
- | 2.2752 | 248 | 0.0206 |
621
- | 2.2936 | 250 | 0.0206 |
622
- | 2.3119 | 252 | 0.0206 |
623
- | 2.3303 | 254 | 0.0205 |
624
- | 2.3486 | 256 | 0.0205 |
625
- | 2.3670 | 258 | 0.0205 |
626
- | 2.3853 | 260 | 0.0204 |
627
- | 2.4037 | 262 | 0.0204 |
628
- | 2.4220 | 264 | 0.0205 |
629
- | 2.4404 | 266 | 0.0207 |
630
- | 2.4587 | 268 | 0.0207 |
631
- | 2.4771 | 270 | 0.0208 |
632
- | 2.4954 | 272 | 0.0206 |
633
- | 2.5138 | 274 | 0.0207 |
634
- | 2.5321 | 276 | 0.0208 |
635
- | 2.5505 | 278 | 0.0208 |
636
- | 2.5688 | 280 | 0.0209 |
637
 
638
  </details>
639
 
640
  ### Framework Versions
641
  - Python: 3.10.9
642
- - Sentence Transformers: 3.0.1
643
  - Transformers: 4.44.0
644
  - PyTorch: 2.4.0+cu121
645
  - Accelerate: 0.33.0
@@ -666,7 +566,7 @@ You can finetune this model on your own dataset.
666
  #### MultipleNegativesRankingLoss
667
  ```bibtex
668
  @misc{henderson2017efficient,
669
- title={Efficient Natural Language Response Suggestion for Smart Reply},
670
  author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil},
671
  year={2017},
672
  eprint={1705.00652},
 
1
  ---
2
  base_model: BAAI/bge-small-en-v1.5
 
 
3
  library_name: sentence-transformers
4
  metrics:
5
  - cosine_accuracy@1
 
17
  - cosine_ndcg@10
18
  - cosine_mrr@10
19
  - cosine_map@100
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  pipeline_tag: sentence-similarity
21
  tags:
22
  - sentence-transformers
23
  - sentence-similarity
24
  - feature-extraction
25
  - generated_from_trainer
26
+ - dataset_size:1090
27
  - loss:MultipleNegativesRankingLoss
28
  widget:
29
+ - source_sentence: how do different regions contribute to my returns
30
  sentences:
 
 
 
 
 
 
 
 
 
 
 
 
31
  - '[{"get_portfolio([''type''],None)": "portfolio"}, {"filter(''portfolio'',''type'',''=='',''ETF'')":
32
  "portfolio"}, {"get_attribute(''portfolio'',[''losses''],''<DATES>'')": "portfolio"},
33
  {"filter(''portfolio'',''losses'',''<'',''0'')": "portfolio"}, {"sort(''portfolio'',''losses'',''asc'')":
34
  "portfolio"}]'
35
+ - '[{"get_portfolio(None,None)": "portfolio"}, {"factor_contribution(''portfolio'',''<DATES>'',''region'',None,''returns'')":
36
+ "portfolio"}]'
37
+ - '[{"get_portfolio([''marketValue''],None)": "portfolio"}, {"get_attribute(''portfolio'',[''<TICKER1>''],''<DATES>'')":
38
+ "portfolio"}, {"calculate(''portfolio'',[''marketValue'', ''<TICKER1>''],''multiply'',''expo_<TICKER1>'')":
39
+ "portfolio"}, {"sort(''portfolio'',''expo_<TICKER1>'',''desc'')": "portfolio"},
40
+ {"aggregate(''portfolio'',''ticker'',''expo_<TICKER1>'',''sum'',None)": "port_expo_<TICKER1>"}]'
41
+ - source_sentence: which percent of my portfolio is in single stocks?
42
+ sentences:
43
+ - '[{"get_portfolio([''quantity'', ''averageCost'', ''marketValue''],None)": "portfolio"},
44
+ {"filter(''portfolio'',''ticker'',''=='',''<TICKER1>'')": "portfolio"}, {"calculate(''portfolio'',[''quantity'',
45
+ ''averageCost''],''multiply'',''cost_basis'')": "portfolio"}, {"calculate(''portfolio'',[''marketValue'',
46
+ ''cost_basis''],''difference'',''profit'')": "profit_<TICKER1>"}, {"aggregate(''portfolio'',''ticker'',''profit'',''sum'',None)":
47
+ "profit_<TICKER1>"}]'
48
+ - '[{"get_portfolio([''type''],None)": "portfolio"}, {"filter(''portfolio'',''type'',''=='',''SHARE'')":
49
+ "portfolio"}, {"aggregate(''portfolio'',''ticker'',''marketValue'',''sum'',None)":
50
+ "stocks_amount"}]'
51
+ - '[{"get_portfolio(None,None)": "portfolio"}, {"get_attribute(''portfolio'',[''dividend
52
+ yield''],''<DATES>'')": "portfolio"}, {"filter(''portfolio'',''dividend yield'',''>'',''0'')":
53
+ "portfolio"}, {"sort(''portfolio'',''dividend yield'',''desc'')": "portfolio"}]'
54
+ - source_sentence: what is the volatility of each of my holdings?
55
  sentences:
56
  - '[{"get_portfolio(None,None)": "portfolio"}, {"get_attribute(''portfolio'',[''gains''],''<DATES>'')":
57
  "portfolio"}, {"sort(''portfolio'',''gains'',''desc'')": "portfolio"}, {"get_attribute([''<TICKER1>''],[''returns''],''<DATES>'')":
58
  "<TICKER1>_performance_data"}]'
59
  - '[{"get_portfolio(None,None)": "portfolio"}, {"factor_contribution(''portfolio'',''<DATES>'',''asset_class'',''global
60
  bonds'',''returns'')": "portfolio"}]'
61
+ - '[{"get_portfolio([''type''],None)": "portfolio"}, {"get_attribute(''portfolio'',[''risk''],''<DATES>'')":
62
+ "portfolio"}, {"sort(''portfolio'',''risk'',''asc'')": "portfolio"}]'
63
+ - source_sentence: list all paper trading portfolios
64
  sentences:
65
+ - '[{"get_all_portfolios(''virtual'')": "virtual_portfolios"}]'
 
 
66
  - '[{"get_portfolio([''averageCost''],None)": "portfolio"}, {"get_attribute(''portfolio'',[''price''],''<DATES>'')":
67
  "portfolio"}, {"calculate(''portfolio'',[''price'', ''averageCost''],''difference'',''price_delta'')":
68
  "portfolio"}, {"filter(''portfolio'',''price_delta'',''>'',''0'')": "portfolio"},
69
  {"sort(''portfolio'',''price_delta'',''desc'')": "portfolio"}]'
70
+ - '[{"get_portfolio(None,<PORTFOLIO_NAME_1>)": "portfolio"}, {"get_attribute(''portfolio'',[''gains''],''<DATES>'')":
71
+ "portfolio"}, {"filter(''portfolio'',''gains'',''>'',''0'')": "portfolio"}, {"sort(''portfolio'',''gains'',''desc'')":
72
+ "portfolio"}]'
73
+ - source_sentence: what is my exposure to US Equities?
74
  sentences:
75
+ - '[{"get_portfolio(None,None)": "portfolio"}, {"factor_contribution(''portfolio'',''<DATES>'',''asset_class'',''us
76
+ equity'',''portfolio'')": "portfolio"}]'
 
 
 
 
77
  - '[{"get_portfolio(None,None)": "portfolio"}, {"factor_contribution(''portfolio'',''<DATES>'',''sector'',''sector
78
+ industrials'',''portfolio'')": "portfolio"}]'
79
+ - '[{"get_portfolio([''type''],None)": "portfolio"}, {"filter(''portfolio'',''type'',''=='',''ETF'')":
80
+ "portfolio"}, {"get_attribute(''portfolio'',[''gains''],''<DATES>'')": "portfolio"},
81
+ {"filter(''portfolio'',''gains'',''>'',''0'')": "portfolio"}, {"sort(''portfolio'',''gains'',''desc'')":
82
+ "portfolio"}]'
83
  model-index:
84
  - name: SentenceTransformer based on BAAI/bge-small-en-v1.5
85
  results:
 
91
  type: unknown
92
  metrics:
93
  - type: cosine_accuracy@1
94
+ value: 0.678082191780822
95
  name: Cosine Accuracy@1
96
  - type: cosine_accuracy@3
97
+ value: 0.8082191780821918
98
  name: Cosine Accuracy@3
99
  - type: cosine_accuracy@5
100
+ value: 0.863013698630137
101
  name: Cosine Accuracy@5
102
  - type: cosine_accuracy@10
103
+ value: 0.9315068493150684
104
  name: Cosine Accuracy@10
105
  - type: cosine_precision@1
106
+ value: 0.678082191780822
107
  name: Cosine Precision@1
108
  - type: cosine_precision@3
109
+ value: 0.2694063926940639
110
  name: Cosine Precision@3
111
  - type: cosine_precision@5
112
+ value: 0.17260273972602735
113
  name: Cosine Precision@5
114
  - type: cosine_precision@10
115
+ value: 0.09315068493150684
116
  name: Cosine Precision@10
117
  - type: cosine_recall@1
118
+ value: 0.018835616438356163
119
  name: Cosine Recall@1
120
  - type: cosine_recall@3
121
+ value: 0.02245053272450533
122
  name: Cosine Recall@3
123
  - type: cosine_recall@5
124
+ value: 0.02397260273972603
125
  name: Cosine Recall@5
126
  - type: cosine_recall@10
127
+ value: 0.025875190258751908
128
  name: Cosine Recall@10
129
  - type: cosine_ndcg@10
130
+ value: 0.17595381476268288
131
  name: Cosine Ndcg@10
132
  - type: cosine_mrr@10
133
+ value: 0.7579120460969775
134
  name: Cosine Mrr@10
135
  - type: cosine_map@100
136
+ value: 0.02111814463536371
137
  name: Cosine Map@100
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
138
  ---
139
 
140
  # SentenceTransformer based on BAAI/bge-small-en-v1.5
 
147
  - **Model Type:** Sentence Transformer
148
  - **Base model:** [BAAI/bge-small-en-v1.5](https://huggingface.co/BAAI/bge-small-en-v1.5) <!-- at revision 5c38ec7c405ec4b44b94cc5a9bb96e735b38267a -->
149
  - **Maximum Sequence Length:** 512 tokens
150
+ - **Output Dimensionality:** 384 dimensions
151
  - **Similarity Function:** Cosine Similarity
152
  <!-- - **Training Dataset:** Unknown -->
153
  <!-- - **Language:** Unknown -->
 
187
  model = SentenceTransformer("sentence_transformers_model_id")
188
  # Run inference
189
  sentences = [
190
+ 'what is my exposure to US Equities?',
191
+ '[{"get_portfolio(None,None)": "portfolio"}, {"factor_contribution(\'portfolio\',\'<DATES>\',\'asset_class\',\'us equity\',\'portfolio\')": "portfolio"}]',
192
+ '[{"get_portfolio(None,None)": "portfolio"}, {"factor_contribution(\'portfolio\',\'<DATES>\',\'sector\',\'sector industrials\',\'portfolio\')": "portfolio"}]',
193
  ]
194
  embeddings = model.encode(sentences)
195
  print(embeddings.shape)
 
233
 
234
  * Evaluated with [<code>InformationRetrievalEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator)
235
 
236
+ | Metric | Value |
237
+ |:--------------------|:----------|
238
+ | cosine_accuracy@1 | 0.6781 |
239
+ | cosine_accuracy@3 | 0.8082 |
240
+ | cosine_accuracy@5 | 0.863 |
241
+ | cosine_accuracy@10 | 0.9315 |
242
+ | cosine_precision@1 | 0.6781 |
243
+ | cosine_precision@3 | 0.2694 |
244
+ | cosine_precision@5 | 0.1726 |
245
+ | cosine_precision@10 | 0.0932 |
246
+ | cosine_recall@1 | 0.0188 |
247
+ | cosine_recall@3 | 0.0225 |
248
+ | cosine_recall@5 | 0.024 |
249
+ | cosine_recall@10 | 0.0259 |
250
+ | **cosine_ndcg@10** | **0.176** |
251
+ | cosine_mrr@10 | 0.7579 |
252
+ | cosine_map@100 | 0.0211 |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
253
 
254
  <!--
255
  ## Bias, Risks and Limitations
 
270
  #### Unnamed Dataset
271
 
272
 
273
+ * Size: 1,090 training samples
274
  * Columns: <code>sentence_0</code> and <code>sentence_1</code>
275
  * Approximate statistics based on the first 1000 samples:
276
+ | | sentence_0 | sentence_1 |
277
+ |:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
278
+ | type | string | string |
279
+ | details | <ul><li>min: 5 tokens</li><li>mean: 13.28 tokens</li><li>max: 27 tokens</li></ul> | <ul><li>min: 26 tokens</li><li>mean: 87.73 tokens</li><li>max: 196 tokens</li></ul> |
280
  * Samples:
281
  | sentence_0 | sentence_1 |
282
  |:------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
 
412
  - `batch_eval_metrics`: False
413
  - `eval_on_start`: False
414
  - `eval_use_gather_object`: False
415
+ - `prompts`: None
416
  - `batch_sampler`: batch_sampler
417
  - `multi_dataset_batch_sampler`: round_robin
418
 
 
421
  ### Training Logs
422
  <details><summary>Click to expand</summary>
423
 
424
+ | Epoch | Step | cosine_ndcg@10 |
425
  |:------:|:----:|:--------------:|
426
+ | 0.0183 | 2 | 0.1179 |
427
+ | 0.0367 | 4 | 0.1184 |
428
+ | 0.0550 | 6 | 0.1193 |
429
+ | 0.0734 | 8 | 0.1201 |
430
+ | 0.0917 | 10 | 0.1227 |
431
+ | 0.1101 | 12 | 0.1235 |
432
+ | 0.1284 | 14 | 0.1255 |
433
+ | 0.1468 | 16 | 0.1267 |
434
+ | 0.1651 | 18 | 0.1299 |
435
+ | 0.1835 | 20 | 0.1320 |
436
+ | 0.2018 | 22 | 0.1348 |
437
+ | 0.2202 | 24 | 0.1367 |
438
+ | 0.2385 | 26 | 0.1383 |
439
+ | 0.2569 | 28 | 0.1413 |
440
+ | 0.2752 | 30 | 0.1420 |
441
+ | 0.2936 | 32 | 0.1432 |
442
+ | 0.3119 | 34 | 0.1435 |
443
+ | 0.3303 | 36 | 0.1451 |
444
+ | 0.3486 | 38 | 0.1471 |
445
+ | 0.3670 | 40 | 0.1491 |
446
+ | 0.3853 | 42 | 0.1503 |
447
+ | 0.4037 | 44 | 0.1523 |
448
+ | 0.4220 | 46 | 0.1525 |
449
+ | 0.4404 | 48 | 0.1531 |
450
+ | 0.4587 | 50 | 0.1535 |
451
+ | 0.4771 | 52 | 0.1534 |
452
+ | 0.4954 | 54 | 0.1529 |
453
+ | 0.5138 | 56 | 0.1528 |
454
+ | 0.5321 | 58 | 0.1556 |
455
+ | 0.5505 | 60 | 0.1568 |
456
+ | 0.5688 | 62 | 0.1576 |
457
+ | 0.5872 | 64 | 0.1577 |
458
+ | 0.6055 | 66 | 0.1577 |
459
+ | 0.6239 | 68 | 0.1575 |
460
+ | 0.6422 | 70 | 0.1586 |
461
+ | 0.6606 | 72 | 0.1596 |
462
+ | 0.6789 | 74 | 0.1612 |
463
+ | 0.6972 | 76 | 0.1617 |
464
+ | 0.7156 | 78 | 0.1637 |
465
+ | 0.7339 | 80 | 0.1638 |
466
+ | 0.7523 | 82 | 0.1637 |
467
+ | 0.7706 | 84 | 0.1635 |
468
+ | 0.7890 | 86 | 0.1634 |
469
+ | 0.8073 | 88 | 0.1640 |
470
+ | 0.8257 | 90 | 0.1641 |
471
+ | 0.8440 | 92 | 0.1652 |
472
+ | 0.8624 | 94 | 0.1652 |
473
+ | 0.8807 | 96 | 0.1657 |
474
+ | 0.8991 | 98 | 0.1650 |
475
+ | 0.9174 | 100 | 0.1664 |
476
+ | 0.9358 | 102 | 0.1668 |
477
+ | 0.9541 | 104 | 0.1671 |
478
+ | 0.9725 | 106 | 0.1683 |
479
+ | 0.9908 | 108 | 0.1689 |
480
+ | 1.0 | 109 | 0.1684 |
481
+ | 1.0092 | 110 | 0.1673 |
482
+ | 1.0275 | 112 | 0.1686 |
483
+ | 1.0459 | 114 | 0.1680 |
484
+ | 1.0642 | 116 | 0.1676 |
485
+ | 1.0826 | 118 | 0.1668 |
486
+ | 1.1009 | 120 | 0.1668 |
487
+ | 1.1193 | 122 | 0.1671 |
488
+ | 1.1376 | 124 | 0.1673 |
489
+ | 1.1560 | 126 | 0.1666 |
490
+ | 1.1743 | 128 | 0.1669 |
491
+ | 1.1927 | 130 | 0.1668 |
492
+ | 1.2110 | 132 | 0.1669 |
493
+ | 1.2294 | 134 | 0.1673 |
494
+ | 1.2477 | 136 | 0.1681 |
495
+ | 1.2661 | 138 | 0.1683 |
496
+ | 1.2844 | 140 | 0.1681 |
497
+ | 1.3028 | 142 | 0.1674 |
498
+ | 1.3211 | 144 | 0.1672 |
499
+ | 1.3394 | 146 | 0.1668 |
500
+ | 1.3578 | 148 | 0.1682 |
501
+ | 1.3761 | 150 | 0.1689 |
502
+ | 1.3945 | 152 | 0.1690 |
503
+ | 1.4128 | 154 | 0.1693 |
504
+ | 1.4312 | 156 | 0.1683 |
505
+ | 1.4495 | 158 | 0.1683 |
506
+ | 1.4679 | 160 | 0.1678 |
507
+ | 1.4862 | 162 | 0.1695 |
508
+ | 1.5046 | 164 | 0.1710 |
509
+ | 1.5229 | 166 | 0.1717 |
510
+ | 1.5413 | 168 | 0.1715 |
511
+ | 1.5596 | 170 | 0.1698 |
512
+ | 1.5780 | 172 | 0.1699 |
513
+ | 1.5963 | 174 | 0.1694 |
514
+ | 1.6147 | 176 | 0.1701 |
515
+ | 1.6330 | 178 | 0.1693 |
516
+ | 1.6514 | 180 | 0.1683 |
517
+ | 1.6697 | 182 | 0.1692 |
518
+ | 1.6881 | 184 | 0.1689 |
519
+ | 1.7064 | 186 | 0.1696 |
520
+ | 1.7248 | 188 | 0.1696 |
521
+ | 1.7431 | 190 | 0.1700 |
522
+ | 1.7615 | 192 | 0.1705 |
523
+ | 1.7798 | 194 | 0.1718 |
524
+ | 1.7982 | 196 | 0.1719 |
525
+ | 1.8165 | 198 | 0.1723 |
526
+ | 1.8349 | 200 | 0.1721 |
527
+ | 1.8532 | 202 | 0.1717 |
528
+ | 1.8716 | 204 | 0.1722 |
529
+ | 1.8899 | 206 | 0.1722 |
530
+ | 1.9083 | 208 | 0.1728 |
531
+ | 1.9266 | 210 | 0.1734 |
532
+ | 1.9450 | 212 | 0.1733 |
533
+ | 1.9633 | 214 | 0.1742 |
534
+ | 1.9817 | 216 | 0.1749 |
535
+ | 2.0 | 218 | 0.1750 |
536
+ | 2.0183 | 220 | 0.1760 |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
537
 
538
  </details>
539
 
540
  ### Framework Versions
541
  - Python: 3.10.9
542
+ - Sentence Transformers: 3.3.1
543
  - Transformers: 4.44.0
544
  - PyTorch: 2.4.0+cu121
545
  - Accelerate: 0.33.0
 
566
  #### MultipleNegativesRankingLoss
567
  ```bibtex
568
  @misc{henderson2017efficient,
569
+ title={Efficient Natural Language Response Suggestion for Smart Reply},
570
  author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil},
571
  year={2017},
572
  eprint={1705.00652},
config_sentence_transformers.json CHANGED
@@ -1,10 +1,10 @@
1
  {
2
  "__version__": {
3
- "sentence_transformers": "3.0.1",
4
  "transformers": "4.44.0",
5
  "pytorch": "2.4.0+cu121"
6
  },
7
  "prompts": {},
8
  "default_prompt_name": null,
9
- "similarity_fn_name": null
10
  }
 
1
  {
2
  "__version__": {
3
+ "sentence_transformers": "3.3.1",
4
  "transformers": "4.44.0",
5
  "pytorch": "2.4.0+cu121"
6
  },
7
  "prompts": {},
8
  "default_prompt_name": null,
9
+ "similarity_fn_name": "cosine"
10
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b9a877018680fb61a472627e468678bc0b81a90b2b989d167914ab65eba0ed13
3
  size 133462128
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1600be1ae30bbefa7dba2d3fb026b69a5459d9e461cfa33ea1d7b4f64e0f77fd
3
  size 133462128