Update README.md
Browse files
README.md
CHANGED
@@ -114,8 +114,7 @@ INDEX.medium_index_TDT
|
|
114 |
'data_repo': 'https://huggingface.co/datasets/BroDeadlines/TEST.edu_tdt_proposition_data',
|
115 |
'data_split': 'train',
|
116 |
'vec_index': 'vec-index.medium_index_tdt',
|
117 |
-
'algo': 'HyDE',
|
118 |
-
'search_algo': 'vector search',
|
119 |
'size': 143,
|
120 |
'precision': 0.2823529411764706,
|
121 |
'recall': 0.16783216783216784,
|
@@ -131,11 +130,68 @@ INDEX.medium_index_TDT
|
|
131 |
'data_split': 'train',
|
132 |
'vec_index': 'vec-sentence-propositon_medium_edu_tdt',
|
133 |
'text_index': 'text-sentence-propositon_medium_edu_tdt',
|
134 |
-
'algo': 'HyDE',
|
135 |
-
'search_algo': 'hybrid search',
|
136 |
'size': 144,
|
|
|
137 |
'precision': 0.6650485436893204,
|
138 |
'recall': 0.9513888888888888,
|
139 |
'map_score': 0.16559069113756608
|
140 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
141 |
```
|
|
|
114 |
'data_repo': 'https://huggingface.co/datasets/BroDeadlines/TEST.edu_tdt_proposition_data',
|
115 |
'data_split': 'train',
|
116 |
'vec_index': 'vec-index.medium_index_tdt',
|
117 |
+
'algo': ['HyDE', 'proposition', 'vector search'],
|
|
|
118 |
'size': 143,
|
119 |
'precision': 0.2823529411764706,
|
120 |
'recall': 0.16783216783216784,
|
|
|
130 |
'data_split': 'train',
|
131 |
'vec_index': 'vec-sentence-propositon_medium_edu_tdt',
|
132 |
'text_index': 'text-sentence-propositon_medium_edu_tdt',
|
133 |
+
'algo': ['HyDE', 'hybrid search', 'proposition', 'sentence-encoding', 'parent document retrieval'],
|
|
|
134 |
'size': 144,
|
135 |
+
'hybrid_weigths': [0.4, 0.6],
|
136 |
'precision': 0.6650485436893204,
|
137 |
'recall': 0.9513888888888888,
|
138 |
'map_score': 0.16559069113756608
|
139 |
}
|
140 |
+
```
|
141 |
+
|
142 |
+
INDEX.medium_index_TDT
|
143 |
+
```json
|
144 |
+
{
|
145 |
+
'split': 'INDEX.medium_index_TDT.fulltext.clean.proposition.sentence.hybrid',
|
146 |
+
'data_repo': 'https://huggingface.co/datasets/BroDeadlines/TEST.edu_tdt_proposition_data',
|
147 |
+
'data_split': 'INDEX.medium_index_TDT_clean',
|
148 |
+
'vec_index': 'vec-sentence-index.medium_index_tdt_clean',
|
149 |
+
'text_index': 'text-sentence-index.medium_index_tdt_clean',
|
150 |
+
'algo': ['HyDE', 'hybrid search', 'proposition', 'full-text-encoding']
|
151 |
+
'total_k': 20,
|
152 |
+
'relevant': 0.7569444444444444,
|
153 |
+
'precision': 0.4052044609665427,
|
154 |
+
'recall': 0.7569444444444444,
|
155 |
+
'map_score': 0.19089647598822646,
|
156 |
+
'relevant_retrieved': 109,
|
157 |
+
'num_retrieved': 269
|
158 |
+
}
|
159 |
+
```
|
160 |
+
|
161 |
+
INDEX.medium_index_TDT
|
162 |
+
```json
|
163 |
+
{
|
164 |
+
'split': 'INDEX.medium_index_TDT.fulltext.clean.8.proposition.sentence.hybrid',
|
165 |
+
'data_repo': 'https://huggingface.co/datasets/BroDeadlines/TEST.edu_tdt_proposition_data',
|
166 |
+
'data_split': 'INDEX.medium_index_TDT_clean',
|
167 |
+
'vec_index': 'vec-sentence-index.medium_index_tdt_clean',
|
168 |
+
'text_index': 'text-sentence-index.medium_index_tdt_clean',
|
169 |
+
'algo': ['HyDE', 'hybrid search', 'proposition', 'full-text-encoding']
|
170 |
+
'total_k': 8,
|
171 |
+
'relevant': 0.5902777777777778,
|
172 |
+
'precision': 0.425,
|
173 |
+
'recall': 0.5902777777777778,
|
174 |
+
'map_score': 0.3272982804232805,
|
175 |
+
'relevant_retrieved': 85,
|
176 |
+
'num_retrieved': 200
|
177 |
+
}
|
178 |
+
```
|
179 |
+
|
180 |
+
INDEX.medium_index_TDT
|
181 |
+
```json
|
182 |
+
{
|
183 |
+
'split': 'INDEX.medium_index_TDT.fulltext.clean.2.proposition.sentence.hybrid',
|
184 |
+
'data_repo': 'https://huggingface.co/datasets/BroDeadlines/TEST.edu_tdt_proposition_data',
|
185 |
+
'data_split': 'INDEX.medium_index_TDT_clean',
|
186 |
+
'vec_index': 'vec-sentence-index.medium_index_tdt_clean',
|
187 |
+
'text_index': 'text-sentence-index.medium_index_tdt_clean',
|
188 |
+
'algo': ['HyDE', 'hybrid search', 'proposition', 'full-text-encoding']
|
189 |
+
'total_k': 4,
|
190 |
+
'relevant': 0.4652777777777778,
|
191 |
+
'precision': 0.46206896551724136,
|
192 |
+
'recall': 0.4652777777777778,
|
193 |
+
'map_score': 0.2957175925925926,
|
194 |
+
'relevant_retrieved': 67,
|
195 |
+
'num_retrieved': 145
|
196 |
+
}
|
197 |
```
|