|
--- |
|
license: cc-by-sa-4.0 |
|
datasets: |
|
- procesaur/kisobran |
|
- procesaur/STARS |
|
- procesaur/Vikipedija |
|
- procesaur/Vikizvornik |
|
- jerteh/SrpELTeC |
|
language: |
|
- sr |
|
library_name: fasttext |
|
--- |
|
|
|
<table style="width:100%;height:100%"> |
|
<tr> |
|
<td colspan=2> |
|
<h4><i class="highlight-container"><b class="highlight">FastText Sr</b></i></h4> |
|
</td> |
|
</tr> |
|
<tr style="width:100%;height:100%"> |
|
<td width=50%> |
|
<p>Обучаван над корпусом српског језика - 9.5 милијарди речи</p> |
|
<p>Међу датотекама се налазе модели у Gensim, али и оригиналном формату</p> |
|
</td> |
|
<td> |
|
<p>Trained on the Serbian language corpus - 9.5 billion words</p> |
|
<p>The files include models in both Gensim and the original format.</p> |
|
</td> |
|
</tr> |
|
</table> |
|
|
|
|
|
```python |
|
from gensim.models import FastText |
|
model = Word2Vec.load("TeslaFT") |
|
examples = [ |
|
("dim", "zavesa"), |
|
("staklo", "zavesa"), |
|
("ormar", "zavesa"), |
|
("prozor", "zavesa"), |
|
("draperija", "zavesa") |
|
] |
|
for e in examples: |
|
model.wv.cosine_similarities(ft.wv[e[0]], ft.wv[[e[1]]])[0] |
|
``` |
|
``` |
|
0.5305264 |
|
0.7095266 |
|
0.6041575 |
|
0.5771946 |
|
0.8870213 |
|
``` |
|
```python |
|
from gensim.models.fasttext import load_facebook_model |
|
model = load_facebook_model("TeslaFT.bin") |
|
examples = [ |
|
("dim", "zavesa"), |
|
("staklo", "zavesa"), |
|
("ormar", "zavesa"), |
|
("prozor", "zavesa"), |
|
("draperija", "zavesa") |
|
] |
|
for e in examples: |
|
model.wv.cosine_similarities(ft.wv[e[0]], ft.wv[[e[1]]])[0] |
|
``` |
|
``` |
|
0.5305264 |
|
0.7095266 |
|
0.6041575 |
|
0.5771946 |
|
0.8870213 |
|
``` |
|
|
|
|
|
<div class="inline-flex flex-col" style="line-height: 1.5;padding-right:50px"> |
|
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">Author</div> |
|
<a href="https://huggingface.co/procesaur"> |
|
<div class="flex"> |
|
<div |
|
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; |
|
background-size: cover; background-image: url('https://cdn-uploads.huggingface.co/production/uploads/1673534533167-63bc254fb8c61b8aa496a39b.jpeg?w=200&h=200&f=face')"> |
|
</div> |
|
</div> |
|
</a> |
|
<div style="text-align: center; font-size: 16px; font-weight: 800">Mihailo Škorić</div> |
|
<div> |
|
<a href="https://huggingface.co/procesaur"> |
|
<div style="text-align: center; font-size: 14px;">@procesaur</div> |
|
</a> |
|
</div> |
|
</div> |
|
</div> |
|
|
|
<div class="inline-flex flex-col" style="line-height: 1.5;"> |
|
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">Computation</div> |
|
<a href="https://tesla.rgf.bg.ac.rs"> |
|
<div class="flex"> |
|
<div |
|
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; |
|
background-size: cover; background-image: url(https://cdn-avatars.huggingface.co/v1/production/uploads/63bc254fb8c61b8aa496a39b/TfM_-sc8-b34ddfhHBGTA.png?w=200&h=200&f=face)"> |
|
</div> |
|
</div> |
|
</a> |
|
<div style="text-align: center; font-size: 16px; font-weight: 800">TESLA project</div> |
|
<div> |
|
<a href="https://huggingface.co/te-sla"> |
|
<div style="text-align: center; font-size: 14px;">@te-sla</div> |
|
</a> |
|
</div> |
|
</div> |
|
</div> |
|
<br/> |
|
|
|
```bibtex |
|
@inproceedings{stankovic-dict2vec, |
|
author = {Ranka Stanković, Jovana Rađenović, Mihailo Škorić, Marko Putniković}, |
|
title = {Learning Word Embeddings using Lexical Resources and Corpora}, |
|
booktitle = {15th International Conference on Information Society and Technology, ISIST 2025, Kopaonik}, |
|
year = {2025}, |
|
address = {Kopaonik, Belgrade} |
|
publisher = {SASA, Belgrade}, |
|
url = {https://doi.org/10.5281/zenodo.15093900} |
|
} |
|
``` |
|
|
|
<div id="zastava"> |
|
<div class="grb"> |
|
<img src="https://www.ai.gov.rs/img/logo_60x120-2.png" style="position:relative; left:30px; z-index:10; height:85px"> |
|
</div> |
|
<table width=100% style="border:0px"> |
|
<tr style="background-color:#C6363C;width:100%;border:0px;height:30px"><td style="width:100vw"></td></tr> |
|
<tr style="background-color:#0C4076;width:100%;border:0px;height:30px"><td></td></tr> |
|
<tr style="background-color:#ffffff;width:100%;border:0px;height:30px"><td></td></tr> |
|
</table> |
|
</div> |
|
|
|
<table style="width:100%;height:100%"> |
|
<tr style="width:100%;height:100%"> |
|
<td width=50%> |
|
<p>Истраживање jе спроведено уз подршку Фонда за науку Републике Србиjе, #7276, Text Embeddings – Serbian Language Applications – TESLA</p> |
|
</td> |
|
<td> |
|
<p>This research was supported by the Science Fund of the Republic of Serbia, #7276, Text Embeddings - Serbian Language Applications - TESLA</p> |
|
</td> |
|
</tr> |
|
</table> |
|
|
|
|
|
|
|
<style> |
|
.ffeat: { |
|
color:red |
|
} |
|
|
|
.cover { |
|
width: 100%; |
|
margin-bottom: 5pt |
|
} |
|
|
|
.highlight-container, .highlight { |
|
position: relative; |
|
text-decoration:none |
|
} |
|
|
|
.highlight-container { |
|
display: inline-block; |
|
|
|
} |
|
|
|
.highlight{ |
|
color:white; |
|
text-transform:uppercase; |
|
font-size: 16pt; |
|
} |
|
|
|
.highlight-container{ |
|
padding:5px 10px |
|
} |
|
|
|
.highlight-container:before { |
|
content: " "; |
|
display: block; |
|
height: 100%; |
|
width: 100%; |
|
margin-left: 0px; |
|
margin-right: 0px; |
|
position: absolute; |
|
background: #e80909; |
|
transform: rotate(2deg); |
|
top: -1px; |
|
left: -1px; |
|
border-radius: 20% 25% 20% 24%; |
|
padding: 10px 18px 18px 10px; |
|
} |
|
|
|
div.grb, #zastava>table { |
|
position:absolute; |
|
top:0px; |
|
left: 0px; |
|
margin:0px |
|
} |
|
|
|
div.grb>img, #zastava>table{ |
|
margin:0px |
|
} |
|
|
|
#zastava { |
|
position: relative; |
|
margin-bottom:120px |
|
} |
|
|
|
p { |
|
font-size:14pt |
|
} |
|
</style> |