Spaces:
Runtime error
Runtime error
lfs added
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +16 -0
- __init__.py +0 -0
- app.py +70 -0
- models/__init__.py +0 -0
- models/adaboost_clf.report +27 -0
- models/adaboost_clf.sav +3 -0
- models/b_nb.report +27 -0
- models/b_nb.sav +3 -0
- models/count_vectorizer.sav +3 -0
- models/decision_tree.report +27 -0
- models/decision_tree.sav +3 -0
- models/flair-sentiment-classifier/best-model.pt +3 -0
- models/flair-sentiment-classifier/dev.tsv +0 -0
- models/flair-sentiment-classifier/final-model.pt +3 -0
- models/flair-sentiment-classifier/loss.tsv +36 -0
- models/flair-sentiment-classifier/test.tsv +1563 -0
- models/flair-sentiment-classifier/training.log +644 -0
- models/flair-sentiment-classifier/weights.txt +0 -0
- models/logistic_regression.report +27 -0
- models/logistic_regression.sav +3 -0
- models/m_nb.report +27 -0
- models/m_nb.sav +3 -0
- models/random_forest.report +27 -0
- models/random_forest.sav +3 -0
- models/setfit-classifier/1_Pooling/config.json +7 -0
- models/setfit-classifier/README.md +126 -0
- models/setfit-classifier/config.json +24 -0
- models/setfit-classifier/config_sentence_transformers.json +7 -0
- models/setfit-classifier/model_head.pkl +3 -0
- models/setfit-classifier/modules.json +14 -0
- models/setfit-classifier/pytorch_model.bin +3 -0
- models/setfit-classifier/sentence_bert_config.json +4 -0
- models/setfit-classifier/special_tokens_map.json +15 -0
- models/setfit-classifier/tokenizer.json +0 -0
- models/setfit-classifier/tokenizer_config.json +67 -0
- models/setfit-classifier/vocab.txt +0 -0
- models/spacy-classifier/model-best/config.cfg +121 -0
- models/spacy-classifier/model-best/meta.json +59 -0
- models/spacy-classifier/model-best/textcat/cfg +3 -0
- models/spacy-classifier/model-best/textcat/model +3 -0
- models/spacy-classifier/model-best/tokenizer +3 -0
- models/spacy-classifier/model-best/vocab/key2row +3 -0
- models/spacy-classifier/model-best/vocab/lookups.bin +3 -0
- models/spacy-classifier/model-best/vocab/strings.json +0 -0
- models/spacy-classifier/model-best/vocab/vectors +0 -0
- models/spacy-classifier/model-best/vocab/vectors.cfg +3 -0
- models/tfidf_vectorizer.sav +3 -0
- models/xgb_clf.report +27 -0
- models/xgb_clf.sav +3 -0
- paths.py +12 -0
.gitattributes
CHANGED
@@ -32,3 +32,19 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
32 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
33 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
34 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
33 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
34 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
35 |
+
models/adaboost_clf.sav filter=lfs diff=lfs merge=lfs -text
|
36 |
+
models/m_nb.sav filter=lfs diff=lfs merge=lfs -text
|
37 |
+
models/random_forest.sav filter=lfs diff=lfs merge=lfs -text
|
38 |
+
models/tfidf_vectorizer.sav filter=lfs diff=lfs merge=lfs -text
|
39 |
+
models/b_nb.sav filter=lfs diff=lfs merge=lfs -text
|
40 |
+
models/count_vectorizer.sav filter=lfs diff=lfs merge=lfs -text
|
41 |
+
models/decision_tree.sav filter=lfs diff=lfs merge=lfs -text
|
42 |
+
models/logistic_regression.sav filter=lfs diff=lfs merge=lfs -text
|
43 |
+
models/xgb_clf.sav filter=lfs diff=lfs merge=lfs -text
|
44 |
+
models/flair-sentiment-classifier/final-model.pt filter=lfs diff=lfs merge=lfs -text
|
45 |
+
models/flair-sentiment-classifier/best-model.pt filter=lfs diff=lfs merge=lfs -text
|
46 |
+
models/setfit-classifier/pytorch_model.bin filter=lfs diff=lfs merge=lfs -text
|
47 |
+
models/spacy-classifier/model-best/textcat/cfg filter=lfs diff=lfs merge=lfs -text
|
48 |
+
models/spacy-classifier/model-best/textcat/model filter=lfs diff=lfs merge=lfs -text
|
49 |
+
models/spacy-classifier/model-best/vocab/key2row filter=lfs diff=lfs merge=lfs -text
|
50 |
+
models/setfit-classifier/model_head.pkl filter=lfs diff=lfs merge=lfs -text
|
__init__.py
ADDED
File without changes
|
app.py
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
@author : Sakshi Tatak
|
3 |
+
"""
|
4 |
+
|
5 |
+
# Imports
|
6 |
+
import pandas as pd
|
7 |
+
import streamlit as st
|
8 |
+
|
9 |
+
from predict_flair import SentimentClassifier as FlairSentimentClassifier
|
10 |
+
from predict_ml import predict as predict_ml
|
11 |
+
from predict_setfit import SentimentClassifier as SetFitSentimentClassifier
|
12 |
+
from predict_spacy import SentimentClassifier as SpacySentimentClassifier
|
13 |
+
|
14 |
+
st.set_page_config(layout = 'wide')
|
15 |
+
st.title('SetFit, Flair, SpaCy, Naive Bayes Sentiment Classifiers')
|
16 |
+
|
17 |
+
if 'flair_model' not in st.session_state:
|
18 |
+
st.session_state['flair_model'] = None
|
19 |
+
|
20 |
+
if 'spacy_model' not in st.session_state:
|
21 |
+
st.session_state['spacy_model'] = None
|
22 |
+
|
23 |
+
if 'setfit_model' not in st.session_state:
|
24 |
+
st.session_state['setfit_model'] = None
|
25 |
+
|
26 |
+
if 'results' not in st.session_state:
|
27 |
+
st.session_state['results'] = pd.DataFrame(columns = ['model', 'query', 'sentiment', 'confidence'])
|
28 |
+
|
29 |
+
def main():
|
30 |
+
model_name = st.selectbox('Select Model', options = ['SetFit', 'Naive Bayes', 'Flair', 'SpaCy'])
|
31 |
+
if model_name == 'SetFit':
|
32 |
+
if st.session_state.setfit_model is None:
|
33 |
+
with st.spinner('Loading SetFit classifier ...'):
|
34 |
+
st.session_state.setfit_model = SetFitSentimentClassifier()
|
35 |
+
st.success('SetFit classifier loaded successfully!')
|
36 |
+
model = st.session_state.setfit_model
|
37 |
+
|
38 |
+
if model_name == 'Flair':
|
39 |
+
if st.session_state.flair_model is None:
|
40 |
+
with st.spinner('Loading Flair classifier ...'):
|
41 |
+
st.session_state.flair_model = FlairSentimentClassifier()
|
42 |
+
st.success('Flair classifier loaded successfully!')
|
43 |
+
model = st.session_state.flair_model
|
44 |
+
|
45 |
+
if model_name == 'SpaCy':
|
46 |
+
if st.session_state.spacy_model is None:
|
47 |
+
with st.spinner('Loading SpaCy classifier'):
|
48 |
+
st.session_state.spacy_model = SpacySentimentClassifier()
|
49 |
+
st.success('Spacy classifier loaded successfully!')
|
50 |
+
model = st.session_state.spacy_model
|
51 |
+
|
52 |
+
text = st.text_area('Input text', value = 'This is insane haha!')
|
53 |
+
|
54 |
+
if st.button('Compute sentiment'):
|
55 |
+
if model_name != 'Naive Bayes':
|
56 |
+
with st.spinner(f'Predicting with {model_name} ...'):
|
57 |
+
sentiment, conf = model.predict(text)
|
58 |
+
else:
|
59 |
+
with st.spinner('Predicting with Naive Bayes ...'):
|
60 |
+
sentiment, conf = predict_ml(text)
|
61 |
+
|
62 |
+
st.success(sentiment + ', ' + str(conf))
|
63 |
+
df = st.session_state.results
|
64 |
+
df.loc[len(df)] = [model_name, text, sentiment, conf]
|
65 |
+
st.table(df)
|
66 |
+
|
67 |
+
|
68 |
+
|
69 |
+
if __name__ == '__main__':
|
70 |
+
main()
|
models/__init__.py
ADDED
File without changes
|
models/adaboost_clf.report
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"0": {
|
3 |
+
"precision": 0.6629213483146067,
|
4 |
+
"recall": 0.3333333333333333,
|
5 |
+
"f1-score": 0.44360902255639095,
|
6 |
+
"support": 177
|
7 |
+
},
|
8 |
+
"1": {
|
9 |
+
"precision": 0.562962962962963,
|
10 |
+
"recall": 0.8351648351648352,
|
11 |
+
"f1-score": 0.672566371681416,
|
12 |
+
"support": 182
|
13 |
+
},
|
14 |
+
"accuracy": 0.5877437325905293,
|
15 |
+
"macro avg": {
|
16 |
+
"precision": 0.6129421556387848,
|
17 |
+
"recall": 0.5842490842490843,
|
18 |
+
"f1-score": 0.5580876971189035,
|
19 |
+
"support": 359
|
20 |
+
},
|
21 |
+
"weighted avg": {
|
22 |
+
"precision": 0.6122460666043027,
|
23 |
+
"recall": 0.5877437325905293,
|
24 |
+
"f1-score": 0.5596821076281306,
|
25 |
+
"support": 359
|
26 |
+
}
|
27 |
+
}
|
models/adaboost_clf.sav
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3b2316de374d560815947cb064d7501907410600e6eb85d278cb1e45e5793dd8
|
3 |
+
size 27677
|
models/b_nb.report
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"0": {
|
3 |
+
"precision": 0.7714285714285715,
|
4 |
+
"recall": 0.6101694915254238,
|
5 |
+
"f1-score": 0.6813880126182965,
|
6 |
+
"support": 177
|
7 |
+
},
|
8 |
+
"1": {
|
9 |
+
"precision": 0.684931506849315,
|
10 |
+
"recall": 0.8241758241758241,
|
11 |
+
"f1-score": 0.7481296758104738,
|
12 |
+
"support": 182
|
13 |
+
},
|
14 |
+
"accuracy": 0.7186629526462396,
|
15 |
+
"macro avg": {
|
16 |
+
"precision": 0.7281800391389432,
|
17 |
+
"recall": 0.7171726578506239,
|
18 |
+
"f1-score": 0.7147588442143852,
|
19 |
+
"support": 359
|
20 |
+
},
|
21 |
+
"weighted avg": {
|
22 |
+
"precision": 0.7275776918925696,
|
23 |
+
"recall": 0.7186629526462396,
|
24 |
+
"f1-score": 0.7152236190277012,
|
25 |
+
"support": 359
|
26 |
+
}
|
27 |
+
}
|
models/b_nb.sav
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dd1b99c12014f9d884a89b277656be79e8b388abdd730dd6f16612ea50728e7f
|
3 |
+
size 168235
|
models/count_vectorizer.sav
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:61d3872fa292150a5c4bd12a808effc15f89349046134a02f187fd2ce0b778f9
|
3 |
+
size 63275
|
models/decision_tree.report
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"0": {
|
3 |
+
"precision": 0.6329113924050633,
|
4 |
+
"recall": 0.5649717514124294,
|
5 |
+
"f1-score": 0.5970149253731344,
|
6 |
+
"support": 177
|
7 |
+
},
|
8 |
+
"1": {
|
9 |
+
"precision": 0.6169154228855721,
|
10 |
+
"recall": 0.6813186813186813,
|
11 |
+
"f1-score": 0.6475195822454307,
|
12 |
+
"support": 182
|
13 |
+
},
|
14 |
+
"accuracy": 0.6239554317548747,
|
15 |
+
"macro avg": {
|
16 |
+
"precision": 0.6249134076453178,
|
17 |
+
"recall": 0.6231452163655553,
|
18 |
+
"f1-score": 0.6222672538092826,
|
19 |
+
"support": 359
|
20 |
+
},
|
21 |
+
"weighted avg": {
|
22 |
+
"precision": 0.6248020150999173,
|
23 |
+
"recall": 0.6239554317548747,
|
24 |
+
"f1-score": 0.6226189575479476,
|
25 |
+
"support": 359
|
26 |
+
}
|
27 |
+
}
|
models/decision_tree.sav
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b36c894e4b5994b313cef8a0c4ec2ce720d6277d891e7b9f46ddac6a685d1c38
|
3 |
+
size 59172
|
models/flair-sentiment-classifier/best-model.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9be35bc3d4459a9527a4a1ea8d1fc206d6a823fe50c8fffbb23ba7dc5013701f
|
3 |
+
size 25499436
|
models/flair-sentiment-classifier/dev.tsv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/flair-sentiment-classifier/final-model.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8f04ab1e00b8c811cf946c3702c12a055e27be5e037a376b9a5f0e85ff7f2b1b
|
3 |
+
size 25499447
|
models/flair-sentiment-classifier/loss.tsv
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
EPOCH TIMESTAMP BAD_EPOCHS LEARNING_RATE TRAIN_LOSS DEV_LOSS DEV_PRECISION DEV_RECALL DEV_F1 DEV_ACCURACY
|
2 |
+
1 08:29:02 0 0.0100 0.01253135909052456 0.01149754598736763 0.5393 0.5393 0.5393 0.5393
|
3 |
+
2 08:29:20 0 0.0100 0.011174857321907492 0.011094754561781883 0.586 0.586 0.586 0.586
|
4 |
+
3 08:29:38 1 0.0100 0.010936222427031574 0.010945815593004227 0.5833 0.5833 0.5833 0.5833
|
5 |
+
4 08:29:55 0 0.0100 0.010812628795118892 0.010844088159501553 0.5867 0.5867 0.5867 0.5867
|
6 |
+
5 08:30:12 0 0.0100 0.010711871532832875 0.010764073580503464 0.59 0.59 0.59 0.59
|
7 |
+
6 08:30:29 0 0.0100 0.010672345210524166 0.010709869675338268 0.5953 0.5953 0.5953 0.5953
|
8 |
+
7 08:30:46 0 0.0100 0.010636886722901287 0.010663843713700771 0.604 0.604 0.604 0.604
|
9 |
+
8 08:31:04 1 0.0100 0.010562960512497846 0.010619796812534332 0.602 0.602 0.602 0.602
|
10 |
+
9 08:31:21 0 0.0100 0.010548884174403023 0.010611701756715775 0.6127 0.6127 0.6127 0.6127
|
11 |
+
10 08:31:38 1 0.0100 0.010505827714415158 0.010555021464824677 0.612 0.612 0.612 0.612
|
12 |
+
11 08:31:55 0 0.0100 0.010489398612695582 0.010583124123513699 0.618 0.618 0.618 0.618
|
13 |
+
12 08:32:12 0 0.0100 0.010428840700317831 0.010490193963050842 0.6233 0.6233 0.6233 0.6233
|
14 |
+
13 08:32:30 1 0.0100 0.010391102524364695 0.010483094491064548 0.6207 0.6207 0.6207 0.6207
|
15 |
+
14 08:32:47 0 0.0100 0.010375254006946787 0.010432829149067402 0.626 0.626 0.626 0.626
|
16 |
+
15 08:33:05 1 0.0100 0.010306658197851742 0.010397534817457199 0.6247 0.6247 0.6247 0.6247
|
17 |
+
16 08:33:22 2 0.0100 0.010284092398250804 0.010406638495624065 0.6253 0.6253 0.6253 0.6253
|
18 |
+
17 08:33:38 0 0.0100 0.010297371801208047 0.01034807600080967 0.63 0.63 0.63 0.63
|
19 |
+
18 08:33:55 0 0.0100 0.010228792071342469 0.010320308618247509 0.6313 0.6313 0.6313 0.6313
|
20 |
+
19 08:34:13 0 0.0100 0.010214742527288549 0.010296817868947983 0.64 0.64 0.64 0.64
|
21 |
+
20 08:34:30 0 0.0100 0.010179497108739966 0.010265583172440529 0.6413 0.6413 0.6413 0.6413
|
22 |
+
21 08:34:48 1 0.0100 0.010137383573195513 0.010291438549757004 0.6373 0.6373 0.6373 0.6373
|
23 |
+
22 08:35:05 0 0.0100 0.010133044719696045 0.010224188677966595 0.6413 0.6413 0.6413 0.6413
|
24 |
+
23 08:35:22 0 0.0100 0.010098109722137451 0.010190014727413654 0.644 0.644 0.644 0.644
|
25 |
+
24 08:35:39 0 0.0100 0.010065501472529242 0.010158772580325603 0.6487 0.6487 0.6487 0.6487
|
26 |
+
25 08:35:56 0 0.0100 0.010004523277282715 0.010137598030269146 0.6507 0.6507 0.6507 0.6507
|
27 |
+
26 08:36:13 1 0.0100 0.00997911853650037 0.010154918767511845 0.6453 0.6453 0.6453 0.6453
|
28 |
+
27 08:36:31 0 0.0100 0.009967831822002636 0.010085121728479862 0.6567 0.6567 0.6567 0.6567
|
29 |
+
28 08:36:48 0 0.0100 0.009977477255989523 0.010063917376101017 0.6567 0.6567 0.6567 0.6567
|
30 |
+
29 08:37:05 1 0.0100 0.009934876932817346 0.010063448920845985 0.6533 0.6533 0.6533 0.6533
|
31 |
+
30 08:37:22 0 0.0100 0.009909990450915169 0.010032457299530506 0.6627 0.6627 0.6627 0.6627
|
32 |
+
31 08:37:39 1 0.0100 0.00987828400555779 0.010030188597738743 0.6507 0.6507 0.6507 0.6507
|
33 |
+
32 08:37:56 2 0.0100 0.009848919868469239 0.009983059018850327 0.662 0.662 0.662 0.662
|
34 |
+
33 08:38:13 3 0.0100 0.009884989871698268 0.009982590563595295 0.66 0.66 0.66 0.66
|
35 |
+
34 08:38:30 0 0.0100 0.009801310083445381 0.009954135864973068 0.6647 0.6647 0.6647 0.6647
|
36 |
+
35 08:38:47 1 0.0100 0.009773709009675419 0.009962853975594044 0.658 0.658 0.658 0.658
|
models/flair-sentiment-classifier/test.tsv
ADDED
@@ -0,0 +1,1563 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
i loooooooovvvvvveee my kindle2. not that the dx is cool, but the 2 is fantastic in its own right.
|
2 |
+
- Gold: 1
|
3 |
+
- Pred: 1
|
4 |
+
|
5 |
+
reading my kindle2. love it. lee childs is good read.
|
6 |
+
- Gold: 1
|
7 |
+
- Pred: 1
|
8 |
+
|
9 |
+
ok, first assesment of the #kindle2 .it fucking rocks!
|
10 |
+
- Gold: 1
|
11 |
+
- Pred: 1
|
12 |
+
|
13 |
+
you will love your kindle2. i have had mine for a few months and never looked back. the new big one is huge! no need for remorse! :)
|
14 |
+
- Gold: 1
|
15 |
+
- Pred: 0
|
16 |
+
-> MISMATCH!
|
17 |
+
|
18 |
+
fair enough. but i have the kindle2 and i think it is perfect :)
|
19 |
+
- Gold: 1
|
20 |
+
- Pred: 1
|
21 |
+
|
22 |
+
no. it is too big. i am quite happy with the kindle2.
|
23 |
+
- Gold: 1
|
24 |
+
- Pred: 1
|
25 |
+
|
26 |
+
fuck this economy. i hate aig and their non loan given asses.
|
27 |
+
- Gold: 0
|
28 |
+
- Pred: 0
|
29 |
+
|
30 |
+
jquery is my new best friend.
|
31 |
+
- Gold: 1
|
32 |
+
- Pred: 1
|
33 |
+
|
34 |
+
loves twitter
|
35 |
+
- Gold: 1
|
36 |
+
- Pred: 1
|
37 |
+
|
38 |
+
how can you not love obama! he makes jokes about himself.
|
39 |
+
- Gold: 1
|
40 |
+
- Pred: 1
|
41 |
+
|
42 |
+
i firmly believe that obama/pelosi have zero desire to be civil. it is a charade and a slogan, but they want to destroy conservatism
|
43 |
+
- Gold: 0
|
44 |
+
- Pred: 1
|
45 |
+
-> MISMATCH!
|
46 |
+
|
47 |
+
house correspondents dinner was last night whoopi, barbara & sherri went, obama got a standing ovation
|
48 |
+
- Gold: 1
|
49 |
+
- Pred: 1
|
50 |
+
|
51 |
+
watchin espn.jus seen this new merical with a puppet lebron.sh*t was hilarious.lmao!
|
52 |
+
- Gold: 1
|
53 |
+
- Pred: 1
|
54 |
+
|
55 |
+
dear nike, stop with the flywire. that shit is a waste of science. and ugly. love,
|
56 |
+
- Gold: 0
|
57 |
+
- Pred: 1
|
58 |
+
-> MISMATCH!
|
59 |
+
|
60 |
+
#lebron best athlete of our generation, if not all time (basketball related) i don not want to get into inter-sport debates about __1/2
|
61 |
+
- Gold: 1
|
62 |
+
- Pred: 0
|
63 |
+
-> MISMATCH!
|
64 |
+
|
65 |
+
i was talking to this guy last night and he was telling me that he is a die hard spurs fan. he also told me that he hates lebron james.
|
66 |
+
- Gold: 0
|
67 |
+
- Pred: 0
|
68 |
+
|
69 |
+
i love lebron.
|
70 |
+
- Gold: 1
|
71 |
+
- Pred: 1
|
72 |
+
|
73 |
+
lebron is a beast, but i am still cheering 4 the a.til the end.
|
74 |
+
- Gold: 0
|
75 |
+
- Pred: 0
|
76 |
+
|
77 |
+
lebron is the boss
|
78 |
+
- Gold: 1
|
79 |
+
- Pred: 1
|
80 |
+
|
81 |
+
lebron is a hometown hero to me, lol i love the lakers but let is go cavs, lol
|
82 |
+
- Gold: 1
|
83 |
+
- Pred: 1
|
84 |
+
|
85 |
+
lebron and zydrunas are such an awesome duo
|
86 |
+
- Gold: 1
|
87 |
+
- Pred: 1
|
88 |
+
|
89 |
+
lebron is a beast. nobody in the es even close.
|
90 |
+
- Gold: 1
|
91 |
+
- Pred: 0
|
92 |
+
-> MISMATCH!
|
93 |
+
|
94 |
+
downloading apps for my iphone! so much fun :-) there literally is an app for just about anything.
|
95 |
+
- Gold: 1
|
96 |
+
- Pred: 0
|
97 |
+
-> MISMATCH!
|
98 |
+
|
99 |
+
good news, just had a call from the visa office, saying everything is fine.what a relief! i am sick of scams out there! stealing!
|
100 |
+
- Gold: 1
|
101 |
+
- Pred: 0
|
102 |
+
-> MISMATCH!
|
103 |
+
|
104 |
+
- e back from (via )
|
105 |
+
- Gold: 1
|
106 |
+
- Pred: 1
|
107 |
+
|
108 |
+
in montreal for a long weekend of r&r. much needed.
|
109 |
+
- Gold: 1
|
110 |
+
- Pred: 0
|
111 |
+
-> MISMATCH!
|
112 |
+
|
113 |
+
booz allen hamilton has a bad ass homegrown social collaboration platform. way cool! #ttiv
|
114 |
+
- Gold: 1
|
115 |
+
- Pred: 1
|
116 |
+
|
117 |
+
[#mluc09] customer innovation award winner: booz allen hamilton --
|
118 |
+
- Gold: 1
|
119 |
+
- Pred: 1
|
120 |
+
|
121 |
+
i current use the nikon d90 and love it, but not as much as the canon 40d/50d. i chose the d90 for the video feature. my mistake.
|
122 |
+
- Gold: 1
|
123 |
+
- Pred: 1
|
124 |
+
|
125 |
+
google is always a good place to look. should have mentioned i worked on the mustang w/ my dad, .
|
126 |
+
- Gold: 1
|
127 |
+
- Pred: 1
|
128 |
+
|
129 |
+
played with an android google phone. the slide out screen scares me i would break that fucker so fast. still prefer my iphone.
|
130 |
+
- Gold: 0
|
131 |
+
- Pred: 0
|
132 |
+
|
133 |
+
us planning to resume the military tribunals at guantanamo bay. only this time those on trial will be aig execs and chrysler debt holders
|
134 |
+
- Gold: 0
|
135 |
+
- Pred: 0
|
136 |
+
|
137 |
+
omg so bored & my tattoooos are so itchy! help! aha =)
|
138 |
+
- Gold: 0
|
139 |
+
- Pred: 0
|
140 |
+
|
141 |
+
i am itchy and miserable!
|
142 |
+
- Gold: 0
|
143 |
+
- Pred: 1
|
144 |
+
-> MISMATCH!
|
145 |
+
|
146 |
+
no. i am not itchy for now. maybe later, lol.
|
147 |
+
- Gold: 0
|
148 |
+
- Pred: 0
|
149 |
+
|
150 |
+
rt i love the nerdy stanford human biology videos - makes me miss school.
|
151 |
+
- Gold: 1
|
152 |
+
- Pred: 1
|
153 |
+
|
154 |
+
: has been a bit crazy, with steep learning curve, but lyx is really good for long docs. for anything shorter, it would be insane.
|
155 |
+
- Gold: 1
|
156 |
+
- Pred: 0
|
157 |
+
-> MISMATCH!
|
158 |
+
|
159 |
+
i am listening to "p.y.t" by danny gokey <3 <3 <3 aww, he is so amazing. i <3 him so much :)
|
160 |
+
- Gold: 1
|
161 |
+
- Pred: 1
|
162 |
+
|
163 |
+
is going to sleep then on a bike ride:]
|
164 |
+
- Gold: 1
|
165 |
+
- Pred: 0
|
166 |
+
-> MISMATCH!
|
167 |
+
|
168 |
+
cant sleep. my tooth is aching.
|
169 |
+
- Gold: 0
|
170 |
+
- Pred: 0
|
171 |
+
|
172 |
+
blah, blah, blah same old same old. no plans today, going back to sleep i guess.
|
173 |
+
- Gold: 0
|
174 |
+
- Pred: 0
|
175 |
+
|
176 |
+
glad i didnt do bay to breakers today, it is 1000 freaking degrees in san francisco wtf
|
177 |
+
- Gold: 0
|
178 |
+
- Pred: 0
|
179 |
+
|
180 |
+
!obama administration must stop bonuses to aig ponzi schemers .
|
181 |
+
- Gold: 0
|
182 |
+
- Pred: 0
|
183 |
+
|
184 |
+
started to think that citi is in really deep s&^t. are they gonna survive the turmoil or are they gonna be the next aig!
|
185 |
+
- Gold: 0
|
186 |
+
- Pred: 1
|
187 |
+
-> MISMATCH!
|
188 |
+
|
189 |
+
shaunwoo hate'n on aig
|
190 |
+
- Gold: 0
|
191 |
+
- Pred: 1
|
192 |
+
-> MISMATCH!
|
193 |
+
|
194 |
+
you will not regret going to see star trek. it was awesome!
|
195 |
+
- Gold: 1
|
196 |
+
- Pred: 0
|
197 |
+
-> MISMATCH!
|
198 |
+
|
199 |
+
annoying new trend on the internets: people picking apart michael lewis and malcolm gladwell. nobody wants to read that.
|
200 |
+
- Gold: 0
|
201 |
+
- Pred: 1
|
202 |
+
-> MISMATCH!
|
203 |
+
|
204 |
+
highly mend: by malcolm gladwell
|
205 |
+
- Gold: 1
|
206 |
+
- Pred: 1
|
207 |
+
|
208 |
+
blink by malcolm gladwell amazing book and the tipping point!
|
209 |
+
- Gold: 1
|
210 |
+
- Pred: 1
|
211 |
+
|
212 |
+
malcolm gladwell might be my new man crush
|
213 |
+
- Gold: 1
|
214 |
+
- Pred: 1
|
215 |
+
|
216 |
+
omg. mercials alone on espn are going to drive me nuts.
|
217 |
+
- Gold: 0
|
218 |
+
- Pred: 0
|
219 |
+
|
220 |
+
playing with twitter api sounds fun. may need to take a class or find a new friend who like to generate results with api code.
|
221 |
+
- Gold: 1
|
222 |
+
- Pred: 0
|
223 |
+
-> MISMATCH!
|
224 |
+
|
225 |
+
hello twitter api ;)
|
226 |
+
- Gold: 1
|
227 |
+
- Pred: 1
|
228 |
+
|
229 |
+
because the twitter api is slow and most client is aren not good.
|
230 |
+
- Gold: 0
|
231 |
+
- Pred: 0
|
232 |
+
|
233 |
+
yahoo answers can be a butt sometimes
|
234 |
+
- Gold: 0
|
235 |
+
- Pred: 0
|
236 |
+
|
237 |
+
is scrapbooking with nic =d
|
238 |
+
- Gold: 1
|
239 |
+
- Pred: 1
|
240 |
+
|
241 |
+
rt : five things wolfram alpha does better (and vastly different) than google -
|
242 |
+
- Gold: 1
|
243 |
+
- Pred: 1
|
244 |
+
|
245 |
+
just changed my default pic to a nike basketball cause bball is awesome!
|
246 |
+
- Gold: 1
|
247 |
+
- Pred: 1
|
248 |
+
|
249 |
+
rt : was just told that nike layoffs started today :-(
|
250 |
+
- Gold: 0
|
251 |
+
- Pred: 0
|
252 |
+
|
253 |
+
back when i worked for nike we had one fav word : just do it! :)
|
254 |
+
- Gold: 1
|
255 |
+
- Pred: 0
|
256 |
+
-> MISMATCH!
|
257 |
+
|
258 |
+
by the way, i am totally inspired by this freaky mercial:
|
259 |
+
- Gold: 1
|
260 |
+
- Pred: 1
|
261 |
+
|
262 |
+
class. the 50d is supposed e today :)
|
263 |
+
- Gold: 1
|
264 |
+
- Pred: 1
|
265 |
+
|
266 |
+
needs someone to explain lambda calculus to him! :(
|
267 |
+
- Gold: 0
|
268 |
+
- Pred: 1
|
269 |
+
-> MISMATCH!
|
270 |
+
|
271 |
+
took the graduate field exam puter science today. nothing makes you feel like more of an idiot than lambda calculus.
|
272 |
+
- Gold: 0
|
273 |
+
- Pred: 0
|
274 |
+
|
275 |
+
shout outs to all east palo alto for being in the buildin karizmakaze 50cal gta! also thanks to profits of doom universal hempz cracka.
|
276 |
+
- Gold: 1
|
277 |
+
- Pred: 0
|
278 |
+
-> MISMATCH!
|
279 |
+
|
280 |
+
yeahhhhhhhhh, i wouldn not really have lived in east palo alto if i could have avoided it. i guess it is only for the summer.
|
281 |
+
- Gold: 0
|
282 |
+
- Pred: 0
|
283 |
+
|
284 |
+
great stanford course. thanks for making it available to the public! really helpful and informative for starting off!
|
285 |
+
- Gold: 1
|
286 |
+
- Pred: 1
|
287 |
+
|
288 |
+
@ work til 6pm. lets go lakers!
|
289 |
+
- Gold: 1
|
290 |
+
- Pred: 1
|
291 |
+
|
292 |
+
damn you north korea.
|
293 |
+
- Gold: 0
|
294 |
+
- Pred: 1
|
295 |
+
-> MISMATCH!
|
296 |
+
|
297 |
+
can we just go ahead and blow north korea off the map already!
|
298 |
+
- Gold: 0
|
299 |
+
- Pred: 0
|
300 |
+
|
301 |
+
north korea, please cease this douchebaggery. china doesn not even like you anymore.
|
302 |
+
- Gold: 0
|
303 |
+
- Pred: 1
|
304 |
+
-> MISMATCH!
|
305 |
+
|
306 |
+
why the hell is pelosi in freakin china! and on whose dime!
|
307 |
+
- Gold: 0
|
308 |
+
- Pred: 1
|
309 |
+
-> MISMATCH!
|
310 |
+
|
311 |
+
are you burning more cash $$$ than chrysler and gm! stop the financial tsunami. where "bailout" means taking a handout!
|
312 |
+
- Gold: 0
|
313 |
+
- Pred: 1
|
314 |
+
-> MISMATCH!
|
315 |
+
|
316 |
+
insects have infected my spinach plant :(
|
317 |
+
- Gold: 0
|
318 |
+
- Pred: 0
|
319 |
+
|
320 |
+
wish i could catch every mosquito in the world n burn em slowly.they been bitin the shit outta me 2day.mosquitos are the assholes of insects
|
321 |
+
- Gold: 0
|
322 |
+
- Pred: 1
|
323 |
+
-> MISMATCH!
|
324 |
+
|
325 |
+
just got back from church, and i totally hate insects.
|
326 |
+
- Gold: 0
|
327 |
+
- Pred: 0
|
328 |
+
|
329 |
+
just got mcdonalds goddam those eggs make me sick. o yeah laker up date go lakers. not much of an update! well it is true so suck it
|
330 |
+
- Gold: 0
|
331 |
+
- Pred: 0
|
332 |
+
|
333 |
+
omgg i ohhdee want mcdonalds damn i wonder if its open lol =]
|
334 |
+
- Gold: 1
|
335 |
+
- Pred: 0
|
336 |
+
-> MISMATCH!
|
337 |
+
|
338 |
+
history exam studying ugh
|
339 |
+
- Gold: 0
|
340 |
+
- Pred: 1
|
341 |
+
-> MISMATCH!
|
342 |
+
|
343 |
+
i hate revision, it is so boring! i am totally unprepared for my exam tomorrow :( things are not looking good.
|
344 |
+
- Gold: 0
|
345 |
+
- Pred: 0
|
346 |
+
|
347 |
+
higher physics exam tommorow, not lookin forward to it much :(
|
348 |
+
- Gold: 0
|
349 |
+
- Pred: 1
|
350 |
+
-> MISMATCH!
|
351 |
+
|
352 |
+
it is a bank holiday, yet i am only out of work now. exam season sucks:(
|
353 |
+
- Gold: 0
|
354 |
+
- Pred: 1
|
355 |
+
-> MISMATCH!
|
356 |
+
|
357 |
+
cheney and bush are the real culprits -
|
358 |
+
- Gold: 0
|
359 |
+
- Pred: 1
|
360 |
+
-> MISMATCH!
|
361 |
+
|
362 |
+
life!s a bitch! and so is dick cheney. #p2 #bipart #tlot #tcot #hhrs #gop #dnc
|
363 |
+
- Gold: 0
|
364 |
+
- Pred: 1
|
365 |
+
-> MISMATCH!
|
366 |
+
|
367 |
+
dick cheney is dishonest speech about torture, terror, and obama. -fred kaplan slate.
|
368 |
+
- Gold: 0
|
369 |
+
- Pred: 0
|
370 |
+
|
371 |
+
"the republican party is a bunch of anti-abortion zealots who couldn not draw flies to a dump." -- neal boortz (just now, on the radio)
|
372 |
+
- Gold: 0
|
373 |
+
- Pred: 0
|
374 |
+
|
375 |
+
is twitter is connections api broken! some tweets didn not make it to twitter.
|
376 |
+
- Gold: 0
|
377 |
+
- Pred: 0
|
378 |
+
|
379 |
+
i srsly hate the stupid twitter api timeout thing, soooo annoying! :(
|
380 |
+
- Gold: 0
|
381 |
+
- Pred: 1
|
382 |
+
-> MISMATCH!
|
383 |
+
|
384 |
+
i really liked is "learning jquery" book. is worth a look too
|
385 |
+
- Gold: 1
|
386 |
+
- Pred: 1
|
387 |
+
|
388 |
+
very interesting ad from adobe by goodby, silverstein & partners - youtube - adobe cs4: le sens propre
|
389 |
+
- Gold: 1
|
390 |
+
- Pred: 1
|
391 |
+
|
392 |
+
goodby silverstein agency new site! / great!
|
393 |
+
- Gold: 1
|
394 |
+
- Pred: 1
|
395 |
+
|
396 |
+
rt goodby, silverstein is new site: / i enjoy it. *nice find!*
|
397 |
+
- Gold: 1
|
398 |
+
- Pred: 1
|
399 |
+
|
400 |
+
the ever amazing psyop and goodby silverstein & partners for hp! have to go play with after effects now!
|
401 |
+
- Gold: 1
|
402 |
+
- Pred: 1
|
403 |
+
|
404 |
+
top ten most watched on viral-video chart. love the nike #mostvaluablepuppets campaign from wieden & kennedy
|
405 |
+
- Gold: 1
|
406 |
+
- Pred: 1
|
407 |
+
|
408 |
+
zomg! i have a g2!
|
409 |
+
- Gold: 1
|
410 |
+
- Pred: 1
|
411 |
+
|
412 |
+
ok so lots of buzz from io2009 but how lucky are they - a free g2!
|
413 |
+
- Gold: 1
|
414 |
+
- Pred: 1
|
415 |
+
|
416 |
+
just got a free g2 android at google i/o!
|
417 |
+
- Gold: 1
|
418 |
+
- Pred: 0
|
419 |
+
-> MISMATCH!
|
420 |
+
|
421 |
+
guess i will be retiring my g1 and start using my developer g2 woot #googleio
|
422 |
+
- Gold: 1
|
423 |
+
- Pred: 1
|
424 |
+
|
425 |
+
i am happy for philip being at googleio today
|
426 |
+
- Gold: 1
|
427 |
+
- Pred: 1
|
428 |
+
|
429 |
+
lakers played great! cannot wait for thursday night lakers vs. !
|
430 |
+
- Gold: 1
|
431 |
+
- Pred: 1
|
432 |
+
|
433 |
+
judd apatow creates fake on to market his new movie. viral marketing at its best.
|
434 |
+
- Gold: 1
|
435 |
+
- Pred: 1
|
436 |
+
|
437 |
+
viral marketing fail. this acia pills brand oughta get shut down for hacking into people is messenger is. i get 5-6 msgs in a day! arrrgh!
|
438 |
+
- Gold: 0
|
439 |
+
- Pred: 1
|
440 |
+
-> MISMATCH!
|
441 |
+
|
442 |
+
watching night at the museum . lmao
|
443 |
+
- Gold: 1
|
444 |
+
- Pred: 1
|
445 |
+
|
446 |
+
i loved night at the museum!
|
447 |
+
- Gold: 1
|
448 |
+
- Pred: 1
|
449 |
+
|
450 |
+
just got back from the movies. went to see the new night at the museum with rachel. it was good
|
451 |
+
- Gold: 1
|
452 |
+
- Pred: 1
|
453 |
+
|
454 |
+
i will take you on a date to see night at the museum 2 whenever you want.it looks soooooo good
|
455 |
+
- Gold: 1
|
456 |
+
- Pred: 1
|
457 |
+
|
458 |
+
no watching the night at the museum. getting really good
|
459 |
+
- Gold: 1
|
460 |
+
- Pred: 1
|
461 |
+
|
462 |
+
night at the museum, wolverine and junk food - perfect monday!
|
463 |
+
- Gold: 1
|
464 |
+
- Pred: 1
|
465 |
+
|
466 |
+
saw night at the museum 2 last night. pretty crazy movie. but the cast was awesome so it was well worth it. robin williams forever!
|
467 |
+
- Gold: 1
|
468 |
+
- Pred: 1
|
469 |
+
|
470 |
+
night at the museum tonite instead of up. :( oh well. that 4 yr old better enjoy it. lol
|
471 |
+
- Gold: 0
|
472 |
+
- Pred: 1
|
473 |
+
-> MISMATCH!
|
474 |
+
|
475 |
+
it is unfortunate that after the stimulus plan was put in place twice to help gm on the back of the american people has led to the inevitable
|
476 |
+
- Gold: 0
|
477 |
+
- Pred: 0
|
478 |
+
|
479 |
+
tell me again why we are giving more $$ to gm! we should use that $ for all the programs that support the unemployed.
|
480 |
+
- Gold: 0
|
481 |
+
- Pred: 1
|
482 |
+
-> MISMATCH!
|
483 |
+
|
484 |
+
oh yes but if gm dies it will only be worth more boo hahaha
|
485 |
+
- Gold: 0
|
486 |
+
- Pred: 0
|
487 |
+
|
488 |
+
time warner cable is down again 3rd time since memorial day bummer!
|
489 |
+
- Gold: 0
|
490 |
+
- Pred: 0
|
491 |
+
|
492 |
+
i would rather pay reasonable yearly taxes for "free" fast internet, than get gouged by time warner for a slow connection.
|
493 |
+
- Gold: 0
|
494 |
+
- Pred: 0
|
495 |
+
|
496 |
+
nooooooo my dvr just died and i was only half way through the ea presser. hate you time warner
|
497 |
+
- Gold: 0
|
498 |
+
- Pred: 0
|
499 |
+
|
500 |
+
f*ck time warner cable! you f*cking suck balls! i have a $700 hd tv & my damn hd channels hardly e in. bullshit!
|
501 |
+
- Gold: 0
|
502 |
+
- Pred: 1
|
503 |
+
-> MISMATCH!
|
504 |
+
|
505 |
+
time warner has the worse customer service ever. i will never use them again
|
506 |
+
- Gold: 0
|
507 |
+
- Pred: 0
|
508 |
+
|
509 |
+
time warner is the devil. worst possible time for the internet to go out.
|
510 |
+
- Gold: 0
|
511 |
+
- Pred: 0
|
512 |
+
|
513 |
+
fuck no internet damn time warner!
|
514 |
+
- Gold: 0
|
515 |
+
- Pred: 1
|
516 |
+
-> MISMATCH!
|
517 |
+
|
518 |
+
time warner really picks the worst time to not work. all i want to do is get to so i can watch the hills. wtfffff.
|
519 |
+
- Gold: 0
|
520 |
+
- Pred: 1
|
521 |
+
-> MISMATCH!
|
522 |
+
|
523 |
+
i hate time warner! soooo wish i had vios. cant watch the fricken mets game w/o buffering. i feel like im watching free internet porn.
|
524 |
+
- Gold: 0
|
525 |
+
- Pred: 1
|
526 |
+
-> MISMATCH!
|
527 |
+
|
528 |
+
ahh.got rid of stupid time warner today & now taking a nap while the roomies cook for me. pretty good end for a monday :)
|
529 |
+
- Gold: 0
|
530 |
+
- Pred: 1
|
531 |
+
-> MISMATCH!
|
532 |
+
|
533 |
+
time warner is hd line up is crap.
|
534 |
+
- Gold: 0
|
535 |
+
- Pred: 0
|
536 |
+
|
537 |
+
is being fucked by time warner cable. didnt know modems could explode. and susan boyle sucks too!
|
538 |
+
- Gold: 0
|
539 |
+
- Pred: 1
|
540 |
+
-> MISMATCH!
|
541 |
+
|
542 |
+
time warner cable slogan: where calling it a day at 2pm happens.
|
543 |
+
- Gold: 0
|
544 |
+
- Pred: 1
|
545 |
+
-> MISMATCH!
|
546 |
+
|
547 |
+
recovering from surgery.wishing was here :(
|
548 |
+
- Gold: 0
|
549 |
+
- Pred: 0
|
550 |
+
|
551 |
+
my wrist still hurts. i have to get it looked at. i hate the dr/dentist/scary places. :( time to watch eagle eye. if you want to join, txt!
|
552 |
+
- Gold: 1
|
553 |
+
- Pred: 1
|
554 |
+
|
555 |
+
the dentist lied! " u won not feel any ort! prob won not even need pain pills" man u twippin this shit hurt! how many pills can i take!
|
556 |
+
- Gold: 0
|
557 |
+
- Pred: 0
|
558 |
+
|
559 |
+
my dentist is great but she is expensive.=(
|
560 |
+
- Gold: 0
|
561 |
+
- Pred: 0
|
562 |
+
|
563 |
+
is studing math ;) tomorrow exam and dentist :)
|
564 |
+
- Gold: 1
|
565 |
+
- Pred: 1
|
566 |
+
|
567 |
+
my dentist was wrong. wrong
|
568 |
+
- Gold: 0
|
569 |
+
- Pred: 0
|
570 |
+
|
571 |
+
going to the dentist later.:|
|
572 |
+
- Gold: 0
|
573 |
+
- Pred: 1
|
574 |
+
-> MISMATCH!
|
575 |
+
|
576 |
+
son has me looking at cars online. i hate car shopping. would rather go to the dentist! anyone with a good car at a good price to sell!
|
577 |
+
- Gold: 0
|
578 |
+
- Pred: 0
|
579 |
+
|
580 |
+
luke and i got stopped walking out of safeway and asked to empty our pockets and lift our shirts. how jacked up is that!
|
581 |
+
- Gold: 0
|
582 |
+
- Pred: 0
|
583 |
+
|
584 |
+
safeway is very rock n roll tonight
|
585 |
+
- Gold: 1
|
586 |
+
- Pred: 1
|
587 |
+
|
588 |
+
the safeway bathroom still smells like ass!
|
589 |
+
- Gold: 0
|
590 |
+
- Pred: 0
|
591 |
+
|
592 |
+
at safeway on elkhorn, they move like they are dead!
|
593 |
+
- Gold: 0
|
594 |
+
- Pred: 0
|
595 |
+
|
596 |
+
i love dwight howard is vitamin mercial. now i wish he was with nike and not adidas. lol.
|
597 |
+
- Gold: 1
|
598 |
+
- Pred: 0
|
599 |
+
-> MISMATCH!
|
600 |
+
|
601 |
+
found nothing at nike factory :/ off to banana republic outlet!
|
602 |
+
- Gold: 0
|
603 |
+
- Pred: 1
|
604 |
+
-> MISMATCH!
|
605 |
+
|
606 |
+
is lovin his nike already and that is only from running on the spot in his bedroom
|
607 |
+
- Gold: 1
|
608 |
+
- Pred: 0
|
609 |
+
-> MISMATCH!
|
610 |
+
|
611 |
+
i finally got around to using jquery to make my bio collapse. yay for slide animations.
|
612 |
+
- Gold: 1
|
613 |
+
- Pred: 0
|
614 |
+
-> MISMATCH!
|
615 |
+
|
616 |
+
right! lol we will get there! i have high expectations, warren buffet style.
|
617 |
+
- Gold: 1
|
618 |
+
- Pred: 1
|
619 |
+
|
620 |
+
rt : rt great "someone is sitting in the shade today because someone planted a tree a long time ago."- warren buffet
|
621 |
+
- Gold: 1
|
622 |
+
- Pred: 0
|
623 |
+
-> MISMATCH!
|
624 |
+
|
625 |
+
warren buffet became (for a time) the richest man in the united states, not by working but investing in 1 big idea which lead to the fortune
|
626 |
+
- Gold: 1
|
627 |
+
- Pred: 1
|
628 |
+
|
629 |
+
according to the create a school, notre dame will have 7 receivers in ncaa 10 at 84 or higher rating :) *sweet*
|
630 |
+
- Gold: 1
|
631 |
+
- Pred: 1
|
632 |
+
|
633 |
+
it is definitely under warranty & my experience is the amazon support for kindle is great! had to contact them about my kindle2
|
634 |
+
- Gold: 1
|
635 |
+
- Pred: 0
|
636 |
+
-> MISMATCH!
|
637 |
+
|
638 |
+
time warner road runner customer support here absolutely blows. i hate not having other high-speed net options. i am ready to go nuclear.
|
639 |
+
- Gold: 0
|
640 |
+
- Pred: 0
|
641 |
+
|
642 |
+
time warner cable phone reps r dumber than nails! ugh! cable was working 10 mins ago now its not wtf!
|
643 |
+
- Gold: 0
|
644 |
+
- Pred: 1
|
645 |
+
-> MISMATCH!
|
646 |
+
|
647 |
+
we tried but time warner wasn not being nice so we recorded today. :)
|
648 |
+
- Gold: 0
|
649 |
+
- Pred: 0
|
650 |
+
|
651 |
+
omg - time warner f'ed up my internet install - instead of today its now next saturday - another week w/o internet! &$*ehfa^v9fhg[*# fml.
|
652 |
+
- Gold: 0
|
653 |
+
- Pred: 1
|
654 |
+
-> MISMATCH!
|
655 |
+
|
656 |
+
wth.i have never seen a line this loooong at time warner before, ugh.
|
657 |
+
- Gold: 0
|
658 |
+
- Pred: 0
|
659 |
+
|
660 |
+
impatiently awaiting the arrival of the time warner guy. it is way too pretty to be inside all afternoon
|
661 |
+
- Gold: 0
|
662 |
+
- Pred: 0
|
663 |
+
|
664 |
+
naive bayes using em for text classification. really frustrating.
|
665 |
+
- Gold: 0
|
666 |
+
- Pred: 1
|
667 |
+
-> MISMATCH!
|
668 |
+
|
669 |
+
we went to stanford university today. got a tour. made me want to go back to college. it is also decided all of our kids will go there.
|
670 |
+
- Gold: 1
|
671 |
+
- Pred: 0
|
672 |
+
-> MISMATCH!
|
673 |
+
|
674 |
+
if you are being harassed by calls about your car warranty, changing your number won not fix that. they call every number. #d-bags
|
675 |
+
- Gold: 0
|
676 |
+
- Pred: 0
|
677 |
+
|
678 |
+
just blocked united blood services using google voice. they call more than those car warranty guys.
|
679 |
+
- Gold: 0
|
680 |
+
- Pred: 0
|
681 |
+
|
682 |
+
#at&t plete fail.
|
683 |
+
- Gold: 0
|
684 |
+
- Pred: 1
|
685 |
+
-> MISMATCH!
|
686 |
+
|
687 |
+
oh snap you work at at&t don not you
|
688 |
+
- Gold: 0
|
689 |
+
- Pred: 1
|
690 |
+
-> MISMATCH!
|
691 |
+
|
692 |
+
i really dont want at&t phone service.they suck when es to having a signal
|
693 |
+
- Gold: 0
|
694 |
+
- Pred: 0
|
695 |
+
|
696 |
+
i say we just cut out the small talk: at&t is new slogan: f__k you, give us your money. (apologies to bob geldof.)
|
697 |
+
- Gold: 0
|
698 |
+
- Pred: 0
|
699 |
+
|
700 |
+
pissed about at&t is mid-contract upgrade price for the iphone (it is $200 more) i am not going to pay $499 for something i thought was $299
|
701 |
+
- Gold: 0
|
702 |
+
- Pred: 0
|
703 |
+
|
704 |
+
safari 4 is fast :) even on my shitty at&t tethering.
|
705 |
+
- Gold: 0
|
706 |
+
- Pred: 1
|
707 |
+
-> MISMATCH!
|
708 |
+
|
709 |
+
what is at&t fucking up!
|
710 |
+
- Gold: 0
|
711 |
+
- Pred: 1
|
712 |
+
-> MISMATCH!
|
713 |
+
|
714 |
+
at&t dropped the ball and isn not supporting crap with the new iphone 3.0. fail #att sucks!
|
715 |
+
- Gold: 0
|
716 |
+
- Pred: 1
|
717 |
+
-> MISMATCH!
|
718 |
+
|
719 |
+
yay, glad you got the phone! still, damn you, at&t.
|
720 |
+
- Gold: 0
|
721 |
+
- Pred: 1
|
722 |
+
-> MISMATCH!
|
723 |
+
|
724 |
+
talk is cheap: bing that, i!ll stick with google.
|
725 |
+
- Gold: 0
|
726 |
+
- Pred: 1
|
727 |
+
-> MISMATCH!
|
728 |
+
|
729 |
+
wtf is the point of deleting tweets if they can still be found in summize and searches! twitter, please fix that. thanks and bye
|
730 |
+
- Gold: 0
|
731 |
+
- Pred: 1
|
732 |
+
-> MISMATCH!
|
733 |
+
|
734 |
+
i love google translator too ! :d good day mate !
|
735 |
+
- Gold: 1
|
736 |
+
- Pred: 1
|
737 |
+
|
738 |
+
reading on my new kindle2!
|
739 |
+
- Gold: 1
|
740 |
+
- Pred: 1
|
741 |
+
|
742 |
+
my kindle2 came and i love it! :)
|
743 |
+
- Gold: 1
|
744 |
+
- Pred: 0
|
745 |
+
-> MISMATCH!
|
746 |
+
|
747 |
+
loving my new kindle2. named her kendra in case u were wondering. the "cookbook" is the tool cuz it tells u all the tricks! best gift evr!
|
748 |
+
- Gold: 1
|
749 |
+
- Pred: 1
|
750 |
+
|
751 |
+
the real aig scandal /
|
752 |
+
- Gold: 0
|
753 |
+
- Pred: 1
|
754 |
+
-> MISMATCH!
|
755 |
+
|
756 |
+
obama is quite a edian! check out his dinner speech on cnn :) very funny jokes.
|
757 |
+
- Gold: 1
|
758 |
+
- Pred: 1
|
759 |
+
|
760 |
+
' barack obama shows his funny side " >> ! great speech.
|
761 |
+
- Gold: 1
|
762 |
+
- Pred: 1
|
763 |
+
|
764 |
+
i like this guy : ' barack obama shows his funny side " >> !
|
765 |
+
- Gold: 1
|
766 |
+
- Pred: 1
|
767 |
+
|
768 |
+
obama is speech was pretty awesome last night!
|
769 |
+
- Gold: 1
|
770 |
+
- Pred: 1
|
771 |
+
|
772 |
+
reading "bill clinton fail - obama win!"
|
773 |
+
- Gold: 1
|
774 |
+
- Pred: 1
|
775 |
+
|
776 |
+
obama more popular than u.s. among arabs: survey: president barack obama is popularity in leading arab countries .
|
777 |
+
- Gold: 1
|
778 |
+
- Pred: 1
|
779 |
+
|
780 |
+
obama is got jokes! haha just got to watch a bit of his after dinner speech from last night. i am in love with mr. president ;)
|
781 |
+
- Gold: 1
|
782 |
+
- Pred: 1
|
783 |
+
|
784 |
+
lebron james got in a car accident i guess.just heard it on evening news.wow i cant believe it.will he be ok !
|
785 |
+
- Gold: 0
|
786 |
+
- Pred: 0
|
787 |
+
|
788 |
+
is it me or is this the best the playoffs have been in years oh yea lebron and melo in the finals
|
789 |
+
- Gold: 1
|
790 |
+
- Pred: 1
|
791 |
+
|
792 |
+
no, lebron is the best
|
793 |
+
- Gold: 1
|
794 |
+
- Pred: 1
|
795 |
+
|
796 |
+
lebron is cool. i like his personality.he has good character.
|
797 |
+
- Gold: 1
|
798 |
+
- Pred: 1
|
799 |
+
|
800 |
+
watching lebron highlights. damn that niggas good
|
801 |
+
- Gold: 1
|
802 |
+
- Pred: 1
|
803 |
+
|
804 |
+
lebron is murdering shit.
|
805 |
+
- Gold: 1
|
806 |
+
- Pred: 0
|
807 |
+
-> MISMATCH!
|
808 |
+
|
809 |
+
lebron is a monsta and he is only 24. smh the world ain not ready.
|
810 |
+
- Gold: 1
|
811 |
+
- Pred: 0
|
812 |
+
-> MISMATCH!
|
813 |
+
|
814 |
+
when lebron is done in the nba he will probably be greater than kobe. like u said kobe is good but there alot of 'good' players.
|
815 |
+
- Gold: 1
|
816 |
+
- Pred: 0
|
817 |
+
-> MISMATCH!
|
818 |
+
|
819 |
+
kobe is good bt lebron has my vote
|
820 |
+
- Gold: 1
|
821 |
+
- Pred: 0
|
822 |
+
-> MISMATCH!
|
823 |
+
|
824 |
+
kobe is the best in the world not lebron .
|
825 |
+
- Gold: 0
|
826 |
+
- Pred: 0
|
827 |
+
|
828 |
+
world cup 2010 access! damn, that is a good look!
|
829 |
+
- Gold: 1
|
830 |
+
- Pred: 1
|
831 |
+
|
832 |
+
just bought my tickets for the 2010 fifa world cup in south africa. its going to be a great summer.
|
833 |
+
- Gold: 1
|
834 |
+
- Pred: 1
|
835 |
+
|
836 |
+
i have to go to booz allen hamilton for a 2hr meeting :( but then i get to go home :)
|
837 |
+
- Gold: 0
|
838 |
+
- Pred: 0
|
839 |
+
|
840 |
+
the great indian tamasha truly will unfold from may 16, the result day for indian general election.
|
841 |
+
- Gold: 1
|
842 |
+
- Pred: 1
|
843 |
+
|
844 |
+
i have the kindle2. i have seen pictures of the dx, but haven not seen it in person. i love my kindle - i am on it everyday.
|
845 |
+
- Gold: 1
|
846 |
+
- Pred: 0
|
847 |
+
-> MISMATCH!
|
848 |
+
|
849 |
+
such an awesome idea - the continual learning program with a kindle2
|
850 |
+
- Gold: 1
|
851 |
+
- Pred: 1
|
852 |
+
|
853 |
+
ooooh, what model are you getting! i have the 40d and love love love love it!
|
854 |
+
- Gold: 1
|
855 |
+
- Pred: 1
|
856 |
+
|
857 |
+
the times of india: the wonder that is india is election.
|
858 |
+
- Gold: 1
|
859 |
+
- Pred: 1
|
860 |
+
|
861 |
+
good video from google on using search options.
|
862 |
+
- Gold: 1
|
863 |
+
- Pred: 1
|
864 |
+
|
865 |
+
lol. ah my skin is itchy :( damn lawnmowing.
|
866 |
+
- Gold: 0
|
867 |
+
- Pred: 0
|
868 |
+
|
869 |
+
itchy back! dont ya hate it!
|
870 |
+
- Gold: 0
|
871 |
+
- Pred: 1
|
872 |
+
-> MISMATCH!
|
873 |
+
|
874 |
+
stanford charity fashion show a top draw
|
875 |
+
- Gold: 1
|
876 |
+
- Pred: 1
|
877 |
+
|
878 |
+
stanford university!s facebook profile is one of the most popular official university pages -
|
879 |
+
- Gold: 1
|
880 |
+
- Pred: 1
|
881 |
+
|
882 |
+
lyx is cool.
|
883 |
+
- Gold: 1
|
884 |
+
- Pred: 1
|
885 |
+
|
886 |
+
sooo dissapointed they sent danny gokey home. you still rock .danny . my hometown hero ! yeah milrockee!
|
887 |
+
- Gold: 1
|
888 |
+
- Pred: 1
|
889 |
+
|
890 |
+
rt 'american idol' fashion: adam lambert tones down, danny gokey cute .
|
891 |
+
- Gold: 1
|
892 |
+
- Pred: 1
|
893 |
+
|
894 |
+
i love you danny gokey! :)
|
895 |
+
- Gold: 1
|
896 |
+
- Pred: 1
|
897 |
+
|
898 |
+
so tired. i didn not sleep well at all last night.
|
899 |
+
- Gold: 0
|
900 |
+
- Pred: 0
|
901 |
+
|
902 |
+
boarding plane for san francisco in 1 hour; 6 hr flight. blech.
|
903 |
+
- Gold: 0
|
904 |
+
- Pred: 0
|
905 |
+
|
906 |
+
bonjour san francisco. my back hurts from last night.
|
907 |
+
- Gold: 0
|
908 |
+
- Pred: 0
|
909 |
+
|
910 |
+
with my best girl for a few more hours in san francisco. mmmmmfamily is wonderful!
|
911 |
+
- Gold: 1
|
912 |
+
- Pred: 1
|
913 |
+
|
914 |
+
f*** up big, or go home - aig
|
915 |
+
- Gold: 0
|
916 |
+
- Pred: 1
|
917 |
+
-> MISMATCH!
|
918 |
+
|
919 |
+
went to see the star trek movie last night. very satisfying.
|
920 |
+
- Gold: 1
|
921 |
+
- Pred: 1
|
922 |
+
|
923 |
+
i can not wait, going to see star trek tonight!
|
924 |
+
- Gold: 1
|
925 |
+
- Pred: 0
|
926 |
+
-> MISMATCH!
|
927 |
+
|
928 |
+
star trek was as good as everyone said!
|
929 |
+
- Gold: 1
|
930 |
+
- Pred: 0
|
931 |
+
-> MISMATCH!
|
932 |
+
|
933 |
+
am loving new malcolm gladwell book - outliers
|
934 |
+
- Gold: 1
|
935 |
+
- Pred: 1
|
936 |
+
|
937 |
+
i highly mend malcolm gladwell is 'the tipping point.' my next audiobook will probably be one of his as well.
|
938 |
+
- Gold: 1
|
939 |
+
- Pred: 1
|
940 |
+
|
941 |
+
malcolm gladwell is a genius at tricking people into not realizing he is a fucking idiot
|
942 |
+
- Gold: 0
|
943 |
+
- Pred: 0
|
944 |
+
|
945 |
+
hey no offense but malcolm gladwell is a pretenious, annoying cunt and he brings you down. cant read his shit
|
946 |
+
- Gold: 0
|
947 |
+
- Pred: 1
|
948 |
+
-> MISMATCH!
|
949 |
+
|
950 |
+
rt : great article by malcolm gladwell.
|
951 |
+
- Gold: 1
|
952 |
+
- Pred: 1
|
953 |
+
|
954 |
+
i seriously underestimated malcolm gladwell. i want to meet this dude.
|
955 |
+
- Gold: 1
|
956 |
+
- Pred: 0
|
957 |
+
-> MISMATCH!
|
958 |
+
|
959 |
+
i cast right now. everything is down cable internet & phone.ughh what am i to do
|
960 |
+
- Gold: 0
|
961 |
+
- Pred: 1
|
962 |
+
-> MISMATCH!
|
963 |
+
|
964 |
+
comcast sucks.
|
965 |
+
- Gold: 0
|
966 |
+
- Pred: 1
|
967 |
+
-> MISMATCH!
|
968 |
+
|
969 |
+
the day i never have to deal cast again will rank as one of the best days of my life.
|
970 |
+
- Gold: 0
|
971 |
+
- Pred: 0
|
972 |
+
|
973 |
+
cast fail again!
|
974 |
+
- Gold: 0
|
975 |
+
- Pred: 1
|
976 |
+
-> MISMATCH!
|
977 |
+
|
978 |
+
curses the twitter api limit
|
979 |
+
- Gold: 0
|
980 |
+
- Pred: 1
|
981 |
+
-> MISMATCH!
|
982 |
+
|
983 |
+
now i can see why dave winer screams about lack of twitter api, its limitations and access throttles!
|
984 |
+
- Gold: 0
|
985 |
+
- Pred: 1
|
986 |
+
-> MISMATCH!
|
987 |
+
|
988 |
+
arg. twitter api is making me crazy.
|
989 |
+
- Gold: 0
|
990 |
+
- Pred: 1
|
991 |
+
-> MISMATCH!
|
992 |
+
|
993 |
+
i am really loving the new search site wolfram/alpha. makes google seem so . quaint. /
|
994 |
+
- Gold: 1
|
995 |
+
- Pred: 1
|
996 |
+
|
997 |
+
#wolfram alpha sucks! even for researchers the information provided is less than you can get from #google or #wikipedia, totally useless!
|
998 |
+
- Gold: 0
|
999 |
+
- Pred: 1
|
1000 |
+
-> MISMATCH!
|
1001 |
+
|
1002 |
+
off to the nike factory!
|
1003 |
+
- Gold: 1
|
1004 |
+
- Pred: 1
|
1005 |
+
|
1006 |
+
new nike mercials are pretty cute. why do we live together again!
|
1007 |
+
- Gold: 1
|
1008 |
+
- Pred: 1
|
1009 |
+
|
1010 |
+
oh those are awesome! i so wish they weren not owned by nike :(
|
1011 |
+
- Gold: 0
|
1012 |
+
- Pred: 0
|
1013 |
+
|
1014 |
+
- awesome! seeing the show friday at the shoreline amphitheatre. never seen nin before. can not wait. .
|
1015 |
+
- Gold: 1
|
1016 |
+
- Pred: 0
|
1017 |
+
-> MISMATCH!
|
1018 |
+
|
1019 |
+
arhh, it is weka bug. = =" and i spent almost two hours to find that out. crappy me
|
1020 |
+
- Gold: 0
|
1021 |
+
- Pred: 0
|
1022 |
+
|
1023 |
+
hey bud :) np i do so love my 50d, although i would love a 5d mkii more
|
1024 |
+
- Gold: 1
|
1025 |
+
- Pred: 1
|
1026 |
+
|
1027 |
+
just got us a 50d for the office. :d
|
1028 |
+
- Gold: 1
|
1029 |
+
- Pred: 1
|
1030 |
+
|
1031 |
+
just picked up my new canon 50d.it is beautiful! prepare for some seriously awesome photography!
|
1032 |
+
- Gold: 1
|
1033 |
+
- Pred: 1
|
1034 |
+
|
1035 |
+
just got my new toy. canon 50d. love love love it!
|
1036 |
+
- Gold: 1
|
1037 |
+
- Pred: 1
|
1038 |
+
|
1039 |
+
learning about lambda calculus :)
|
1040 |
+
- Gold: 1
|
1041 |
+
- Pred: 1
|
1042 |
+
|
1043 |
+
i am moving to east palo alto!
|
1044 |
+
- Gold: 1
|
1045 |
+
- Pred: 0
|
1046 |
+
-> MISMATCH!
|
1047 |
+
|
1048 |
+
@ atebits i just finished watching your stanford iphone class session. i really appreciate it. you rock!
|
1049 |
+
- Gold: 1
|
1050 |
+
- Pred: 1
|
1051 |
+
|
1052 |
+
hi! just saw your stanford talk and really liked your advice. just saying hi from singapore (yes the videos do get around)
|
1053 |
+
- Gold: 1
|
1054 |
+
- Pred: 0
|
1055 |
+
-> MISMATCH!
|
1056 |
+
|
1057 |
+
lakers tonight let is go!
|
1058 |
+
- Gold: 1
|
1059 |
+
- Pred: 0
|
1060 |
+
-> MISMATCH!
|
1061 |
+
|
1062 |
+
will the lakers kick the nuggets ass tonight!
|
1063 |
+
- Gold: 1
|
1064 |
+
- Pred: 1
|
1065 |
+
|
1066 |
+
oooooooh. north korea is in troubleeeee!
|
1067 |
+
- Gold: 0
|
1068 |
+
- Pred: 1
|
1069 |
+
-> MISMATCH!
|
1070 |
+
|
1071 |
+
wat the heck is north korea doing!!! they just conducted powerful nuclear tests! follow the link:
|
1072 |
+
- Gold: 0
|
1073 |
+
- Pred: 1
|
1074 |
+
-> MISMATCH!
|
1075 |
+
|
1076 |
+
listening to obama. friggin north korea.
|
1077 |
+
- Gold: 0
|
1078 |
+
- Pred: 1
|
1079 |
+
-> MISMATCH!
|
1080 |
+
|
1081 |
+
i just realized we three monkeys in the white obama.biden,pelosi . sarah palin 2012
|
1082 |
+
- Gold: 0
|
1083 |
+
- Pred: 0
|
1084 |
+
|
1085 |
+
pelosi should stay in china and e back.
|
1086 |
+
- Gold: 0
|
1087 |
+
- Pred: 0
|
1088 |
+
|
1089 |
+
nancy pelosi gave the mencement speech i have ever heard. yes i am still bitter about this
|
1090 |
+
- Gold: 0
|
1091 |
+
- Pred: 0
|
1092 |
+
|
1093 |
+
ugh. the amount of times these stupid insects have bitten me. grr.
|
1094 |
+
- Gold: 0
|
1095 |
+
- Pred: 0
|
1096 |
+
|
1097 |
+
prettiest insects ever - pink katydids:
|
1098 |
+
- Gold: 1
|
1099 |
+
- Pred: 0
|
1100 |
+
-> MISMATCH!
|
1101 |
+
|
1102 |
+
just got barraged by a horde of insects hungry for my kitchen light. so scary.
|
1103 |
+
- Gold: 0
|
1104 |
+
- Pred: 0
|
1105 |
+
|
1106 |
+
just had mcdonalds for dinner. :d it was goooood. big mac meal. ;)
|
1107 |
+
- Gold: 1
|
1108 |
+
- Pred: 1
|
1109 |
+
|
1110 |
+
ahh yes lol ima tell my hubby to go get me sum mcdonalds =]
|
1111 |
+
- Gold: 1
|
1112 |
+
- Pred: 0
|
1113 |
+
-> MISMATCH!
|
1114 |
+
|
1115 |
+
stopped to have lunch at mcdonalds. chicken nuggetssss! :) yummmmmy.
|
1116 |
+
- Gold: 1
|
1117 |
+
- Pred: 1
|
1118 |
+
|
1119 |
+
could go for a lot of mcdonalds. i mean a lot.
|
1120 |
+
- Gold: 1
|
1121 |
+
- Pred: 0
|
1122 |
+
-> MISMATCH!
|
1123 |
+
|
1124 |
+
my exam went good. : your prayers worked (:
|
1125 |
+
- Gold: 1
|
1126 |
+
- Pred: 0
|
1127 |
+
-> MISMATCH!
|
1128 |
+
|
1129 |
+
only one exam left, and i am so happy for it :d
|
1130 |
+
- Gold: 1
|
1131 |
+
- Pred: 0
|
1132 |
+
-> MISMATCH!
|
1133 |
+
|
1134 |
+
math review. im going to fail the exam.
|
1135 |
+
- Gold: 0
|
1136 |
+
- Pred: 1
|
1137 |
+
-> MISMATCH!
|
1138 |
+
|
1139 |
+
colin powell rocked yesterday on cbs. cheney needs to shut the hell up and go home.powell is a man of honor and served our country proudly
|
1140 |
+
- Gold: 0
|
1141 |
+
- Pred: 1
|
1142 |
+
-> MISMATCH!
|
1143 |
+
|
1144 |
+
obviously not siding with cheney here:
|
1145 |
+
- Gold: 0
|
1146 |
+
- Pred: 1
|
1147 |
+
-> MISMATCH!
|
1148 |
+
|
1149 |
+
absolutely hilarious! from :
|
1150 |
+
- Gold: 1
|
1151 |
+
- Pred: 1
|
1152 |
+
|
1153 |
+
i never did thank you for including me in your top 100 twitter authors! you rock! (& i new wave :-d)
|
1154 |
+
- Gold: 1
|
1155 |
+
- Pred: 1
|
1156 |
+
|
1157 |
+
rt : awesome jquery reference book for coda! / #webdesign
|
1158 |
+
- Gold: 1
|
1159 |
+
- Pred: 1
|
1160 |
+
|
1161 |
+
i have been sending e-mails like crazy today to my contacts.does anyone have a contact at goodby silverstein.i would love to speak to them
|
1162 |
+
- Gold: 1
|
1163 |
+
- Pred: 0
|
1164 |
+
-> MISMATCH!
|
1165 |
+
|
1166 |
+
goodby, silverstein is new site. / i enjoy it.
|
1167 |
+
- Gold: 1
|
1168 |
+
- Pred: 1
|
1169 |
+
|
1170 |
+
wow everyone at the google i/o conference got free g2 is with a month of unlimited service
|
1171 |
+
- Gold: 1
|
1172 |
+
- Pred: 1
|
1173 |
+
|
1174 |
+
dood i got a free google android phone at the i/o conference. the g2!
|
1175 |
+
- Gold: 1
|
1176 |
+
- Pred: 1
|
1177 |
+
|
1178 |
+
the g2 is amazing btw, a huge improvement over the g1
|
1179 |
+
- Gold: 1
|
1180 |
+
- Pred: 1
|
1181 |
+
|
1182 |
+
html 5 demos! lots of great stuff e! yes, i am excited. :) #io2009 #googleio
|
1183 |
+
- Gold: 1
|
1184 |
+
- Pred: 1
|
1185 |
+
|
1186 |
+
- yay! happy place! place place! i love google!
|
1187 |
+
- Gold: 1
|
1188 |
+
- Pred: 1
|
1189 |
+
|
1190 |
+
#googleio | o3d - bringing 3d graphics to the browser. very nice tbh. funfun.
|
1191 |
+
- Gold: 1
|
1192 |
+
- Pred: 1
|
1193 |
+
|
1194 |
+
awesome viral marketing for "funny people" -teach/
|
1195 |
+
- Gold: 1
|
1196 |
+
- Pred: 1
|
1197 |
+
|
1198 |
+
saw night at the museum out of sheer desperation. who is funding these movies!
|
1199 |
+
- Gold: 0
|
1200 |
+
- Pred: 1
|
1201 |
+
-> MISMATCH!
|
1202 |
+
|
1203 |
+
night at the museum 2! pretty furkin good.
|
1204 |
+
- Gold: 1
|
1205 |
+
- Pred: 1
|
1206 |
+
|
1207 |
+
watching night at the museum - giggling.
|
1208 |
+
- Gold: 1
|
1209 |
+
- Pred: 1
|
1210 |
+
|
1211 |
+
back from seeing istar trek' and 'night at the museum.' istar trek' was amazing, but 'night at the museum' was; eh.
|
1212 |
+
- Gold: 0
|
1213 |
+
- Pred: 1
|
1214 |
+
-> MISMATCH!
|
1215 |
+
|
1216 |
+
just watched night at the museum 2! so stinkin cute!
|
1217 |
+
- Gold: 1
|
1218 |
+
- Pred: 1
|
1219 |
+
|
1220 |
+
so, night at the museum 2 was awesome! much better than part 1. next weekend we will see up.
|
1221 |
+
- Gold: 1
|
1222 |
+
- Pred: 0
|
1223 |
+
-> MISMATCH!
|
1224 |
+
|
1225 |
+
saw the new night at the museum and i loved it. next is to go see up in 3d
|
1226 |
+
- Gold: 1
|
1227 |
+
- Pred: 0
|
1228 |
+
-> MISMATCH!
|
1229 |
+
|
1230 |
+
it is a shame about gm. what if they are forced to make only cars the white house thinks will sell! what do you think!
|
1231 |
+
- Gold: 0
|
1232 |
+
- Pred: 1
|
1233 |
+
-> MISMATCH!
|
1234 |
+
|
1235 |
+
as u may have noticed, not too happy about the gm situation, nor aig, lehman, et al
|
1236 |
+
- Gold: 0
|
1237 |
+
- Pred: 1
|
1238 |
+
-> MISMATCH!
|
1239 |
+
|
1240 |
+
$gm good riddance. sad though.
|
1241 |
+
- Gold: 0
|
1242 |
+
- Pred: 1
|
1243 |
+
-> MISMATCH!
|
1244 |
+
|
1245 |
+
i will never buy a government motors vehicle: until just recently, i drove gm cars. since 1988, when i bought a .
|
1246 |
+
- Gold: 0
|
1247 |
+
- Pred: 0
|
1248 |
+
|
1249 |
+
having the old coca-cola guy on the gm board is stupid has heck! #tcot #ala
|
1250 |
+
- Gold: 0
|
1251 |
+
- Pred: 1
|
1252 |
+
-> MISMATCH!
|
1253 |
+
|
1254 |
+
#rantsandraves the worst thing about gm (concord / pleasant hill / martinez): is the fucking uaw. .
|
1255 |
+
- Gold: 0
|
1256 |
+
- Pred: 1
|
1257 |
+
-> MISMATCH!
|
1258 |
+
|
1259 |
+
give a man a fish, u feed him for the day. teach him to fish, u feed him for life. buy him gm, and u f**k him over for good.
|
1260 |
+
- Gold: 0
|
1261 |
+
- Pred: 0
|
1262 |
+
|
1263 |
+
the more i hear about this gm thing the more angry i get. billions wasted, more bullshit. all for something like 40k employees and all the.
|
1264 |
+
- Gold: 0
|
1265 |
+
- Pred: 0
|
1266 |
+
|
1267 |
+
i own a gm car and it is junk as far as pared to a honda
|
1268 |
+
- Gold: 0
|
1269 |
+
- Pred: 0
|
1270 |
+
|
1271 |
+
sad day.bankrupt gm
|
1272 |
+
- Gold: 0
|
1273 |
+
- Pred: 1
|
1274 |
+
-> MISMATCH!
|
1275 |
+
|
1276 |
+
is upset about the whole gm thing. life as i know it is so screwed up
|
1277 |
+
- Gold: 0
|
1278 |
+
- Pred: 0
|
1279 |
+
|
1280 |
+
whoever is running time warner needs to be repeatedly raped by a rhino so they understand the consequences of putting out shitty cable svcs
|
1281 |
+
- Gold: 0
|
1282 |
+
- Pred: 0
|
1283 |
+
|
1284 |
+
#wftb joining a bit late. my connection was down (boo time warner)
|
1285 |
+
- Gold: 0
|
1286 |
+
- Pred: 0
|
1287 |
+
|
1288 |
+
cox or time warner! cox is cheaper and gets a b on dslreports. tw is more expensive and gets a c.
|
1289 |
+
- Gold: 0
|
1290 |
+
- Pred: 0
|
1291 |
+
|
1292 |
+
i am furious with time warner and their phone promotions!
|
1293 |
+
- Gold: 0
|
1294 |
+
- Pred: 1
|
1295 |
+
-> MISMATCH!
|
1296 |
+
|
1297 |
+
just got home from chick-fil-a with the boys. damn my internets down =( stupid time warner
|
1298 |
+
- Gold: 0
|
1299 |
+
- Pred: 1
|
1300 |
+
-> MISMATCH!
|
1301 |
+
|
1302 |
+
could time-warner cable suck more! no.
|
1303 |
+
- Gold: 0
|
1304 |
+
- Pred: 0
|
1305 |
+
|
1306 |
+
pissed at time warner for causin me to have slow internet problems
|
1307 |
+
- Gold: 0
|
1308 |
+
- Pred: 0
|
1309 |
+
|
1310 |
+
ummm, having some time warner problems!
|
1311 |
+
- Gold: 0
|
1312 |
+
- Pred: 0
|
1313 |
+
|
1314 |
+
you guys see this! why does time warner have to suck so much ass! really wish i could get u-verse at my apartment.
|
1315 |
+
- Gold: 0
|
1316 |
+
- Pred: 0
|
1317 |
+
|
1318 |
+
rt the upside to time warner: unhelpful phone operators superslow on-site service. crap, that is not an upside.
|
1319 |
+
- Gold: 0
|
1320 |
+
- Pred: 0
|
1321 |
+
|
1322 |
+
rt : new time warner slogan: "time warner, where we make you long for the days before cable."
|
1323 |
+
- Gold: 0
|
1324 |
+
- Pred: 1
|
1325 |
+
-> MISMATCH!
|
1326 |
+
|
1327 |
+
confirmed: it is time warner is fault, not facebook is, that fb is taking about 3 minutes to load. so tempted to switch to verizon =/
|
1328 |
+
- Gold: 0
|
1329 |
+
- Pred: 0
|
1330 |
+
|
1331 |
+
time warner = epic fail
|
1332 |
+
- Gold: 0
|
1333 |
+
- Pred: 1
|
1334 |
+
-> MISMATCH!
|
1335 |
+
|
1336 |
+
i know. how sad is that! rt : 1st day of hurricane season. that is less scarey than govt taking over gm.
|
1337 |
+
- Gold: 0
|
1338 |
+
- Pred: 0
|
1339 |
+
|
1340 |
+
gm files bankruptcy, not a good sign.
|
1341 |
+
- Gold: 0
|
1342 |
+
- Pred: 0
|
1343 |
+
|
1344 |
+
yankees won mets lost. its a good day.
|
1345 |
+
- Gold: 1
|
1346 |
+
- Pred: 1
|
1347 |
+
|
1348 |
+
my dentist appt today was actually quite enjoyable.
|
1349 |
+
- Gold: 1
|
1350 |
+
- Pred: 0
|
1351 |
+
-> MISMATCH!
|
1352 |
+
|
1353 |
+
i hate the effing dentist.
|
1354 |
+
- Gold: 0
|
1355 |
+
- Pred: 0
|
1356 |
+
|
1357 |
+
i hate going to the dentist. !
|
1358 |
+
- Gold: 0
|
1359 |
+
- Pred: 0
|
1360 |
+
|
1361 |
+
i hate the dentist.who invented them anyways!
|
1362 |
+
- Gold: 0
|
1363 |
+
- Pred: 1
|
1364 |
+
-> MISMATCH!
|
1365 |
+
|
1366 |
+
this dentist is office is cold :/
|
1367 |
+
- Gold: 0
|
1368 |
+
- Pred: 0
|
1369 |
+
|
1370 |
+
just applied at safeway!(: yeeeee!
|
1371 |
+
- Gold: 1
|
1372 |
+
- Pred: 1
|
1373 |
+
|
1374 |
+
@ safeway. place is a nightmare right now. bumming.
|
1375 |
+
- Gold: 0
|
1376 |
+
- Pred: 1
|
1377 |
+
-> MISMATCH!
|
1378 |
+
|
1379 |
+
hate safeway select green tea icecream! bought two cartons, what a waste of money. >_<
|
1380 |
+
- Gold: 0
|
1381 |
+
- Pred: 1
|
1382 |
+
-> MISMATCH!
|
1383 |
+
|
1384 |
+
nike rocks. i am super grateful for what i have done with them :) & the european division of nike is beyond!
|
1385 |
+
- Gold: 1
|
1386 |
+
- Pred: 1
|
1387 |
+
|
1388 |
+
have you tried nike ! v. addictive.
|
1389 |
+
- Gold: 1
|
1390 |
+
- Pred: 0
|
1391 |
+
-> MISMATCH!
|
1392 |
+
|
1393 |
+
the nike training club (beta) iphone app looks very interesting.
|
1394 |
+
- Gold: 1
|
1395 |
+
- Pred: 1
|
1396 |
+
|
1397 |
+
argghhhh why won not my jquery appear in safari bad safari !
|
1398 |
+
- Gold: 0
|
1399 |
+
- Pred: 1
|
1400 |
+
-> MISMATCH!
|
1401 |
+
|
1402 |
+
i am ready to drop the pretenses, i am forever in love with jquery, and i want to marry it. sorry ladies, this nerd is jquery.spokenfor.js
|
1403 |
+
- Gold: 1
|
1404 |
+
- Pred: 1
|
1405 |
+
|
1406 |
+
super investors: a great weekend read here from warren buffet. oldie, but a goodie.
|
1407 |
+
- Gold: 1
|
1408 |
+
- Pred: 1
|
1409 |
+
|
1410 |
+
reading michael palin book, the python years.great book. i also mend warren buffet & nelson mandela is bio
|
1411 |
+
- Gold: 1
|
1412 |
+
- Pred: 1
|
1413 |
+
|
1414 |
+
i mean, i am down with notre dame if i have to. it is a good school, i would be closer to dan, i would enjoy it.
|
1415 |
+
- Gold: 1
|
1416 |
+
- Pred: 0
|
1417 |
+
-> MISMATCH!
|
1418 |
+
|
1419 |
+
i can not watch tv without a tivo. and after all these years, the time/warner dvr still sucks.
|
1420 |
+
- Gold: 0
|
1421 |
+
- Pred: 0
|
1422 |
+
|
1423 |
+
i would say some sports writers are idiots for saying roger federer is one of the best ever in tennis. roger federer is the best ever in tennis
|
1424 |
+
- Gold: 1
|
1425 |
+
- Pred: 1
|
1426 |
+
|
1427 |
+
i still love my kindle2 but reading the new york times on it does not feel natural. i miss the bloomingdale ads.
|
1428 |
+
- Gold: 0
|
1429 |
+
- Pred: 0
|
1430 |
+
|
1431 |
+
i love my kindle2. no more stacks of books to trip over on the way to the loo.
|
1432 |
+
- Gold: 1
|
1433 |
+
- Pred: 0
|
1434 |
+
-> MISMATCH!
|
1435 |
+
|
1436 |
+
although today is keynote rocked, for every great announcement, at&t shit on us just a little bit more.
|
1437 |
+
- Gold: 0
|
1438 |
+
- Pred: 0
|
1439 |
+
|
1440 |
+
- its not so much my obsession with cell phones, but the iphone! i am a slave to at&t forever because of it. :)
|
1441 |
+
- Gold: 0
|
1442 |
+
- Pred: 1
|
1443 |
+
-> MISMATCH!
|
1444 |
+
|
1445 |
+
fuzzball is more fun than at&t ;p -/twitter
|
1446 |
+
- Gold: 0
|
1447 |
+
- Pred: 1
|
1448 |
+
-> MISMATCH!
|
1449 |
+
|
1450 |
+
today is a good day to dislike at&t. vote out of office indeed,
|
1451 |
+
- Gold: 0
|
1452 |
+
- Pred: 1
|
1453 |
+
-> MISMATCH!
|
1454 |
+
|
1455 |
+
got my wave sandbox invite! extra excited! too bad i have class now. but i will play with it soon enough! #io2009 #wave
|
1456 |
+
- Gold: 1
|
1457 |
+
- Pred: 1
|
1458 |
+
|
1459 |
+
looks like summize has gone down. too many tweets from wwdc perhaps!
|
1460 |
+
- Gold: 0
|
1461 |
+
- Pred: 0
|
1462 |
+
|
1463 |
+
thanks so much! .from one of your *very* happy kindle2 winners ; ) i was so surprised, fabulous. thank you! best, kathleen
|
1464 |
+
- Gold: 1
|
1465 |
+
- Pred: 1
|
1466 |
+
|
1467 |
+
man i kinda dislike apple right now. case in point: the iphone 3gs. wish there was a video recorder app. please!
|
1468 |
+
- Gold: 0
|
1469 |
+
- Pred: 1
|
1470 |
+
-> MISMATCH!
|
1471 |
+
|
1472 |
+
i have a kindle2 (& sony prs-500). like it! physical device feels good. font is nice. pg turns are snappy enuf. ui a little klunky.
|
1473 |
+
- Gold: 1
|
1474 |
+
- Pred: 1
|
1475 |
+
|
1476 |
+
the #kindle2 seems the best ereader, but will it work in the uk and where can i get one!
|
1477 |
+
- Gold: 1
|
1478 |
+
- Pred: 1
|
1479 |
+
|
1480 |
+
i have a google addiction. thank you for pointing that out, . hahaha.
|
1481 |
+
- Gold: 1
|
1482 |
+
- Pred: 0
|
1483 |
+
-> MISMATCH!
|
1484 |
+
|
1485 |
+
dearest , you rich bastards! the visa card you sent me doesn not work. why screw a little guy like me!
|
1486 |
+
- Gold: 0
|
1487 |
+
- Pred: 0
|
1488 |
+
|
1489 |
+
excited about seeing bobby flay and guy fieri tomorrow at the great american food & music fest!
|
1490 |
+
- Gold: 1
|
1491 |
+
- Pred: 1
|
1492 |
+
|
1493 |
+
gonna go see bobby flay 2moro at shoreline. eat and drink. gonna be good.
|
1494 |
+
- Gold: 1
|
1495 |
+
- Pred: 1
|
1496 |
+
|
1497 |
+
can not wait for the great american food and music festival at shoreline tomorrow. mmm.katz pastrami and bobby flay. yes please.
|
1498 |
+
- Gold: 1
|
1499 |
+
- Pred: 1
|
1500 |
+
|
1501 |
+
my dad was in ny for a day, we ate at mesa grill last night and met bobby flay. so much fun, except pletely lost my voice today.
|
1502 |
+
- Gold: 1
|
1503 |
+
- Pred: 0
|
1504 |
+
-> MISMATCH!
|
1505 |
+
|
1506 |
+
fighting with latex. again.
|
1507 |
+
- Gold: 0
|
1508 |
+
- Pred: 0
|
1509 |
+
|
1510 |
+
we love you too and don not want you to die! latex = the devil
|
1511 |
+
- Gold: 0
|
1512 |
+
- Pred: 0
|
1513 |
+
|
1514 |
+
7 hours. 7 hours of inkscape crashing, normally solid as a rock. 7 hours of plaining at the slightest thing. i can not take any more.
|
1515 |
+
- Gold: 0
|
1516 |
+
- Pred: 0
|
1517 |
+
|
1518 |
+
shit is hitting the fan in iran.craziness indeed #iranelection
|
1519 |
+
- Gold: 0
|
1520 |
+
- Pred: 0
|
1521 |
+
|
1522 |
+
monday already. iran may implode. kitchen is a disaster. seems happy. had a nice weekend and is great. whoop.
|
1523 |
+
- Gold: 0
|
1524 |
+
- Pred: 1
|
1525 |
+
-> MISMATCH!
|
1526 |
+
|
1527 |
+
getting ready to test out some burger receipes this weekend. bobby flay has some great receipes to try. thanks bobby.
|
1528 |
+
- Gold: 1
|
1529 |
+
- Pred: 1
|
1530 |
+
|
1531 |
+
i lam so in love with bobby flay. he is my favorite. rt : you need a place in phoenix. we have great peppers here!
|
1532 |
+
- Gold: 1
|
1533 |
+
- Pred: 1
|
1534 |
+
|
1535 |
+
i just created my first latex file from scratch. that didn not work out very well. (see , it is a great time waster)
|
1536 |
+
- Gold: 0
|
1537 |
+
- Pred: 0
|
1538 |
+
|
1539 |
+
using linux and loving it - so much nicer than windows. looking forward to using the wysiwyg latex editor!
|
1540 |
+
- Gold: 1
|
1541 |
+
- Pred: 1
|
1542 |
+
|
1543 |
+
after using latex a lot, any other typeset mathematics just looks hideous.
|
1544 |
+
- Gold: 1
|
1545 |
+
- Pred: 0
|
1546 |
+
-> MISMATCH!
|
1547 |
+
|
1548 |
+
on that note, i hate word. i hate pages. i hate latex. there, i said it. i hate latex. all you texn3rds e kill me now.
|
1549 |
+
- Gold: 0
|
1550 |
+
- Pred: 0
|
1551 |
+
|
1552 |
+
ahhh. back in a *real* text editing environment. i <3 latex.
|
1553 |
+
- Gold: 1
|
1554 |
+
- Pred: 1
|
1555 |
+
|
1556 |
+
trouble in iran, i see. hmm. iran. iran so far away. #flockofseagullsweregeopoliticallycorrect
|
1557 |
+
- Gold: 0
|
1558 |
+
- Pred: 0
|
1559 |
+
|
1560 |
+
reading the ing out of iran. the whole thing is terrifying and incredibly sad.
|
1561 |
+
- Gold: 0
|
1562 |
+
- Pred: 0
|
1563 |
+
|
models/flair-sentiment-classifier/training.log
ADDED
@@ -0,0 +1,644 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2023-02-06 08:28:45,031 ----------------------------------------------------------------------------------------------------
|
2 |
+
2023-02-06 08:28:45,036 Model: "TextClassifier(
|
3 |
+
(decoder): Linear(in_features=512, out_features=3, bias=True)
|
4 |
+
(dropout): Dropout(p=0.0, inplace=False)
|
5 |
+
(locked_dropout): LockedDropout(p=0.0)
|
6 |
+
(word_dropout): WordDropout(p=0.0)
|
7 |
+
(loss_function): CrossEntropyLoss()
|
8 |
+
(document_embeddings): DocumentLSTMEmbeddings(
|
9 |
+
(embeddings): StackedEmbeddings(
|
10 |
+
(list_embedding_0): FlairEmbeddings(
|
11 |
+
(lm): LanguageModel(
|
12 |
+
(drop): Dropout(p=0.25, inplace=False)
|
13 |
+
(encoder): Embedding(275, 100)
|
14 |
+
(rnn): LSTM(100, 1024)
|
15 |
+
(decoder): Linear(in_features=1024, out_features=275, bias=True)
|
16 |
+
)
|
17 |
+
)
|
18 |
+
)
|
19 |
+
(word_reprojection_map): Linear(in_features=1024, out_features=256, bias=True)
|
20 |
+
(rnn): GRU(256, 512)
|
21 |
+
(dropout): Dropout(p=0.5, inplace=False)
|
22 |
+
)
|
23 |
+
(weights): None
|
24 |
+
(weight_tensor) None
|
25 |
+
)"
|
26 |
+
2023-02-06 08:28:45,039 ----------------------------------------------------------------------------------------------------
|
27 |
+
2023-02-06 08:28:45,042 Corpus: "Corpus: 8500 train + 1500 dev + 359 test sentences"
|
28 |
+
2023-02-06 08:28:45,045 ----------------------------------------------------------------------------------------------------
|
29 |
+
2023-02-06 08:28:45,048 Parameters:
|
30 |
+
2023-02-06 08:28:45,051 - learning_rate: "0.010000"
|
31 |
+
2023-02-06 08:28:45,052 - mini_batch_size: "64"
|
32 |
+
2023-02-06 08:28:45,056 - patience: "3"
|
33 |
+
2023-02-06 08:28:45,057 - anneal_factor: "0.5"
|
34 |
+
2023-02-06 08:28:45,061 - max_epochs: "35"
|
35 |
+
2023-02-06 08:28:45,063 - shuffle: "True"
|
36 |
+
2023-02-06 08:28:45,069 - train_with_dev: "False"
|
37 |
+
2023-02-06 08:28:45,071 - batch_growth_annealing: "False"
|
38 |
+
2023-02-06 08:28:45,075 ----------------------------------------------------------------------------------------------------
|
39 |
+
2023-02-06 08:28:45,078 Model training base path: "/content/drive/MyDrive/Colab Notebooks/models/flair-sentiment-classifier"
|
40 |
+
2023-02-06 08:28:45,081 ----------------------------------------------------------------------------------------------------
|
41 |
+
2023-02-06 08:28:45,083 Device: cuda:0
|
42 |
+
2023-02-06 08:28:45,085 ----------------------------------------------------------------------------------------------------
|
43 |
+
2023-02-06 08:28:45,089 Embeddings storage mode: gpu
|
44 |
+
2023-02-06 08:28:45,091 ----------------------------------------------------------------------------------------------------
|
45 |
+
2023-02-06 08:28:46,882 epoch 1 - iter 13/133 - loss 0.01562834 - samples/sec: 514.51 - lr: 0.010000
|
46 |
+
2023-02-06 08:28:48,397 epoch 1 - iter 26/133 - loss 0.01481466 - samples/sec: 748.73 - lr: 0.010000
|
47 |
+
2023-02-06 08:28:49,638 epoch 1 - iter 39/133 - loss 0.01423043 - samples/sec: 772.85 - lr: 0.010000
|
48 |
+
2023-02-06 08:28:51,189 epoch 1 - iter 52/133 - loss 0.01381341 - samples/sec: 600.83 - lr: 0.010000
|
49 |
+
2023-02-06 08:28:52,402 epoch 1 - iter 65/133 - loss 0.01349711 - samples/sec: 796.47 - lr: 0.010000
|
50 |
+
2023-02-06 08:28:53,877 epoch 1 - iter 78/133 - loss 0.01323276 - samples/sec: 765.73 - lr: 0.010000
|
51 |
+
2023-02-06 08:28:55,144 epoch 1 - iter 91/133 - loss 0.01301969 - samples/sec: 759.35 - lr: 0.010000
|
52 |
+
2023-02-06 08:28:56,401 epoch 1 - iter 104/133 - loss 0.01283645 - samples/sec: 765.85 - lr: 0.010000
|
53 |
+
2023-02-06 08:28:57,895 epoch 1 - iter 117/133 - loss 0.01267868 - samples/sec: 760.35 - lr: 0.010000
|
54 |
+
2023-02-06 08:28:59,155 epoch 1 - iter 130/133 - loss 0.01254156 - samples/sec: 766.70 - lr: 0.010000
|
55 |
+
2023-02-06 08:28:59,435 ----------------------------------------------------------------------------------------------------
|
56 |
+
2023-02-06 08:28:59,440 EPOCH 1 done: loss 0.0125 - lr 0.010000
|
57 |
+
2023-02-06 08:29:02,345 Evaluating as a multi-label problem: False
|
58 |
+
2023-02-06 08:29:02,360 DEV : loss 0.01149754598736763 - f1-score (micro avg) 0.5393
|
59 |
+
2023-02-06 08:29:02,938 BAD EPOCHS (no improvement): 0
|
60 |
+
2023-02-06 08:29:02,944 saving best model
|
61 |
+
2023-02-06 08:29:03,019 ----------------------------------------------------------------------------------------------------
|
62 |
+
2023-02-06 08:29:04,350 epoch 2 - iter 13/133 - loss 0.01134125 - samples/sec: 713.12 - lr: 0.010000
|
63 |
+
2023-02-06 08:29:05,867 epoch 2 - iter 26/133 - loss 0.01133783 - samples/sec: 745.31 - lr: 0.010000
|
64 |
+
2023-02-06 08:29:07,096 epoch 2 - iter 39/133 - loss 0.01130036 - samples/sec: 785.18 - lr: 0.010000
|
65 |
+
2023-02-06 08:29:08,398 epoch 2 - iter 52/133 - loss 0.01126930 - samples/sec: 739.05 - lr: 0.010000
|
66 |
+
2023-02-06 08:29:10,210 epoch 2 - iter 65/133 - loss 0.01123679 - samples/sec: 611.49 - lr: 0.010000
|
67 |
+
2023-02-06 08:29:12,013 epoch 2 - iter 78/133 - loss 0.01119918 - samples/sec: 563.65 - lr: 0.010000
|
68 |
+
2023-02-06 08:29:13,567 epoch 2 - iter 91/133 - loss 0.01119660 - samples/sec: 722.13 - lr: 0.010000
|
69 |
+
2023-02-06 08:29:14,863 epoch 2 - iter 104/133 - loss 0.01118887 - samples/sec: 739.97 - lr: 0.010000
|
70 |
+
2023-02-06 08:29:16,413 epoch 2 - iter 117/133 - loss 0.01117338 - samples/sec: 736.63 - lr: 0.010000
|
71 |
+
2023-02-06 08:29:17,679 epoch 2 - iter 130/133 - loss 0.01115977 - samples/sec: 754.47 - lr: 0.010000
|
72 |
+
2023-02-06 08:29:17,964 ----------------------------------------------------------------------------------------------------
|
73 |
+
2023-02-06 08:29:17,967 EPOCH 2 done: loss 0.0112 - lr 0.010000
|
74 |
+
2023-02-06 08:29:20,384 Evaluating as a multi-label problem: False
|
75 |
+
2023-02-06 08:29:20,400 DEV : loss 0.011094754561781883 - f1-score (micro avg) 0.586
|
76 |
+
2023-02-06 08:29:20,760 BAD EPOCHS (no improvement): 0
|
77 |
+
2023-02-06 08:29:20,767 saving best model
|
78 |
+
2023-02-06 08:29:20,836 ----------------------------------------------------------------------------------------------------
|
79 |
+
2023-02-06 08:29:22,455 epoch 3 - iter 13/133 - loss 0.01100920 - samples/sec: 577.11 - lr: 0.010000
|
80 |
+
2023-02-06 08:29:23,739 epoch 3 - iter 26/133 - loss 0.01100166 - samples/sec: 748.26 - lr: 0.010000
|
81 |
+
2023-02-06 08:29:25,256 epoch 3 - iter 39/133 - loss 0.01099206 - samples/sec: 614.87 - lr: 0.010000
|
82 |
+
2023-02-06 08:29:26,543 epoch 3 - iter 52/133 - loss 0.01096206 - samples/sec: 747.35 - lr: 0.010000
|
83 |
+
2023-02-06 08:29:28,045 epoch 3 - iter 65/133 - loss 0.01093502 - samples/sec: 756.93 - lr: 0.010000
|
84 |
+
2023-02-06 08:29:29,300 epoch 3 - iter 78/133 - loss 0.01092986 - samples/sec: 767.76 - lr: 0.010000
|
85 |
+
2023-02-06 08:29:30,899 epoch 3 - iter 91/133 - loss 0.01094036 - samples/sec: 700.02 - lr: 0.010000
|
86 |
+
2023-02-06 08:29:32,167 epoch 3 - iter 104/133 - loss 0.01093898 - samples/sec: 759.88 - lr: 0.010000
|
87 |
+
2023-02-06 08:29:33,482 epoch 3 - iter 117/133 - loss 0.01092654 - samples/sec: 724.74 - lr: 0.010000
|
88 |
+
2023-02-06 08:29:34,996 epoch 3 - iter 130/133 - loss 0.01091851 - samples/sec: 747.17 - lr: 0.010000
|
89 |
+
2023-02-06 08:29:35,285 ----------------------------------------------------------------------------------------------------
|
90 |
+
2023-02-06 08:29:35,288 EPOCH 3 done: loss 0.0109 - lr 0.010000
|
91 |
+
2023-02-06 08:29:37,771 Evaluating as a multi-label problem: False
|
92 |
+
2023-02-06 08:29:37,787 DEV : loss 0.010945815593004227 - f1-score (micro avg) 0.5833
|
93 |
+
2023-02-06 08:29:38,166 BAD EPOCHS (no improvement): 1
|
94 |
+
2023-02-06 08:29:38,171 ----------------------------------------------------------------------------------------------------
|
95 |
+
2023-02-06 08:29:39,771 epoch 4 - iter 13/133 - loss 0.01088269 - samples/sec: 703.41 - lr: 0.010000
|
96 |
+
2023-02-06 08:29:41,093 epoch 4 - iter 26/133 - loss 0.01079631 - samples/sec: 727.27 - lr: 0.010000
|
97 |
+
2023-02-06 08:29:42,388 epoch 4 - iter 39/133 - loss 0.01080397 - samples/sec: 731.61 - lr: 0.010000
|
98 |
+
2023-02-06 08:29:43,901 epoch 4 - iter 52/133 - loss 0.01079299 - samples/sec: 748.17 - lr: 0.010000
|
99 |
+
2023-02-06 08:29:45,173 epoch 4 - iter 65/133 - loss 0.01080453 - samples/sec: 756.93 - lr: 0.010000
|
100 |
+
2023-02-06 08:29:46,704 epoch 4 - iter 78/133 - loss 0.01078781 - samples/sec: 742.45 - lr: 0.010000
|
101 |
+
2023-02-06 08:29:48,004 epoch 4 - iter 91/133 - loss 0.01077505 - samples/sec: 739.13 - lr: 0.010000
|
102 |
+
2023-02-06 08:29:49,566 epoch 4 - iter 104/133 - loss 0.01077253 - samples/sec: 719.85 - lr: 0.010000
|
103 |
+
2023-02-06 08:29:50,799 epoch 4 - iter 117/133 - loss 0.01077099 - samples/sec: 782.87 - lr: 0.010000
|
104 |
+
2023-02-06 08:29:52,309 epoch 4 - iter 130/133 - loss 0.01079835 - samples/sec: 751.86 - lr: 0.010000
|
105 |
+
2023-02-06 08:29:52,598 ----------------------------------------------------------------------------------------------------
|
106 |
+
2023-02-06 08:29:52,600 EPOCH 4 done: loss 0.0108 - lr 0.010000
|
107 |
+
2023-02-06 08:29:54,806 Evaluating as a multi-label problem: False
|
108 |
+
2023-02-06 08:29:54,823 DEV : loss 0.010844088159501553 - f1-score (micro avg) 0.5867
|
109 |
+
2023-02-06 08:29:55,416 BAD EPOCHS (no improvement): 0
|
110 |
+
2023-02-06 08:29:55,422 saving best model
|
111 |
+
2023-02-06 08:29:55,494 ----------------------------------------------------------------------------------------------------
|
112 |
+
2023-02-06 08:29:56,823 epoch 5 - iter 13/133 - loss 0.01067723 - samples/sec: 718.01 - lr: 0.010000
|
113 |
+
2023-02-06 08:29:58,344 epoch 5 - iter 26/133 - loss 0.01059902 - samples/sec: 743.92 - lr: 0.010000
|
114 |
+
2023-02-06 08:29:59,632 epoch 5 - iter 39/133 - loss 0.01065991 - samples/sec: 752.37 - lr: 0.010000
|
115 |
+
2023-02-06 08:30:01,148 epoch 5 - iter 52/133 - loss 0.01066396 - samples/sec: 742.67 - lr: 0.010000
|
116 |
+
2023-02-06 08:30:02,462 epoch 5 - iter 65/133 - loss 0.01067246 - samples/sec: 721.77 - lr: 0.010000
|
117 |
+
2023-02-06 08:30:03,991 epoch 5 - iter 78/133 - loss 0.01067908 - samples/sec: 764.43 - lr: 0.010000
|
118 |
+
2023-02-06 08:30:05,279 epoch 5 - iter 91/133 - loss 0.01070105 - samples/sec: 746.56 - lr: 0.010000
|
119 |
+
2023-02-06 08:30:06,504 epoch 5 - iter 104/133 - loss 0.01071328 - samples/sec: 788.19 - lr: 0.010000
|
120 |
+
2023-02-06 08:30:07,981 epoch 5 - iter 117/133 - loss 0.01069141 - samples/sec: 768.06 - lr: 0.010000
|
121 |
+
2023-02-06 08:30:09,278 epoch 5 - iter 130/133 - loss 0.01069258 - samples/sec: 736.98 - lr: 0.010000
|
122 |
+
2023-02-06 08:30:09,584 ----------------------------------------------------------------------------------------------------
|
123 |
+
2023-02-06 08:30:09,591 EPOCH 5 done: loss 0.0107 - lr 0.010000
|
124 |
+
2023-02-06 08:30:12,079 Evaluating as a multi-label problem: False
|
125 |
+
2023-02-06 08:30:12,096 DEV : loss 0.010764073580503464 - f1-score (micro avg) 0.59
|
126 |
+
2023-02-06 08:30:12,666 BAD EPOCHS (no improvement): 0
|
127 |
+
2023-02-06 08:30:12,684 saving best model
|
128 |
+
2023-02-06 08:30:12,758 ----------------------------------------------------------------------------------------------------
|
129 |
+
2023-02-06 08:30:14,153 epoch 6 - iter 13/133 - loss 0.01071924 - samples/sec: 682.06 - lr: 0.010000
|
130 |
+
2023-02-06 08:30:15,468 epoch 6 - iter 26/133 - loss 0.01071274 - samples/sec: 726.60 - lr: 0.010000
|
131 |
+
2023-02-06 08:30:16,995 epoch 6 - iter 39/133 - loss 0.01072073 - samples/sec: 745.06 - lr: 0.010000
|
132 |
+
2023-02-06 08:30:18,256 epoch 6 - iter 52/133 - loss 0.01069609 - samples/sec: 766.36 - lr: 0.010000
|
133 |
+
2023-02-06 08:30:19,760 epoch 6 - iter 65/133 - loss 0.01067637 - samples/sec: 621.63 - lr: 0.010000
|
134 |
+
2023-02-06 08:30:21,150 epoch 6 - iter 78/133 - loss 0.01069997 - samples/sec: 698.03 - lr: 0.010000
|
135 |
+
2023-02-06 08:30:22,666 epoch 6 - iter 91/133 - loss 0.01067124 - samples/sec: 747.15 - lr: 0.010000
|
136 |
+
2023-02-06 08:30:23,937 epoch 6 - iter 104/133 - loss 0.01065074 - samples/sec: 756.77 - lr: 0.010000
|
137 |
+
2023-02-06 08:30:25,196 epoch 6 - iter 117/133 - loss 0.01065426 - samples/sec: 759.07 - lr: 0.010000
|
138 |
+
2023-02-06 08:30:26,726 epoch 6 - iter 130/133 - loss 0.01065274 - samples/sec: 739.37 - lr: 0.010000
|
139 |
+
2023-02-06 08:30:27,011 ----------------------------------------------------------------------------------------------------
|
140 |
+
2023-02-06 08:30:27,016 EPOCH 6 done: loss 0.0107 - lr 0.010000
|
141 |
+
2023-02-06 08:30:29,488 Evaluating as a multi-label problem: False
|
142 |
+
2023-02-06 08:30:29,504 DEV : loss 0.010709869675338268 - f1-score (micro avg) 0.5953
|
143 |
+
2023-02-06 08:30:29,884 BAD EPOCHS (no improvement): 0
|
144 |
+
2023-02-06 08:30:29,891 saving best model
|
145 |
+
2023-02-06 08:30:29,962 ----------------------------------------------------------------------------------------------------
|
146 |
+
2023-02-06 08:30:31,568 epoch 7 - iter 13/133 - loss 0.01074821 - samples/sec: 588.47 - lr: 0.010000
|
147 |
+
2023-02-06 08:30:32,838 epoch 7 - iter 26/133 - loss 0.01067320 - samples/sec: 752.01 - lr: 0.010000
|
148 |
+
2023-02-06 08:30:34,326 epoch 7 - iter 39/133 - loss 0.01067867 - samples/sec: 764.30 - lr: 0.010000
|
149 |
+
2023-02-06 08:30:35,675 epoch 7 - iter 52/133 - loss 0.01066511 - samples/sec: 715.33 - lr: 0.010000
|
150 |
+
2023-02-06 08:30:37,160 epoch 7 - iter 65/133 - loss 0.01066500 - samples/sec: 774.66 - lr: 0.010000
|
151 |
+
2023-02-06 08:30:38,384 epoch 7 - iter 78/133 - loss 0.01064703 - samples/sec: 786.21 - lr: 0.010000
|
152 |
+
2023-02-06 08:30:39,656 epoch 7 - iter 91/133 - loss 0.01064059 - samples/sec: 752.36 - lr: 0.010000
|
153 |
+
2023-02-06 08:30:41,183 epoch 7 - iter 104/133 - loss 0.01064299 - samples/sec: 741.82 - lr: 0.010000
|
154 |
+
2023-02-06 08:30:42,411 epoch 7 - iter 117/133 - loss 0.01063663 - samples/sec: 782.15 - lr: 0.010000
|
155 |
+
2023-02-06 08:30:43,918 epoch 7 - iter 130/133 - loss 0.01062067 - samples/sec: 746.86 - lr: 0.010000
|
156 |
+
2023-02-06 08:30:44,207 ----------------------------------------------------------------------------------------------------
|
157 |
+
2023-02-06 08:30:44,212 EPOCH 7 done: loss 0.0106 - lr 0.010000
|
158 |
+
2023-02-06 08:30:46,609 Evaluating as a multi-label problem: False
|
159 |
+
2023-02-06 08:30:46,625 DEV : loss 0.010663843713700771 - f1-score (micro avg) 0.604
|
160 |
+
2023-02-06 08:30:46,984 BAD EPOCHS (no improvement): 0
|
161 |
+
2023-02-06 08:30:46,991 saving best model
|
162 |
+
2023-02-06 08:30:47,072 ----------------------------------------------------------------------------------------------------
|
163 |
+
2023-02-06 08:30:48,638 epoch 8 - iter 13/133 - loss 0.01043837 - samples/sec: 601.87 - lr: 0.010000
|
164 |
+
2023-02-06 08:30:49,893 epoch 8 - iter 26/133 - loss 0.01051882 - samples/sec: 760.45 - lr: 0.010000
|
165 |
+
2023-02-06 08:30:51,166 epoch 8 - iter 39/133 - loss 0.01054135 - samples/sec: 761.95 - lr: 0.010000
|
166 |
+
2023-02-06 08:30:52,624 epoch 8 - iter 52/133 - loss 0.01056330 - samples/sec: 784.40 - lr: 0.010000
|
167 |
+
2023-02-06 08:30:53,864 epoch 8 - iter 65/133 - loss 0.01058674 - samples/sec: 788.25 - lr: 0.010000
|
168 |
+
2023-02-06 08:30:55,348 epoch 8 - iter 78/133 - loss 0.01057453 - samples/sec: 632.25 - lr: 0.010000
|
169 |
+
2023-02-06 08:30:56,643 epoch 8 - iter 91/133 - loss 0.01056818 - samples/sec: 743.01 - lr: 0.010000
|
170 |
+
2023-02-06 08:30:58,165 epoch 8 - iter 104/133 - loss 0.01056378 - samples/sec: 754.99 - lr: 0.010000
|
171 |
+
2023-02-06 08:30:59,473 epoch 8 - iter 117/133 - loss 0.01055726 - samples/sec: 740.74 - lr: 0.010000
|
172 |
+
2023-02-06 08:31:00,790 epoch 8 - iter 130/133 - loss 0.01054234 - samples/sec: 730.40 - lr: 0.010000
|
173 |
+
2023-02-06 08:31:01,299 ----------------------------------------------------------------------------------------------------
|
174 |
+
2023-02-06 08:31:01,301 EPOCH 8 done: loss 0.0106 - lr 0.010000
|
175 |
+
2023-02-06 08:31:03,556 Evaluating as a multi-label problem: False
|
176 |
+
2023-02-06 08:31:03,573 DEV : loss 0.010619796812534332 - f1-score (micro avg) 0.602
|
177 |
+
2023-02-06 08:31:04,148 BAD EPOCHS (no improvement): 1
|
178 |
+
2023-02-06 08:31:04,153 ----------------------------------------------------------------------------------------------------
|
179 |
+
2023-02-06 08:31:05,493 epoch 9 - iter 13/133 - loss 0.01055787 - samples/sec: 714.38 - lr: 0.010000
|
180 |
+
2023-02-06 08:31:06,991 epoch 9 - iter 26/133 - loss 0.01055880 - samples/sec: 756.84 - lr: 0.010000
|
181 |
+
2023-02-06 08:31:08,220 epoch 9 - iter 39/133 - loss 0.01058857 - samples/sec: 784.11 - lr: 0.010000
|
182 |
+
2023-02-06 08:31:09,733 epoch 9 - iter 52/133 - loss 0.01050204 - samples/sec: 743.77 - lr: 0.010000
|
183 |
+
2023-02-06 08:31:11,006 epoch 9 - iter 65/133 - loss 0.01049462 - samples/sec: 757.49 - lr: 0.010000
|
184 |
+
2023-02-06 08:31:12,320 epoch 9 - iter 78/133 - loss 0.01049927 - samples/sec: 726.38 - lr: 0.010000
|
185 |
+
2023-02-06 08:31:13,836 epoch 9 - iter 91/133 - loss 0.01053675 - samples/sec: 753.53 - lr: 0.010000
|
186 |
+
2023-02-06 08:31:15,093 epoch 9 - iter 104/133 - loss 0.01051643 - samples/sec: 763.35 - lr: 0.010000
|
187 |
+
2023-02-06 08:31:16,571 epoch 9 - iter 117/133 - loss 0.01051333 - samples/sec: 770.43 - lr: 0.010000
|
188 |
+
2023-02-06 08:31:17,840 epoch 9 - iter 130/133 - loss 0.01052863 - samples/sec: 764.73 - lr: 0.010000
|
189 |
+
2023-02-06 08:31:18,111 ----------------------------------------------------------------------------------------------------
|
190 |
+
2023-02-06 08:31:18,112 EPOCH 9 done: loss 0.0105 - lr 0.010000
|
191 |
+
2023-02-06 08:31:20,513 Evaluating as a multi-label problem: False
|
192 |
+
2023-02-06 08:31:20,529 DEV : loss 0.010611701756715775 - f1-score (micro avg) 0.6127
|
193 |
+
2023-02-06 08:31:21,099 BAD EPOCHS (no improvement): 0
|
194 |
+
2023-02-06 08:31:21,105 saving best model
|
195 |
+
2023-02-06 08:31:21,180 ----------------------------------------------------------------------------------------------------
|
196 |
+
2023-02-06 08:31:22,488 epoch 10 - iter 13/133 - loss 0.01068018 - samples/sec: 735.19 - lr: 0.010000
|
197 |
+
2023-02-06 08:31:23,814 epoch 10 - iter 26/133 - loss 0.01048881 - samples/sec: 719.40 - lr: 0.010000
|
198 |
+
2023-02-06 08:31:25,348 epoch 10 - iter 39/133 - loss 0.01057341 - samples/sec: 607.48 - lr: 0.010000
|
199 |
+
2023-02-06 08:31:26,612 epoch 10 - iter 52/133 - loss 0.01053287 - samples/sec: 762.66 - lr: 0.010000
|
200 |
+
2023-02-06 08:31:28,080 epoch 10 - iter 65/133 - loss 0.01053009 - samples/sec: 780.80 - lr: 0.010000
|
201 |
+
2023-02-06 08:31:29,348 epoch 10 - iter 78/133 - loss 0.01049424 - samples/sec: 765.03 - lr: 0.010000
|
202 |
+
2023-02-06 08:31:30,907 epoch 10 - iter 91/133 - loss 0.01049714 - samples/sec: 602.01 - lr: 0.010000
|
203 |
+
2023-02-06 08:31:32,299 epoch 10 - iter 104/133 - loss 0.01051002 - samples/sec: 688.23 - lr: 0.010000
|
204 |
+
2023-02-06 08:31:33,585 epoch 10 - iter 117/133 - loss 0.01050991 - samples/sec: 751.61 - lr: 0.010000
|
205 |
+
2023-02-06 08:31:35,136 epoch 10 - iter 130/133 - loss 0.01049037 - samples/sec: 721.95 - lr: 0.010000
|
206 |
+
2023-02-06 08:31:35,429 ----------------------------------------------------------------------------------------------------
|
207 |
+
2023-02-06 08:31:35,431 EPOCH 10 done: loss 0.0105 - lr 0.010000
|
208 |
+
2023-02-06 08:31:37,895 Evaluating as a multi-label problem: False
|
209 |
+
2023-02-06 08:31:37,910 DEV : loss 0.010555021464824677 - f1-score (micro avg) 0.612
|
210 |
+
2023-02-06 08:31:38,250 BAD EPOCHS (no improvement): 1
|
211 |
+
2023-02-06 08:31:38,266 ----------------------------------------------------------------------------------------------------
|
212 |
+
2023-02-06 08:31:39,815 epoch 11 - iter 13/133 - loss 0.01014661 - samples/sec: 717.23 - lr: 0.010000
|
213 |
+
2023-02-06 08:31:41,095 epoch 11 - iter 26/133 - loss 0.01037157 - samples/sec: 747.91 - lr: 0.010000
|
214 |
+
2023-02-06 08:31:42,350 epoch 11 - iter 39/133 - loss 0.01039031 - samples/sec: 771.90 - lr: 0.010000
|
215 |
+
2023-02-06 08:31:43,850 epoch 11 - iter 52/133 - loss 0.01048393 - samples/sec: 763.72 - lr: 0.010000
|
216 |
+
2023-02-06 08:31:45,153 epoch 11 - iter 65/133 - loss 0.01050528 - samples/sec: 737.89 - lr: 0.010000
|
217 |
+
2023-02-06 08:31:46,661 epoch 11 - iter 78/133 - loss 0.01048936 - samples/sec: 752.50 - lr: 0.010000
|
218 |
+
2023-02-06 08:31:47,936 epoch 11 - iter 91/133 - loss 0.01046032 - samples/sec: 756.43 - lr: 0.010000
|
219 |
+
2023-02-06 08:31:49,443 epoch 11 - iter 104/133 - loss 0.01049842 - samples/sec: 750.46 - lr: 0.010000
|
220 |
+
2023-02-06 08:31:50,667 epoch 11 - iter 117/133 - loss 0.01048207 - samples/sec: 788.09 - lr: 0.010000
|
221 |
+
2023-02-06 08:31:51,915 epoch 11 - iter 130/133 - loss 0.01047867 - samples/sec: 764.34 - lr: 0.010000
|
222 |
+
2023-02-06 08:31:52,443 ----------------------------------------------------------------------------------------------------
|
223 |
+
2023-02-06 08:31:52,447 EPOCH 11 done: loss 0.0105 - lr 0.010000
|
224 |
+
2023-02-06 08:31:54,642 Evaluating as a multi-label problem: False
|
225 |
+
2023-02-06 08:31:54,659 DEV : loss 0.010583124123513699 - f1-score (micro avg) 0.618
|
226 |
+
2023-02-06 08:31:55,235 BAD EPOCHS (no improvement): 0
|
227 |
+
2023-02-06 08:31:55,243 saving best model
|
228 |
+
2023-02-06 08:31:55,324 ----------------------------------------------------------------------------------------------------
|
229 |
+
2023-02-06 08:31:56,599 epoch 12 - iter 13/133 - loss 0.01034765 - samples/sec: 758.67 - lr: 0.010000
|
230 |
+
2023-02-06 08:31:58,049 epoch 12 - iter 26/133 - loss 0.01038689 - samples/sec: 649.83 - lr: 0.010000
|
231 |
+
2023-02-06 08:31:59,293 epoch 12 - iter 39/133 - loss 0.01041491 - samples/sec: 771.48 - lr: 0.010000
|
232 |
+
2023-02-06 08:32:00,827 epoch 12 - iter 52/133 - loss 0.01039636 - samples/sec: 749.22 - lr: 0.010000
|
233 |
+
2023-02-06 08:32:02,125 epoch 12 - iter 65/133 - loss 0.01039765 - samples/sec: 734.58 - lr: 0.010000
|
234 |
+
2023-02-06 08:32:03,651 epoch 12 - iter 78/133 - loss 0.01037797 - samples/sec: 742.60 - lr: 0.010000
|
235 |
+
2023-02-06 08:32:04,912 epoch 12 - iter 91/133 - loss 0.01037713 - samples/sec: 767.06 - lr: 0.010000
|
236 |
+
2023-02-06 08:32:06,155 epoch 12 - iter 104/133 - loss 0.01039333 - samples/sec: 778.01 - lr: 0.010000
|
237 |
+
2023-02-06 08:32:07,679 epoch 12 - iter 117/133 - loss 0.01039785 - samples/sec: 612.76 - lr: 0.010000
|
238 |
+
2023-02-06 08:32:08,958 epoch 12 - iter 130/133 - loss 0.01041191 - samples/sec: 749.49 - lr: 0.010000
|
239 |
+
2023-02-06 08:32:09,221 ----------------------------------------------------------------------------------------------------
|
240 |
+
2023-02-06 08:32:09,225 EPOCH 12 done: loss 0.0104 - lr 0.010000
|
241 |
+
2023-02-06 08:32:11,743 Evaluating as a multi-label problem: False
|
242 |
+
2023-02-06 08:32:11,761 DEV : loss 0.010490193963050842 - f1-score (micro avg) 0.6233
|
243 |
+
2023-02-06 08:32:12,343 BAD EPOCHS (no improvement): 0
|
244 |
+
2023-02-06 08:32:12,347 saving best model
|
245 |
+
2023-02-06 08:32:12,425 ----------------------------------------------------------------------------------------------------
|
246 |
+
2023-02-06 08:32:13,768 epoch 13 - iter 13/133 - loss 0.01044893 - samples/sec: 711.51 - lr: 0.010000
|
247 |
+
2023-02-06 08:32:15,331 epoch 13 - iter 26/133 - loss 0.01037701 - samples/sec: 597.05 - lr: 0.010000
|
248 |
+
2023-02-06 08:32:16,636 epoch 13 - iter 39/133 - loss 0.01041042 - samples/sec: 729.64 - lr: 0.010000
|
249 |
+
2023-02-06 08:32:18,179 epoch 13 - iter 52/133 - loss 0.01039558 - samples/sec: 639.59 - lr: 0.010000
|
250 |
+
2023-02-06 08:32:20,357 epoch 13 - iter 65/133 - loss 0.01036643 - samples/sec: 453.47 - lr: 0.010000
|
251 |
+
2023-02-06 08:32:22,076 epoch 13 - iter 78/133 - loss 0.01034448 - samples/sec: 576.82 - lr: 0.010000
|
252 |
+
2023-02-06 08:32:23,544 epoch 13 - iter 91/133 - loss 0.01037494 - samples/sec: 776.14 - lr: 0.010000
|
253 |
+
2023-02-06 08:32:24,857 epoch 13 - iter 104/133 - loss 0.01038988 - samples/sec: 723.98 - lr: 0.010000
|
254 |
+
2023-02-06 08:32:26,302 epoch 13 - iter 117/133 - loss 0.01037204 - samples/sec: 803.02 - lr: 0.010000
|
255 |
+
2023-02-06 08:32:27,579 epoch 13 - iter 130/133 - loss 0.01037025 - samples/sec: 756.57 - lr: 0.010000
|
256 |
+
2023-02-06 08:32:27,857 ----------------------------------------------------------------------------------------------------
|
257 |
+
2023-02-06 08:32:27,858 EPOCH 13 done: loss 0.0104 - lr 0.010000
|
258 |
+
2023-02-06 08:32:30,369 Evaluating as a multi-label problem: False
|
259 |
+
2023-02-06 08:32:30,385 DEV : loss 0.010483094491064548 - f1-score (micro avg) 0.6207
|
260 |
+
2023-02-06 08:32:30,744 BAD EPOCHS (no improvement): 1
|
261 |
+
2023-02-06 08:32:30,752 ----------------------------------------------------------------------------------------------------
|
262 |
+
2023-02-06 08:32:32,325 epoch 14 - iter 13/133 - loss 0.01026158 - samples/sec: 717.41 - lr: 0.010000
|
263 |
+
2023-02-06 08:32:33,586 epoch 14 - iter 26/133 - loss 0.01037027 - samples/sec: 762.17 - lr: 0.010000
|
264 |
+
2023-02-06 08:32:35,070 epoch 14 - iter 39/133 - loss 0.01036379 - samples/sec: 774.03 - lr: 0.010000
|
265 |
+
2023-02-06 08:32:36,383 epoch 14 - iter 52/133 - loss 0.01035813 - samples/sec: 729.67 - lr: 0.010000
|
266 |
+
2023-02-06 08:32:37,889 epoch 14 - iter 65/133 - loss 0.01031572 - samples/sec: 748.65 - lr: 0.010000
|
267 |
+
2023-02-06 08:32:39,182 epoch 14 - iter 78/133 - loss 0.01034433 - samples/sec: 743.14 - lr: 0.010000
|
268 |
+
2023-02-06 08:32:40,475 epoch 14 - iter 91/133 - loss 0.01037816 - samples/sec: 742.25 - lr: 0.010000
|
269 |
+
2023-02-06 08:32:41,954 epoch 14 - iter 104/133 - loss 0.01039795 - samples/sec: 782.25 - lr: 0.010000
|
270 |
+
2023-02-06 08:32:43,225 epoch 14 - iter 117/133 - loss 0.01038837 - samples/sec: 761.71 - lr: 0.010000
|
271 |
+
2023-02-06 08:32:44,737 epoch 14 - iter 130/133 - loss 0.01036175 - samples/sec: 745.28 - lr: 0.010000
|
272 |
+
2023-02-06 08:32:45,035 ----------------------------------------------------------------------------------------------------
|
273 |
+
2023-02-06 08:32:45,040 EPOCH 14 done: loss 0.0104 - lr 0.010000
|
274 |
+
2023-02-06 08:32:47,510 Evaluating as a multi-label problem: False
|
275 |
+
2023-02-06 08:32:47,528 DEV : loss 0.010432829149067402 - f1-score (micro avg) 0.626
|
276 |
+
2023-02-06 08:32:47,863 BAD EPOCHS (no improvement): 0
|
277 |
+
2023-02-06 08:32:47,871 saving best model
|
278 |
+
2023-02-06 08:32:47,944 ----------------------------------------------------------------------------------------------------
|
279 |
+
2023-02-06 08:32:49,537 epoch 15 - iter 13/133 - loss 0.01019386 - samples/sec: 698.75 - lr: 0.010000
|
280 |
+
2023-02-06 08:32:50,800 epoch 15 - iter 26/133 - loss 0.01019158 - samples/sec: 758.25 - lr: 0.010000
|
281 |
+
2023-02-06 08:32:52,065 epoch 15 - iter 39/133 - loss 0.01015538 - samples/sec: 759.68 - lr: 0.010000
|
282 |
+
2023-02-06 08:32:53,509 epoch 15 - iter 52/133 - loss 0.01023823 - samples/sec: 804.56 - lr: 0.010000
|
283 |
+
2023-02-06 08:32:54,834 epoch 15 - iter 65/133 - loss 0.01026660 - samples/sec: 722.21 - lr: 0.010000
|
284 |
+
2023-02-06 08:32:56,442 epoch 15 - iter 78/133 - loss 0.01025501 - samples/sec: 717.72 - lr: 0.010000
|
285 |
+
2023-02-06 08:32:57,718 epoch 15 - iter 91/133 - loss 0.01024956 - samples/sec: 756.16 - lr: 0.010000
|
286 |
+
2023-02-06 08:32:59,173 epoch 15 - iter 104/133 - loss 0.01028768 - samples/sec: 783.99 - lr: 0.010000
|
287 |
+
2023-02-06 08:33:00,444 epoch 15 - iter 117/133 - loss 0.01027921 - samples/sec: 758.72 - lr: 0.010000
|
288 |
+
2023-02-06 08:33:01,708 epoch 15 - iter 130/133 - loss 0.01030246 - samples/sec: 760.06 - lr: 0.010000
|
289 |
+
2023-02-06 08:33:02,242 ----------------------------------------------------------------------------------------------------
|
290 |
+
2023-02-06 08:33:02,248 EPOCH 15 done: loss 0.0103 - lr 0.010000
|
291 |
+
2023-02-06 08:33:04,549 Evaluating as a multi-label problem: False
|
292 |
+
2023-02-06 08:33:04,568 DEV : loss 0.010397534817457199 - f1-score (micro avg) 0.6247
|
293 |
+
2023-02-06 08:33:05,171 BAD EPOCHS (no improvement): 1
|
294 |
+
2023-02-06 08:33:05,181 ----------------------------------------------------------------------------------------------------
|
295 |
+
2023-02-06 08:33:06,547 epoch 16 - iter 13/133 - loss 0.01041342 - samples/sec: 696.13 - lr: 0.010000
|
296 |
+
2023-02-06 08:33:08,089 epoch 16 - iter 26/133 - loss 0.01036971 - samples/sec: 726.77 - lr: 0.010000
|
297 |
+
2023-02-06 08:33:09,375 epoch 16 - iter 39/133 - loss 0.01029897 - samples/sec: 745.27 - lr: 0.010000
|
298 |
+
2023-02-06 08:33:10,662 epoch 16 - iter 52/133 - loss 0.01030162 - samples/sec: 746.88 - lr: 0.010000
|
299 |
+
2023-02-06 08:33:12,125 epoch 16 - iter 65/133 - loss 0.01031824 - samples/sec: 780.89 - lr: 0.010000
|
300 |
+
2023-02-06 08:33:13,387 epoch 16 - iter 78/133 - loss 0.01031728 - samples/sec: 754.65 - lr: 0.010000
|
301 |
+
2023-02-06 08:33:14,895 epoch 16 - iter 91/133 - loss 0.01027123 - samples/sec: 757.20 - lr: 0.010000
|
302 |
+
2023-02-06 08:33:16,162 epoch 16 - iter 104/133 - loss 0.01027054 - samples/sec: 764.32 - lr: 0.010000
|
303 |
+
2023-02-06 08:33:17,639 epoch 16 - iter 117/133 - loss 0.01024219 - samples/sec: 633.84 - lr: 0.010000
|
304 |
+
2023-02-06 08:33:18,899 epoch 16 - iter 130/133 - loss 0.01026610 - samples/sec: 758.90 - lr: 0.010000
|
305 |
+
2023-02-06 08:33:19,184 ----------------------------------------------------------------------------------------------------
|
306 |
+
2023-02-06 08:33:19,188 EPOCH 16 done: loss 0.0103 - lr 0.010000
|
307 |
+
2023-02-06 08:33:21,632 Evaluating as a multi-label problem: False
|
308 |
+
2023-02-06 08:33:21,649 DEV : loss 0.010406638495624065 - f1-score (micro avg) 0.6253
|
309 |
+
2023-02-06 08:33:22,222 BAD EPOCHS (no improvement): 2
|
310 |
+
2023-02-06 08:33:22,234 ----------------------------------------------------------------------------------------------------
|
311 |
+
2023-02-06 08:33:23,528 epoch 17 - iter 13/133 - loss 0.01033387 - samples/sec: 738.38 - lr: 0.010000
|
312 |
+
2023-02-06 08:33:24,760 epoch 17 - iter 26/133 - loss 0.01031095 - samples/sec: 781.92 - lr: 0.010000
|
313 |
+
2023-02-06 08:33:26,300 epoch 17 - iter 39/133 - loss 0.01028919 - samples/sec: 746.79 - lr: 0.010000
|
314 |
+
2023-02-06 08:33:27,590 epoch 17 - iter 52/133 - loss 0.01030368 - samples/sec: 739.39 - lr: 0.010000
|
315 |
+
2023-02-06 08:33:29,070 epoch 17 - iter 65/133 - loss 0.01029647 - samples/sec: 634.36 - lr: 0.010000
|
316 |
+
2023-02-06 08:33:30,298 epoch 17 - iter 78/133 - loss 0.01029278 - samples/sec: 782.85 - lr: 0.010000
|
317 |
+
2023-02-06 08:33:31,598 epoch 17 - iter 91/133 - loss 0.01028871 - samples/sec: 734.99 - lr: 0.010000
|
318 |
+
2023-02-06 08:33:33,082 epoch 17 - iter 104/133 - loss 0.01029995 - samples/sec: 762.60 - lr: 0.010000
|
319 |
+
2023-02-06 08:33:34,331 epoch 17 - iter 117/133 - loss 0.01028484 - samples/sec: 776.27 - lr: 0.010000
|
320 |
+
2023-02-06 08:33:35,784 epoch 17 - iter 130/133 - loss 0.01027576 - samples/sec: 785.80 - lr: 0.010000
|
321 |
+
2023-02-06 08:33:36,070 ----------------------------------------------------------------------------------------------------
|
322 |
+
2023-02-06 08:33:36,071 EPOCH 17 done: loss 0.0103 - lr 0.010000
|
323 |
+
2023-02-06 08:33:38,498 Evaluating as a multi-label problem: False
|
324 |
+
2023-02-06 08:33:38,515 DEV : loss 0.01034807600080967 - f1-score (micro avg) 0.63
|
325 |
+
2023-02-06 08:33:38,856 BAD EPOCHS (no improvement): 0
|
326 |
+
2023-02-06 08:33:38,862 saving best model
|
327 |
+
2023-02-06 08:33:38,939 ----------------------------------------------------------------------------------------------------
|
328 |
+
2023-02-06 08:33:40,462 epoch 18 - iter 13/133 - loss 0.01017348 - samples/sec: 740.64 - lr: 0.010000
|
329 |
+
2023-02-06 08:33:41,723 epoch 18 - iter 26/133 - loss 0.01019914 - samples/sec: 764.13 - lr: 0.010000
|
330 |
+
2023-02-06 08:33:43,017 epoch 18 - iter 39/133 - loss 0.01021330 - samples/sec: 737.55 - lr: 0.010000
|
331 |
+
2023-02-06 08:33:44,471 epoch 18 - iter 52/133 - loss 0.01018504 - samples/sec: 782.18 - lr: 0.010000
|
332 |
+
2023-02-06 08:33:45,771 epoch 18 - iter 65/133 - loss 0.01017944 - samples/sec: 740.07 - lr: 0.010000
|
333 |
+
2023-02-06 08:33:47,306 epoch 18 - iter 78/133 - loss 0.01020170 - samples/sec: 734.11 - lr: 0.010000
|
334 |
+
2023-02-06 08:33:48,590 epoch 18 - iter 91/133 - loss 0.01021375 - samples/sec: 743.50 - lr: 0.010000
|
335 |
+
2023-02-06 08:33:50,106 epoch 18 - iter 104/133 - loss 0.01019670 - samples/sec: 745.19 - lr: 0.010000
|
336 |
+
2023-02-06 08:33:51,371 epoch 18 - iter 117/133 - loss 0.01023334 - samples/sec: 762.98 - lr: 0.010000
|
337 |
+
2023-02-06 08:33:52,871 epoch 18 - iter 130/133 - loss 0.01022335 - samples/sec: 764.73 - lr: 0.010000
|
338 |
+
2023-02-06 08:33:53,154 ----------------------------------------------------------------------------------------------------
|
339 |
+
2023-02-06 08:33:53,156 EPOCH 18 done: loss 0.0102 - lr 0.010000
|
340 |
+
2023-02-06 08:33:55,611 Evaluating as a multi-label problem: False
|
341 |
+
2023-02-06 08:33:55,629 DEV : loss 0.010320308618247509 - f1-score (micro avg) 0.6313
|
342 |
+
2023-02-06 08:33:55,977 BAD EPOCHS (no improvement): 0
|
343 |
+
2023-02-06 08:33:55,987 saving best model
|
344 |
+
2023-02-06 08:33:56,065 ----------------------------------------------------------------------------------------------------
|
345 |
+
2023-02-06 08:33:57,370 epoch 19 - iter 13/133 - loss 0.01029091 - samples/sec: 738.06 - lr: 0.010000
|
346 |
+
2023-02-06 08:33:58,890 epoch 19 - iter 26/133 - loss 0.01023014 - samples/sec: 750.92 - lr: 0.010000
|
347 |
+
2023-02-06 08:34:00,150 epoch 19 - iter 39/133 - loss 0.01020864 - samples/sec: 762.09 - lr: 0.010000
|
348 |
+
2023-02-06 08:34:01,681 epoch 19 - iter 52/133 - loss 0.01024439 - samples/sec: 741.73 - lr: 0.010000
|
349 |
+
2023-02-06 08:34:02,906 epoch 19 - iter 65/133 - loss 0.01023181 - samples/sec: 790.97 - lr: 0.010000
|
350 |
+
2023-02-06 08:34:04,433 epoch 19 - iter 78/133 - loss 0.01022412 - samples/sec: 613.31 - lr: 0.010000
|
351 |
+
2023-02-06 08:34:05,780 epoch 19 - iter 91/133 - loss 0.01019693 - samples/sec: 712.09 - lr: 0.010000
|
352 |
+
2023-02-06 08:34:07,187 epoch 19 - iter 104/133 - loss 0.01022081 - samples/sec: 673.66 - lr: 0.010000
|
353 |
+
2023-02-06 08:34:08,668 epoch 19 - iter 117/133 - loss 0.01022890 - samples/sec: 785.83 - lr: 0.010000
|
354 |
+
2023-02-06 08:34:09,910 epoch 19 - iter 130/133 - loss 0.01020723 - samples/sec: 777.53 - lr: 0.010000
|
355 |
+
2023-02-06 08:34:10,200 ----------------------------------------------------------------------------------------------------
|
356 |
+
2023-02-06 08:34:10,207 EPOCH 19 done: loss 0.0102 - lr 0.010000
|
357 |
+
2023-02-06 08:34:12,679 Evaluating as a multi-label problem: False
|
358 |
+
2023-02-06 08:34:12,700 DEV : loss 0.010296817868947983 - f1-score (micro avg) 0.64
|
359 |
+
2023-02-06 08:34:13,311 BAD EPOCHS (no improvement): 0
|
360 |
+
2023-02-06 08:34:13,316 saving best model
|
361 |
+
2023-02-06 08:34:13,390 ----------------------------------------------------------------------------------------------------
|
362 |
+
2023-02-06 08:34:14,758 epoch 20 - iter 13/133 - loss 0.01021864 - samples/sec: 703.40 - lr: 0.010000
|
363 |
+
2023-02-06 08:34:16,077 epoch 20 - iter 26/133 - loss 0.01028711 - samples/sec: 725.84 - lr: 0.010000
|
364 |
+
2023-02-06 08:34:17,634 epoch 20 - iter 39/133 - loss 0.01022971 - samples/sec: 745.04 - lr: 0.010000
|
365 |
+
2023-02-06 08:34:18,921 epoch 20 - iter 52/133 - loss 0.01026002 - samples/sec: 750.90 - lr: 0.010000
|
366 |
+
2023-02-06 08:34:20,467 epoch 20 - iter 65/133 - loss 0.01026057 - samples/sec: 602.55 - lr: 0.010000
|
367 |
+
2023-02-06 08:34:21,753 epoch 20 - iter 78/133 - loss 0.01026928 - samples/sec: 751.33 - lr: 0.010000
|
368 |
+
2023-02-06 08:34:23,292 epoch 20 - iter 91/133 - loss 0.01025076 - samples/sec: 736.16 - lr: 0.010000
|
369 |
+
2023-02-06 08:34:24,579 epoch 20 - iter 104/133 - loss 0.01020640 - samples/sec: 746.82 - lr: 0.010000
|
370 |
+
2023-02-06 08:34:25,909 epoch 20 - iter 117/133 - loss 0.01019171 - samples/sec: 719.60 - lr: 0.010000
|
371 |
+
2023-02-06 08:34:27,438 epoch 20 - iter 130/133 - loss 0.01017225 - samples/sec: 738.30 - lr: 0.010000
|
372 |
+
2023-02-06 08:34:27,739 ----------------------------------------------------------------------------------------------------
|
373 |
+
2023-02-06 08:34:27,744 EPOCH 20 done: loss 0.0102 - lr 0.010000
|
374 |
+
2023-02-06 08:34:30,257 Evaluating as a multi-label problem: False
|
375 |
+
2023-02-06 08:34:30,274 DEV : loss 0.010265583172440529 - f1-score (micro avg) 0.6413
|
376 |
+
2023-02-06 08:34:30,654 BAD EPOCHS (no improvement): 0
|
377 |
+
2023-02-06 08:34:30,660 saving best model
|
378 |
+
2023-02-06 08:34:30,739 ----------------------------------------------------------------------------------------------------
|
379 |
+
2023-02-06 08:34:32,330 epoch 21 - iter 13/133 - loss 0.01014521 - samples/sec: 710.15 - lr: 0.010000
|
380 |
+
2023-02-06 08:34:33,625 epoch 21 - iter 26/133 - loss 0.01018544 - samples/sec: 743.43 - lr: 0.010000
|
381 |
+
2023-02-06 08:34:35,128 epoch 21 - iter 39/133 - loss 0.01014949 - samples/sec: 755.29 - lr: 0.010000
|
382 |
+
2023-02-06 08:34:36,388 epoch 21 - iter 52/133 - loss 0.01015701 - samples/sec: 758.49 - lr: 0.010000
|
383 |
+
2023-02-06 08:34:37,625 epoch 21 - iter 65/133 - loss 0.01014343 - samples/sec: 781.03 - lr: 0.010000
|
384 |
+
2023-02-06 08:34:39,127 epoch 21 - iter 78/133 - loss 0.01011384 - samples/sec: 758.01 - lr: 0.010000
|
385 |
+
2023-02-06 08:34:40,496 epoch 21 - iter 91/133 - loss 0.01012422 - samples/sec: 690.35 - lr: 0.010000
|
386 |
+
2023-02-06 08:34:42,023 epoch 21 - iter 104/133 - loss 0.01014283 - samples/sec: 761.00 - lr: 0.010000
|
387 |
+
2023-02-06 08:34:43,304 epoch 21 - iter 117/133 - loss 0.01013442 - samples/sec: 748.07 - lr: 0.010000
|
388 |
+
2023-02-06 08:34:44,869 epoch 21 - iter 130/133 - loss 0.01012862 - samples/sec: 730.48 - lr: 0.010000
|
389 |
+
2023-02-06 08:34:45,169 ----------------------------------------------------------------------------------------------------
|
390 |
+
2023-02-06 08:34:45,175 EPOCH 21 done: loss 0.0101 - lr 0.010000
|
391 |
+
2023-02-06 08:34:47,720 Evaluating as a multi-label problem: False
|
392 |
+
2023-02-06 08:34:47,743 DEV : loss 0.010291438549757004 - f1-score (micro avg) 0.6373
|
393 |
+
2023-02-06 08:34:48,104 BAD EPOCHS (no improvement): 1
|
394 |
+
2023-02-06 08:34:48,124 ----------------------------------------------------------------------------------------------------
|
395 |
+
2023-02-06 08:34:49,559 epoch 22 - iter 13/133 - loss 0.00996709 - samples/sec: 657.64 - lr: 0.010000
|
396 |
+
2023-02-06 08:34:51,095 epoch 22 - iter 26/133 - loss 0.01001206 - samples/sec: 739.55 - lr: 0.010000
|
397 |
+
2023-02-06 08:34:52,347 epoch 22 - iter 39/133 - loss 0.01004049 - samples/sec: 767.48 - lr: 0.010000
|
398 |
+
2023-02-06 08:34:53,920 epoch 22 - iter 52/133 - loss 0.01005317 - samples/sec: 722.99 - lr: 0.010000
|
399 |
+
2023-02-06 08:34:55,196 epoch 22 - iter 65/133 - loss 0.01010477 - samples/sec: 747.76 - lr: 0.010000
|
400 |
+
2023-02-06 08:34:56,737 epoch 22 - iter 78/133 - loss 0.01010079 - samples/sec: 608.24 - lr: 0.010000
|
401 |
+
2023-02-06 08:34:58,059 epoch 22 - iter 91/133 - loss 0.01013500 - samples/sec: 727.48 - lr: 0.010000
|
402 |
+
2023-02-06 08:34:59,404 epoch 22 - iter 104/133 - loss 0.01013831 - samples/sec: 712.32 - lr: 0.010000
|
403 |
+
2023-02-06 08:35:00,950 epoch 22 - iter 117/133 - loss 0.01014236 - samples/sec: 741.47 - lr: 0.010000
|
404 |
+
2023-02-06 08:35:02,201 epoch 22 - iter 130/133 - loss 0.01013044 - samples/sec: 762.45 - lr: 0.010000
|
405 |
+
2023-02-06 08:35:02,483 ----------------------------------------------------------------------------------------------------
|
406 |
+
2023-02-06 08:35:02,485 EPOCH 22 done: loss 0.0101 - lr 0.010000
|
407 |
+
2023-02-06 08:35:04,928 Evaluating as a multi-label problem: False
|
408 |
+
2023-02-06 08:35:04,944 DEV : loss 0.010224188677966595 - f1-score (micro avg) 0.6413
|
409 |
+
2023-02-06 08:35:05,519 BAD EPOCHS (no improvement): 0
|
410 |
+
2023-02-06 08:35:05,528 ----------------------------------------------------------------------------------------------------
|
411 |
+
2023-02-06 08:35:06,856 epoch 23 - iter 13/133 - loss 0.01005563 - samples/sec: 720.28 - lr: 0.010000
|
412 |
+
2023-02-06 08:35:08,375 epoch 23 - iter 26/133 - loss 0.01002448 - samples/sec: 616.23 - lr: 0.010000
|
413 |
+
2023-02-06 08:35:09,661 epoch 23 - iter 39/133 - loss 0.01015809 - samples/sec: 739.97 - lr: 0.010000
|
414 |
+
2023-02-06 08:35:10,916 epoch 23 - iter 52/133 - loss 0.01016788 - samples/sec: 764.29 - lr: 0.010000
|
415 |
+
2023-02-06 08:35:12,458 epoch 23 - iter 65/133 - loss 0.01009484 - samples/sec: 739.26 - lr: 0.010000
|
416 |
+
2023-02-06 08:35:13,705 epoch 23 - iter 78/133 - loss 0.01008399 - samples/sec: 770.62 - lr: 0.010000
|
417 |
+
2023-02-06 08:35:15,187 epoch 23 - iter 91/133 - loss 0.01007382 - samples/sec: 769.82 - lr: 0.010000
|
418 |
+
2023-02-06 08:35:16,466 epoch 23 - iter 104/133 - loss 0.01006859 - samples/sec: 747.87 - lr: 0.010000
|
419 |
+
2023-02-06 08:35:18,033 epoch 23 - iter 117/133 - loss 0.01007952 - samples/sec: 718.89 - lr: 0.010000
|
420 |
+
2023-02-06 08:35:19,305 epoch 23 - iter 130/133 - loss 0.01007932 - samples/sec: 752.56 - lr: 0.010000
|
421 |
+
2023-02-06 08:35:19,591 ----------------------------------------------------------------------------------------------------
|
422 |
+
2023-02-06 08:35:19,593 EPOCH 23 done: loss 0.0101 - lr 0.010000
|
423 |
+
2023-02-06 08:35:22,099 Evaluating as a multi-label problem: False
|
424 |
+
2023-02-06 08:35:22,116 DEV : loss 0.010190014727413654 - f1-score (micro avg) 0.644
|
425 |
+
2023-02-06 08:35:22,476 BAD EPOCHS (no improvement): 0
|
426 |
+
2023-02-06 08:35:22,484 saving best model
|
427 |
+
2023-02-06 08:35:22,562 ----------------------------------------------------------------------------------------------------
|
428 |
+
2023-02-06 08:35:24,159 epoch 24 - iter 13/133 - loss 0.01033764 - samples/sec: 707.79 - lr: 0.010000
|
429 |
+
2023-02-06 08:35:25,436 epoch 24 - iter 26/133 - loss 0.01039924 - samples/sec: 753.06 - lr: 0.010000
|
430 |
+
2023-02-06 08:35:26,953 epoch 24 - iter 39/133 - loss 0.01019226 - samples/sec: 761.33 - lr: 0.010000
|
431 |
+
2023-02-06 08:35:28,217 epoch 24 - iter 52/133 - loss 0.01010421 - samples/sec: 759.92 - lr: 0.010000
|
432 |
+
2023-02-06 08:35:29,503 epoch 24 - iter 65/133 - loss 0.01008049 - samples/sec: 751.64 - lr: 0.010000
|
433 |
+
2023-02-06 08:35:31,044 epoch 24 - iter 78/133 - loss 0.01005312 - samples/sec: 735.12 - lr: 0.010000
|
434 |
+
2023-02-06 08:35:32,319 epoch 24 - iter 91/133 - loss 0.01005299 - samples/sec: 759.51 - lr: 0.010000
|
435 |
+
2023-02-06 08:35:33,822 epoch 24 - iter 104/133 - loss 0.01003790 - samples/sec: 760.19 - lr: 0.010000
|
436 |
+
2023-02-06 08:35:35,097 epoch 24 - iter 117/133 - loss 0.01003322 - samples/sec: 752.83 - lr: 0.010000
|
437 |
+
2023-02-06 08:35:36,616 epoch 24 - iter 130/133 - loss 0.01004728 - samples/sec: 748.80 - lr: 0.010000
|
438 |
+
2023-02-06 08:35:36,902 ----------------------------------------------------------------------------------------------------
|
439 |
+
2023-02-06 08:35:36,903 EPOCH 24 done: loss 0.0101 - lr 0.010000
|
440 |
+
2023-02-06 08:35:39,354 Evaluating as a multi-label problem: False
|
441 |
+
2023-02-06 08:35:39,371 DEV : loss 0.010158772580325603 - f1-score (micro avg) 0.6487
|
442 |
+
2023-02-06 08:35:39,720 BAD EPOCHS (no improvement): 0
|
443 |
+
2023-02-06 08:35:39,727 saving best model
|
444 |
+
2023-02-06 08:35:39,802 ----------------------------------------------------------------------------------------------------
|
445 |
+
2023-02-06 08:35:41,120 epoch 25 - iter 13/133 - loss 0.00981064 - samples/sec: 733.84 - lr: 0.010000
|
446 |
+
2023-02-06 08:35:42,650 epoch 25 - iter 26/133 - loss 0.00984903 - samples/sec: 745.83 - lr: 0.010000
|
447 |
+
2023-02-06 08:35:43,929 epoch 25 - iter 39/133 - loss 0.00991121 - samples/sec: 755.02 - lr: 0.010000
|
448 |
+
2023-02-06 08:35:45,467 epoch 25 - iter 52/133 - loss 0.01000175 - samples/sec: 740.89 - lr: 0.010000
|
449 |
+
2023-02-06 08:35:46,750 epoch 25 - iter 65/133 - loss 0.01000978 - samples/sec: 746.54 - lr: 0.010000
|
450 |
+
2023-02-06 08:35:48,252 epoch 25 - iter 78/133 - loss 0.01001222 - samples/sec: 770.84 - lr: 0.010000
|
451 |
+
2023-02-06 08:35:49,515 epoch 25 - iter 91/133 - loss 0.01000999 - samples/sec: 759.97 - lr: 0.010000
|
452 |
+
2023-02-06 08:35:51,036 epoch 25 - iter 104/133 - loss 0.00999015 - samples/sec: 616.30 - lr: 0.010000
|
453 |
+
2023-02-06 08:35:52,290 epoch 25 - iter 117/133 - loss 0.00998576 - samples/sec: 770.57 - lr: 0.010000
|
454 |
+
2023-02-06 08:35:53,576 epoch 25 - iter 130/133 - loss 0.00998035 - samples/sec: 745.63 - lr: 0.010000
|
455 |
+
2023-02-06 08:35:54,094 ----------------------------------------------------------------------------------------------------
|
456 |
+
2023-02-06 08:35:54,096 EPOCH 25 done: loss 0.0100 - lr 0.010000
|
457 |
+
2023-02-06 08:35:56,283 Evaluating as a multi-label problem: False
|
458 |
+
2023-02-06 08:35:56,301 DEV : loss 0.010137598030269146 - f1-score (micro avg) 0.6507
|
459 |
+
2023-02-06 08:35:56,866 BAD EPOCHS (no improvement): 0
|
460 |
+
2023-02-06 08:35:56,874 saving best model
|
461 |
+
2023-02-06 08:35:56,952 ----------------------------------------------------------------------------------------------------
|
462 |
+
2023-02-06 08:35:58,254 epoch 26 - iter 13/133 - loss 0.00981231 - samples/sec: 744.59 - lr: 0.010000
|
463 |
+
2023-02-06 08:35:59,716 epoch 26 - iter 26/133 - loss 0.00985499 - samples/sec: 779.93 - lr: 0.010000
|
464 |
+
2023-02-06 08:36:00,934 epoch 26 - iter 39/133 - loss 0.00986997 - samples/sec: 794.56 - lr: 0.010000
|
465 |
+
2023-02-06 08:36:02,450 epoch 26 - iter 52/133 - loss 0.00985057 - samples/sec: 750.90 - lr: 0.010000
|
466 |
+
2023-02-06 08:36:03,705 epoch 26 - iter 65/133 - loss 0.00991802 - samples/sec: 770.23 - lr: 0.010000
|
467 |
+
2023-02-06 08:36:04,983 epoch 26 - iter 78/133 - loss 0.00995249 - samples/sec: 752.52 - lr: 0.010000
|
468 |
+
2023-02-06 08:36:06,610 epoch 26 - iter 91/133 - loss 0.00994670 - samples/sec: 695.85 - lr: 0.010000
|
469 |
+
2023-02-06 08:36:07,854 epoch 26 - iter 104/133 - loss 0.00992897 - samples/sec: 766.60 - lr: 0.010000
|
470 |
+
2023-02-06 08:36:09,332 epoch 26 - iter 117/133 - loss 0.00995839 - samples/sec: 771.15 - lr: 0.010000
|
471 |
+
2023-02-06 08:36:10,585 epoch 26 - iter 130/133 - loss 0.00997863 - samples/sec: 763.77 - lr: 0.010000
|
472 |
+
2023-02-06 08:36:10,866 ----------------------------------------------------------------------------------------------------
|
473 |
+
2023-02-06 08:36:10,871 EPOCH 26 done: loss 0.0100 - lr 0.010000
|
474 |
+
2023-02-06 08:36:13,272 Evaluating as a multi-label problem: False
|
475 |
+
2023-02-06 08:36:13,289 DEV : loss 0.010154918767511845 - f1-score (micro avg) 0.6453
|
476 |
+
2023-02-06 08:36:13,875 BAD EPOCHS (no improvement): 1
|
477 |
+
2023-02-06 08:36:13,882 ----------------------------------------------------------------------------------------------------
|
478 |
+
2023-02-06 08:36:15,140 epoch 27 - iter 13/133 - loss 0.00982359 - samples/sec: 758.36 - lr: 0.010000
|
479 |
+
2023-02-06 08:36:16,393 epoch 27 - iter 26/133 - loss 0.00995263 - samples/sec: 765.88 - lr: 0.010000
|
480 |
+
2023-02-06 08:36:17,990 epoch 27 - iter 39/133 - loss 0.00994264 - samples/sec: 724.70 - lr: 0.010000
|
481 |
+
2023-02-06 08:36:19,734 epoch 27 - iter 52/133 - loss 0.00990056 - samples/sec: 584.51 - lr: 0.010000
|
482 |
+
2023-02-06 08:36:21,871 epoch 27 - iter 65/133 - loss 0.00989842 - samples/sec: 547.01 - lr: 0.010000
|
483 |
+
2023-02-06 08:36:23,233 epoch 27 - iter 78/133 - loss 0.00990904 - samples/sec: 698.41 - lr: 0.010000
|
484 |
+
2023-02-06 08:36:24,750 epoch 27 - iter 91/133 - loss 0.00997472 - samples/sec: 741.54 - lr: 0.010000
|
485 |
+
2023-02-06 08:36:25,989 epoch 27 - iter 104/133 - loss 0.00996257 - samples/sec: 774.43 - lr: 0.010000
|
486 |
+
2023-02-06 08:36:27,217 epoch 27 - iter 117/133 - loss 0.00995437 - samples/sec: 781.46 - lr: 0.010000
|
487 |
+
2023-02-06 08:36:28,665 epoch 27 - iter 130/133 - loss 0.00995591 - samples/sec: 787.10 - lr: 0.010000
|
488 |
+
2023-02-06 08:36:28,936 ----------------------------------------------------------------------------------------------------
|
489 |
+
2023-02-06 08:36:28,941 EPOCH 27 done: loss 0.0100 - lr 0.010000
|
490 |
+
2023-02-06 08:36:31,339 Evaluating as a multi-label problem: False
|
491 |
+
2023-02-06 08:36:31,357 DEV : loss 0.010085121728479862 - f1-score (micro avg) 0.6567
|
492 |
+
2023-02-06 08:36:31,722 BAD EPOCHS (no improvement): 0
|
493 |
+
2023-02-06 08:36:31,732 saving best model
|
494 |
+
2023-02-06 08:36:31,827 ----------------------------------------------------------------------------------------------------
|
495 |
+
2023-02-06 08:36:33,404 epoch 28 - iter 13/133 - loss 0.01020064 - samples/sec: 708.07 - lr: 0.010000
|
496 |
+
2023-02-06 08:36:34,623 epoch 28 - iter 26/133 - loss 0.01013967 - samples/sec: 786.34 - lr: 0.010000
|
497 |
+
2023-02-06 08:36:36,131 epoch 28 - iter 39/133 - loss 0.01003563 - samples/sec: 758.40 - lr: 0.010000
|
498 |
+
2023-02-06 08:36:37,442 epoch 28 - iter 52/133 - loss 0.00998388 - samples/sec: 730.28 - lr: 0.010000
|
499 |
+
2023-02-06 08:36:38,722 epoch 28 - iter 65/133 - loss 0.00999888 - samples/sec: 748.06 - lr: 0.010000
|
500 |
+
2023-02-06 08:36:40,168 epoch 28 - iter 78/133 - loss 0.01003122 - samples/sec: 786.57 - lr: 0.010000
|
501 |
+
2023-02-06 08:36:41,456 epoch 28 - iter 91/133 - loss 0.01000777 - samples/sec: 744.48 - lr: 0.010000
|
502 |
+
2023-02-06 08:36:42,925 epoch 28 - iter 104/133 - loss 0.00998209 - samples/sec: 783.40 - lr: 0.010000
|
503 |
+
2023-02-06 08:36:44,160 epoch 28 - iter 117/133 - loss 0.00999413 - samples/sec: 777.17 - lr: 0.010000
|
504 |
+
2023-02-06 08:36:45,652 epoch 28 - iter 130/133 - loss 0.00996400 - samples/sec: 627.73 - lr: 0.010000
|
505 |
+
2023-02-06 08:36:45,936 ----------------------------------------------------------------------------------------------------
|
506 |
+
2023-02-06 08:36:45,937 EPOCH 28 done: loss 0.0100 - lr 0.010000
|
507 |
+
2023-02-06 08:36:48,370 Evaluating as a multi-label problem: False
|
508 |
+
2023-02-06 08:36:48,390 DEV : loss 0.010063917376101017 - f1-score (micro avg) 0.6567
|
509 |
+
2023-02-06 08:36:48,774 BAD EPOCHS (no improvement): 0
|
510 |
+
2023-02-06 08:36:48,783 ----------------------------------------------------------------------------------------------------
|
511 |
+
2023-02-06 08:36:50,112 epoch 29 - iter 13/133 - loss 0.01007614 - samples/sec: 715.39 - lr: 0.010000
|
512 |
+
2023-02-06 08:36:51,579 epoch 29 - iter 26/133 - loss 0.00997458 - samples/sec: 766.42 - lr: 0.010000
|
513 |
+
2023-02-06 08:36:52,814 epoch 29 - iter 39/133 - loss 0.00989408 - samples/sec: 781.38 - lr: 0.010000
|
514 |
+
2023-02-06 08:36:54,264 epoch 29 - iter 52/133 - loss 0.00989758 - samples/sec: 784.84 - lr: 0.010000
|
515 |
+
2023-02-06 08:36:55,532 epoch 29 - iter 65/133 - loss 0.00991688 - samples/sec: 756.52 - lr: 0.010000
|
516 |
+
2023-02-06 08:36:57,026 epoch 29 - iter 78/133 - loss 0.00991818 - samples/sec: 758.17 - lr: 0.010000
|
517 |
+
2023-02-06 08:36:58,307 epoch 29 - iter 91/133 - loss 0.00989315 - samples/sec: 756.41 - lr: 0.010000
|
518 |
+
2023-02-06 08:36:59,562 epoch 29 - iter 104/133 - loss 0.00987888 - samples/sec: 767.41 - lr: 0.010000
|
519 |
+
2023-02-06 08:37:01,023 epoch 29 - iter 117/133 - loss 0.00989864 - samples/sec: 642.50 - lr: 0.010000
|
520 |
+
2023-02-06 08:37:02,264 epoch 29 - iter 130/133 - loss 0.00992279 - samples/sec: 772.60 - lr: 0.010000
|
521 |
+
2023-02-06 08:37:02,549 ----------------------------------------------------------------------------------------------------
|
522 |
+
2023-02-06 08:37:02,550 EPOCH 29 done: loss 0.0099 - lr 0.010000
|
523 |
+
2023-02-06 08:37:05,071 Evaluating as a multi-label problem: False
|
524 |
+
2023-02-06 08:37:05,088 DEV : loss 0.010063448920845985 - f1-score (micro avg) 0.6533
|
525 |
+
2023-02-06 08:37:05,677 BAD EPOCHS (no improvement): 1
|
526 |
+
2023-02-06 08:37:05,684 ----------------------------------------------------------------------------------------------------
|
527 |
+
2023-02-06 08:37:06,997 epoch 30 - iter 13/133 - loss 0.01003949 - samples/sec: 732.41 - lr: 0.010000
|
528 |
+
2023-02-06 08:37:08,331 epoch 30 - iter 26/133 - loss 0.01015033 - samples/sec: 719.55 - lr: 0.010000
|
529 |
+
2023-02-06 08:37:09,806 epoch 30 - iter 39/133 - loss 0.01011311 - samples/sec: 636.91 - lr: 0.010000
|
530 |
+
2023-02-06 08:37:11,050 epoch 30 - iter 52/133 - loss 0.00997797 - samples/sec: 769.41 - lr: 0.010000
|
531 |
+
2023-02-06 08:37:12,552 epoch 30 - iter 65/133 - loss 0.00995678 - samples/sec: 763.55 - lr: 0.010000
|
532 |
+
2023-02-06 08:37:13,798 epoch 30 - iter 78/133 - loss 0.00991438 - samples/sec: 771.97 - lr: 0.010000
|
533 |
+
2023-02-06 08:37:15,297 epoch 30 - iter 91/133 - loss 0.00988681 - samples/sec: 753.64 - lr: 0.010000
|
534 |
+
2023-02-06 08:37:16,543 epoch 30 - iter 104/133 - loss 0.00986966 - samples/sec: 770.92 - lr: 0.010000
|
535 |
+
2023-02-06 08:37:17,762 epoch 30 - iter 117/133 - loss 0.00987746 - samples/sec: 785.58 - lr: 0.010000
|
536 |
+
2023-02-06 08:37:19,280 epoch 30 - iter 130/133 - loss 0.00989263 - samples/sec: 738.22 - lr: 0.010000
|
537 |
+
2023-02-06 08:37:19,569 ----------------------------------------------------------------------------------------------------
|
538 |
+
2023-02-06 08:37:19,571 EPOCH 30 done: loss 0.0099 - lr 0.010000
|
539 |
+
2023-02-06 08:37:22,147 Evaluating as a multi-label problem: False
|
540 |
+
2023-02-06 08:37:22,165 DEV : loss 0.010032457299530506 - f1-score (micro avg) 0.6627
|
541 |
+
2023-02-06 08:37:22,507 BAD EPOCHS (no improvement): 0
|
542 |
+
2023-02-06 08:37:22,515 saving best model
|
543 |
+
2023-02-06 08:37:22,593 ----------------------------------------------------------------------------------------------------
|
544 |
+
2023-02-06 08:37:24,034 epoch 31 - iter 13/133 - loss 0.00986991 - samples/sec: 795.89 - lr: 0.010000
|
545 |
+
2023-02-06 08:37:25,350 epoch 31 - iter 26/133 - loss 0.00980032 - samples/sec: 722.12 - lr: 0.010000
|
546 |
+
2023-02-06 08:37:26,804 epoch 31 - iter 39/133 - loss 0.00981070 - samples/sec: 788.11 - lr: 0.010000
|
547 |
+
2023-02-06 08:37:28,032 epoch 31 - iter 52/133 - loss 0.00978956 - samples/sec: 782.72 - lr: 0.010000
|
548 |
+
2023-02-06 08:37:29,262 epoch 31 - iter 65/133 - loss 0.00985205 - samples/sec: 776.53 - lr: 0.010000
|
549 |
+
2023-02-06 08:37:30,801 epoch 31 - iter 78/133 - loss 0.00986377 - samples/sec: 734.35 - lr: 0.010000
|
550 |
+
2023-02-06 08:37:32,105 epoch 31 - iter 91/133 - loss 0.00986039 - samples/sec: 737.43 - lr: 0.010000
|
551 |
+
2023-02-06 08:37:33,586 epoch 31 - iter 104/133 - loss 0.00986586 - samples/sec: 766.82 - lr: 0.010000
|
552 |
+
2023-02-06 08:37:34,867 epoch 31 - iter 117/133 - loss 0.00988794 - samples/sec: 748.41 - lr: 0.010000
|
553 |
+
2023-02-06 08:37:36,405 epoch 31 - iter 130/133 - loss 0.00986537 - samples/sec: 732.70 - lr: 0.010000
|
554 |
+
2023-02-06 08:37:36,707 ----------------------------------------------------------------------------------------------------
|
555 |
+
2023-02-06 08:37:36,713 EPOCH 31 done: loss 0.0099 - lr 0.010000
|
556 |
+
2023-02-06 08:37:39,148 Evaluating as a multi-label problem: False
|
557 |
+
2023-02-06 08:37:39,165 DEV : loss 0.010030188597738743 - f1-score (micro avg) 0.6507
|
558 |
+
2023-02-06 08:37:39,503 BAD EPOCHS (no improvement): 1
|
559 |
+
2023-02-06 08:37:39,509 ----------------------------------------------------------------------------------------------------
|
560 |
+
2023-02-06 08:37:40,819 epoch 32 - iter 13/133 - loss 0.00972815 - samples/sec: 729.87 - lr: 0.010000
|
561 |
+
2023-02-06 08:37:42,293 epoch 32 - iter 26/133 - loss 0.00993442 - samples/sec: 767.42 - lr: 0.010000
|
562 |
+
2023-02-06 08:37:43,527 epoch 32 - iter 39/133 - loss 0.00989563 - samples/sec: 777.81 - lr: 0.010000
|
563 |
+
2023-02-06 08:37:45,030 epoch 32 - iter 52/133 - loss 0.00987169 - samples/sec: 762.50 - lr: 0.010000
|
564 |
+
2023-02-06 08:37:46,312 epoch 32 - iter 65/133 - loss 0.00982198 - samples/sec: 744.56 - lr: 0.010000
|
565 |
+
2023-02-06 08:37:47,772 epoch 32 - iter 78/133 - loss 0.00984166 - samples/sec: 787.72 - lr: 0.010000
|
566 |
+
2023-02-06 08:37:49,052 epoch 32 - iter 91/133 - loss 0.00985835 - samples/sec: 753.47 - lr: 0.010000
|
567 |
+
2023-02-06 08:37:50,603 epoch 32 - iter 104/133 - loss 0.00985180 - samples/sec: 729.35 - lr: 0.010000
|
568 |
+
2023-02-06 08:37:51,833 epoch 32 - iter 117/133 - loss 0.00984472 - samples/sec: 780.52 - lr: 0.010000
|
569 |
+
2023-02-06 08:37:53,083 epoch 32 - iter 130/133 - loss 0.00982573 - samples/sec: 764.01 - lr: 0.010000
|
570 |
+
2023-02-06 08:37:53,402 ----------------------------------------------------------------------------------------------------
|
571 |
+
2023-02-06 08:37:53,407 EPOCH 32 done: loss 0.0098 - lr 0.010000
|
572 |
+
2023-02-06 08:37:55,865 Evaluating as a multi-label problem: False
|
573 |
+
2023-02-06 08:37:55,882 DEV : loss 0.009983059018850327 - f1-score (micro avg) 0.662
|
574 |
+
2023-02-06 08:37:56,448 BAD EPOCHS (no improvement): 2
|
575 |
+
2023-02-06 08:37:56,456 ----------------------------------------------------------------------------------------------------
|
576 |
+
2023-02-06 08:37:57,753 epoch 33 - iter 13/133 - loss 0.00986220 - samples/sec: 736.25 - lr: 0.010000
|
577 |
+
2023-02-06 08:37:59,189 epoch 33 - iter 26/133 - loss 0.00975062 - samples/sec: 802.53 - lr: 0.010000
|
578 |
+
2023-02-06 08:38:00,474 epoch 33 - iter 39/133 - loss 0.00986705 - samples/sec: 745.66 - lr: 0.010000
|
579 |
+
2023-02-06 08:38:01,738 epoch 33 - iter 52/133 - loss 0.00984679 - samples/sec: 754.60 - lr: 0.010000
|
580 |
+
2023-02-06 08:38:03,179 epoch 33 - iter 65/133 - loss 0.00978551 - samples/sec: 794.68 - lr: 0.010000
|
581 |
+
2023-02-06 08:38:04,429 epoch 33 - iter 78/133 - loss 0.00975048 - samples/sec: 765.26 - lr: 0.010000
|
582 |
+
2023-02-06 08:38:05,913 epoch 33 - iter 91/133 - loss 0.00976211 - samples/sec: 634.13 - lr: 0.010000
|
583 |
+
2023-02-06 08:38:07,200 epoch 33 - iter 104/133 - loss 0.00978878 - samples/sec: 740.16 - lr: 0.010000
|
584 |
+
2023-02-06 08:38:08,709 epoch 33 - iter 117/133 - loss 0.00982863 - samples/sec: 747.73 - lr: 0.010000
|
585 |
+
2023-02-06 08:38:10,026 epoch 33 - iter 130/133 - loss 0.00987417 - samples/sec: 722.74 - lr: 0.010000
|
586 |
+
2023-02-06 08:38:10,302 ----------------------------------------------------------------------------------------------------
|
587 |
+
2023-02-06 08:38:10,303 EPOCH 33 done: loss 0.0099 - lr 0.010000
|
588 |
+
2023-02-06 08:38:12,844 Evaluating as a multi-label problem: False
|
589 |
+
2023-02-06 08:38:12,862 DEV : loss 0.009982590563595295 - f1-score (micro avg) 0.66
|
590 |
+
2023-02-06 08:38:13,436 BAD EPOCHS (no improvement): 3
|
591 |
+
2023-02-06 08:38:13,443 ----------------------------------------------------------------------------------------------------
|
592 |
+
2023-02-06 08:38:14,816 epoch 34 - iter 13/133 - loss 0.00992892 - samples/sec: 690.92 - lr: 0.010000
|
593 |
+
2023-02-06 08:38:16,115 epoch 34 - iter 26/133 - loss 0.00987621 - samples/sec: 736.73 - lr: 0.010000
|
594 |
+
2023-02-06 08:38:17,696 epoch 34 - iter 39/133 - loss 0.00987597 - samples/sec: 734.20 - lr: 0.010000
|
595 |
+
2023-02-06 08:38:18,994 epoch 34 - iter 52/133 - loss 0.00991974 - samples/sec: 737.16 - lr: 0.010000
|
596 |
+
2023-02-06 08:38:20,513 epoch 34 - iter 65/133 - loss 0.00990955 - samples/sec: 619.03 - lr: 0.010000
|
597 |
+
2023-02-06 08:38:21,810 epoch 34 - iter 78/133 - loss 0.00985254 - samples/sec: 739.38 - lr: 0.010000
|
598 |
+
2023-02-06 08:38:23,273 epoch 34 - iter 91/133 - loss 0.00982011 - samples/sec: 779.25 - lr: 0.010000
|
599 |
+
2023-02-06 08:38:24,511 epoch 34 - iter 104/133 - loss 0.00980783 - samples/sec: 773.71 - lr: 0.010000
|
600 |
+
2023-02-06 08:38:25,739 epoch 34 - iter 117/133 - loss 0.00976338 - samples/sec: 780.95 - lr: 0.010000
|
601 |
+
2023-02-06 08:38:27,199 epoch 34 - iter 130/133 - loss 0.00978109 - samples/sec: 777.03 - lr: 0.010000
|
602 |
+
2023-02-06 08:38:27,468 ----------------------------------------------------------------------------------------------------
|
603 |
+
2023-02-06 08:38:27,472 EPOCH 34 done: loss 0.0098 - lr 0.010000
|
604 |
+
2023-02-06 08:38:29,920 Evaluating as a multi-label problem: False
|
605 |
+
2023-02-06 08:38:29,937 DEV : loss 0.009954135864973068 - f1-score (micro avg) 0.6647
|
606 |
+
2023-02-06 08:38:30,291 BAD EPOCHS (no improvement): 0
|
607 |
+
2023-02-06 08:38:30,298 saving best model
|
608 |
+
2023-02-06 08:38:30,373 ----------------------------------------------------------------------------------------------------
|
609 |
+
2023-02-06 08:38:31,950 epoch 35 - iter 13/133 - loss 0.00960910 - samples/sec: 708.10 - lr: 0.010000
|
610 |
+
2023-02-06 08:38:33,281 epoch 35 - iter 26/133 - loss 0.00965913 - samples/sec: 722.62 - lr: 0.010000
|
611 |
+
2023-02-06 08:38:34,761 epoch 35 - iter 39/133 - loss 0.00961002 - samples/sec: 636.49 - lr: 0.010000
|
612 |
+
2023-02-06 08:38:36,130 epoch 35 - iter 52/133 - loss 0.00968180 - samples/sec: 698.70 - lr: 0.010000
|
613 |
+
2023-02-06 08:38:37,349 epoch 35 - iter 65/133 - loss 0.00970834 - samples/sec: 786.80 - lr: 0.010000
|
614 |
+
2023-02-06 08:38:38,815 epoch 35 - iter 78/133 - loss 0.00976811 - samples/sec: 770.86 - lr: 0.010000
|
615 |
+
2023-02-06 08:38:40,015 epoch 35 - iter 91/133 - loss 0.00977029 - samples/sec: 805.65 - lr: 0.010000
|
616 |
+
2023-02-06 08:38:41,480 epoch 35 - iter 104/133 - loss 0.00976745 - samples/sec: 773.86 - lr: 0.010000
|
617 |
+
2023-02-06 08:38:42,701 epoch 35 - iter 117/133 - loss 0.00977891 - samples/sec: 787.95 - lr: 0.010000
|
618 |
+
2023-02-06 08:38:44,189 epoch 35 - iter 130/133 - loss 0.00974154 - samples/sec: 759.68 - lr: 0.010000
|
619 |
+
2023-02-06 08:38:44,483 ----------------------------------------------------------------------------------------------------
|
620 |
+
2023-02-06 08:38:44,489 EPOCH 35 done: loss 0.0098 - lr 0.010000
|
621 |
+
2023-02-06 08:38:46,913 Evaluating as a multi-label problem: False
|
622 |
+
2023-02-06 08:38:46,930 DEV : loss 0.009962853975594044 - f1-score (micro avg) 0.658
|
623 |
+
2023-02-06 08:38:47,267 BAD EPOCHS (no improvement): 1
|
624 |
+
2023-02-06 08:38:47,361 ----------------------------------------------------------------------------------------------------
|
625 |
+
2023-02-06 08:38:47,365 loading file /content/drive/MyDrive/Colab Notebooks/models/flair-sentiment-classifier/best-model.pt
|
626 |
+
2023-02-06 08:38:48,126 Evaluating as a multi-label problem: False
|
627 |
+
2023-02-06 08:38:48,139 0.6462 0.6462 0.6462 0.6462
|
628 |
+
2023-02-06 08:38:48,144
|
629 |
+
Results:
|
630 |
+
- F-score (micro) 0.6462
|
631 |
+
- F-score (macro) 0.6426
|
632 |
+
- Accuracy 0.6462
|
633 |
+
|
634 |
+
By class:
|
635 |
+
precision recall f1-score support
|
636 |
+
|
637 |
+
1 0.6291 0.7363 0.6785 182
|
638 |
+
0 0.6712 0.5537 0.6068 177
|
639 |
+
|
640 |
+
accuracy 0.6462 359
|
641 |
+
macro avg 0.6502 0.6450 0.6426 359
|
642 |
+
weighted avg 0.6499 0.6462 0.6431 359
|
643 |
+
|
644 |
+
2023-02-06 08:38:48,150 ----------------------------------------------------------------------------------------------------
|
models/flair-sentiment-classifier/weights.txt
ADDED
File without changes
|
models/logistic_regression.report
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"0": {
|
3 |
+
"precision": 0.7279411764705882,
|
4 |
+
"recall": 0.559322033898305,
|
5 |
+
"f1-score": 0.6325878594249201,
|
6 |
+
"support": 177
|
7 |
+
},
|
8 |
+
"1": {
|
9 |
+
"precision": 0.6502242152466368,
|
10 |
+
"recall": 0.7967032967032966,
|
11 |
+
"f1-score": 0.7160493827160495,
|
12 |
+
"support": 182
|
13 |
+
},
|
14 |
+
"accuracy": 0.6796657381615598,
|
15 |
+
"macro avg": {
|
16 |
+
"precision": 0.6890826958586125,
|
17 |
+
"recall": 0.6780126653008008,
|
18 |
+
"f1-score": 0.6743186210704848,
|
19 |
+
"support": 359
|
20 |
+
},
|
21 |
+
"weighted avg": {
|
22 |
+
"precision": 0.6885414913932646,
|
23 |
+
"recall": 0.6796657381615598,
|
24 |
+
"f1-score": 0.6748998294499494,
|
25 |
+
"support": 359
|
26 |
+
}
|
27 |
+
}
|
models/logistic_regression.sav
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:747c70897e578a4b19404e306bfd8a86bfd1e996cd6abf4fc560178049dff332
|
3 |
+
size 42617
|
models/m_nb.report
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"0": {
|
3 |
+
"precision": 0.711340206185567,
|
4 |
+
"recall": 0.7796610169491526,
|
5 |
+
"f1-score": 0.7439353099730459,
|
6 |
+
"support": 177
|
7 |
+
},
|
8 |
+
"1": {
|
9 |
+
"precision": 0.7636363636363637,
|
10 |
+
"recall": 0.6923076923076923,
|
11 |
+
"f1-score": 0.7262247838616716,
|
12 |
+
"support": 182
|
13 |
+
},
|
14 |
+
"accuracy": 0.7353760445682451,
|
15 |
+
"macro avg": {
|
16 |
+
"precision": 0.7374882849109654,
|
17 |
+
"recall": 0.7359843546284224,
|
18 |
+
"f1-score": 0.7350800469173587,
|
19 |
+
"support": 359
|
20 |
+
},
|
21 |
+
"weighted avg": {
|
22 |
+
"precision": 0.7378524642804,
|
23 |
+
"recall": 0.7353760445682451,
|
24 |
+
"f1-score": 0.7349567145628226,
|
25 |
+
"support": 359
|
26 |
+
}
|
27 |
+
}
|
models/m_nb.sav
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c3b03616b403205fb262bdd439ad8fb5da91235f80d3536a5917ece7cf5c97e8
|
3 |
+
size 168217
|
models/random_forest.report
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"0": {
|
3 |
+
"precision": 0.7317073170731707,
|
4 |
+
"recall": 0.5084745762711864,
|
5 |
+
"f1-score": 0.6,
|
6 |
+
"support": 177
|
7 |
+
},
|
8 |
+
"1": {
|
9 |
+
"precision": 0.6313559322033898,
|
10 |
+
"recall": 0.8186813186813187,
|
11 |
+
"f1-score": 0.7129186602870812,
|
12 |
+
"support": 182
|
13 |
+
},
|
14 |
+
"accuracy": 0.6657381615598886,
|
15 |
+
"macro avg": {
|
16 |
+
"precision": 0.6815316246382803,
|
17 |
+
"recall": 0.6635779474762525,
|
18 |
+
"f1-score": 0.6564593301435406,
|
19 |
+
"support": 359
|
20 |
+
},
|
21 |
+
"weighted avg": {
|
22 |
+
"precision": 0.6808327988383513,
|
23 |
+
"recall": 0.6657381615598886,
|
24 |
+
"f1-score": 0.6572456717889938,
|
25 |
+
"support": 359
|
26 |
+
}
|
27 |
+
}
|
models/random_forest.sav
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e4746ce69b95ab3059efa16fa4f337714b4cbd502044ae14b72fe029d78d4ba
|
3 |
+
size 9949938
|
models/setfit-classifier/1_Pooling/config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"word_embedding_dimension": 768,
|
3 |
+
"pooling_mode_cls_token": false,
|
4 |
+
"pooling_mode_mean_tokens": true,
|
5 |
+
"pooling_mode_max_tokens": false,
|
6 |
+
"pooling_mode_mean_sqrt_len_tokens": false
|
7 |
+
}
|
models/setfit-classifier/README.md
ADDED
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
pipeline_tag: sentence-similarity
|
3 |
+
tags:
|
4 |
+
- sentence-transformers
|
5 |
+
- feature-extraction
|
6 |
+
- sentence-similarity
|
7 |
+
- transformers
|
8 |
+
|
9 |
+
---
|
10 |
+
|
11 |
+
# {MODEL_NAME}
|
12 |
+
|
13 |
+
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
|
14 |
+
|
15 |
+
<!--- Describe your model here -->
|
16 |
+
|
17 |
+
## Usage (Sentence-Transformers)
|
18 |
+
|
19 |
+
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
|
20 |
+
|
21 |
+
```
|
22 |
+
pip install -U sentence-transformers
|
23 |
+
```
|
24 |
+
|
25 |
+
Then you can use the model like this:
|
26 |
+
|
27 |
+
```python
|
28 |
+
from sentence_transformers import SentenceTransformer
|
29 |
+
sentences = ["This is an example sentence", "Each sentence is converted"]
|
30 |
+
|
31 |
+
model = SentenceTransformer('{MODEL_NAME}')
|
32 |
+
embeddings = model.encode(sentences)
|
33 |
+
print(embeddings)
|
34 |
+
```
|
35 |
+
|
36 |
+
|
37 |
+
|
38 |
+
## Usage (HuggingFace Transformers)
|
39 |
+
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
|
40 |
+
|
41 |
+
```python
|
42 |
+
from transformers import AutoTokenizer, AutoModel
|
43 |
+
import torch
|
44 |
+
|
45 |
+
|
46 |
+
#Mean Pooling - Take attention mask into account for correct averaging
|
47 |
+
def mean_pooling(model_output, attention_mask):
|
48 |
+
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
|
49 |
+
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
|
50 |
+
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
|
51 |
+
|
52 |
+
|
53 |
+
# Sentences we want sentence embeddings for
|
54 |
+
sentences = ['This is an example sentence', 'Each sentence is converted']
|
55 |
+
|
56 |
+
# Load model from HuggingFace Hub
|
57 |
+
tokenizer = AutoTokenizer.from_pretrained('{MODEL_NAME}')
|
58 |
+
model = AutoModel.from_pretrained('{MODEL_NAME}')
|
59 |
+
|
60 |
+
# Tokenize sentences
|
61 |
+
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
|
62 |
+
|
63 |
+
# Compute token embeddings
|
64 |
+
with torch.no_grad():
|
65 |
+
model_output = model(**encoded_input)
|
66 |
+
|
67 |
+
# Perform pooling. In this case, mean pooling.
|
68 |
+
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
|
69 |
+
|
70 |
+
print("Sentence embeddings:")
|
71 |
+
print(sentence_embeddings)
|
72 |
+
```
|
73 |
+
|
74 |
+
|
75 |
+
|
76 |
+
## Evaluation Results
|
77 |
+
|
78 |
+
<!--- Describe how your model was evaluated -->
|
79 |
+
|
80 |
+
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME})
|
81 |
+
|
82 |
+
|
83 |
+
## Training
|
84 |
+
The model was trained with the parameters:
|
85 |
+
|
86 |
+
**DataLoader**:
|
87 |
+
|
88 |
+
`torch.utils.data.dataloader.DataLoader` of length 160 with parameters:
|
89 |
+
```
|
90 |
+
{'batch_size': 16, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
|
91 |
+
```
|
92 |
+
|
93 |
+
**Loss**:
|
94 |
+
|
95 |
+
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
|
96 |
+
|
97 |
+
Parameters of the fit()-Method:
|
98 |
+
```
|
99 |
+
{
|
100 |
+
"epochs": 1,
|
101 |
+
"evaluation_steps": 0,
|
102 |
+
"evaluator": "NoneType",
|
103 |
+
"max_grad_norm": 1,
|
104 |
+
"optimizer_class": "<class 'torch.optim.adamw.AdamW'>",
|
105 |
+
"optimizer_params": {
|
106 |
+
"lr": 2e-05
|
107 |
+
},
|
108 |
+
"scheduler": "WarmupLinear",
|
109 |
+
"steps_per_epoch": 160,
|
110 |
+
"warmup_steps": 16,
|
111 |
+
"weight_decay": 0.01
|
112 |
+
}
|
113 |
+
```
|
114 |
+
|
115 |
+
|
116 |
+
## Full Model Architecture
|
117 |
+
```
|
118 |
+
SentenceTransformer(
|
119 |
+
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: MPNetModel
|
120 |
+
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
|
121 |
+
)
|
122 |
+
```
|
123 |
+
|
124 |
+
## Citing & Authors
|
125 |
+
|
126 |
+
<!--- Describe where people can find more information -->
|
models/setfit-classifier/config.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/root/.cache/torch/sentence_transformers/sentence-transformers_paraphrase-mpnet-base-v2/",
|
3 |
+
"architectures": [
|
4 |
+
"MPNetModel"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"bos_token_id": 0,
|
8 |
+
"eos_token_id": 2,
|
9 |
+
"hidden_act": "gelu",
|
10 |
+
"hidden_dropout_prob": 0.1,
|
11 |
+
"hidden_size": 768,
|
12 |
+
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 3072,
|
14 |
+
"layer_norm_eps": 1e-05,
|
15 |
+
"max_position_embeddings": 514,
|
16 |
+
"model_type": "mpnet",
|
17 |
+
"num_attention_heads": 12,
|
18 |
+
"num_hidden_layers": 12,
|
19 |
+
"pad_token_id": 1,
|
20 |
+
"relative_attention_num_buckets": 32,
|
21 |
+
"torch_dtype": "float32",
|
22 |
+
"transformers_version": "4.26.0",
|
23 |
+
"vocab_size": 30527
|
24 |
+
}
|
models/setfit-classifier/config_sentence_transformers.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"__version__": {
|
3 |
+
"sentence_transformers": "2.0.0",
|
4 |
+
"transformers": "4.7.0",
|
5 |
+
"pytorch": "1.9.0+cu102"
|
6 |
+
}
|
7 |
+
}
|
models/setfit-classifier/model_head.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:76829b46e5a74f9c45c0d6dbfe0df882d9449645ca328521108969ed2d4e05f1
|
3 |
+
size 6991
|
models/setfit-classifier/modules.json
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[
|
2 |
+
{
|
3 |
+
"idx": 0,
|
4 |
+
"name": "0",
|
5 |
+
"path": "",
|
6 |
+
"type": "sentence_transformers.models.Transformer"
|
7 |
+
},
|
8 |
+
{
|
9 |
+
"idx": 1,
|
10 |
+
"name": "1",
|
11 |
+
"path": "1_Pooling",
|
12 |
+
"type": "sentence_transformers.models.Pooling"
|
13 |
+
}
|
14 |
+
]
|
models/setfit-classifier/pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:664b194f0ad607fb2f9558906539270bd6f4eae1705efab47f2d505fbba2e22a
|
3 |
+
size 438016493
|
models/setfit-classifier/sentence_bert_config.json
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"max_seq_length": 512,
|
3 |
+
"do_lower_case": false
|
4 |
+
}
|
models/setfit-classifier/special_tokens_map.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": "<s>",
|
3 |
+
"cls_token": "<s>",
|
4 |
+
"eos_token": "</s>",
|
5 |
+
"mask_token": {
|
6 |
+
"content": "<mask>",
|
7 |
+
"lstrip": true,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false
|
11 |
+
},
|
12 |
+
"pad_token": "<pad>",
|
13 |
+
"sep_token": "</s>",
|
14 |
+
"unk_token": "[UNK]"
|
15 |
+
}
|
models/setfit-classifier/tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/setfit-classifier/tokenizer_config.json
ADDED
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"__type": "AddedToken",
|
4 |
+
"content": "<s>",
|
5 |
+
"lstrip": false,
|
6 |
+
"normalized": true,
|
7 |
+
"rstrip": false,
|
8 |
+
"single_word": false
|
9 |
+
},
|
10 |
+
"cls_token": {
|
11 |
+
"__type": "AddedToken",
|
12 |
+
"content": "<s>",
|
13 |
+
"lstrip": false,
|
14 |
+
"normalized": true,
|
15 |
+
"rstrip": false,
|
16 |
+
"single_word": false
|
17 |
+
},
|
18 |
+
"do_basic_tokenize": true,
|
19 |
+
"do_lower_case": true,
|
20 |
+
"eos_token": {
|
21 |
+
"__type": "AddedToken",
|
22 |
+
"content": "</s>",
|
23 |
+
"lstrip": false,
|
24 |
+
"normalized": true,
|
25 |
+
"rstrip": false,
|
26 |
+
"single_word": false
|
27 |
+
},
|
28 |
+
"mask_token": {
|
29 |
+
"__type": "AddedToken",
|
30 |
+
"content": "<mask>",
|
31 |
+
"lstrip": true,
|
32 |
+
"normalized": true,
|
33 |
+
"rstrip": false,
|
34 |
+
"single_word": false
|
35 |
+
},
|
36 |
+
"model_max_length": 512,
|
37 |
+
"name_or_path": "/root/.cache/torch/sentence_transformers/sentence-transformers_paraphrase-mpnet-base-v2/",
|
38 |
+
"never_split": null,
|
39 |
+
"pad_token": {
|
40 |
+
"__type": "AddedToken",
|
41 |
+
"content": "<pad>",
|
42 |
+
"lstrip": false,
|
43 |
+
"normalized": true,
|
44 |
+
"rstrip": false,
|
45 |
+
"single_word": false
|
46 |
+
},
|
47 |
+
"sep_token": {
|
48 |
+
"__type": "AddedToken",
|
49 |
+
"content": "</s>",
|
50 |
+
"lstrip": false,
|
51 |
+
"normalized": true,
|
52 |
+
"rstrip": false,
|
53 |
+
"single_word": false
|
54 |
+
},
|
55 |
+
"special_tokens_map_file": null,
|
56 |
+
"strip_accents": null,
|
57 |
+
"tokenize_chinese_chars": true,
|
58 |
+
"tokenizer_class": "MPNetTokenizer",
|
59 |
+
"unk_token": {
|
60 |
+
"__type": "AddedToken",
|
61 |
+
"content": "[UNK]",
|
62 |
+
"lstrip": false,
|
63 |
+
"normalized": true,
|
64 |
+
"rstrip": false,
|
65 |
+
"single_word": false
|
66 |
+
}
|
67 |
+
}
|
models/setfit-classifier/vocab.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/spacy-classifier/model-best/config.cfg
ADDED
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[paths]
|
2 |
+
train = "/content/drive/MyDrive/Colab Notebooks/train.spacy"
|
3 |
+
dev = "/content/drive/MyDrive/Colab Notebooks/dev.spacy"
|
4 |
+
vectors = null
|
5 |
+
init_tok2vec = null
|
6 |
+
|
7 |
+
[system]
|
8 |
+
gpu_allocator = null
|
9 |
+
seed = 0
|
10 |
+
|
11 |
+
[nlp]
|
12 |
+
lang = "en"
|
13 |
+
pipeline = ["textcat"]
|
14 |
+
batch_size = 1000
|
15 |
+
disabled = []
|
16 |
+
before_creation = null
|
17 |
+
after_creation = null
|
18 |
+
after_pipeline_creation = null
|
19 |
+
tokenizer = {"@tokenizers":"spacy.Tokenizer.v1"}
|
20 |
+
|
21 |
+
[components]
|
22 |
+
|
23 |
+
[components.textcat]
|
24 |
+
factory = "textcat"
|
25 |
+
scorer = {"@scorers":"spacy.textcat_scorer.v1"}
|
26 |
+
threshold = 0.5
|
27 |
+
|
28 |
+
[components.textcat.model]
|
29 |
+
@architectures = "spacy.TextCatBOW.v2"
|
30 |
+
exclusive_classes = true
|
31 |
+
ngram_size = 1
|
32 |
+
no_output_layer = false
|
33 |
+
nO = null
|
34 |
+
|
35 |
+
[corpora]
|
36 |
+
|
37 |
+
[corpora.dev]
|
38 |
+
@readers = "spacy.Corpus.v1"
|
39 |
+
path = ${paths.dev}
|
40 |
+
max_length = 0
|
41 |
+
gold_preproc = false
|
42 |
+
limit = 0
|
43 |
+
augmenter = null
|
44 |
+
|
45 |
+
[corpora.train]
|
46 |
+
@readers = "spacy.Corpus.v1"
|
47 |
+
path = ${paths.train}
|
48 |
+
max_length = 0
|
49 |
+
gold_preproc = false
|
50 |
+
limit = 0
|
51 |
+
augmenter = null
|
52 |
+
|
53 |
+
[training]
|
54 |
+
dev_corpus = "corpora.dev"
|
55 |
+
train_corpus = "corpora.train"
|
56 |
+
seed = ${system.seed}
|
57 |
+
gpu_allocator = ${system.gpu_allocator}
|
58 |
+
dropout = 0.1
|
59 |
+
accumulate_gradient = 1
|
60 |
+
patience = 3200
|
61 |
+
max_epochs = 0
|
62 |
+
max_steps = 20000
|
63 |
+
eval_frequency = 200
|
64 |
+
frozen_components = []
|
65 |
+
annotating_components = []
|
66 |
+
before_to_disk = null
|
67 |
+
|
68 |
+
[training.batcher]
|
69 |
+
@batchers = "spacy.batch_by_words.v1"
|
70 |
+
discard_oversize = false
|
71 |
+
tolerance = 0.2
|
72 |
+
get_length = null
|
73 |
+
|
74 |
+
[training.batcher.size]
|
75 |
+
@schedules = "compounding.v1"
|
76 |
+
start = 100
|
77 |
+
stop = 1000
|
78 |
+
compound = 1.001
|
79 |
+
t = 0.0
|
80 |
+
|
81 |
+
[training.logger]
|
82 |
+
@loggers = "spacy.ConsoleLogger.v1"
|
83 |
+
progress_bar = false
|
84 |
+
|
85 |
+
[training.optimizer]
|
86 |
+
@optimizers = "Adam.v1"
|
87 |
+
beta1 = 0.9
|
88 |
+
beta2 = 0.999
|
89 |
+
L2_is_weight_decay = true
|
90 |
+
L2 = 0.01
|
91 |
+
grad_clip = 1.0
|
92 |
+
use_averages = false
|
93 |
+
eps = 0.00000001
|
94 |
+
learn_rate = 0.00005
|
95 |
+
|
96 |
+
[training.score_weights]
|
97 |
+
cats_score = 1.0
|
98 |
+
cats_score_desc = null
|
99 |
+
cats_micro_p = null
|
100 |
+
cats_micro_r = null
|
101 |
+
cats_micro_f = null
|
102 |
+
cats_macro_p = null
|
103 |
+
cats_macro_r = null
|
104 |
+
cats_macro_f = null
|
105 |
+
cats_macro_auc = null
|
106 |
+
cats_f_per_type = null
|
107 |
+
cats_macro_auc_per_type = null
|
108 |
+
|
109 |
+
[pretraining]
|
110 |
+
|
111 |
+
[initialize]
|
112 |
+
vectors = ${paths.vectors}
|
113 |
+
init_tok2vec = ${paths.init_tok2vec}
|
114 |
+
vocab_data = null
|
115 |
+
lookups = null
|
116 |
+
before_init = null
|
117 |
+
after_init = null
|
118 |
+
|
119 |
+
[initialize.components]
|
120 |
+
|
121 |
+
[initialize.tokenizer]
|
models/spacy-classifier/model-best/meta.json
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"lang":"en",
|
3 |
+
"name":"pipeline",
|
4 |
+
"version":"0.0.0",
|
5 |
+
"spacy_version":">=3.4.4,<3.5.0",
|
6 |
+
"description":"",
|
7 |
+
"author":"",
|
8 |
+
"email":"",
|
9 |
+
"url":"",
|
10 |
+
"license":"",
|
11 |
+
"spacy_git_version":"Unknown",
|
12 |
+
"vectors":{
|
13 |
+
"width":0,
|
14 |
+
"vectors":0,
|
15 |
+
"keys":0,
|
16 |
+
"name":null,
|
17 |
+
"mode":"default"
|
18 |
+
},
|
19 |
+
"labels":{
|
20 |
+
"textcat":[
|
21 |
+
"positive",
|
22 |
+
"negative"
|
23 |
+
]
|
24 |
+
},
|
25 |
+
"pipeline":[
|
26 |
+
"textcat"
|
27 |
+
],
|
28 |
+
"components":[
|
29 |
+
"textcat"
|
30 |
+
],
|
31 |
+
"disabled":[
|
32 |
+
|
33 |
+
],
|
34 |
+
"performance":{
|
35 |
+
"cats_score":0.7499305363,
|
36 |
+
"cats_score_desc":"macro F",
|
37 |
+
"cats_micro_p":0.75,
|
38 |
+
"cats_micro_r":0.75,
|
39 |
+
"cats_micro_f":0.75,
|
40 |
+
"cats_macro_p":0.7500320184,
|
41 |
+
"cats_macro_r":0.7499097699,
|
42 |
+
"cats_macro_f":0.7499305363,
|
43 |
+
"cats_macro_auc":0.8201950037,
|
44 |
+
"cats_f_per_type":{
|
45 |
+
"positive":{
|
46 |
+
"p":0.7513661202,
|
47 |
+
"r":0.7402422611,
|
48 |
+
"f":0.7457627119
|
49 |
+
},
|
50 |
+
"negative":{
|
51 |
+
"p":0.7486979167,
|
52 |
+
"r":0.7595772787,
|
53 |
+
"f":0.7540983607
|
54 |
+
}
|
55 |
+
},
|
56 |
+
"cats_macro_auc_per_type":0.0,
|
57 |
+
"textcat_loss":23.4422574639
|
58 |
+
}
|
59 |
+
}
|
models/spacy-classifier/model-best/textcat/cfg
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e7822fc6181a943012f1395d8b5f12dee231cc5d4485bfe2c8dc890874534bcc
|
3 |
+
size 95
|
models/spacy-classifier/model-best/textcat/model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:146d3a912f816e603af38e174ef506e868fad16fd32ebcbf46a99dd5c06fcd3c
|
3 |
+
size 2097895
|
models/spacy-classifier/model-best/tokenizer
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
��prefix_search�~^§|^%|^=|^—|^–|^\+(?![0-9])|^…|^……|^,|^:|^;|^\!|^\?|^¿|^؟|^¡|^\(|^\)|^\[|^\]|^\{|^\}|^<|^>|^_|^#|^\*|^&|^。|^?|^!|^,|^、|^;|^:|^~|^·|^।|^،|^۔|^؛|^٪|^\.\.+|^…|^\'|^"|^”|^“|^`|^‘|^´|^’|^‚|^,|^„|^»|^«|^「|^」|^『|^』|^(|^)|^〔|^〕|^【|^】|^《|^》|^〈|^〉|^\$|^£|^€|^¥|^฿|^US\$|^C\$|^A\$|^₽|^﷼|^₴|^₠|^₡|^₢|^₣|^₤|^₥|^₦|^₧|^₨|^₩|^₪|^₫|^€|^₭|^₮|^₯|^₰|^₱|^₲|^₳|^₴|^₵|^₶|^₷|^₸|^₹|^₺|^₻|^₼|^₽|^₾|^₿|^[\u00A6\u00A9\u00AE\u00B0\u0482\u058D\u058E\u060E\u060F\u06DE\u06E9\u06FD\u06FE\u07F6\u09FA\u0B70\u0BF3-\u0BF8\u0BFA\u0C7F\u0D4F\u0D79\u0F01-\u0F03\u0F13\u0F15-\u0F17\u0F1A-\u0F1F\u0F34\u0F36\u0F38\u0FBE-\u0FC5\u0FC7-\u0FCC\u0FCE\u0FCF\u0FD5-\u0FD8\u109E\u109F\u1390-\u1399\u1940\u19DE-\u19FF\u1B61-\u1B6A\u1B74-\u1B7C\u2100\u2101\u2103-\u2106\u2108\u2109\u2114\u2116\u2117\u211E-\u2123\u2125\u2127\u2129\u212E\u213A\u213B\u214A\u214C\u214D\u214F\u218A\u218B\u2195-\u2199\u219C-\u219F\u21A1\u21A2\u21A4\u21A5\u21A7-\u21AD\u21AF-\u21CD\u21D0\u21D1\u21D3\u21D5-\u21F3\u2300-\u2307\u230C-\u231F\u2322-\u2328\u232B-\u237B\u237D-\u239A\u23B4-\u23DB\u23E2-\u2426\u2440-\u244A\u249C-\u24E9\u2500-\u25B6\u25B8-\u25C0\u25C2-\u25F7\u2600-\u266E\u2670-\u2767\u2794-\u27BF\u2800-\u28FF\u2B00-\u2B2F\u2B45\u2B46\u2B4D-\u2B73\u2B76-\u2B95\u2B98-\u2BC8\u2BCA-\u2BFE\u2CE5-\u2CEA\u2E80-\u2E99\u2E9B-\u2EF3\u2F00-\u2FD5\u2FF0-\u2FFB\u3004\u3012\u3013\u3020\u3036\u3037\u303E\u303F\u3190\u3191\u3196-\u319F\u31C0-\u31E3\u3200-\u321E\u322A-\u3247\u3250\u3260-\u327F\u328A-\u32B0\u32C0-\u32FE\u3300-\u33FF\u4DC0-\u4DFF\uA490-\uA4C6\uA828-\uA82B\uA836\uA837\uA839\uAA77-\uAA79\uFDFD\uFFE4\uFFE8\uFFED\uFFEE\uFFFC\uFFFD\U00010137-\U0001013F\U00010179-\U00010189\U0001018C-\U0001018E\U00010190-\U0001019B\U000101A0\U000101D0-\U000101FC\U00010877\U00010878\U00010AC8\U0001173F\U00016B3C-\U00016B3F\U00016B45\U0001BC9C\U0001D000-\U0001D0F5\U0001D100-\U0001D126\U0001D129-\U0001D164\U0001D16A-\U0001D16C\U0001D183\U0001D184\U0001D18C-\U0001D1A9\U0001D1AE-\U0001D1E8\U0001D200-\U0001D241\U0001D245\U0001D300-\U0001D356\U0001D800-\U0001D9FF\U0001DA37-\U0001DA3A\U0001DA6D-\U0001DA74\U0001DA76-\U0001DA83\U0001DA85\U0001DA86\U0001ECAC\U0001F000-\U0001F02B\U0001F030-\U0001F093\U0001F0A0-\U0001F0AE\U0001F0B1-\U0001F0BF\U0001F0C1-\U0001F0CF\U0001F0D1-\U0001F0F5\U0001F110-\U0001F16B\U0001F170-\U0001F1AC\U0001F1E6-\U0001F202\U0001F210-\U0001F23B\U0001F240-\U0001F248\U0001F250\U0001F251\U0001F260-\U0001F265\U0001F300-\U0001F3FA\U0001F400-\U0001F6D4\U0001F6E0-\U0001F6EC\U0001F6F0-\U0001F6F9\U0001F700-\U0001F773\U0001F780-\U0001F7D8\U0001F800-\U0001F80B\U0001F810-\U0001F847\U0001F850-\U0001F859\U0001F860-\U0001F887\U0001F890-\U0001F8AD\U0001F900-\U0001F90B\U0001F910-\U0001F93E\U0001F940-\U0001F970\U0001F973-\U0001F976\U0001F97A\U0001F97C-\U0001F9A2\U0001F9B0-\U0001F9B9\U0001F9C0-\U0001F9C2\U0001F9D0-\U0001F9FF\U0001FA60-\U0001FA6D]�suffix_search�2y…$|……$|,$|:$|;$|\!$|\?$|¿$|؟$|¡$|\($|\)$|\[$|\]$|\{$|\}$|<$|>$|_$|#$|\*$|&$|。$|?$|!$|,$|、$|;$|:$|~$|·$|।$|،$|۔$|؛$|٪$|\.\.+$|…$|\'$|"$|”$|“$|`$|‘$|´$|’$|‚$|,$|„$|»$|«$|「$|」$|『$|』$|($|)$|〔$|〕$|【$|】$|《$|》$|〈$|〉$|[\u00A6\u00A9\u00AE\u00B0\u0482\u058D\u058E\u060E\u060F\u06DE\u06E9\u06FD\u06FE\u07F6\u09FA\u0B70\u0BF3-\u0BF8\u0BFA\u0C7F\u0D4F\u0D79\u0F01-\u0F03\u0F13\u0F15-\u0F17\u0F1A-\u0F1F\u0F34\u0F36\u0F38\u0FBE-\u0FC5\u0FC7-\u0FCC\u0FCE\u0FCF\u0FD5-\u0FD8\u109E\u109F\u1390-\u1399\u1940\u19DE-\u19FF\u1B61-\u1B6A\u1B74-\u1B7C\u2100\u2101\u2103-\u2106\u2108\u2109\u2114\u2116\u2117\u211E-\u2123\u2125\u2127\u2129\u212E\u213A\u213B\u214A\u214C\u214D\u214F\u218A\u218B\u2195-\u2199\u219C-\u219F\u21A1\u21A2\u21A4\u21A5\u21A7-\u21AD\u21AF-\u21CD\u21D0\u21D1\u21D3\u21D5-\u21F3\u2300-\u2307\u230C-\u231F\u2322-\u2328\u232B-\u237B\u237D-\u239A\u23B4-\u23DB\u23E2-\u2426\u2440-\u244A\u249C-\u24E9\u2500-\u25B6\u25B8-\u25C0\u25C2-\u25F7\u2600-\u266E\u2670-\u2767\u2794-\u27BF\u2800-\u28FF\u2B00-\u2B2F\u2B45\u2B46\u2B4D-\u2B73\u2B76-\u2B95\u2B98-\u2BC8\u2BCA-\u2BFE\u2CE5-\u2CEA\u2E80-\u2E99\u2E9B-\u2EF3\u2F00-\u2FD5\u2FF0-\u2FFB\u3004\u3012\u3013\u3020\u3036\u3037\u303E\u303F\u3190\u3191\u3196-\u319F\u31C0-\u31E3\u3200-\u321E\u322A-\u3247\u3250\u3260-\u327F\u328A-\u32B0\u32C0-\u32FE\u3300-\u33FF\u4DC0-\u4DFF\uA490-\uA4C6\uA828-\uA82B\uA836\uA837\uA839\uAA77-\uAA79\uFDFD\uFFE4\uFFE8\uFFED\uFFEE\uFFFC\uFFFD\U00010137-\U0001013F\U00010179-\U00010189\U0001018C-\U0001018E\U00010190-\U0001019B\U000101A0\U000101D0-\U000101FC\U00010877\U00010878\U00010AC8\U0001173F\U00016B3C-\U00016B3F\U00016B45\U0001BC9C\U0001D000-\U0001D0F5\U0001D100-\U0001D126\U0001D129-\U0001D164\U0001D16A-\U0001D16C\U0001D183\U0001D184\U0001D18C-\U0001D1A9\U0001D1AE-\U0001D1E8\U0001D200-\U0001D241\U0001D245\U0001D300-\U0001D356\U0001D800-\U0001D9FF\U0001DA37-\U0001DA3A\U0001DA6D-\U0001DA74\U0001DA76-\U0001DA83\U0001DA85\U0001DA86\U0001ECAC\U0001F000-\U0001F02B\U0001F030-\U0001F093\U0001F0A0-\U0001F0AE\U0001F0B1-\U0001F0BF\U0001F0C1-\U0001F0CF\U0001F0D1-\U0001F0F5\U0001F110-\U0001F16B\U0001F170-\U0001F1AC\U0001F1E6-\U0001F202\U0001F210-\U0001F23B\U0001F240-\U0001F248\U0001F250\U0001F251\U0001F260-\U0001F265\U0001F300-\U0001F3FA\U0001F400-\U0001F6D4\U0001F6E0-\U0001F6EC\U0001F6F0-\U0001F6F9\U0001F700-\U0001F773\U0001F780-\U0001F7D8\U0001F800-\U0001F80B\U0001F810-\U0001F847\U0001F850-\U0001F859\U0001F860-\U0001F887\U0001F890-\U0001F8AD\U0001F900-\U0001F90B\U0001F910-\U0001F93E\U0001F940-\U0001F970\U0001F973-\U0001F976\U0001F97A\U0001F97C-\U0001F9A2\U0001F9B0-\U0001F9B9\U0001F9C0-\U0001F9C2\U0001F9D0-\U0001F9FF\U0001FA60-\U0001FA6D]$|'s$|'S$|’s$|’S$|—$|–$|(?<=[0-9])\+$|(?<=°[FfCcKk])\.$|(?<=[0-9])(?:\$|£|€|¥|฿|US\$|C\$|A\$|₽|﷼|₴|₠|₡|₢|₣|₤|₥|₦|₧|₨|₩|₪|₫|€|₭|₮|₯|₰|₱|₲|₳|₴|₵|₶|₷|₸|₹|₺|₻|₼|₽|₾|₿)$|(?<=[0-9])(?:km|km²|km³|m|m²|m³|dm|dm²|dm³|cm|cm²|cm³|mm|mm²|mm³|ha|µm|nm|yd|in|ft|kg|g|mg|µg|t|lb|oz|m/s|km/h|kmh|mph|hPa|Pa|mbar|mb|MB|kb|KB|gb|GB|tb|TB|T|G|M|K|%|км|км²|км³|м|м²|м³|дм|дм²|дм³|см|см²|см³|мм|мм²|мм³|нм|кг|г|мг|м/с|км/ч|кПа|Па|мбар|Кб|КБ|кб|Мб|МБ|мб|Гб|ГБ|гб|Тб|ТБ|тбكم|كم²|كم³|م|م²|م³|سم|سم²|سم³|مم|مم²|مم³|كم|غرام|جرام|جم|كغ|ملغ|كوب|اكواب)$|(?<=[0-9a-z\uFF41-\uFF5A\u00DF-\u00F6\u00F8-\u00FF\u0101\u0103\u0105\u0107\u0109\u010B\u010D\u010F\u0111\u0113\u0115\u0117\u0119\u011B\u011D\u011F\u0121\u0123\u0125\u0127\u0129\u012B\u012D\u012F\u0131\u0133\u0135\u0137\u0138\u013A\u013C\u013E\u0140\u0142\u0144\u0146\u0148\u0149\u014B\u014D\u014F\u0151\u0153\u0155\u0157\u0159\u015B\u015D\u015F\u0161\u0163\u0165\u0167\u0169\u016B\u016D\u016F\u0171\u0173\u0175\u0177\u017A\u017C\u017E\u017F\u0180\u0183\u0185\u0188\u018C\u018D\u0192\u0195\u0199-\u019B\u019E\u01A1\u01A3\u01A5\u01A8\u01AA\u01AB\u01AD\u01B0\u01B4\u01B6\u01B9\u01BA\u01BD-\u01BF\u01C6\u01C9\u01CC\u01CE\u01D0\u01D2\u01D4\u01D6\u01D8\u01DA\u01DC\u01DD\u01DF\u01E1\u01E3\u01E5\u01E7\u01E9\u01EB\u01ED\u01EF\u01F0\u01F3\u01F5\u01F9\u01FB\u01FD\u01FF\u0201\u0203\u0205\u0207\u0209\u020B\u020D\u020F\u0211\u0213\u0215\u0217\u0219\u021B\u021D\u021F\u0221\u0223\u0225\u0227\u0229\u022B\u022D\u022F\u0231\u0233-\u0239\u023C\u023F\u0240\u0242\u0247\u0249\u024B\u024D\u024F\u2C61\u2C65\u2C66\u2C68\u2C6A\u2C6C\u2C71\u2C73\u2C74\u2C76-\u2C7B\uA723\uA725\uA727\uA729\uA72B\uA72D\uA72F-\uA731\uA733\uA735\uA737\uA739\uA73B\uA73D\uA73F\uA741\uA743\uA745\uA747\uA749\uA74B\uA74D\uA74F\uA751\uA753\uA755\uA757\uA759\uA75B\uA75D\uA75F\uA761\uA763\uA765\uA767\uA769\uA76B\uA76D\uA76F\uA771-\uA778\uA77A\uA77C\uA77F\uA781\uA783\uA785\uA787\uA78C\uA78E\uA791\uA793-\uA795\uA797\uA799\uA79B\uA79D\uA79F\uA7A1\uA7A3\uA7A5\uA7A7\uA7A9\uA7AF\uA7B5\uA7B7\uA7B9\uA7FA\uAB30-\uAB5A\uAB60-\uAB64\u0250-\u02AF\u1D00-\u1D25\u1D6B-\u1D77\u1D79-\u1D9A\u1E01\u1E03\u1E05\u1E07\u1E09\u1E0B\u1E0D\u1E0F\u1E11\u1E13\u1E15\u1E17\u1E19\u1E1B\u1E1D\u1E1F\u1E21\u1E23\u1E25\u1E27\u1E29\u1E2B\u1E2D\u1E2F\u1E31\u1E33\u1E35\u1E37\u1E39\u1E3B\u1E3D\u1E3F\u1E41\u1E43\u1E45\u1E47\u1E49\u1E4B\u1E4D\u1E4F\u1E51\u1E53\u1E55\u1E57\u1E59\u1E5B\u1E5D\u1E5F\u1E61\u1E63\u1E65\u1E67\u1E69\u1E6B\u1E6D\u1E6F\u1E71\u1E73\u1E75\u1E77\u1E79\u1E7B\u1E7D\u1E7F\u1E81\u1E83\u1E85\u1E87\u1E89\u1E8B\u1E8D\u1E8F\u1E91\u1E93\u1E95-\u1E9D\u1E9F\u1EA1\u1EA3\u1EA5\u1EA7\u1EA9\u1EAB\u1EAD\u1EAF\u1EB1\u1EB3\u1EB5\u1EB7\u1EB9\u1EBB\u1EBD\u1EBF\u1EC1\u1EC3\u1EC5\u1EC7\u1EC9\u1ECB\u1ECD\u1ECF\u1ED1\u1ED3\u1ED5\u1ED7\u1ED9\u1EDB\u1EDD\u1EDF\u1EE1\u1EE3\u1EE5\u1EE7\u1EE9\u1EEB\u1EED\u1EEF\u1EF1\u1EF3\u1EF5\u1EF7\u1EF9\u1EFB\u1EFD\u1EFFёа-яәөүҗңһα-ωάέίόώήύа-щюяіїєґѓѕјљњќѐѝ\u1200-\u137F\u0980-\u09FF\u0591-\u05F4\uFB1D-\uFB4F\u0620-\u064A\u066E-\u06D5\u06E5-\u06FF\u0750-\u077F\u08A0-\u08BD\uFB50-\uFBB1\uFBD3-\uFD3D\uFD50-\uFDC7\uFDF0-\uFDFB\uFE70-\uFEFC\U0001EE00-\U0001EEBB\u0D80-\u0DFF\u0900-\u097F\u0C80-\u0CFF\u0B80-\u0BFF\u0C00-\u0C7F\uAC00-\uD7AF\u1100-\u11FF\u3040-\u309F\u30A0-\u30FFー\u4E00-\u62FF\u6300-\u77FF\u7800-\u8CFF\u8D00-\u9FFF\u3400-\u4DBF\U00020000-\U000215FF\U00021600-\U000230FF\U00023100-\U000245FF\U00024600-\U000260FF\U00026100-\U000275FF\U00027600-\U000290FF\U00029100-\U0002A6DF\U0002A700-\U0002B73F\U0002B740-\U0002B81F\U0002B820-\U0002CEAF\U0002CEB0-\U0002EBEF\u2E80-\u2EFF\u2F00-\u2FDF\u2FF0-\u2FFF\u3000-\u303F\u31C0-\u31EF\u3200-\u32FF\u3300-\u33FF\uF900-\uFAFF\uFE30-\uFE4F\U0001F200-\U0001F2FF\U0002F800-\U0002FA1F%²\-\+…|……|,|:|;|\!|\?|¿|؟|¡|\(|\)|\[|\]|\{|\}|<|>|_|#|\*|&|。|?|!|,|、|;|:|~|·|।|،|۔|؛|٪(?:\'"”“`‘´’‚,„»«「」『』()〔〕【】《》〈〉)])\.$|(?<=[A-Z\uFF21-\uFF3A\u00C0-\u00D6\u00D8-\u00DE\u0100\u0102\u0104\u0106\u0108\u010A\u010C\u010E\u0110\u0112\u0114\u0116\u0118\u011A\u011C\u011E\u0120\u0122\u0124\u0126\u0128\u012A\u012C\u012E\u0130\u0132\u0134\u0136\u0139\u013B\u013D\u013F\u0141\u0143\u0145\u0147\u014A\u014C\u014E\u0150\u0152\u0154\u0156\u0158\u015A\u015C\u015E\u0160\u0162\u0164\u0166\u0168\u016A\u016C\u016E\u0170\u0172\u0174\u0176\u0178\u0179\u017B\u017D\u0181\u0182\u0184\u0186\u0187\u0189-\u018B\u018E-\u0191\u0193\u0194\u0196-\u0198\u019C\u019D\u019F\u01A0\u01A2\u01A4\u01A6\u01A7\u01A9\u01AC\u01AE\u01AF\u01B1-\u01B3\u01B5\u01B7\u01B8\u01BC\u01C4\u01C7\u01CA\u01CD\u01CF\u01D1\u01D3\u01D5\u01D7\u01D9\u01DB\u01DE\u01E0\u01E2\u01E4\u01E6\u01E8\u01EA\u01EC\u01EE\u01F1\u01F4\u01F6-\u01F8\u01FA\u01FC\u01FE\u0200\u0202\u0204\u0206\u0208\u020A\u020C\u020E\u0210\u0212\u0214\u0216\u0218\u021A\u021C\u021E\u0220\u0222\u0224\u0226\u0228\u022A\u022C\u022E\u0230\u0232\u023A\u023B\u023D\u023E\u0241\u0243-\u0246\u0248\u024A\u024C\u024E\u2C60\u2C62-\u2C64\u2C67\u2C69\u2C6B\u2C6D-\u2C70\u2C72\u2C75\u2C7E\u2C7F\uA722\uA724\uA726\uA728\uA72A\uA72C\uA72E\uA732\uA734\uA736\uA738\uA73A\uA73C\uA73E\uA740\uA742\uA744\uA746\uA748\uA74A\uA74C\uA74E\uA750\uA752\uA754\uA756\uA758\uA75A\uA75C\uA75E\uA760\uA762\uA764\uA766\uA768\uA76A\uA76C\uA76E\uA779\uA77B\uA77D\uA77E\uA780\uA782\uA784\uA786\uA78B\uA78D\uA790\uA792\uA796\uA798\uA79A\uA79C\uA79E\uA7A0\uA7A2\uA7A4\uA7A6\uA7A8\uA7AA-\uA7AE\uA7B0-\uA7B4\uA7B6\uA7B8\u1E00\u1E02\u1E04\u1E06\u1E08\u1E0A\u1E0C\u1E0E\u1E10\u1E12\u1E14\u1E16\u1E18\u1E1A\u1E1C\u1E1E\u1E20\u1E22\u1E24\u1E26\u1E28\u1E2A\u1E2C\u1E2E\u1E30\u1E32\u1E34\u1E36\u1E38\u1E3A\u1E3C\u1E3E\u1E40\u1E42\u1E44\u1E46\u1E48\u1E4A\u1E4C\u1E4E\u1E50\u1E52\u1E54\u1E56\u1E58\u1E5A\u1E5C\u1E5E\u1E60\u1E62\u1E64\u1E66\u1E68\u1E6A\u1E6C\u1E6E\u1E70\u1E72\u1E74\u1E76\u1E78\u1E7A\u1E7C\u1E7E\u1E80\u1E82\u1E84\u1E86\u1E88\u1E8A\u1E8C\u1E8E\u1E90\u1E92\u1E94\u1E9E\u1EA0\u1EA2\u1EA4\u1EA6\u1EA8\u1EAA\u1EAC\u1EAE\u1EB0\u1EB2\u1EB4\u1EB6\u1EB8\u1EBA\u1EBC\u1EBE\u1EC0\u1EC2\u1EC4\u1EC6\u1EC8\u1ECA\u1ECC\u1ECE\u1ED0\u1ED2\u1ED4\u1ED6\u1ED8\u1EDA\u1EDC\u1EDE\u1EE0\u1EE2\u1EE4\u1EE6\u1EE8\u1EEA\u1EEC\u1EEE\u1EF0\u1EF2\u1EF4\u1EF6\u1EF8\u1EFA\u1EFC\u1EFEЁА-ЯӘӨҮҖҢҺΑ-ΩΆΈΊΌΏΉΎА-ЩЮЯІЇЄҐЃЅЈЉЊЌЀЍ\u1200-\u137F\u0980-\u09FF\u0591-\u05F4\uFB1D-\uFB4F\u0620-\u064A\u066E-\u06D5\u06E5-\u06FF\u0750-\u077F\u08A0-\u08BD\uFB50-\uFBB1\uFBD3-\uFD3D\uFD50-\uFDC7\uFDF0-\uFDFB\uFE70-\uFEFC\U0001EE00-\U0001EEBB\u0D80-\u0DFF\u0900-\u097F\u0C80-\u0CFF\u0B80-\u0BFF\u0C00-\u0C7F\uAC00-\uD7AF\u1100-\u11FF\u3040-\u309F\u30A0-\u30FFー\u4E00-\u62FF\u6300-\u77FF\u7800-\u8CFF\u8D00-\u9FFF\u3400-\u4DBF\U00020000-\U000215FF\U00021600-\U000230FF\U00023100-\U000245FF\U00024600-\U000260FF\U00026100-\U000275FF\U00027600-\U000290FF\U00029100-\U0002A6DF\U0002A700-\U0002B73F\U0002B740-\U0002B81F\U0002B820-\U0002CEAF\U0002CEB0-\U0002EBEF\u2E80-\u2EFF\u2F00-\u2FDF\u2FF0-\u2FFF\u3000-\u303F\u31C0-\u31EF\u3200-\u32FF\u3300-\u33FF\uF900-\uFAFF\uFE30-\uFE4F\U0001F200-\U0001F2FF\U0002F800-\U0002FA1F][A-Z\uFF21-\uFF3A\u00C0-\u00D6\u00D8-\u00DE\u0100\u0102\u0104\u0106\u0108\u010A\u010C\u010E\u0110\u0112\u0114\u0116\u0118\u011A\u011C\u011E\u0120\u0122\u0124\u0126\u0128\u012A\u012C\u012E\u0130\u0132\u0134\u0136\u0139\u013B\u013D\u013F\u0141\u0143\u0145\u0147\u014A\u014C\u014E\u0150\u0152\u0154\u0156\u0158\u015A\u015C\u015E\u0160\u0162\u0164\u0166\u0168\u016A\u016C\u016E\u0170\u0172\u0174\u0176\u0178\u0179\u017B\u017D\u0181\u0182\u0184\u0186\u0187\u0189-\u018B\u018E-\u0191\u0193\u0194\u0196-\u0198\u019C\u019D\u019F\u01A0\u01A2\u01A4\u01A6\u01A7\u01A9\u01AC\u01AE\u01AF\u01B1-\u01B3\u01B5\u01B7\u01B8\u01BC\u01C4\u01C7\u01CA\u01CD\u01CF\u01D1\u01D3\u01D5\u01D7\u01D9\u01DB\u01DE\u01E0\u01E2\u01E4\u01E6\u01E8\u01EA\u01EC\u01EE\u01F1\u01F4\u01F6-\u01F8\u01FA\u01FC\u01FE\u0200\u0202\u0204\u0206\u0208\u020A\u020C\u020E\u0210\u0212\u0214\u0216\u0218\u021A\u021C\u021E\u0220\u0222\u0224\u0226\u0228\u022A\u022C\u022E\u0230\u0232\u023A\u023B\u023D\u023E\u0241\u0243-\u0246\u0248\u024A\u024C\u024E\u2C60\u2C62-\u2C64\u2C67\u2C69\u2C6B\u2C6D-\u2C70\u2C72\u2C75\u2C7E\u2C7F\uA722\uA724\uA726\uA728\uA72A\uA72C\uA72E\uA732\uA734\uA736\uA738\uA73A\uA73C\uA73E\uA740\uA742\uA744\uA746\uA748\uA74A\uA74C\uA74E\uA750\uA752\uA754\uA756\uA758\uA75A\uA75C\uA75E\uA760\uA762\uA764\uA766\uA768\uA76A\uA76C\uA76E\uA779\uA77B\uA77D\uA77E\uA780\uA782\uA784\uA786\uA78B\uA78D\uA790\uA792\uA796\uA798\uA79A\uA79C\uA79E\uA7A0\uA7A2\uA7A4\uA7A6\uA7A8\uA7AA-\uA7AE\uA7B0-\uA7B4\uA7B6\uA7B8\u1E00\u1E02\u1E04\u1E06\u1E08\u1E0A\u1E0C\u1E0E\u1E10\u1E12\u1E14\u1E16\u1E18\u1E1A\u1E1C\u1E1E\u1E20\u1E22\u1E24\u1E26\u1E28\u1E2A\u1E2C\u1E2E\u1E30\u1E32\u1E34\u1E36\u1E38\u1E3A\u1E3C\u1E3E\u1E40\u1E42\u1E44\u1E46\u1E48\u1E4A\u1E4C\u1E4E\u1E50\u1E52\u1E54\u1E56\u1E58\u1E5A\u1E5C\u1E5E\u1E60\u1E62\u1E64\u1E66\u1E68\u1E6A\u1E6C\u1E6E\u1E70\u1E72\u1E74\u1E76\u1E78\u1E7A\u1E7C\u1E7E\u1E80\u1E82\u1E84\u1E86\u1E88\u1E8A\u1E8C\u1E8E\u1E90\u1E92\u1E94\u1E9E\u1EA0\u1EA2\u1EA4\u1EA6\u1EA8\u1EAA\u1EAC\u1EAE\u1EB0\u1EB2\u1EB4\u1EB6\u1EB8\u1EBA\u1EBC\u1EBE\u1EC0\u1EC2\u1EC4\u1EC6\u1EC8\u1ECA\u1ECC\u1ECE\u1ED0\u1ED2\u1ED4\u1ED6\u1ED8\u1EDA\u1EDC\u1EDE\u1EE0\u1EE2\u1EE4\u1EE6\u1EE8\u1EEA\u1EEC\u1EEE\u1EF0\u1EF2\u1EF4\u1EF6\u1EF8\u1EFA\u1EFC\u1EFEЁА-ЯӘӨҮҖҢҺΑ-ΩΆΈΊΌΏΉΎА-ЩЮЯІЇЄҐЃЅЈЉЊЌЀЍ\u1200-\u137F\u0980-\u09FF\u0591-\u05F4\uFB1D-\uFB4F\u0620-\u064A\u066E-\u06D5\u06E5-\u06FF\u0750-\u077F\u08A0-\u08BD\uFB50-\uFBB1\uFBD3-\uFD3D\uFD50-\uFDC7\uFDF0-\uFDFB\uFE70-\uFEFC\U0001EE00-\U0001EEBB\u0D80-\u0DFF\u0900-\u097F\u0C80-\u0CFF\u0B80-\u0BFF\u0C00-\u0C7F\uAC00-\uD7AF\u1100-\u11FF\u3040-\u309F\u30A0-\u30FFー\u4E00-\u62FF\u6300-\u77FF\u7800-\u8CFF\u8D00-\u9FFF\u3400-\u4DBF\U00020000-\U000215FF\U00021600-\U000230FF\U00023100-\U000245FF\U00024600-\U000260FF\U00026100-\U000275FF\U00027600-\U000290FF\U00029100-\U0002A6DF\U0002A700-\U0002B73F\U0002B740-\U0002B81F\U0002B820-\U0002CEAF\U0002CEB0-\U0002EBEF\u2E80-\u2EFF\u2F00-\u2FDF\u2FF0-\u2FFF\u3000-\u303F\u31C0-\u31EF\u3200-\u32FF\u3300-\u33FF\uF900-\uFAFF\uFE30-\uFE4F\U0001F200-\U0001F2FF\U0002F800-\U0002FA1F])\.$�infix_finditer�>�\.\.+|…|[\u00A6\u00A9\u00AE\u00B0\u0482\u058D\u058E\u060E\u060F\u06DE\u06E9\u06FD\u06FE\u07F6\u09FA\u0B70\u0BF3-\u0BF8\u0BFA\u0C7F\u0D4F\u0D79\u0F01-\u0F03\u0F13\u0F15-\u0F17\u0F1A-\u0F1F\u0F34\u0F36\u0F38\u0FBE-\u0FC5\u0FC7-\u0FCC\u0FCE\u0FCF\u0FD5-\u0FD8\u109E\u109F\u1390-\u1399\u1940\u19DE-\u19FF\u1B61-\u1B6A\u1B74-\u1B7C\u2100\u2101\u2103-\u2106\u2108\u2109\u2114\u2116\u2117\u211E-\u2123\u2125\u2127\u2129\u212E\u213A\u213B\u214A\u214C\u214D\u214F\u218A\u218B\u2195-\u2199\u219C-\u219F\u21A1\u21A2\u21A4\u21A5\u21A7-\u21AD\u21AF-\u21CD\u21D0\u21D1\u21D3\u21D5-\u21F3\u2300-\u2307\u230C-\u231F\u2322-\u2328\u232B-\u237B\u237D-\u239A\u23B4-\u23DB\u23E2-\u2426\u2440-\u244A\u249C-\u24E9\u2500-\u25B6\u25B8-\u25C0\u25C2-\u25F7\u2600-\u266E\u2670-\u2767\u2794-\u27BF\u2800-\u28FF\u2B00-\u2B2F\u2B45\u2B46\u2B4D-\u2B73\u2B76-\u2B95\u2B98-\u2BC8\u2BCA-\u2BFE\u2CE5-\u2CEA\u2E80-\u2E99\u2E9B-\u2EF3\u2F00-\u2FD5\u2FF0-\u2FFB\u3004\u3012\u3013\u3020\u3036\u3037\u303E\u303F\u3190\u3191\u3196-\u319F\u31C0-\u31E3\u3200-\u321E\u322A-\u3247\u3250\u3260-\u327F\u328A-\u32B0\u32C0-\u32FE\u3300-\u33FF\u4DC0-\u4DFF\uA490-\uA4C6\uA828-\uA82B\uA836\uA837\uA839\uAA77-\uAA79\uFDFD\uFFE4\uFFE8\uFFED\uFFEE\uFFFC\uFFFD\U00010137-\U0001013F\U00010179-\U00010189\U0001018C-\U0001018E\U00010190-\U0001019B\U000101A0\U000101D0-\U000101FC\U00010877\U00010878\U00010AC8\U0001173F\U00016B3C-\U00016B3F\U00016B45\U0001BC9C\U0001D000-\U0001D0F5\U0001D100-\U0001D126\U0001D129-\U0001D164\U0001D16A-\U0001D16C\U0001D183\U0001D184\U0001D18C-\U0001D1A9\U0001D1AE-\U0001D1E8\U0001D200-\U0001D241\U0001D245\U0001D300-\U0001D356\U0001D800-\U0001D9FF\U0001DA37-\U0001DA3A\U0001DA6D-\U0001DA74\U0001DA76-\U0001DA83\U0001DA85\U0001DA86\U0001ECAC\U0001F000-\U0001F02B\U0001F030-\U0001F093\U0001F0A0-\U0001F0AE\U0001F0B1-\U0001F0BF\U0001F0C1-\U0001F0CF\U0001F0D1-\U0001F0F5\U0001F110-\U0001F16B\U0001F170-\U0001F1AC\U0001F1E6-\U0001F202\U0001F210-\U0001F23B\U0001F240-\U0001F248\U0001F250\U0001F251\U0001F260-\U0001F265\U0001F300-\U0001F3FA\U0001F400-\U0001F6D4\U0001F6E0-\U0001F6EC\U0001F6F0-\U0001F6F9\U0001F700-\U0001F773\U0001F780-\U0001F7D8\U0001F800-\U0001F80B\U0001F810-\U0001F847\U0001F850-\U0001F859\U0001F860-\U0001F887\U0001F890-\U0001F8AD\U0001F900-\U0001F90B\U0001F910-\U0001F93E\U0001F940-\U0001F970\U0001F973-\U0001F976\U0001F97A\U0001F97C-\U0001F9A2\U0001F9B0-\U0001F9B9\U0001F9C0-\U0001F9C2\U0001F9D0-\U0001F9FF\U0001FA60-\U0001FA6D]|(?<=[0-9])[+\-\*^](?=[0-9-])|(?<=[a-z\uFF41-\uFF5A\u00DF-\u00F6\u00F8-\u00FF\u0101\u0103\u0105\u0107\u0109\u010B\u010D\u010F\u0111\u0113\u0115\u0117\u0119\u011B\u011D\u011F\u0121\u0123\u0125\u0127\u0129\u012B\u012D\u012F\u0131\u0133\u0135\u0137\u0138\u013A\u013C\u013E\u0140\u0142\u0144\u0146\u0148\u0149\u014B\u014D\u014F\u0151\u0153\u0155\u0157\u0159\u015B\u015D\u015F\u0161\u0163\u0165\u0167\u0169\u016B\u016D\u016F\u0171\u0173\u0175\u0177\u017A\u017C\u017E\u017F\u0180\u0183\u0185\u0188\u018C\u018D\u0192\u0195\u0199-\u019B\u019E\u01A1\u01A3\u01A5\u01A8\u01AA\u01AB\u01AD\u01B0\u01B4\u01B6\u01B9\u01BA\u01BD-\u01BF\u01C6\u01C9\u01CC\u01CE\u01D0\u01D2\u01D4\u01D6\u01D8\u01DA\u01DC\u01DD\u01DF\u01E1\u01E3\u01E5\u01E7\u01E9\u01EB\u01ED\u01EF\u01F0\u01F3\u01F5\u01F9\u01FB\u01FD\u01FF\u0201\u0203\u0205\u0207\u0209\u020B\u020D\u020F\u0211\u0213\u0215\u0217\u0219\u021B\u021D\u021F\u0221\u0223\u0225\u0227\u0229\u022B\u022D\u022F\u0231\u0233-\u0239\u023C\u023F\u0240\u0242\u0247\u0249\u024B\u024D\u024F\u2C61\u2C65\u2C66\u2C68\u2C6A\u2C6C\u2C71\u2C73\u2C74\u2C76-\u2C7B\uA723\uA725\uA727\uA729\uA72B\uA72D\uA72F-\uA731\uA733\uA735\uA737\uA739\uA73B\uA73D\uA73F\uA741\uA743\uA745\uA747\uA749\uA74B\uA74D\uA74F\uA751\uA753\uA755\uA757\uA759\uA75B\uA75D\uA75F\uA761\uA763\uA765\uA767\uA769\uA76B\uA76D\uA76F\uA771-\uA778\uA77A\uA77C\uA77F\uA781\uA783\uA785\uA787\uA78C\uA78E\uA791\uA793-\uA795\uA797\uA799\uA79B\uA79D\uA79F\uA7A1\uA7A3\uA7A5\uA7A7\uA7A9\uA7AF\uA7B5\uA7B7\uA7B9\uA7FA\uAB30-\uAB5A\uAB60-\uAB64\u0250-\u02AF\u1D00-\u1D25\u1D6B-\u1D77\u1D79-\u1D9A\u1E01\u1E03\u1E05\u1E07\u1E09\u1E0B\u1E0D\u1E0F\u1E11\u1E13\u1E15\u1E17\u1E19\u1E1B\u1E1D\u1E1F\u1E21\u1E23\u1E25\u1E27\u1E29\u1E2B\u1E2D\u1E2F\u1E31\u1E33\u1E35\u1E37\u1E39\u1E3B\u1E3D\u1E3F\u1E41\u1E43\u1E45\u1E47\u1E49\u1E4B\u1E4D\u1E4F\u1E51\u1E53\u1E55\u1E57\u1E59\u1E5B\u1E5D\u1E5F\u1E61\u1E63\u1E65\u1E67\u1E69\u1E6B\u1E6D\u1E6F\u1E71\u1E73\u1E75\u1E77\u1E79\u1E7B\u1E7D\u1E7F\u1E81\u1E83\u1E85\u1E87\u1E89\u1E8B\u1E8D\u1E8F\u1E91\u1E93\u1E95-\u1E9D\u1E9F\u1EA1\u1EA3\u1EA5\u1EA7\u1EA9\u1EAB\u1EAD\u1EAF\u1EB1\u1EB3\u1EB5\u1EB7\u1EB9\u1EBB\u1EBD\u1EBF\u1EC1\u1EC3\u1EC5\u1EC7\u1EC9\u1ECB\u1ECD\u1ECF\u1ED1\u1ED3\u1ED5\u1ED7\u1ED9\u1EDB\u1EDD\u1EDF\u1EE1\u1EE3\u1EE5\u1EE7\u1EE9\u1EEB\u1EED\u1EEF\u1EF1\u1EF3\u1EF5\u1EF7\u1EF9\u1EFB\u1EFD\u1EFFёа-яәөүҗңһα-ωάέίόώήύа-щюяіїєґѓѕјљњќѐѝ\u1200-\u137F\u0980-\u09FF\u0591-\u05F4\uFB1D-\uFB4F\u0620-\u064A\u066E-\u06D5\u06E5-\u06FF\u0750-\u077F\u08A0-\u08BD\uFB50-\uFBB1\uFBD3-\uFD3D\uFD50-\uFDC7\uFDF0-\uFDFB\uFE70-\uFEFC\U0001EE00-\U0001EEBB\u0D80-\u0DFF\u0900-\u097F\u0C80-\u0CFF\u0B80-\u0BFF\u0C00-\u0C7F\uAC00-\uD7AF\u1100-\u11FF\u3040-\u309F\u30A0-\u30FFー\u4E00-\u62FF\u6300-\u77FF\u7800-\u8CFF\u8D00-\u9FFF\u3400-\u4DBF\U00020000-\U000215FF\U00021600-\U000230FF\U00023100-\U000245FF\U00024600-\U000260FF\U00026100-\U000275FF\U00027600-\U000290FF\U00029100-\U0002A6DF\U0002A700-\U0002B73F\U0002B740-\U0002B81F\U0002B820-\U0002CEAF\U0002CEB0-\U0002EBEF\u2E80-\u2EFF\u2F00-\u2FDF\u2FF0-\u2FFF\u3000-\u303F\u31C0-\u31EF\u3200-\u32FF\u3300-\u33FF\uF900-\uFAFF\uFE30-\uFE4F\U0001F200-\U0001F2FF\U0002F800-\U0002FA1F\'"”“`‘´’‚,„»«「」『』()〔〕【】《》〈〉])\.(?=[A-Z\uFF21-\uFF3A\u00C0-\u00D6\u00D8-\u00DE\u0100\u0102\u0104\u0106\u0108\u010A\u010C\u010E\u0110\u0112\u0114\u0116\u0118\u011A\u011C\u011E\u0120\u0122\u0124\u0126\u0128\u012A\u012C\u012E\u0130\u0132\u0134\u0136\u0139\u013B\u013D\u013F\u0141\u0143\u0145\u0147\u014A\u014C\u014E\u0150\u0152\u0154\u0156\u0158\u015A\u015C\u015E\u0160\u0162\u0164\u0166\u0168\u016A\u016C\u016E\u0170\u0172\u0174\u0176\u0178\u0179\u017B\u017D\u0181\u0182\u0184\u0186\u0187\u0189-\u018B\u018E-\u0191\u0193\u0194\u0196-\u0198\u019C\u019D\u019F\u01A0\u01A2\u01A4\u01A6\u01A7\u01A9\u01AC\u01AE\u01AF\u01B1-\u01B3\u01B5\u01B7\u01B8\u01BC\u01C4\u01C7\u01CA\u01CD\u01CF\u01D1\u01D3\u01D5\u01D7\u01D9\u01DB\u01DE\u01E0\u01E2\u01E4\u01E6\u01E8\u01EA\u01EC\u01EE\u01F1\u01F4\u01F6-\u01F8\u01FA\u01FC\u01FE\u0200\u0202\u0204\u0206\u0208\u020A\u020C\u020E\u0210\u0212\u0214\u0216\u0218\u021A\u021C\u021E\u0220\u0222\u0224\u0226\u0228\u022A\u022C\u022E\u0230\u0232\u023A\u023B\u023D\u023E\u0241\u0243-\u0246\u0248\u024A\u024C\u024E\u2C60\u2C62-\u2C64\u2C67\u2C69\u2C6B\u2C6D-\u2C70\u2C72\u2C75\u2C7E\u2C7F\uA722\uA724\uA726\uA728\uA72A\uA72C\uA72E\uA732\uA734\uA736\uA738\uA73A\uA73C\uA73E\uA740\uA742\uA744\uA746\uA748\uA74A\uA74C\uA74E\uA750\uA752\uA754\uA756\uA758\uA75A\uA75C\uA75E\uA760\uA762\uA764\uA766\uA768\uA76A\uA76C\uA76E\uA779\uA77B\uA77D\uA77E\uA780\uA782\uA784\uA786\uA78B\uA78D\uA790\uA792\uA796\uA798\uA79A\uA79C\uA79E\uA7A0\uA7A2\uA7A4\uA7A6\uA7A8\uA7AA-\uA7AE\uA7B0-\uA7B4\uA7B6\uA7B8\u1E00\u1E02\u1E04\u1E06\u1E08\u1E0A\u1E0C\u1E0E\u1E10\u1E12\u1E14\u1E16\u1E18\u1E1A\u1E1C\u1E1E\u1E20\u1E22\u1E24\u1E26\u1E28\u1E2A\u1E2C\u1E2E\u1E30\u1E32\u1E34\u1E36\u1E38\u1E3A\u1E3C\u1E3E\u1E40\u1E42\u1E44\u1E46\u1E48\u1E4A\u1E4C\u1E4E\u1E50\u1E52\u1E54\u1E56\u1E58\u1E5A\u1E5C\u1E5E\u1E60\u1E62\u1E64\u1E66\u1E68\u1E6A\u1E6C\u1E6E\u1E70\u1E72\u1E74\u1E76\u1E78\u1E7A\u1E7C\u1E7E\u1E80\u1E82\u1E84\u1E86\u1E88\u1E8A\u1E8C\u1E8E\u1E90\u1E92\u1E94\u1E9E\u1EA0\u1EA2\u1EA4\u1EA6\u1EA8\u1EAA\u1EAC\u1EAE\u1EB0\u1EB2\u1EB4\u1EB6\u1EB8\u1EBA\u1EBC\u1EBE\u1EC0\u1EC2\u1EC4\u1EC6\u1EC8\u1ECA\u1ECC\u1ECE\u1ED0\u1ED2\u1ED4\u1ED6\u1ED8\u1EDA\u1EDC\u1EDE\u1EE0\u1EE2\u1EE4\u1EE6\u1EE8\u1EEA\u1EEC\u1EEE\u1EF0\u1EF2\u1EF4\u1EF6\u1EF8\u1EFA\u1EFC\u1EFEЁА-ЯӘӨҮҖҢҺΑ-ΩΆΈΊΌΏΉΎА-ЩЮЯІЇЄҐЃЅЈЉЊЌЀЍ\u1200-\u137F\u0980-\u09FF\u0591-\u05F4\uFB1D-\uFB4F\u0620-\u064A\u066E-\u06D5\u06E5-\u06FF\u0750-\u077F\u08A0-\u08BD\uFB50-\uFBB1\uFBD3-\uFD3D\uFD50-\uFDC7\uFDF0-\uFDFB\uFE70-\uFEFC\U0001EE00-\U0001EEBB\u0D80-\u0DFF\u0900-\u097F\u0C80-\u0CFF\u0B80-\u0BFF\u0C00-\u0C7F\uAC00-\uD7AF\u1100-\u11FF\u3040-\u309F\u30A0-\u30FFー\u4E00-\u62FF\u6300-\u77FF\u7800-\u8CFF\u8D00-\u9FFF\u3400-\u4DBF\U00020000-\U000215FF\U00021600-\U000230FF\U00023100-\U000245FF\U00024600-\U000260FF\U00026100-\U000275FF\U00027600-\U000290FF\U00029100-\U0002A6DF\U0002A700-\U0002B73F\U0002B740-\U0002B81F\U0002B820-\U0002CEAF\U0002CEB0-\U0002EBEF\u2E80-\u2EFF\u2F00-\u2FDF\u2FF0-\u2FFF\u3000-\u303F\u31C0-\u31EF\u3200-\u32FF\u3300-\u33FF\uF900-\uFAFF\uFE30-\uFE4F\U0001F200-\U0001F2FF\U0002F800-\U0002FA1F\'"”“`‘´’‚,„»«「」『』()〔〕【】《》〈〉])|(?<=[A-Za-z\uFF21-\uFF3A\uFF41-\uFF5A\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u00FF\u0100-\u017F\u0180-\u01BF\u01C4-\u024F\u2C60-\u2C7B\u2C7E\u2C7F\uA722-\uA76F\uA771-\uA787\uA78B-\uA78E\uA790-\uA7B9\uA7FA\uAB30-\uAB5A\uAB60-\uAB64\u0250-\u02AF\u1D00-\u1D25\u1D6B-\u1D77\u1D79-\u1D9A\u1E00-\u1EFFёа-яЁА-ЯәөүҗңһӘӨҮҖҢҺα-ωάέίόώήύΑ-ΩΆΈΊΌΏΉΎа-щюяіїєґА-ЩЮЯІЇЄҐѓѕјљњќѐѝЃЅЈЉЊЌЀЍ\u1200-\u137F\u0980-\u09FF\u0591-\u05F4\uFB1D-\uFB4F\u0620-\u064A\u066E-\u06D5\u06E5-\u06FF\u0750-\u077F\u08A0-\u08BD\uFB50-\uFBB1\uFBD3-\uFD3D\uFD50-\uFDC7\uFDF0-\uFDFB\uFE70-\uFEFC\U0001EE00-\U0001EEBB\u0D80-\u0DFF\u0900-\u097F\u0C80-\u0CFF\u0B80-\u0BFF\u0C00-\u0C7F\uAC00-\uD7AF\u1100-\u11FF\u3040-\u309F\u30A0-\u30FFー\u4E00-\u62FF\u6300-\u77FF\u7800-\u8CFF\u8D00-\u9FFF\u3400-\u4DBF\U00020000-\U000215FF\U00021600-\U000230FF\U00023100-\U000245FF\U00024600-\U000260FF\U00026100-\U000275FF\U00027600-\U000290FF\U00029100-\U0002A6DF\U0002A700-\U0002B73F\U0002B740-\U0002B81F\U0002B820-\U0002CEAF\U0002CEB0-\U0002EBEF\u2E80-\u2EFF\u2F00-\u2FDF\u2FF0-\u2FFF\u3000-\u303F\u31C0-\u31EF\u3200-\u32FF\u3300-\u33FF\uF900-\uFAFF\uFE30-\uFE4F\U0001F200-\U0001F2FF\U0002F800-\U0002FA1F]),(?=[A-Za-z\uFF21-\uFF3A\uFF41-\uFF5A\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u00FF\u0100-\u017F\u0180-\u01BF\u01C4-\u024F\u2C60-\u2C7B\u2C7E\u2C7F\uA722-\uA76F\uA771-\uA787\uA78B-\uA78E\uA790-\uA7B9\uA7FA\uAB30-\uAB5A\uAB60-\uAB64\u0250-\u02AF\u1D00-\u1D25\u1D6B-\u1D77\u1D79-\u1D9A\u1E00-\u1EFFёа-яЁА-ЯәөүҗңһӘӨҮҖҢҺα-ωάέίόώήύΑ-ΩΆΈΊΌΏΉΎа-щюяіїєґА-ЩЮЯІЇЄҐѓѕјљњќѐѝЃЅЈЉЊЌЀЍ\u1200-\u137F\u0980-\u09FF\u0591-\u05F4\uFB1D-\uFB4F\u0620-\u064A\u066E-\u06D5\u06E5-\u06FF\u0750-\u077F\u08A0-\u08BD\uFB50-\uFBB1\uFBD3-\uFD3D\uFD50-\uFDC7\uFDF0-\uFDFB\uFE70-\uFEFC\U0001EE00-\U0001EEBB\u0D80-\u0DFF\u0900-\u097F\u0C80-\u0CFF\u0B80-\u0BFF\u0C00-\u0C7F\uAC00-\uD7AF\u1100-\u11FF\u3040-\u309F\u30A0-\u30FFー\u4E00-\u62FF\u6300-\u77FF\u7800-\u8CFF\u8D00-\u9FFF\u3400-\u4DBF\U00020000-\U000215FF\U00021600-\U000230FF\U00023100-\U000245FF\U00024600-\U000260FF\U00026100-\U000275FF\U00027600-\U000290FF\U00029100-\U0002A6DF\U0002A700-\U0002B73F\U0002B740-\U0002B81F\U0002B820-\U0002CEAF\U0002CEB0-\U0002EBEF\u2E80-\u2EFF\u2F00-\u2FDF\u2FF0-\u2FFF\u3000-\u303F\u31C0-\u31EF\u3200-\u32FF\u3300-\u33FF\uF900-\uFAFF\uFE30-\uFE4F\U0001F200-\U0001F2FF\U0002F800-\U0002FA1F])|(?<=[A-Za-z\uFF21-\uFF3A\uFF41-\uFF5A\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u00FF\u0100-\u017F\u0180-\u01BF\u01C4-\u024F\u2C60-\u2C7B\u2C7E\u2C7F\uA722-\uA76F\uA771-\uA787\uA78B-\uA78E\uA790-\uA7B9\uA7FA\uAB30-\uAB5A\uAB60-\uAB64\u0250-\u02AF\u1D00-\u1D25\u1D6B-\u1D77\u1D79-\u1D9A\u1E00-\u1EFFёа-яЁА-ЯәөүҗңһӘӨҮҖҢҺα-ωάέίόώήύΑ-ΩΆΈΊΌΏΉΎа-щюяіїєґА-ЩЮЯІЇЄҐѓѕјљњќѐѝЃЅЈЉЊЌЀЍ\u1200-\u137F\u0980-\u09FF\u0591-\u05F4\uFB1D-\uFB4F\u0620-\u064A\u066E-\u06D5\u06E5-\u06FF\u0750-\u077F\u08A0-\u08BD\uFB50-\uFBB1\uFBD3-\uFD3D\uFD50-\uFDC7\uFDF0-\uFDFB\uFE70-\uFEFC\U0001EE00-\U0001EEBB\u0D80-\u0DFF\u0900-\u097F\u0C80-\u0CFF\u0B80-\u0BFF\u0C00-\u0C7F\uAC00-\uD7AF\u1100-\u11FF\u3040-\u309F\u30A0-\u30FFー\u4E00-\u62FF\u6300-\u77FF\u7800-\u8CFF\u8D00-\u9FFF\u3400-\u4DBF\U00020000-\U000215FF\U00021600-\U000230FF\U00023100-\U000245FF\U00024600-\U000260FF\U00026100-\U000275FF\U00027600-\U000290FF\U00029100-\U0002A6DF\U0002A700-\U0002B73F\U0002B740-\U0002B81F\U0002B820-\U0002CEAF\U0002CEB0-\U0002EBEF\u2E80-\u2EFF\u2F00-\u2FDF\u2FF0-\u2FFF\u3000-\u303F\u31C0-\u31EF\u3200-\u32FF\u3300-\u33FF\uF900-\uFAFF\uFE30-\uFE4F\U0001F200-\U0001F2FF\U0002F800-\U0002FA1F0-9])(?:-|–|—|--|---|——|~)(?=[A-Za-z\uFF21-\uFF3A\uFF41-\uFF5A\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u00FF\u0100-\u017F\u0180-\u01BF\u01C4-\u024F\u2C60-\u2C7B\u2C7E\u2C7F\uA722-\uA76F\uA771-\uA787\uA78B-\uA78E\uA790-\uA7B9\uA7FA\uAB30-\uAB5A\uAB60-\uAB64\u0250-\u02AF\u1D00-\u1D25\u1D6B-\u1D77\u1D79-\u1D9A\u1E00-\u1EFFёа-яЁА-ЯәөүҗңһӘӨҮҖҢҺα-ωάέίόώήύΑ-ΩΆΈΊΌΏΉΎа-щюяіїєґА-ЩЮЯІЇЄҐѓѕјљњќѐѝЃЅЈЉЊЌЀЍ\u1200-\u137F\u0980-\u09FF\u0591-\u05F4\uFB1D-\uFB4F\u0620-\u064A\u066E-\u06D5\u06E5-\u06FF\u0750-\u077F\u08A0-\u08BD\uFB50-\uFBB1\uFBD3-\uFD3D\uFD50-\uFDC7\uFDF0-\uFDFB\uFE70-\uFEFC\U0001EE00-\U0001EEBB\u0D80-\u0DFF\u0900-\u097F\u0C80-\u0CFF\u0B80-\u0BFF\u0C00-\u0C7F\uAC00-\uD7AF\u1100-\u11FF\u3040-\u309F\u30A0-\u30FFー\u4E00-\u62FF\u6300-\u77FF\u7800-\u8CFF\u8D00-\u9FFF\u3400-\u4DBF\U00020000-\U000215FF\U00021600-\U000230FF\U00023100-\U000245FF\U00024600-\U000260FF\U00026100-\U000275FF\U00027600-\U000290FF\U00029100-\U0002A6DF\U0002A700-\U0002B73F\U0002B740-\U0002B81F\U0002B820-\U0002CEAF\U0002CEB0-\U0002EBEF\u2E80-\u2EFF\u2F00-\u2FDF\u2FF0-\u2FFF\u3000-\u303F\u31C0-\u31EF\u3200-\u32FF\u3300-\u33FF\uF900-\uFAFF\uFE30-\uFE4F\U0001F200-\U0001F2FF\U0002F800-\U0002FA1F])|(?<=[A-Za-z\uFF21-\uFF3A\uFF41-\uFF5A\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u00FF\u0100-\u017F\u0180-\u01BF\u01C4-\u024F\u2C60-\u2C7B\u2C7E\u2C7F\uA722-\uA76F\uA771-\uA787\uA78B-\uA78E\uA790-\uA7B9\uA7FA\uAB30-\uAB5A\uAB60-\uAB64\u0250-\u02AF\u1D00-\u1D25\u1D6B-\u1D77\u1D79-\u1D9A\u1E00-\u1EFFёа-яЁА-ЯәөүҗңһӘӨҮҖҢҺα-ωάέίόώήύΑ-ΩΆΈΊΌΏΉΎа-щюяіїєґА-ЩЮЯІЇЄҐѓѕјљњќѐѝЃЅЈЉЊЌЀЍ\u1200-\u137F\u0980-\u09FF\u0591-\u05F4\uFB1D-\uFB4F\u0620-\u064A\u066E-\u06D5\u06E5-\u06FF\u0750-\u077F\u08A0-\u08BD\uFB50-\uFBB1\uFBD3-\uFD3D\uFD50-\uFDC7\uFDF0-\uFDFB\uFE70-\uFEFC\U0001EE00-\U0001EEBB\u0D80-\u0DFF\u0900-\u097F\u0C80-\u0CFF\u0B80-\u0BFF\u0C00-\u0C7F\uAC00-\uD7AF\u1100-\u11FF\u3040-\u309F\u30A0-\u30FFー\u4E00-\u62FF\u6300-\u77FF\u7800-\u8CFF\u8D00-\u9FFF\u3400-\u4DBF\U00020000-\U000215FF\U00021600-\U000230FF\U00023100-\U000245FF\U00024600-\U000260FF\U00026100-\U000275FF\U00027600-\U000290FF\U00029100-\U0002A6DF\U0002A700-\U0002B73F\U0002B740-\U0002B81F\U0002B820-\U0002CEAF\U0002CEB0-\U0002EBEF\u2E80-\u2EFF\u2F00-\u2FDF\u2FF0-\u2FFF\u3000-\u303F\u31C0-\u31EF\u3200-\u32FF\u3300-\u33FF\uF900-\uFAFF\uFE30-\uFE4F\U0001F200-\U0001F2FF\U0002F800-\U0002FA1F0-9])[:<>=/](?=[A-Za-z\uFF21-\uFF3A\uFF41-\uFF5A\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u00FF\u0100-\u017F\u0180-\u01BF\u01C4-\u024F\u2C60-\u2C7B\u2C7E\u2C7F\uA722-\uA76F\uA771-\uA787\uA78B-\uA78E\uA790-\uA7B9\uA7FA\uAB30-\uAB5A\uAB60-\uAB64\u0250-\u02AF\u1D00-\u1D25\u1D6B-\u1D77\u1D79-\u1D9A\u1E00-\u1EFFёа-яЁА-ЯәөүҗңһӘӨҮҖҢҺα-ωάέίόώήύΑ-ΩΆΈΊΌΏΉΎа-щюяіїєґА-ЩЮЯІЇЄҐѓѕјљњќѐѝЃЅЈЉЊЌЀЍ\u1200-\u137F\u0980-\u09FF\u0591-\u05F4\uFB1D-\uFB4F\u0620-\u064A\u066E-\u06D5\u06E5-\u06FF\u0750-\u077F\u08A0-\u08BD\uFB50-\uFBB1\uFBD3-\uFD3D\uFD50-\uFDC7\uFDF0-\uFDFB\uFE70-\uFEFC\U0001EE00-\U0001EEBB\u0D80-\u0DFF\u0900-\u097F\u0C80-\u0CFF\u0B80-\u0BFF\u0C00-\u0C7F\uAC00-\uD7AF\u1100-\u11FF\u3040-\u309F\u30A0-\u30FFー\u4E00-\u62FF\u6300-\u77FF\u7800-\u8CFF\u8D00-\u9FFF\u3400-\u4DBF\U00020000-\U000215FF\U00021600-\U000230FF\U00023100-\U000245FF\U00024600-\U000260FF\U00026100-\U000275FF\U00027600-\U000290FF\U00029100-\U0002A6DF\U0002A700-\U0002B73F\U0002B740-\U0002B81F\U0002B820-\U0002CEAF\U0002CEB0-\U0002EBEF\u2E80-\u2EFF\u2F00-\u2FDF\u2FF0-\u2FFF\u3000-\u303F\u31C0-\u31EF\u3200-\u32FF\u3300-\u33FF\uF900-\uFAFF\uFE30-\uFE4F\U0001F200-\U0001F2FF\U0002F800-\U0002FA1F])�token_match��url_match�
|
2 |
+
��A�
|
3 |
+
� ��A� �'��A�'�''��A�''�'Cause��A�'CauseC�because�'Cos��A�'CosC�because�'Coz��A�'CozC�because�'Cuz��A�'CuzC�because�'S��A�'SC�'s�'bout��A�'boutC�about�'cause��A�'causeC�because�'cos��A�'cosC�because�'coz��A�'cozC�because�'cuz��A�'cuzC�because�'d��A�'d�'em��A�'emC�them�'ll��A�'llC�will�'nuff��A�'nuffC�enough�'re��A�'reC�are�'s��A�'sC�'s�(*_*)��A�(*_*)�(-8��A�(-8�(-:��A�(-:�(-;��A�(-;�(-_-)��A�(-_-)�(._.)��A�(._.)�(:��A�(:�(;��A�(;�(=��A�(=�(>_<)��A�(>_<)�(^_^)��A�(^_^)�(o:��A�(o:�(¬_¬)��A�(¬_¬)�(ಠ_ಠ)��A�(ಠ_ಠ)�(╯°□°)╯︵┻━┻��A�(╯°□°)╯︵┻━┻�)-:��A�)-:�):��A�):�-_-��A�-_-�-__-��A�-__-�._.��A�._.�0.0��A�0.0�0.o��A�0.o�0_0��A�0_0�0_o��A�0_o�10a.m.��A�10�A�a.m.C�a.m.�10am��A�10�A�amC�a.m.�10p.m.��A�10�A�p.m.C�p.m.�10pm��A�10�A�pmC�p.m.�11a.m.��A�11�A�a.m.C�a.m.�11am��A�11�A�amC�a.m.�11p.m.��A�11�A�p.m.C�p.m.�11pm��A�11�A�pmC�p.m.�12a.m.��A�12�A�a.m.C�a.m.�12am��A�12�A�amC�a.m.�12p.m.��A�12�A�p.m.C�p.m.�12pm��A�12�A�pmC�p.m.�1a.m.��A�1�A�a.m.C�a.m.�1am��A�1�A�amC�a.m.�1p.m.��A�1�A�p.m.C�p.m.�1pm��A�1�A�pmC�p.m.�2a.m.��A�2�A�a.m.C�a.m.�2am��A�2�A�amC�a.m.�2p.m.��A�2�A�p.m.C�p.m.�2pm��A�2�A�pmC�p.m.�3a.m.��A�3�A�a.m.C�a.m.�3am��A�3�A�amC�a.m.�3p.m.��A�3�A�p.m.C�p.m.�3pm��A�3�A�pmC�p.m.�4a.m.��A�4�A�a.m.C�a.m.�4am��A�4�A�amC�a.m.�4p.m.��A�4�A�p.m.C�p.m.�4pm��A�4�A�pmC�p.m.�5a.m.��A�5�A�a.m.C�a.m.�5am��A�5�A�amC�a.m.�5p.m.��A�5�A�p.m.C�p.m.�5pm��A�5�A�pmC�p.m.�6a.m.��A�6�A�a.m.C�a.m.�6am��A�6�A�amC�a.m.�6p.m.��A�6�A�p.m.C�p.m.�6pm��A�6�A�pmC�p.m.�7a.m.��A�7�A�a.m.C�a.m.�7am��A�7�A�amC�a.m.�7p.m.��A�7�A�p.m.C�p.m.�7pm��A�7�A�pmC�p.m.�8)��A�8)�8-)��A�8-)�8-D��A�8-D�8D��A�8D�8a.m.��A�8�A�a.m.C�a.m.�8am��A�8�A�amC�a.m.�8p.m.��A�8�A�p.m.C�p.m.�8pm��A�8�A�pmC�p.m.�9a.m.��A�9�A�a.m.C�a.m.�9am��A�9�A�amC�a.m.�9p.m.��A�9�A�p.m.C�p.m.�9pm��A�9�A�pmC�p.m.�:'(��A�:'(�:')��A�:')�:'-(��A�:'-(�:'-)��A�:'-)�:(��A�:(�:((��A�:((�:(((��A�:(((�:()��A�:()�:)��A�:)�:))��A�:))�:)))��A�:)))�:*��A�:*�:-(��A�:-(�:-((��A�:-((�:-(((��A�:-(((�:-)��A�:-)�:-))��A�:-))�:-)))��A�:-)))�:-*��A�:-*�:-/��A�:-/�:-0��A�:-0�:-3��A�:-3�:->��A�:->�:-D��A�:-D�:-O��A�:-O�:-P��A�:-P�:-X��A�:-X�:-]��A�:-]�:-o��A�:-o�:-p��A�:-p�:-x��A�:-x�:-|��A�:-|�:-}��A�:-}�:/��A�:/�:0��A�:0�:1��A�:1�:3��A�:3�:>��A�:>�:D��A�:D�:O��A�:O�:P��A�:P�:X��A�:X�:]��A�:]�:o��A�:o�:o)��A�:o)�:p��A�:p�:x��A�:x�:|��A�:|�:}��A�:}�:’(��A�:’(�:’)��A�:’)�:’-(��A�:’-(�:’-)��A�:’-)�;)��A�;)�;-)��A�;-)�;-D��A�;-D�;D��A�;D�;_;��A�;_;�<.<��A�<.<�</3��A�</3�<3��A�<3�<33��A�<33�<333��A�<333�<space>��A�<space>�=(��A�=(�=)��A�=)�=/��A�=/�=3��A�=3�=D��A�=D�=[��A�=[�=]��A�=]�=|��A�=|�>.<��A�>.<�>.>��A�>.>�>:(��A�>:(�>:o��A�>:o�><(((*>��A�><(((*>�@_@��A�@_@�Adm.��A�Adm.�Ain't��A�Ai�A�n'tC�not�Aint��A�Ai�A�ntC�not�Ain’t��A�Ai�A�n’tC�not�Ak.��A�Ak.C�Alaska�Ala.��A�Ala.C�Alabama�Apr.��A�Apr.C�April�Aren't��A�AreC�are�A�n'tC�not�Arent��A�AreC�are�A�ntC�not�Aren’t��A�AreC�are�A�n’tC�not�Ariz.��A�Ariz.C�Arizona�Ark.��A�Ark.C�Arkansas�Aug.��A�Aug.C�August�Bros.��A�Bros.�C'mon��A�C'mC�come�A�on�C++��A�C++�Calif.��A�Calif.C�California�Can't��A�CaC�can�A�n'tC�not�Can't've��A�CaC�can�A�n'tC�not�A�'veC�have�Cannot��A�CanC�can�A�not�Cant��A�CaC�can�A�ntC�not�Cantve��A�CaC�can�A�ntC�not�A�veC�have�Can’t��A�CaC�can�A�n’tC�not�Can’t’ve��A�CaC�can�A�n’tC�not�A�’veC�have�Co.��A�Co.�Colo.��A�Colo.C�Colorado�Conn.��A�Conn.C�Connecticut�Corp.��A�Corp.�Could've��A�CouldC�could�A�'ve�Couldn't��A�CouldC�could�A�n'tC�not�Couldn't've��A�CouldC�could�A�n'tC�not�A�'veC�have�Couldnt��A�CouldC�could�A�ntC�not�Couldntve��A�CouldC�could�A�ntC�not�A�veC�have�Couldn’t��A�CouldC�could�A�n’tC�not�Couldn’t’ve��A�CouldC�could�A�n’tC�not�A�’veC�have�Couldve��A�CouldC�could�A�ve�Could’ve��A�CouldC�could�A�’ve�C’mon��A�C’mC�come�A�on�D.C.��A�D.C.�Daren't��A�DareC�dare�A�n'tC�not�Darent��A�DareC�dare�A�ntC�not�Daren’t��A�DareC�dare�A�n’tC�not�Dec.��A�Dec.C�December�Del.��A�Del.C�Delaware�Didn't��A�DidC�do�A�n'tC�not�Didn't've��A�DidC�do�A�n'tC�not�A�'veC�have�Didnt��A�DidC�do�A�ntC�not�Didntve��A�DidC�do�A�ntC�not�A�veC�have�Didn’t��A�DidC�do�A�n’tC�not�Didn’t’ve��A�DidC�do�A�n’tC�not�A�’veC�have�Doesn't��A�DoesC�does�A�n'tC�not�Doesn't've��A�DoesC�does�A�n'tC�not�A�'veC�have�Doesnt��A�DoesC�does�A�ntC�not�Doesntve��A�DoesC�does�A�ntC�not�A�veC�have�Doesn’t��A�DoesC�does�A�n’tC�not�Doesn’t’ve��A�DoesC�does�A�n’tC�not�A�’veC�have�Doin��A�DoinC�doing�Doin'��A�Doin'C�doing�Doin’��A�Doin’C�doing�Don't��A�DoC�do�A�n'tC�not�Don't've��A�DoC�do�A�n'tC�not�A�'veC�have�Dont��A�DoC�do�A�ntC�not�Dontve��A�DoC�do�A�ntC�not�A�veC�have�Don’t��A�DoC�do�A�n’tC�not�Don’t’ve��A�DoC�do�A�n’tC�not�A�’veC�have�Dr.��A�Dr.�E.G.��A�E.G.�E.g.��A�E.g.�Feb.��A�Feb.C�February�Fla.��A�Fla.C�Florida�Ga.��A�Ga.C�Georgia�Gen.��A�Gen.�Goin��A�GoinC�going�Goin'��A�Goin'C�going�Goin’��A�Goin’C�going�Gonna��A�GonC�going�A�naC�to�Gotta��A�GotC�got�A�taC�to�Gov.��A�Gov.�Hadn't��A�HadC�have�A�n'tC�not�Hadn't've��A�HadC�have�A�n'tC�not�A�'veC�have�Hadnt��A�HadC�have�A�ntC�not�Hadntve��A�HadC�have�A�ntC�not�A�veC�have�Hadn’t��A�HadC�have�A�n’tC�not�Hadn’t’ve��A�HadC�have�A�n’tC�not�A�’veC�have�Hasn't��A�HasC�has�A�n'tC�not�Hasnt��A�HasC�has�A�ntC�not�Hasn’t��A�HasC�has�A�n’tC�not�Haven't��A�HaveC�have�A�n'tC�not�Havent��A�HaveC�have�A�ntC�not�Haven��t��A�HaveC�have�A�n’tC�not�Havin��A�HavinC�having�Havin'��A�Havin'C�having�Havin’��A�Havin’C�having�He'd��A�HeC�he�A�'dC�'d�He'd've��A�HeC�he�A�'dC�would�A�'veC�have�He'll��A�HeC�he�A�'llC�will�He'll've��A�HeC�he�A�'llC�will�A�'veC�have�He's��A�HeC�he�A�'sC�'s�Hed��A�HeC�he�A�dC�'d�Hedve��A�HeC�he�A�dC�would�A�veC�have�Hellve��A�HeC�he�A�llC�will�A�veC�have�Hes��A�HeC�he�A�s�He’d��A�HeC�he�A�’dC�'d�He’d’ve��A�HeC�he�A�’dC�would�A�’veC�have�He’ll��A�HeC�he�A�’llC�will�He’ll’ve��A�HeC�he�A�’llC�will�A�’veC�have�He’s��A�HeC�he�A�’sC�'s�How'd��A�HowC�how�A�'dC�'d�How'd've��A�HowC�how�A�'dC�would�A�'veC�have�How'd'y��A�HowC�how�A�'d�A�'yC�you�How'll��A�HowC�how�A�'llC�will�How'll've��A�HowC�how�A�'llC�will�A�'veC�have�How're��A�HowC�how�A�'reC�are�How's��A�HowC�how�A�'sC�'s�How've��A�HowC�how�A�'ve�Howd��A�HowC�how�A�dC�'d�Howdve��A�HowC�how�A�dC�would�A�veC�have�Howll��A�HowC�how�A�llC�will�Howllve��A�HowC�how�A�llC�will�A�veC�have�Howre��A�HowC�how�A�reC�are�Hows��A�HowC�how�A�s�Howve��A�How�A�veC�have�How’d��A�HowC�how�A�’dC�'d�How’d’ve��A�HowC�how�A�’dC�would�A�’veC�have�How’d’y��A�HowC�how�A�’d�A�’yC�you�How’ll��A�HowC�how�A�’llC�will�How’ll’ve��A�HowC�how�A�’llC�will�A�’veC�have�How’re��A�HowC�how�A�’reC�are�How’s��A�HowC�how�A�’sC�'s�How’ve��A�HowC�how�A�’ve�I'd��A�IC�i�A�'dC�'d�I'd've��A�IC�i�A�'dC�would�A�'veC�have�I'll��A�IC�i�A�'llC�will�I'll've��A�IC�i�A�'llC�will�A�'veC�have�I'm��A�IC�i�A�'mC�am�I'ma��A�IC�i�A�'mC�am�A�aC�gonna�I've��A�IC�i�A�'veC�have�I.E.��A�I.E.�I.e.��A�I.e.�Ia.��A�Ia.C�Iowa�Id��A�IC�i�A�dC�'d�Id.��A�Id.C�Idaho�Idve��A�IC�i�A�dC�would�A�veC�have�Ill.��A�Ill.C�Illinois�Illve��A�IC�i�A�llC�will�A�veC�have�Im��A�IC�i�A�m�Ima��A�IC�i�A�mC�am�A�aC�gonna�Inc.��A�Inc.�Ind.��A�Ind.C�Indiana�Isn't��A�IsC�is�A�n'tC�not�Isnt��A�IsC�is�A�ntC�not�Isn’t��A�IsC�is�A�n’tC�not�It'd��A�ItC�it�A�'dC�'d�It'd've��A�ItC�it�A�'dC�would�A�'veC�have�It'll��A�ItC�it�A�'llC�will�It'll've��A�ItC�it�A�'llC�will�A�'veC�have�It's��A�ItC�it�A�'sC�'s�Itd��A�ItC�it�A�dC�'d�Itdve��A�ItC�it�A�dC�would�A�veC�have�Itll��A�ItC�it�A�llC�will�Itllve��A�ItC�it�A�llC�will�A�veC�have�It’d��A�ItC�it�A�’dC�'d�It’d’ve��A�ItC�it�A�’dC�would�A�’veC�have�It’ll��A�ItC�it�A�’llC�will�It’ll’ve��A�ItC�it�A�’llC�will�A�’veC�have�It’s��A�ItC�it�A�’sC�'s�Ive��A�IC�i�A�veC�have�I’d��A�IC�i�A�’dC�'d�I’d’ve��A�IC�i�A�’dC�would�A�’veC�have�I’ll��A�IC�i�A�’llC�will�I’ll’ve��A�IC�i�A�’llC�will�A�’veC�have�I’m��A�IC�i�A�’mC�am�I’ma��A�IC�i�A�’mC�am�A�aC�gonna�I’ve��A�IC�i�A�’veC�have�Jan.��A�Jan.C�January�Jr.��A�Jr.�Jul.��A�Jul.C�July�Jun.��A�Jun.C�June�Kan.��A�Kan.C�Kansas�Kans.��A�Kans.C�Kansas�Ky.��A�Ky.C�Kentucky�La.��A�La.C�Louisiana�Let's��A�LetC�let�A�'sC�us�Let’s��A�LetC�let�A�’sC�us�Lovin��A�LovinC�loving�Lovin'��A�Lovin'C�loving�Lovin’��A�Lovin’C�loving�Ltd.��A�Ltd.�Ma'am��A�Ma'amC�madam�Mar.��A�Mar.C�March�Mass.��A�Mass.C�Massachusetts�Mayn't��A�MayC�may�A�n'tC�not�Mayn't've��A�MayC�may�A�n'tC�not�A�'veC�have�Maynt��A�MayC�may�A�ntC�not�Mayntve��A�MayC�may�A�ntC�not�A�veC�have�Mayn’t��A�MayC�may�A�n’tC�not�Mayn’t’ve��A�MayC�may�A�n’tC�not�A�’veC�have�Ma’am��A�Ma’amC�madam�Md.��A�Md.�Messrs.��A�Messrs.�Mich.��A�Mich.C�Michigan�Might've��A�MightC�might�A�'ve�Mightn't��A�MightC�might�A�n'tC�not�Mightn't've��A�MightC�might�A�n'tC�not�A�'veC�have�Mightnt��A�MightC�might�A�ntC�not�Mightntve��A�MightC�might�A�ntC�not�A�veC�have�Mightn’t��A�MightC�might�A�n’tC�not�Mightn’t’ve��A�MightC�might�A�n’tC�not�A�’veC�have�Mightve��A�MightC�might�A�ve�Might’ve��A�MightC�might�A�’ve�Minn.��A�Minn.C�Minnesota�Miss.��A�Miss.C�Mississippi�Mo.��A�Mo.�Mont.��A�Mont.�Mr.��A�Mr.�Mrs.��A�Mrs.�Ms.��A�Ms.�Mt.��A�Mt.C�Mount�Must've��A�MustC�must�A�'ve�Mustn't��A�MustC�must�A�n'tC�not�Mustn't've��A�MustC�must�A�n'tC�not�A�'veC�have�Mustnt��A�MustC�must�A�ntC�not�Mustntve��A�MustC�must�A�ntC�not�A�veC�have�Mustn’t��A�MustC�must�A�n’tC�not�Mustn’t’ve��A�MustC�must�A�n’tC�not�A�’veC�have�Mustve��A�MustC�must�A�ve�Must’ve��A�MustC�must�A�’ve�N.C.��A�N.C.C�North Carolina�N.D.��A�N.D.C�North Dakota�N.H.��A�N.H.C�New Hampshire�N.J.��A�N.J.C�New Jersey�N.M.��A�N.M.C�New Mexico�N.Y.��A�N.Y.C�New York�Neb.��A�Neb.C�Nebraska�Nebr.��A�Nebr.C�Nebraska�Needn't��A�NeedC�need�A�n'tC�not�Needn't've��A�NeedC�need�A�n'tC�not�A�'veC�have�Neednt��A�NeedC�need�A�ntC�not�Needntve��A�NeedC�need�A�ntC�not�A�veC�have�Needn’t��A�NeedC�need�A�n’tC�not�Needn’t’ve��A�NeedC�need�A�n’tC�not�A�’veC�have�Nev.��A�Nev.C�Nevada�Not've��A�NotC�not�A�'veC�have�Nothin��A�NothinC�nothing�Nothin'��A�Nothin'C�nothing�Nothin’��A�Nothin’C�nothing�Notve��A�NotC�not�A�veC�have�Not’ve��A�NotC�not�A�’veC�have�Nov.��A�Nov.C�November�Nuthin��A�NuthinC�nothing�Nuthin'��A�Nuthin'C�nothing�Nuthin’��A�Nuthin’C�nothing�O'clock��A�O'clockC�o'clock�O.O��A�O.O�O.o��A�O.o�O_O��A�O_O�O_o��A�O_o�Oct.��A�Oct.C�October�Okla.��A�Okla.C�Oklahoma�Ol��A�OlC�old�Ol'��A�Ol'C�old�Ol’��A�Ol’C�old�Ore.��A�Ore.C�Oregon�Oughtn't��A�OughtC�ought�A�n'tC�not�Oughtn't've��A�OughtC�ought�A�n'tC�not�A�'veC�have�Oughtnt��A�OughtC�ought�A�ntC�not�Oughtntve��A�OughtC�ought�A�ntC�not�A�veC�have�Oughtn’t��A�OughtC�ought�A�n’tC�not�Oughtn’t’ve��A�OughtC�ought�A�n’tC�not�A�’veC�have�O’clock��A�O’clockC�o'clock�Pa.��A�Pa.C�Pennsylvania�Ph.D.��A�Ph.D.�Prof.��A�Prof.�Rep.��A�Rep.�Rev.��A�Rev.�S.C.��A�S.C.C�South Carolina�Sen.��A�Sen.�Sep.��A�Sep.C�September�Sept.��A�Sept.C�September�Shan't��A�ShaC�shall�A�n'tC�not�Shan't've��A�ShaC�shall�A�n'tC�not�A�'veC�have�Shant��A�ShaC�shall�A�ntC�not�Shantve��A�ShaC�shall�A�ntC�not�A�veC�have�Shan’t��A�ShaC�shall�A�n’tC�not�Shan’t’ve��A�ShaC�shall�A�n’tC�not�A�’veC�have�She'd��A�SheC�she�A�'dC�'d�She'd've��A�SheC�she�A�'dC�would�A�'veC�have�She'll��A�SheC�she�A�'llC�will�She'll've��A�SheC�she�A�'llC�will�A�'veC�have�She's��A�SheC�she�A�'sC�'s�Shedve��A�SheC�she�A�dC�would�A�veC�have�Shellve��A�SheC�she�A�llC�will�A�veC�have�Shes��A�SheC�she�A�s�She’d��A�SheC�she�A�’dC�'d�She’d’ve��A�SheC�she�A�’dC�would�A�’veC�have�She’ll��A�SheC�she�A�’llC�will�She’ll’ve��A�SheC�she�A�’llC�will�A�’veC�have�She’s��A�SheC�she�A�’sC�'s�Should've��A�ShouldC�should�A�'ve�Shouldn't��A�ShouldC�should�A�n'tC�not�Shouldn't've��A�ShouldC�should�A�n'tC�not�A�'veC�have�Shouldnt��A�ShouldC�should�A�ntC�not�Shouldntve��A�ShouldC�should�A�ntC�not�A�veC�have�Shouldn’t��A�ShouldC�should�A�n’tC�not�Shouldn’t’ve��A�ShouldC�should�A�n’tC�not�A�’veC�have�Shouldve��A�ShouldC�should�A�ve�Should’ve��A�ShouldC�should�A�’ve�Somethin��A�SomethinC�something�Somethin'��A�Somethin'C�something�Somethin’��A�Somethin’C�something�St.��A�St.�Tenn.��A�Tenn.C�Tennessee�That'd��A�ThatC�that�A�'dC�'d�That'd've��A�ThatC�that�A�'dC�would�A�'veC�have�That'll��A�ThatC�that�A�'llC�will�That'll've��A�ThatC�that�A�'llC�will�A�'veC�have�That's��A�ThatC�that�A�'sC�'s�Thatd��A�ThatC�that�A�dC�'d�Thatdve��A�ThatC�that�A�dC�would�A�veC�have�Thatll��A�ThatC�that�A�llC�will�Thatllve��A�ThatC�that�A�llC�will�A�veC�have�Thats��A�ThatC�that�A�s�That’d��A�ThatC�that�A�’dC�'d�That’d’ve��A�ThatC�that�A�’dC�would�A�’veC�have�That’ll��A�ThatC�that�A�’llC�will�That’ll’ve��A�ThatC�that�A�’llC�will�A�’veC�have�That’s��A�ThatC�that�A�’sC�'s�There'd��A�ThereC�there�A�'dC�'d�There'd've��A�ThereC�there�A�'dC�would�A�'veC�have�There'll��A�ThereC�there�A�'llC�will�There'll've��A�ThereC�there�A�'llC�will�A�'veC�have�There're��A�ThereC�there�A�'reC�are�There's��A�ThereC�there�A�'sC�'s�There've��A�ThereC�there�A�'ve�Thered��A�ThereC�there�A�dC�'d�Theredve��A�ThereC�there�A�dC�would�A�veC�have�Therell��A�ThereC�there�A�llC�will�Therellve��A�ThereC�there�A�llC�will�A�veC�have�Therere��A�ThereC�there�A�reC�are�Theres��A�ThereC�there�A�s�Thereve��A�There�A�veC�have�There’d��A�ThereC�there�A�’dC�'d�There’d’ve��A�ThereC�there�A�’dC�would�A�’veC�have�There’ll��A�ThereC�there�A�’llC�will�There’ll’ve��A�ThereC�there�A�’llC�will�A�’veC�have�There’re��A�ThereC�there�A�’reC�are�There’s��A�ThereC�there�A�’sC�'s�There’ve��A�ThereC�there�A�’ve�These'd��A�TheseC�these�A�'dC�'d�These'd've��A�TheseC�these�A�'dC�would�A�'veC�have�These'll��A�TheseC�these�A�'llC�will�These'll've��A�TheseC�these�A�'llC�will�A�'veC�have�These're��A�TheseC�these�A�'reC�are�These've��A�TheseC�these�A�'ve�Thesed��A�TheseC�these�A�dC�'d�Thesedve��A�TheseC�these�A�dC�would�A�veC�have�Thesell��A�TheseC�these�A�llC�will�Thesellve��A�TheseC�these�A�llC�will�A�veC�have�Thesere��A�TheseC�these�A�reC�are�Theseve��A�These�A�veC�have�These’d��A�TheseC�these�A�’dC�'d�These’d’ve��A�TheseC�these�A�’dC�would�A�’veC�have�These’ll��A�TheseC�these�A�’llC�will�These’ll’ve��A�TheseC�these�A�’llC�will�A�’veC�have�These’re��A�TheseC�these�A�’reC�are�These’ve��A�TheseC�these�A�’ve�They'd��A�TheyC�they�A�'dC�'d�They'd've��A�TheyC�they�A�'dC�would�A�'veC�have�They'll��A�TheyC�they�A�'llC�will�They'll've��A�TheyC�they�A�'llC�will�A�'veC�have�They're��A�TheyC�they�A�'reC�are�They've��A�TheyC�they�A�'veC�have�Theyd��A�TheyC�they�A�dC�'d�Theydve��A�TheyC�they�A�dC�would�A�veC�have�Theyll��A�TheyC�they�A�llC�will�Theyllve��A�TheyC�they�A�llC�will�A�veC�have�Theyre��A�TheyC�they�A�reC�are�Theyve��A�TheyC�they�A�veC�have�They’d��A�TheyC�they�A�’dC�'d�They’d’ve��A�TheyC�they�A�’dC�would�A�’veC�have�They’ll��A�TheyC�they�A�’llC�will�They’ll’ve��A�TheyC�they�A�’llC�will�A�’veC�have�They’re��A�TheyC�they�A�’reC�are�They’ve��A�TheyC�they�A�’veC�have�This'd��A�ThisC�this�A�'dC�'d�This'd've��A�ThisC�this�A�'dC�would�A�'veC�have�This'll��A�ThisC�this�A�'llC�will�This'll've��A�ThisC�this�A�'llC�will�A�'veC�have�This's��A�ThisC�this�A�'sC�'s�Thisd��A�ThisC�this�A�dC�'d�Thisdve��A�ThisC�this�A�dC�would�A�veC�have�Thisll��A�ThisC�this�A�llC�will�Thisllve��A�ThisC�this�A�llC�will�A�veC�have�Thiss��A�ThisC�this�A�s�This’d��A�ThisC�this�A�’dC�'d�This’d’ve��A�ThisC�this�A�’dC�would�A�’veC�have�This’ll��A�ThisC�this�A�’llC�will�This’ll’ve��A�ThisC�this�A�’llC�will�A�’veC�have�This’s��A�ThisC�this�A�’sC�'s�Those'd��A�ThoseC�those�A�'dC�'d�Those'd've��A�ThoseC�those�A�'dC�would�A�'veC�have�Those'll��A�ThoseC�those�A�'llC�will�Those'll've��A�ThoseC�those�A�'llC�will�A�'veC�have�Those're��A�ThoseC�those�A�'reC�are�Those've��A�ThoseC�those�A�'ve�Thosed��A�ThoseC�those�A�dC�'d�Thosedve��A�ThoseC�those�A�dC�would�A�veC�have�Thosell��A�ThoseC�those�A�llC�will�Thosellve��A�ThoseC�those�A�llC�will�A�veC�have�Thosere��A�ThoseC�those�A�reC�are�Thoseve��A�Those�A�veC�have�Those’d��A�ThoseC�those�A�’dC�'d�Those’d’ve��A�ThoseC�those�A�’dC�would�A�’veC�have�Those’ll��A�ThoseC�those�A�’llC�will�Those’ll’ve��A�ThoseC�those�A�’llC�will�A�’veC�have�Those’re��A�ThoseC�those�A�’reC�are�Those’ve��A�ThoseC�those�A�’ve�V.V��A�V.V�V_V��A�V_V�Va.��A�Va.C�Virginia�Wash.��A�Wash.C�Washington�Wasn't��A�WasC�was�A�n'tC�not�Wasnt��A�WasC�was�A�ntC�not�Wasn’t��A�WasC�was�A�n’tC�not�We'd��A�WeC�we�A�'dC�'d�We'd've��A�WeC�we�A�'dC�would�A�'veC�have�We'll��A�WeC�we�A�'llC�will�We'll've��A�WeC�we�A�'llC�will�A�'veC�have�We're��A�WeC�we�A�'reC�are�We've��A�WeC�we�A�'veC�have�Wed��A�WeC�we�A�dC�'d�Wedve��A�WeC�we�A�dC�would�A�veC�have�Wellve��A�WeC�we�A�llC�will�A�veC�have�Weren't��A�WereC�were�A�n'tC�not�Werent��A�WereC�were�A�ntC�not�Weren’t��A�WereC�were�A�n’tC�not�Weve��A�WeC�we�A�veC�have�We’d��A�WeC�we�A�’dC�'d�We’d’ve��A�WeC�we�A�’dC�would�A�’veC�have�We’ll��A�WeC�we�A�’llC�will�We’ll’ve��A�WeC�we�A�’llC�will�A�’veC�have�We’re��A�WeC�we�A�’reC�are�We’ve��A�WeC�we�A�’veC�have�What'd��A�WhatC�what�A�'dC�'d�What'd've��A�WhatC�what�A�'dC�would�A�'veC�have�What'll��A�WhatC�what�A�'llC�will�What'll've��A�WhatC�what�A�'llC�will�A�'veC�have�What're��A�WhatC�what�A�'reC�are�What's��A�WhatC�what�A�'sC�'s�What've��A�WhatC�what�A�'ve�Whatd��A�WhatC�what�A�dC�'d�Whatdve��A�WhatC�what�A�dC�would�A�veC�have�Whatll��A�WhatC�what�A�llC�will�Whatllve��A�WhatC�what�A�llC�will�A�veC�have�Whatre��A�WhatC�what�A�reC�are�Whats��A�WhatC�what�A�s�Whatve��A�What�A�veC�have�What’d��A�WhatC�what�A�’dC�'d�What’d’ve��A�WhatC�what�A�’dC�would�A�’veC�have�What’ll��A�WhatC�what�A�’llC�will�What’ll’ve��A�WhatC�what�A�’llC�will�A�’veC�have�What’re��A�WhatC�what�A�’reC�are�What’s��A�WhatC�what�A�’sC�'s�What’ve��A�WhatC�what�A�’ve�When'd��A�WhenC�when�A�'dC�'d�When'd've��A�WhenC�when�A�'dC�would�A�'veC�have�When'll��A�WhenC�when�A�'llC�will�When'll've��A�WhenC�when�A�'llC�will�A�'veC�have�When're��A�WhenC�when�A�'reC�are�When's��A�WhenC�when�A�'sC�'s�When've��A�WhenC�when�A�'ve�Whend��A�WhenC�when�A�dC�'d�Whendve��A�WhenC�when�A�dC�would�A�veC�have�Whenll��A�WhenC�when�A�llC�will�Whenllve��A�WhenC�when�A�llC�will�A�veC�have�Whenre��A�WhenC�when�A�reC�are�Whens��A�WhenC�when�A�s�Whenve��A�When�A�veC�have�When’d��A�WhenC�when�A�’dC�'d�When’d’ve��A�WhenC�when�A�’dC�would�A�’veC�have�When’ll��A�WhenC�when�A�’llC�will�When’ll’ve��A�WhenC�when�A�’llC�will�A�’veC�have�When’re��A�WhenC�when�A�’reC�are�When’s��A�WhenC�when�A�’sC�'s�When’ve��A�WhenC�when�A�’ve�Where'd��A�WhereC�where�A�'dC�'d�Where'd've��A�WhereC�where�A�'dC�would�A�'veC�have�Where'll��A�WhereC�where�A�'llC�will�Where'll've��A�WhereC�where�A�'llC�will�A�'veC�have�Where're��A�WhereC�where�A�'reC�are�Where's��A�WhereC�where�A�'sC�'s�Where've��A�WhereC�where�A�'ve�Whered��A�WhereC�where�A�dC�'d�Wheredve��A�WhereC�where�A�dC�would�A�veC�have�Wherell��A�WhereC�where�A�llC�will�Wherellve��A�WhereC�where�A�llC�will�A�veC�have�Wherere��A�WhereC�where�A�reC�are�Wheres��A�WhereC�where�A�s�Whereve��A�Where�A�veC�have�Where’d��A�WhereC�where�A�’dC�'d�Where’d’ve��A�WhereC�where�A�’dC�would�A�’veC�have�Where’ll��A�WhereC�where�A�’llC�will�Where’ll’ve��A�WhereC�where�A�’llC�will�A�’veC�have�Where’re��A�WhereC�where�A�’reC�are�Where’s��A�WhereC�where�A�’sC�'s�Where’ve��A�WhereC�where�A�’ve�Who'd��A�WhoC�who�A�'dC�'d�Who'd've��A�WhoC�who�A�'dC�would�A�'veC�have�Who'll��A�WhoC�who�A�'llC�will�Who'll've��A�WhoC�who�A�'llC�will�A�'veC�have�Who're��A�WhoC�who�A�'reC�are�Who's��A�WhoC�who�A�'sC�'s�Who've��A�WhoC�who�A�'ve�Whod��A�WhoC�who�A�dC�'d�Whodve��A�WhoC�who�A�dC�would�A�veC�have�Wholl��A�WhoC�who�A�llC�will�Whollve��A�WhoC�who�A�llC�will�A�veC�have�Whos��A�WhoC�who�A�s�Whove��A�Who�A�veC�have�Who’d��A�WhoC�who�A�’dC�'d�Who’d’ve��A�WhoC�who�A�’dC�would�A�’veC�have�Who’ll��A�WhoC�who�A�’llC�will�Who’ll’ve��A�WhoC�who�A�’llC�will�A�’veC�have�Who’re��A�WhoC�who�A�’reC�are�Who’s��A�WhoC�who�A�’sC�'s�Who’ve��A�WhoC�who�A�’ve�Why'd��A�WhyC�why�A�'dC�'d�Why'd've��A�WhyC�why�A�'dC�would�A�'veC�have�Why'll��A�WhyC�why�A�'llC�will�Why'll've��A�WhyC�why�A�'llC�will�A�'veC�have�Why're��A�WhyC�why�A�'reC�are�Why's��A�WhyC�why�A�'sC�'s�Why've��A�WhyC�why�A�'ve�Whyd��A�WhyC�why�A�dC�'d�Whydve��A�WhyC�why�A�dC�would�A�veC�have�Whyll��A�WhyC�why�A�llC�will�Whyllve��A�WhyC�why�A�llC�will�A�veC�have�Whyre��A�WhyC�why�A�reC�are�Whys��A�WhyC�why�A�s�Whyve��A�Why�A�veC�have�Why’d��A�WhyC�why�A�’dC�'d�Why’d’ve��A�WhyC�why�A�’dC�would�A�’veC�have�Why’ll��A�WhyC�why�A�’llC�will�Why’ll’ve��A�WhyC�why�A�’llC�will�A�’veC�have�Why’re��A�WhyC�why�A�’reC�are�Why’s��A�WhyC�why�A�’sC�'s�Why’ve��A�WhyC�why�A�’ve�Wis.��A�Wis.C�Wisconsin�Won't��A�WoC�will�A�n'tC�not�Won't've��A�WoC�will�A�n'tC�not�A�'veC�have�Wont��A�WoC�will�A�ntC�not�Wontve��A�WoC�will�A�ntC�not�A�veC�have�Won’t��A�WoC�will�A�n’tC�not�Won’t’ve��A�WoC�will�A�n’tC�not�A�’veC�have�Would've��A�WouldC�would�A�'ve�Wouldn't��A�WouldC�would�A�n'tC�not�Wouldn't've��A�WouldC�would�A�n'tC�not�A�'veC�have�Wouldnt��A�WouldC�would�A�ntC�not�Wouldntve��A�WouldC�would�A�ntC�not�A�veC�have�Wouldn’t��A�WouldC�would�A�n’tC�not�Wouldn’t’ve��A�WouldC�would�A�n’tC�not�A�’veC�have�Wouldve��A�WouldC�would�A�ve�Would’ve��A�WouldC�would�A�’ve�XD��A�XD�XDD��A�XDD�You'd��A�YouC�you�A�'dC�'d�You'd've��A�YouC�you�A�'dC�would�A�'veC�have�You'll��A�YouC�you�A�'llC�will�You'll've��A�YouC�you�A�'llC�will�A�'veC�have�You're��A�YouC�you�A�'reC�are�You've��A�YouC�you�A�'veC�have�Youd��A�YouC�you�A�dC�'d�Youdve��A�YouC�you�A�dC�would�A�veC�have�Youll��A�YouC�you�A�llC�will�Youllve��A�YouC�you�A�llC�will�A�veC�have�Youre��A�YouC�you�A�reC�are�Youve��A�YouC�you�A�veC�have�You’d��A�YouC�you�A�’dC�'d�You’d’ve��A�YouC�you�A�’dC�would�A�’veC�have�You’ll��A�YouC�you�A�’llC�will�You’ll’ve��A�YouC�you�A�’llC�will�A�’veC�have�You’re��A�YouC�you�A�’reC�are�You’ve��A�YouC�you�A�’veC�have�[-:��A�[-:�[:��A�[:�[=��A�[=�\")��A�\")�\n��A�\n�\t��A�\t�]=��A�]=�^_^��A�^_^�^__^��A�^__^�^___^��A�^___^�a.��A�a.�a.m.��A�a.m.�ain't��A�ai�A�n'tC�not�aint��A�ai�A�ntC�not�ain’t��A�ai�A�n’tC�not�and/or��A�and/orC�and/or�aren't��A�areC�are�A�n'tC�not�arent��A�areC�are�A�ntC�not�aren’t��A�areC�are�A�n’tC�not�b.��A�b.�c'mon��A�c'mC�come�A�on�c.��A�c.�can't��A�caC�can�A�n'tC�not�can't've��A�caC�can�A�n'tC�not�A�'veC�have�cannot��A�can�A�not�cant��A�caC�can�A�ntC�not�cantve��A�caC�can�A�ntC�not�A�veC�have�can’t��A�caC�can�A�n’tC�not�can’t’ve��A�caC�can�A�n’tC�not�A�’veC�have�co.��A�co.�could've��A�couldC�could�A�'ve�couldn't��A�couldC�could�A�n'tC�not�couldn't've��A�couldC�could�A�n'tC�not�A�'veC�have�couldnt��A�couldC�could�A�ntC�not�couldntve��A�couldC�could�A�ntC�not�A�veC�have�couldn’t��A�couldC�could�A�n’tC�not�couldn’t’ve��A�couldC�could�A�n’tC�not�A�’veC�have�couldve��A�couldC�could�A�ve�could’ve��A�couldC�could�A�’ve�c’mon��A�c’mC�come�A�on�d.��A�d.�daren't��A�dareC�dare�A�n'tC�not�darent��A�dareC�dare�A�ntC�not�daren’t��A�dareC�dare�A�n’tC�not�didn't��A�didC�do�A�n'tC�not�didn't've��A�didC�do�A�n'tC�not�A�'veC�have�didnt��A�didC�do�A�ntC�not�didntve��A�didC�do�A�ntC�not�A�veC�have�didn’t��A�didC�do�A�n’tC�not�didn’t’ve��A�didC�do�A�n’tC�not�A�’veC�have�doesn't��A�doesC�does�A�n'tC�not�doesn't've��A�doesC�does�A�n'tC�not�A�'veC�have�doesnt��A�doesC�does�A�ntC�not�doesntve��A�doesC�does�A�ntC�not�A�veC�have�doesn’t��A�doesC�does�A�n’tC�not�doesn’t’ve��A�doesC�does�A�n’tC�not�A�’veC�have�doin��A�doinC�doing�doin'��A�doin'C�doing�doin’��A�doin’C�doing�don't��A�doC�do�A�n'tC�not�don't've��A�doC�do�A�n'tC�not�A�'veC�have�dont��A�doC�do�A�ntC�not�dontve��A�doC�do�A�ntC�not�A�veC�have�don’t��A�doC�do�A�n’tC�not�don’t’ve��A�doC�do�A�n’tC�not�A�’veC�have�e.��A�e.�e.g.��A�e.g.�em��A�emC�them�f.��A�f.�g.��A�g.�goin��A�goinC�going�goin'��A�goin'C�going�goin’��A�goin’C�going�gonna��A�gonC�going�A�naC�to�gotta��A�got�A�taC�to�h.��A�h.�hadn't��A�hadC�have�A�n'tC�not�hadn't've��A�hadC�have�A�n'tC�not�A�'veC�have�hadnt��A�hadC�have�A�ntC�not�hadntve��A�hadC�have�A�ntC�not�A�veC�have�hadn’t��A�hadC�have�A�n’tC�not�hadn’t’ve��A�hadC�have�A�n’tC�not�A�’veC�have�hasn't��A�hasC�has�A�n'tC�not�hasnt��A�hasC�has�A�ntC�not�hasn’t��A�hasC�has�A�n’tC�not�haven't��A�haveC�have�A�n'tC�not�havent��A�haveC�have�A�ntC�not�haven’t��A�haveC�have�A�n’tC�not�havin��A�havinC�having�havin'��A�havin'C�having�havin’��A�havin’C�having�he'd��A�heC�he�A�'dC�'d�he'd've��A�heC�he�A�'dC�would�A�'veC�have�he'll��A�heC�he�A�'llC�will�he'll've��A�heC�he�A�'llC�will�A�'veC�have�he's��A�heC�he�A�'sC�'s�hed��A�heC�he�A�dC�'d�hedve��A�heC�he�A�dC�would�A�veC�have�hellve��A�heC�he�A�llC�will�A�veC�have�hes��A�heC�he�A�s�he’d��A�heC�he�A�’dC�'d�he’d’ve��A�heC�he�A�’dC�would�A�’veC�have�he’ll��A�heC�he�A�’llC�will�he’ll’ve��A�heC�he�A�’llC�will�A�’veC�have�he’s��A�heC�he�A�’sC�'s�how'd��A�howC�how�A�'dC�'d�how'd've��A�howC�how�A�'dC�would�A�'veC�have�how'd'y��A�how�A�'d�A�'yC�you�how'll��A�howC�how�A�'llC�will�how'll've��A�howC�how�A�'llC�will�A�'veC�have�how're��A�howC�how�A�'reC�are�how's��A�howC�how�A�'sC�'s�how've��A�howC�how�A�'ve�howd��A�howC�how�A�dC�'d�howdve��A�howC�how�A�dC�would�A�veC�have�howll��A�howC�how�A�llC�will�howllve��A�howC�how�A�llC�will�A�veC�have�howre��A�howC�how�A�reC�are�hows��A�howC�how�A�s�howve��A�how�A�veC�have�how’d��A�howC�how�A�’dC�'d�how’d’ve��A�howC�how�A�’dC�would�A�’veC�have�how’d’y��A�how�A�’d�A�’yC�you�how’ll��A�howC�how�A�’llC�will�how’ll’ve��A�howC�how�A�’llC�will�A�’veC�have�how’re��A�howC�how�A�’reC�are�how’s��A�howC�how�A�’sC�'s�how’ve��A�howC�how�A�’ve�i'd��A�iC�i�A�'dC�'d�i'd've��A�iC�i�A�'dC�would�A�'veC�have�i'll��A�iC�i�A�'llC�will�i'll've��A�iC�i�A�'llC�will�A�'veC�have�i'm��A�iC�i�A�'mC�am�i'ma��A�iC�i�A�'mC�am�A�aC�gonna�i've��A�iC�i�A�'veC�have�i.��A�i.�i.e.��A�i.e.�id��A�iC�i�A�dC�'d�idve��A�iC�i�A�dC�would�A�veC�have�illve��A�iC�i�A�llC�will�A�veC�have�im��A�iC�i�A�m�ima��A�iC�i�A�mC�am�A�aC�gonna�isn't��A�isC�is�A�n'tC�not�isnt��A�isC�is�A�ntC�not�isn’t��A�isC�is�A�n’tC�not�it'd��A�itC�it�A�'dC�'d�it'd've��A�itC�it�A�'dC�would�A�'veC�have�it'll��A�itC�it�A�'llC�will�it'll've��A�itC�it�A�'llC�will�A�'veC�have�it's��A�itC�it�A�'sC�'s�itd��A�itC�it�A�dC�'d�itdve��A�itC�it�A�dC�would�A�veC�have�itll��A�itC�it�A�llC�will�itllve��A�itC�it�A�llC�will�A�veC�have�it’d��A�itC�it�A�’dC�'d�it’d’ve��A�itC�it�A�’dC�would�A�’veC�have�it’ll��A�itC�it�A�’llC�will�it’ll’ve��A�itC�it�A�’llC�will�A�’veC�have�it’s��A�itC�it�A�’sC�'s�ive��A�iC�i�A�veC�have�i’d��A�iC�i�A�’dC�'d�i’d’ve��A�iC�i�A�’dC�would�A�’veC�have�i’ll��A�iC�i�A�’llC�will�i’ll’ve��A�iC�i�A�’llC�will�A�’veC�have�i’m��A�iC�i�A�’mC�am�i’ma��A�iC�i�A�’mC�am�A�aC�gonna�i’ve��A�iC�i�A�’veC�have�j.��A�j.�k.��A�k.�l.��A�l.�let's��A�let�A�'sC�us�let’s��A�let�A�’sC�us�ll��A�llC�will�lovin��A�lovinC�loving�lovin'��A�lovin'C�loving�lovin’��A�lovin’C�loving�m.��A�m.�ma'am��A�ma'amC�madam�mayn't��A�mayC�may�A�n'tC�not�mayn't've��A�mayC�may�A�n'tC�not�A�'veC�have�maynt��A�mayC�may�A�ntC�not�mayntve��A�mayC�may�A�ntC�not�A�veC�have�mayn’t��A�mayC�may�A�n’tC�not�mayn’t’ve��A�mayC�may�A�n’tC�not�A�’veC�have�ma’am��A�ma’amC�madam�might've��A�mightC�might�A�'ve�mightn't��A�mightC�might�A�n'tC�not�mightn't've��A�mightC�might�A�n'tC�not�A�'veC�have�mightnt��A�mightC�might�A�ntC�not�mightntve��A�mightC�might�A�ntC�not�A�veC�have�mightn’t��A�mightC�might�A�n’tC�not�mightn’t’ve��A�mightC�might�A�n’tC�not�A�’veC�have�mightve��A�mightC�might�A�ve�might’ve��A�mightC�might�A�’ve�must've��A�mustC�must�A�'ve�mustn't��A�mustC�must�A�n'tC�not�mustn't've��A�mustC�must�A�n'tC�not�A�'veC�have�mustnt��A�mustC�must�A�ntC�not�mustntve��A�mustC�must�A�ntC�not�A�veC�have�mustn’t��A�mustC�must�A�n’tC�not�mustn’t’ve��A�mustC�must�A�n’tC�not�A�’veC�have�mustve��A�mustC�must�A�ve�must’ve��A�mustC�must�A�’ve�n.��A�n.�needn't��A�needC�need�A�n'tC�not�needn't've��A�needC�need�A�n'tC�not�A�'veC�have�neednt��A�needC�need�A�ntC�not�needntve��A�needC�need�A�ntC�not�A�veC�have�needn’t��A�needC�need�A�n’tC�not�needn’t’ve��A�needC�need�A�n’tC�not�A�’veC�have�not've��A�not�A�'veC�have�nothin��A�nothinC�nothing�nothin'��A�nothin'C�nothing�nothin’��A�nothin’C�nothing�notve��A�not�A�veC�have�not’ve��A�not�A�’veC�have�nuff��A�nuffC�enough�nuthin��A�nuthinC�nothing�nuthin'��A�nuthin'C�nothing�nuthin’��A�nuthin’C�nothing�o'clock��A�o'clockC�o'clock�o.��A�o.�o.0��A�o.0�o.O��A�o.O�o.o��A�o.o�o_0��A�o_0�o_O��A�o_O�o_o��A�o_o�ol��A�olC�old�ol'��A�ol'C�old�ol’��A�ol’C�old�oughtn't��A�oughtC�ought�A�n'tC�not�oughtn't've��A�oughtC�ought�A�n'tC�not�A�'veC�have�oughtnt��A�oughtC�ought�A�ntC�not�oughtntve��A�oughtC�ought�A�ntC�not�A�veC�have�oughtn’t��A�oughtC�ought�A�n’tC�not�oughtn’t’ve��A�oughtC�ought�A�n’tC�not�A�’veC�have�o’clock��A�o’clockC�o'clock�p.��A�p.�p.m.��A�p.m.�q.��A�q.�r.��A�r.�s.��A�s.�shan't��A�shaC�shall�A�n'tC�not�shan't've��A�shaC�shall�A�n'tC�not�A�'veC�have�shant��A�shaC�shall�A�ntC�not�shantve��A�shaC�shall�A�ntC�not�A�veC�have�shan’t��A�shaC�shall�A�n’tC�not�shan’t’ve��A�shaC�shall�A�n’tC�not�A�’veC�have�she'd��A�sheC�she�A�'dC�'d�she'd've��A�sheC�she�A�'dC�would�A�'veC�have�she'll��A�sheC�she�A�'llC�will�she'll've��A�sheC�she�A�'llC�will�A�'veC�have�she's��A�sheC�she�A�'sC�'s�shedve��A�sheC�she�A�dC�would�A�veC�have�shellve��A�sheC�she�A�llC�will�A�veC�have�shes��A�sheC�she�A�s�she’d��A�sheC�she�A�’dC�'d�she’d’ve��A�sheC�she�A�’dC�would�A�’veC�have�she’ll��A�sheC�she�A�’llC�will�she’ll’ve��A�sheC�she�A�’llC�will�A�’veC�have�she’s��A�sheC�she�A�’sC�'s�should've��A�shouldC�should�A�'ve�shouldn't��A�shouldC�should�A�n'tC�not�shouldn't've��A�shouldC�should�A�n'tC�not�A�'veC�have�shouldnt��A�shouldC�should�A�ntC�not�shouldntve��A�shouldC�should�A�ntC�not�A�veC�have�shouldn’t��A�shouldC�should�A�n’tC�not�shouldn’t’ve��A�shouldC�should�A�n’tC�not�A�’veC�have�shouldve��A�shouldC�should�A�ve�should’ve��A�shouldC�should�A�’ve�somethin��A�somethinC�something�somethin'��A�somethin'C�something�somethin’��A�somethin’C�something�t.��A�t.�that'd��A�thatC�that�A�'dC�'d�that'd've��A�thatC�that�A�'dC�would�A�'veC�have�that'll��A�thatC�that�A�'llC�will�that'll've��A�thatC�that�A�'llC�will�A�'veC�have�that's��A�thatC�that�A�'sC�'s�thatd��A�thatC�that�A�dC�'d�thatdve��A�thatC�that�A�dC�would�A�veC�have�thatll��A�thatC�that�A�llC�will�thatllve��A�thatC�that�A�llC�will�A�veC�have�thats��A�thatC�that�A�s�that’d��A�thatC�that�A�’dC�'d�that’d’ve��A�thatC�that�A�’dC�would�A�’veC�have�that’ll��A�thatC�that�A�’llC�will�that’ll’ve��A�thatC�that�A�’llC�will�A�’veC�have�that’s��A�thatC�that�A�’sC�'s�there'd��A�thereC�there�A�'dC�'d�there'd've��A�thereC�there�A�'dC�would�A�'veC�have�there'll��A�thereC�there�A�'llC�will�there'll've��A�thereC�there�A�'llC�will�A�'veC�have�there're��A�thereC�there�A�'reC�are�there's��A�thereC�there�A�'sC�'s�there've��A�thereC�there�A�'ve�thered��A�thereC�there�A�dC�'d�theredve��A�thereC�there�A�dC�would�A�veC�have�therell��A�thereC�there�A�llC�will�therellve��A�thereC�there�A�llC�will�A�veC�have�therere��A�thereC�there�A�reC�are�theres��A�thereC�there�A�s�thereve��A�there�A�veC�have�there’d��A�thereC�there�A�’dC�'d�there’d’ve��A�thereC�there�A�’dC�would�A�’veC�have�there’ll��A�thereC�there�A�’llC�will�there’ll’ve��A�thereC�there�A�’llC�will�A�’veC�have�there’re��A�thereC�there�A�’reC�are�there’s��A�thereC�there�A�’sC�'s�there’ve��A�thereC�there�A�’ve�these'd��A�theseC�these�A�'dC�'d�these'd've��A�theseC�these�A�'dC�would�A�'veC�have�these'll��A�theseC�these�A�'llC�will�these'll've��A�theseC�these�A�'llC�will�A�'veC�have�these're��A�theseC�these�A�'reC�are�these've��A�theseC�these�A�'ve�thesed��A�theseC�these�A�dC�'d�thesedve��A�theseC�these�A�dC�would�A�veC�have�thesell��A�theseC�these�A�llC�will�thesellve��A�theseC�these�A�llC�will�A�veC�have�thesere��A�theseC�these�A�reC�are�theseve��A�these�A�veC�have�these’d��A�theseC�these�A�’dC�'d�these’d’ve��A�theseC�these�A�’dC�would�A�’veC�have�these’ll��A�theseC�these�A�’llC�will�these’ll’ve��A�theseC�these�A�’llC�will�A�’veC�have�these’re��A�theseC�these�A�’reC�are�these’ve��A�theseC�these�A�’ve�they'd��A�theyC�they�A�'dC�'d�they'd've��A�theyC�they�A�'dC�would�A�'veC�have�they'll��A�theyC�they�A�'llC�will�they'll've��A�theyC�they�A�'llC�will�A�'veC�have�they're��A�theyC�they�A�'reC�are�they've��A�theyC�they�A�'veC�have�theyd��A�theyC�they�A�dC�'d�theydve��A�theyC�they�A�dC�would�A�veC�have�theyll��A�theyC�they�A�llC�will�theyllve��A�theyC�they�A�llC�will�A�veC�have�theyre��A�theyC�they�A�reC�are�theyve��A�theyC�they�A�veC�have�they’d��A�theyC�they�A�’dC�'d�they’d’ve��A�theyC�they�A�’dC�would�A�’veC�have�they’ll��A�theyC�they�A�’llC�will�they’ll’ve��A�theyC�they�A�’llC�will�A�’veC�have�they’re��A�theyC�they�A�’reC�are�they’ve��A�theyC�they�A�’veC�have�this'd��A�thisC�this�A�'dC�'d�this'd've��A�thisC�this�A�'dC�would�A�'veC�have�this'll��A�thisC�this�A�'llC�will�this'll've��A�thisC�this�A�'llC�will�A�'veC�have�this's��A�thisC�this�A�'sC�'s�thisd��A�thisC�this�A�dC�'d�thisdve��A�thisC�this�A�dC�would�A�veC�have�thisll��A�thisC�this�A�llC�will�thisllve��A�thisC�this�A�llC�will�A�veC�have�thiss��A�thisC�this�A�s�this’d��A�thisC�this�A�’dC�'d�this’d’ve��A�thisC�this�A�’dC�would�A�’veC�have�this’ll��A�thisC�this�A�’llC�will�this’ll’ve��A�thisC�this�A�’llC�will�A�’veC�have�this’s��A�thisC�this�A�’sC�'s�those'd��A�thoseC�those�A�'dC�'d�those'd've��A�thoseC�those�A�'dC�would�A�'veC�have�those'll��A�thoseC�those�A�'llC�will�those'll've��A�thoseC�those�A�'llC�will�A�'veC�have�those're��A�thoseC�those�A�'reC�are�those've��A�thoseC�those�A�'ve�thosed��A�thoseC�those�A�dC�'d�thosedve��A�thoseC�those�A�dC�would�A�veC�have�thosell��A�thoseC�those�A�llC�will�thosellve��A�thoseC�those�A�llC�will�A�veC�have�thosere��A�thoseC�those�A�reC�are�thoseve��A�those�A�veC�have�those’d��A�thoseC�those�A�’dC�'d�those’d’ve��A�thoseC�those�A�’dC�would�A�’veC�have�those’ll��A�thoseC�those�A�’llC�will�those’ll’ve��A�thoseC�those�A�’llC�will�A�’veC�have�those’re��A�thoseC�those�A�’reC�are�those’ve��A�thoseC�those�A�’ve�u.��A�u.�v.��A�v.�v.s.��A�v.s.�v.v��A�v.v�v_v��A�v_v�vs.��A�vs.�w.��A�w.�w/o��A�w/oC�without�wasn't��A�wasC�was�A�n'tC�not�wasnt��A�wasC�was�A�ntC�not�wasn’t��A�wasC�was�A�n’tC�not�we'd��A�weC�we�A�'dC�'d�we'd've��A�weC�we�A�'dC�would�A�'veC�have�we'll��A�weC�we�A�'llC�will�we'll've��A�weC�we�A�'llC�will�A�'veC�have�we're��A�weC�we�A�'reC�are�we've��A�weC�we�A�'veC�have�wed��A�weC�we�A�dC�'d�wedve��A�weC�we�A�dC�would�A�veC�have�wellve��A�weC�we�A�llC�will�A�veC�have�weren't��A�wereC�were�A�n'tC�not�werent��A�wereC�were�A�ntC�not�weren’t��A�wereC�were�A�n’tC�not�weve��A�weC�we�A�veC�have�we’d��A�weC�we�A�’dC�'d�we’d’ve��A�weC�we�A�’dC�would�A�’veC�have�we’ll��A�weC�we�A�’llC�will�we’ll’ve��A�weC�we�A�’llC�will�A�’veC�have�we’re��A�weC�we�A�’reC�are�we’ve��A�weC�we�A�’veC�have�what'd��A�whatC�what�A�'dC�'d�what'd've��A�whatC�what�A�'dC�would�A�'veC�have�what'll��A�whatC�what�A�'llC�will�what'll've��A�whatC�what�A�'llC�will�A�'veC�have�what're��A�whatC�what�A�'reC�are�what's��A�whatC�what�A�'sC�'s�what've��A�whatC�what�A�'ve�whatd��A�whatC�what�A�dC�'d�whatdve��A�whatC�what�A�dC�would�A�veC�have�whatll��A�whatC�what�A�llC�will�whatllve��A�whatC�what�A�llC�will�A�veC�have�whatre��A�whatC�what�A�reC�are�whats��A�whatC�what�A�s�whatve��A�what�A�veC�have�what’d��A�whatC�what�A�’dC�'d�what’d’ve��A�whatC�what�A�’dC�would�A�’veC�have�what’ll��A�whatC�what�A�’llC�will�what’ll’ve��A�whatC�what�A�’llC�will�A�’veC�have�what’re��A�whatC�what�A�’reC�are�what’s��A�whatC�what�A�’sC�'s�what’ve��A�whatC�what�A�’ve�when'd��A�whenC�when�A�'dC�'d�when'd've��A�whenC�when�A�'dC�would�A�'veC�have�when'll��A�whenC�when�A�'llC�will�when'll've��A�whenC�when�A�'llC�will�A�'veC�have�when're��A�whenC�when�A�'reC�are�when's��A�whenC�when�A�'sC�'s�when've��A�whenC�when�A�'ve�whend��A�whenC�when�A�dC�'d�whendve��A�whenC�when�A�dC�would�A�veC�have�whenll��A�whenC�when�A�llC�will�whenllve��A�whenC�when�A�llC�will�A�veC�have�whenre��A�whenC�when�A�reC�are�whens��A�whenC�when�A�s�whenve��A�when�A�veC�have�when’d��A�whenC�when�A�’dC�'d�when’d’ve��A�whenC�when�A�’dC�would�A�’veC�have�when’ll��A�whenC�when�A�’llC�will�when’ll’ve��A�whenC�when�A�’llC�will�A�’veC�have�when’re��A�whenC�when�A�’reC�are�when’s��A�whenC�when�A�’sC�'s�when’ve��A�whenC�when�A�’ve�where'd��A�whereC�where�A�'dC�'d�where'd've��A�whereC�where�A�'dC�would�A�'veC�have�where'll��A�whereC�where�A�'llC�will�where'll've��A�whereC�where�A�'llC�will�A�'veC�have�where're��A�whereC�where�A�'reC�are�where's��A�whereC�where�A�'sC�'s�where've��A�whereC�where�A�'ve�whered��A�whereC�where�A�dC�'d�wheredve��A�whereC�where�A�dC�would�A�veC�have�wherell��A�whereC�where�A�llC�will�wherellve��A�whereC�where�A�llC�will�A�veC�have�wherere��A�whereC�where�A�reC�are�wheres��A�whereC�where�A�s�whereve��A�where�A�veC�have�where’d��A�whereC�where�A�’dC�'d�where’d’ve��A�whereC�where�A�’dC�would�A�’veC�have�where’ll��A�whereC�where�A�’llC�will�where’ll’ve��A�whereC�where�A�’llC�will�A�’veC�have�where’re��A�whereC�where�A�’reC�are�where’s��A�whereC�where�A�’sC�'s�where’ve��A�whereC�where�A�’ve�who'd��A�whoC�who�A�'dC�'d�who'd've��A�whoC�who�A�'dC�would�A�'veC�have�who'll��A�whoC�who�A�'llC�will�who'll've��A�whoC�who�A�'llC�will�A�'veC�have�who're��A�whoC�who�A�'reC�are�who's��A�whoC�who�A�'sC�'s�who've��A�whoC�who�A�'ve�whod��A�whoC�who�A�dC�'d�whodve��A�whoC�who�A�dC�would�A�veC�have�wholl��A�whoC�who�A�llC�will�whollve��A�whoC�who�A�llC�will�A�veC�have�whos��A�whoC�who�A�s�whove��A�who�A�veC�have�who’d��A�whoC�who�A�’dC�'d�who’d’ve��A�whoC�who�A�’dC�would�A�’veC�have�who’ll��A�whoC�who�A�’llC�will�who’ll’ve��A�whoC�who�A�’llC�will�A�’veC�have�who’re��A�whoC�who�A�’reC�are�who’s��A�whoC�who�A�’sC�'s�who’ve��A�whoC�who�A�’ve�why'd��A�whyC�why�A�'dC�'d�why'd've��A�whyC�why�A�'dC�would�A�'veC�have�why'll��A�whyC�why�A�'llC�will�why'll've��A�whyC�why�A�'llC�will�A�'veC�have�why're��A�whyC�why�A�'reC�are�why's��A�whyC�why�A�'sC�'s�why've��A�whyC�why�A�'ve�whyd��A�whyC�why�A�dC�'d�whydve��A�whyC�why�A�dC�would�A�veC�have�whyll��A�whyC�why�A�llC�will�whyllve��A�whyC�why�A�llC�will�A�veC�have�whyre��A�whyC�why�A�reC�are�whys��A�whyC�why�A�s�whyve��A�why�A�veC�have�why’d��A�whyC�why�A�’dC�'d�why’d’ve��A�whyC�why�A�’dC�would�A�’veC�have�why’ll��A�whyC�why�A�’llC�will�why’ll’ve��A�whyC�why�A�’llC�will�A�’veC�have�why’re��A�whyC�why�A�’reC�are�why’s��A�whyC�why�A�’sC�'s�why’ve��A�whyC�why�A�’ve�won't��A�woC�will�A�n'tC�not�won't've��A�woC�will�A�n'tC�not�A�'veC�have�wont��A�woC�will�A�ntC�not�wontve��A�woC�will�A�ntC�not�A�veC�have�won’t��A�woC�will�A�n’tC�not�won’t’ve��A�woC�will�A�n’tC�not�A�’veC�have�would've��A�wouldC�would�A�'ve�wouldn't��A�wouldC�would�A�n'tC�not�wouldn't've��A�wouldC�would�A�n'tC�not�A�'veC�have�wouldnt��A�wouldC�would�A�ntC�not�wouldntve��A�wouldC�would�A�ntC�not�A�veC�have�wouldn’t��A�wouldC�would�A�n’tC�not�wouldn’t’ve��A�wouldC�would�A�n’tC�not�A�’veC�have�wouldve��A�wouldC�would�A�ve�would’ve��A�wouldC�would�A�’ve�x.��A�x.�xD��A�xD�xDD��A�xDD�y'all��A�y'C�you�A�all�y.��A�y.�yall��A�yC�you�A�all�you'd��A�youC�you�A�'dC�'d�you'd've��A�youC�you�A�'dC�would�A�'veC�have�you'll��A�youC�you�A�'llC�will�you'll've��A�youC�you�A�'llC�will�A�'veC�have�you're��A�youC�you�A�'reC�are�you've��A�youC�you�A�'veC�have�youd��A�youC�you�A�dC�'d�youdve��A�youC�you�A�dC�would�A�veC�have�youll��A�youC�you�A�llC�will�youllve��A�youC�you�A�llC�will�A�veC�have�youre��A�youC�you�A�reC�are�youve��A�youC�you�A�veC�have�you’d��A�youC�you�A�’dC�'d�you’d’ve��A�youC�you�A�’dC�would�A�’veC�have�you’ll��A�youC�you�A�’llC�will�you’ll’ve��A�youC�you�A�’llC�will�A�’veC�have�you’re��A�youC�you�A�’reC�are�you’ve��A�youC�you�A�’veC�have�y’all��A�y’C�you�A�all�z.��A�z.� ��A� C� �¯\(ツ)/¯��A�¯\(ツ)/¯�°C.��A�°�A�C�A�.�°F.��A�°�A�F�A�.�°K.��A�°�A�K�A�.�°c.��A�°�A�c�A�.�°f.��A�°�A�f�A�.�°k.��A�°�A�k�A�.�ä.��A�ä.�ö.��A�ö.�ü.��A�ü.�ಠ_ಠ��A�ಠ_ಠ�ಠ︵ಠ��A�ಠ︵ಠ�—��A�—�‘S��A�‘SC�'s�‘s��A�‘sC�'s�’��A�’�’Cause��A�’CauseC�because�’Cos��A�’CosC�because�’Coz��A�’CozC�because�’Cuz��A�’CuzC�because�’S��A�’SC�'s�’bout��A�’boutC�about�’cause��A�’causeC�because�’cos��A�’cosC�because�’coz��A�’cozC�because�’cuz��A�’cuzC�because�’d��A�’d�’em��A�’emC�them�’ll��A�’llC�will�’nuff��A�’nuffC�enough�’re��A�’reC�are�’s��A�’sC�'s�’’��A�’’�faster_heuristics�
|
models/spacy-classifier/model-best/vocab/key2row
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:76be8b528d0075f7aae98d6fa57a6d3c83ae480a8469e668d7b0af968995ac71
|
3 |
+
size 1
|
models/spacy-classifier/model-best/vocab/lookups.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:76be8b528d0075f7aae98d6fa57a6d3c83ae480a8469e668d7b0af968995ac71
|
3 |
+
size 1
|
models/spacy-classifier/model-best/vocab/strings.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/spacy-classifier/model-best/vocab/vectors
ADDED
Binary file (128 Bytes). View file
|
|
models/spacy-classifier/model-best/vocab/vectors.cfg
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"mode":"default"
|
3 |
+
}
|
models/tfidf_vectorizer.sav
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a15a3c028d7a5689f8486fc1be474e44a85651bc72de1f0091071ab5562d723b
|
3 |
+
size 84347
|
models/xgb_clf.report
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"0": {
|
3 |
+
"precision": 0.7127659574468085,
|
4 |
+
"recall": 0.3785310734463277,
|
5 |
+
"f1-score": 0.49446494464944646,
|
6 |
+
"support": 177
|
7 |
+
},
|
8 |
+
"1": {
|
9 |
+
"precision": 0.5849056603773585,
|
10 |
+
"recall": 0.8516483516483516,
|
11 |
+
"f1-score": 0.6935123042505593,
|
12 |
+
"support": 182
|
13 |
+
},
|
14 |
+
"accuracy": 0.6183844011142061,
|
15 |
+
"macro avg": {
|
16 |
+
"precision": 0.6488358089120835,
|
17 |
+
"recall": 0.6150897125473397,
|
18 |
+
"f1-score": 0.5939886244500029,
|
19 |
+
"support": 359
|
20 |
+
},
|
21 |
+
"weighted avg": {
|
22 |
+
"precision": 0.6479454168712099,
|
23 |
+
"recall": 0.6183844011142061,
|
24 |
+
"f1-score": 0.5953747481241053,
|
25 |
+
"support": 359
|
26 |
+
}
|
27 |
+
}
|
models/xgb_clf.sav
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dc9803229fc28ab2acba4859e790f9a01ccafd479f9ff2250ee7fab5cafe8b9f
|
3 |
+
size 94725
|
paths.py
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
|
4 |
+
|
5 |
+
MODELS_DIR = os.path.join(PROJECT_DIR, 'models')
|
6 |
+
|
7 |
+
FLAIR_MODEL_PATH = os.path.join(MODELS_DIR, 'flair-sentiment-classifier/best-model.pt')
|
8 |
+
SPACY_MODEL_PATH = os.path.join(MODELS_DIR, 'spacy-classifier/model-best')
|
9 |
+
SETFIT_MODEL_PATH = os.path.join(MODELS_DIR, 'setfit-classifier')
|
10 |
+
NB_MODEL_PATH = os.path.join(MODELS_DIR, 'm_nb.sav')
|
11 |
+
COUNT_VECTORIZER_PATH = os.path.join(MODELS_DIR, 'count_vectorizer.sav')
|
12 |
+
TFIDF_VECTORIZER_PATH = os.path.join(MODELS_DIR, 'tfidf_vectorizer.sav')
|