Update human_text_detect.py
Browse files- human_text_detect.py +2 -2
human_text_detect.py
CHANGED
@@ -95,7 +95,7 @@ def detect_human_text(model_name, topic, text):
|
|
95 |
pval_functions = get_survival_function(df_null, G=43)
|
96 |
|
97 |
min_tokens_per_sentence = 10
|
98 |
-
max_tokens_per_sentence =
|
99 |
|
100 |
cache_dir_tokenizer = f"/data/cacheHuggingface/{model_name}/tokenizer"
|
101 |
|
@@ -131,7 +131,7 @@ def detect_human_text(model_name, topic, text):
|
|
131 |
detector = DetectLM(sentence_detector, pval_functions,
|
132 |
min_len=min_tokens_per_sentence,
|
133 |
max_len=max_tokens_per_sentence,
|
134 |
-
length_limit_policy='
|
135 |
HC_type='not stbl',
|
136 |
gamma=0.15,
|
137 |
ignore_first_sentence= False
|
|
|
95 |
pval_functions = get_survival_function(df_null, G=43)
|
96 |
|
97 |
min_tokens_per_sentence = 10
|
98 |
+
max_tokens_per_sentence = 50
|
99 |
|
100 |
cache_dir_tokenizer = f"/data/cacheHuggingface/{model_name}/tokenizer"
|
101 |
|
|
|
131 |
detector = DetectLM(sentence_detector, pval_functions,
|
132 |
min_len=min_tokens_per_sentence,
|
133 |
max_len=max_tokens_per_sentence,
|
134 |
+
length_limit_policy='max_available',
|
135 |
HC_type='not stbl',
|
136 |
gamma=0.15,
|
137 |
ignore_first_sentence= False
|