text
stringlengths 5
22M
| id
stringlengths 12
177
| metadata
dict | __index_level_0__
int64 0
1.37k
|
---|---|---|---|
import torch
from torch import nn
from modules.BinaryTreeLstmCell import BinaryTreeLstmCell
from modules.LstmRnn import LstmRnn
class BinaryTreeBasedModule(nn.Module):
no_transformation = "no_transformation"
lstm_transformation = "lstm_transformation"
bi_lstm_transformation = "bi_lstm_transformation"
conv_transformation = "conv_transformation"
def __init__(self, input_dim, hidden_dim, leaf_transformation, trans_hidden_dim, dropout_prob):
super().__init__()
self.leaf_transformation = leaf_transformation
if leaf_transformation == BinaryTreeBasedModule.no_transformation:
self.linear = nn.Linear(in_features=input_dim, out_features=2 * hidden_dim)
elif leaf_transformation == BinaryTreeBasedModule.lstm_transformation:
self.lstm = LstmRnn(input_dim, trans_hidden_dim)
self.linear = nn.Linear(in_features=trans_hidden_dim, out_features=2 * hidden_dim)
elif leaf_transformation == BinaryTreeBasedModule.bi_lstm_transformation:
self.lstm_f = LstmRnn(input_dim, trans_hidden_dim)
self.lstm_b = LstmRnn(input_dim, trans_hidden_dim)
self.linear = nn.Linear(in_features=2 * trans_hidden_dim, out_features=2 * hidden_dim)
elif leaf_transformation == BinaryTreeBasedModule.conv_transformation:
self.conv1 = nn.Conv1d(input_dim, trans_hidden_dim, kernel_size=5, padding=2)
self.conv2 = nn.Conv1d(trans_hidden_dim, trans_hidden_dim, kernel_size=3, padding=1)
self.linear = nn.Linear(in_features=trans_hidden_dim, out_features=2 * hidden_dim)
else:
raise ValueError(f'"{leaf_transformation}" is not in the list of possible transformations!')
self.tree_lstm_cell = BinaryTreeLstmCell(hidden_dim, dropout_prob)
BinaryTreeBasedModule.reset_parameters(self)
def reset_parameters(self):
nn.init.orthogonal_(self.linear.weight)
nn.init.constant_(self.linear.bias, val=0)
self.tree_lstm_cell.reset_parameters()
if self.leaf_transformation == BinaryTreeBasedModule.lstm_transformation:
self.lstm.reset_parameters()
elif self.leaf_transformation == BinaryTreeBasedModule.bi_lstm_transformation:
self.lstm_f.reset_parameters()
self.lstm_b.reset_parameters()
elif self.leaf_transformation == BinaryTreeBasedModule.conv_transformation:
self.conv1.reset_parameters()
self.conv2.reset_parameters()
def forward(self, *inputs):
raise NotImplementedError
def _transform_leafs(self, x, mask):
if self.leaf_transformation == BinaryTreeBasedModule.no_transformation:
pass
elif self.leaf_transformation == BinaryTreeBasedModule.lstm_transformation:
x = self.lstm(x, mask)
elif self.leaf_transformation == BinaryTreeBasedModule.bi_lstm_transformation:
h_f = self.lstm_f(x, mask)
h_b = self.lstm_b(x, mask, backward=True)
x = torch.cat([h_f, h_b], dim=-1)
elif self.leaf_transformation == BinaryTreeBasedModule.conv_transformation:
x = x.permute(0, 2, 1)
x = self.conv1(x)
x = torch.relu(x)
x = self.conv2(x)
x = torch.tanh(x)
x = x.permute(0, 2, 1)
return self.linear(x).tanh().chunk(chunks=2, dim=-1)
@staticmethod
def _merge(actions, h_l, c_l, h_r, c_r, h_p, c_p, mask):
"""
This method merges left and right TreeLSTM states. It reuses already precomputed states for the parent node,
but still, has to apply correct masking.
"""
cumsum = torch.cumsum(actions, dim=-1)
mask_l = (1.0 - cumsum)[..., None]
mask_r = (cumsum - actions)[..., None]
mask = mask[..., None]
actions = actions[..., None]
h_p = (mask_l * h_l + actions * h_p + mask_r * h_r) * mask + h_l * (1. - mask)
c_p = (mask_l * c_l + actions * c_p + mask_r * c_r) * mask + c_l * (1. - mask)
return h_p, c_p
|
ContextualSP/compositional_generalization/modules/BinaryTreeBasedModule.py/0
|
{
"file_path": "ContextualSP/compositional_generalization/modules/BinaryTreeBasedModule.py",
"repo_id": "ContextualSP",
"token_count": 1832
}
| 239 |
{
"random_seed": 42,
"numpy_seed": 42,
"pytorch_seed": 42,
"dataset_reader": {
"type": "rewrite",
"lazy": false,
"super_mode": "before",
"joint_encoding": true,
"extra_stop_words": [
"'s",
"besides",
"the",
"in",
"of"
]
},
"model": {
"type": "rewrite",
"word_embedder": {
"tokens": {
"type": "embedding",
"embedding_dim": 100,
"trainable": true,
"padding_index": 0
}
},
"text_encoder": {
"type": "lstm",
"input_size": 100,
"hidden_size": 200,
"bidirectional": true,
"num_layers": 1
},
"inp_drop_rate": 0.2,
"out_drop_rate": 0.2,
"feature_sel": 115,
"loss_weights": [
0.1,
0.4,
0.5
],
"super_mode": "before",
"unet_down_channel": 128,
"enable_training_log": false
},
"iterator": {
"type": "basic",
"batch_size": 4
},
"validation_iterator": {
"type": "basic",
"batch_size": 4
},
"trainer": {
"num_epochs": 100,
"cuda_device": 0,
"patience": 10,
"validation_metric": "+BLEU4",
"optimizer": {
"type": "adam",
"lr": 2e-4
},
"learning_rate_scheduler": {
"type": "reduce_on_plateau",
"factor": 0.5,
"mode": "max",
"patience": 5
},
"num_serialized_models_to_keep": 10,
"should_log_learning_rate": true
}
}
|
ContextualSP/incomplete_utterance_rewriting/configs/canard.jsonnet/0
|
{
"file_path": "ContextualSP/incomplete_utterance_rewriting/configs/canard.jsonnet",
"repo_id": "ContextualSP",
"token_count": 655
}
| 240 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import argparse
import json
import random
import re
import jieba
import spacy
from tqdm import tqdm
random.seed(42)
nlp_en = spacy.load('en_core_web_sm')
def is_all_chinese(word):
# identify whether all chinese characters
for _char in word:
if not '\u4e00' <= _char <= '\u9fa5':
return False
return True
def cut_mixed_sentence(text):
# for chinese, return character; for english, return word;
jieba_words = list(jieba.cut(text))
ret_chars = []
for word in jieba_words:
if is_all_chinese(word):
ret_chars.extend(list(word))
else:
ret_chars.append(word)
return ' '.join(ret_chars)
def cut_english_sentence(text):
text = re.sub('\t\t', ' ', text)
doc = nlp_en(text)
ret_words = []
for word in doc:
if word.text.strip():
ret_words.append(word.text.lower())
return ' '.join(ret_words)
def unified_dataset_format(dataset_id):
if dataset_id == 'Rewrite':
origin_file = "corpus.txt"
with open(origin_file, "r", encoding="utf8") as f:
total_lines = [line.strip() for line in f.readlines()]
total_len = len(total_lines)
border = int(0.9 * total_len)
train_data = total_lines[:border]
dev_data = total_lines[border:]
for train_ind in range(len(train_data)):
sentences = train_data[train_ind].split('\t\t')
new_sen = []
for sentence in sentences:
new_sen.append(cut_mixed_sentence(sentence))
train_data[train_ind] = '\t\t'.join(new_sen)
for dev_ind in range(len(dev_data)):
sentences = dev_data[dev_ind].split('\t\t')
new_sen = []
for sentence in sentences:
new_sen.append(cut_mixed_sentence(sentence))
dev_data[dev_ind] = '\t\t'.join(new_sen)
with open("train.txt", "w", encoding="utf8") as train_f:
train_f.write('\n'.join(train_data))
with open("dev.txt", "w", encoding="utf8") as dev_f:
dev_f.write('\n'.join(dev_data))
elif dataset_id == 'Multi':
src_files = ["train.sr",
"valid.sr",
"test.sr"]
tgt_files = ["train.tr",
"valid.tr",
"test.tr"]
for src_file, tgt_file in zip(src_files, tgt_files):
src_f = open(src_file, "r", encoding="utf8")
tgt_f = open(tgt_file, "r", encoding="utf8")
src_lines = src_f.readlines()
tgt_lines = tgt_f.readlines()
# WARNING: there is an annotation bug in test.sr 3224
if 'test' in src_file:
actual_line = src_lines[3222].split("\t")[0]
src_lines[3222] = actual_line + ' 已 经 玩 过 了 |\n'
del src_lines[3223]
dataset = []
for src_line, tgt_line in zip(src_lines, tgt_lines):
src_line = src_line.strip('\n')
tgt_line = tgt_line.strip()
valid_sen = src_line[:src_line.rfind('|')].strip()
border_pos = valid_sen.rfind(' || ')
context_str, cur_str = valid_sen[:border_pos], valid_sen[border_pos + 4:]
context_str = context_str.replace(' <split> ', '\t\t')
context_str += '\t\t' + cur_str + '\t\t' + tgt_line
dataset.append(context_str)
modes = ['train', 'valid', 'test']
write_path = None
for sample_mode in modes:
if sample_mode in src_file:
write_path = sample_mode + ".txt"
break
with open(write_path, "w", encoding="utf8") as write_f:
write_f.write('\n'.join(dataset))
write_f.close()
elif dataset_id == 'CANARD':
src_files = ["train.json",
"dev.json",
"test.json"]
for src_file in src_files:
content = json.load(open(src_file, "r", encoding="utf8"))
dataset = []
for example in tqdm(content):
sent_history = '\t\t'.join([cut_english_sentence(sen)
for sen in example['History']])
incomplete_sent = cut_english_sentence(example['Question'])
rewrite_sent = cut_english_sentence(example['Rewrite'])
context_str = sent_history + '\t\t' + incomplete_sent + '\t\t' + rewrite_sent
dataset.append(context_str)
modes = ['train', 'dev', 'test']
write_path = None
for sample_mode in modes:
if sample_mode in src_file:
write_path = sample_mode + ".txt"
break
with open(write_path, "w", encoding="utf8") as write_f:
write_f.write('\n'.join(dataset))
write_f.close()
elif dataset_id == 'Task':
src_file = "CamRest676_annotated.json"
with open(src_file, "r", encoding="utf8") as f:
content = json.load(f)
dataset = []
example_border = 0
for dialogue in tqdm(content):
sent_history = []
for example in dialogue['dial']:
context_str = '\t\t'.join(sent_history[-2:])
if context_str == '':
# Just a placeholder
context_str = 'hello'
complete_str = cut_english_sentence(example['usr']['transcript_complete'])
cur_is_incomplete = False
case_number = 0
if example['usr']['transcript_with_ellipsis'] != "":
cur_is_incomplete = True
dataset.append('\t\t'.join([context_str,
cut_english_sentence(example['usr']['transcript_with_ellipsis']),
complete_str]))
case_number += 1
# TODO: follow the original setting which only considers part of corpus
elif example['usr']['transcript_with_coreference'] != "":
cur_is_incomplete = True
dataset.append('\t\t'.join([context_str,
cut_english_sentence(example['usr']['transcript_with_coreference']),
complete_str]))
case_number += 1
if not cur_is_incomplete:
dataset.append('\t\t'.join([context_str,
complete_str,
complete_str]))
case_number += 1
sent_history.append(cut_english_sentence(complete_str))
sent_history.append(cut_english_sentence(example['sys']['sent']))
if dialogue['dialogue_id'] < 540:
example_border += case_number
# shuffle dataset
train_data = dataset[:example_border]
dev_data = dataset[example_border:]
with open("train.txt", "w", encoding="utf8") as train_f:
train_f.write('\n'.join(train_data))
with open("dev.txt", "w", encoding="utf8") as dev_f:
dev_f.write('\n'.join(dev_data))
else:
raise Exception("We do not support it currently!")
if __name__ == '__main__':
# arg_parser = argparse.ArgumentParser()
# arg_parser.add_argument("--dataset", required=True,
# choices=['Task', 'Rewrite', 'Multi', "CANARD"], type=str,
# help="Please specify a dataset you want to process")
# parsed_args = arg_parser.parse_args()
# unified_dataset_format(parsed_args.dataset)
unified_dataset_format("Multi")
|
ContextualSP/incomplete_utterance_rewriting/preprocess.py/0
|
{
"file_path": "ContextualSP/incomplete_utterance_rewriting/preprocess.py",
"repo_id": "ContextualSP",
"token_count": 4424
}
| 241 |
#!/usr/bin/env bash
export model_file=../checkpoints/run_rewrite_bert_
export config_file=../configs/rewrite_bert.jsonnet
export train_data_path=../dataset/Rewrite/train.txt
export validation_data_path=../dataset/Rewrite/dev.txt
export seed=1
allennlp train -s ${model_file} ${config_file} \
--include-package data_reader \
--include-package model \
-o "{\"random_seed\":\"${seed}\",\"numpy_seed\":\"${seed}\",\"pytorch_seed\":\"${seed}\", \"train_data_path\":\"${train_data_path}\",\"validation_data_path\":\"${validation_data_path}\"}"
|
ContextualSP/incomplete_utterance_rewriting/src/train_rewrite_bert.sh/0
|
{
"file_path": "ContextualSP/incomplete_utterance_rewriting/src/train_rewrite_bert.sh",
"repo_id": "ContextualSP",
"token_count": 191
}
| 242 |
from typing import Dict, Set
from context.db_context import SparcDBContext
from context.utils import Table, TableColumn
Keywords = ['limit', 'des', 'asc', 'and', 'or', 'sum', 'min', 'max', 'avg', 'none', '=', '!=', '<', '>', '<=', '>=',
'between', 'like', 'not_like', 'in', 'not_in', 'intersect', 'union', 'except', 'none', 'count', 'ins']
class GrammarType:
"""
Filter Grammar Type
"""
FilterBetween = 1
FilterEqual = 2
FilterGreater = 3
FilterLess = 4
FilterGeq = 5
FilterLeq = 6
FilterNeq = 7
FilterInNes = 8
FilterNotInNes = 9
FilterLike = 10
FilterNotLike = 11
FilterIs = 12
FilterExist = 13
# TODO: in and like does not have a nested version
FilterNotNes = 14
FilterBetweenNes = 15
FilterEqualNes = 16
FilterGreaterNes = 17
FilterLessNes = 18
FilterGeqNes = 19
FilterLeqNes = 20
FilterNeqNes = 21
FilterIsNes = 22
FilterExistNes = 23
FilterAnd = 24
FilterOr = 25
# FilterNone = 26
"""
Statement Grammar Type
"""
StateInter = 1
StateUnion = 2
StateExcept = 3
StateNone = 4
"""
Root Grammar Type
"""
RootSFO = 1
RootSO = 2
RootSF = 3
RootS = 4
"""
Select Grammar Type depends on the length of A
"""
"""
A Grammar Type
"""
ANone = 1
AMax = 2
AMin = 3
ACount = 4
ASum = 5
AAvg = 6
"""
Order Grammar Type
"""
OrderNone = 1
OrderAsc = 2
OrderDes = 3
OrderAscLim = 4
OrderDesLim = 5
class Grammar(object):
# static property, production rule to id
productions = None
def __init__(self, db_context: SparcDBContext):
self._pro_counter = 0
self._type_counter = 0
# lazy loading, init the production
if self.productions is None:
# new self.productions
self.productions = []
# C and T only contain one rule so they do not need initialization
self.build_production_map(Statement)
self.build_production_map(Root)
self.build_production_map(Select)
self.build_production_map(A)
self.build_production_map(Filter)
self.build_production_map(Order)
self.db_context = db_context
self.local_grammar = self.build_instance_production()
def build_production_map(self, cls):
"""
Record the production rules of class cls into self
:param cls: son class of Action
"""
# (note) the values could provide a fixed order
# only when the dictionary is built on
prod_ids = cls.grammar_dict.keys()
for prod_id in prod_ids:
cls_obj = cls(prod_id)
self.productions.append(cls_obj)
def build_instance_production(self):
"""
Instance all possible column and table production rules using db schema
"""
db_schema: Dict[str, Table] = self.db_context.schema
# fetch table name(id)
table_names = sorted([db_schema[table_ind].name for table_ind in
list(db_schema.keys())], reverse=True)
local_grammars = [T(table_name) for table_name in table_names]
all_columns = set()
for table in db_schema.values():
# use name(id) as standard grammar
all_columns.update([C(column.name) for column in table.columns])
column_grammars = list(all_columns)
local_grammars.extend(column_grammars)
# convert into set and sorted
local_grammars = set(local_grammars)
# sorted local grammars
local_grammars = sorted(local_grammars)
return local_grammars
@property
def global_grammar(self):
return sorted(self.productions)
@staticmethod
def default_sql_clause() -> Dict:
default_sql = {
"orderBy": [],
"from": {
"table_units": [
[
"table_unit",
1
]
],
"conds": []
},
"union": None,
"except": None,
"groupBy": None,
"limit": None,
"intersect": None,
"where": [],
"having": [],
"select": [
False,
[
[
3,
[
0,
[
0,
5,
False
],
None
]
]
]
]
}
return default_sql
class Action(object):
grammar_dict = {}
def __init__(self):
self.ins_id = None
self.production = None
def get_next_action(self, is_sketch=False):
actions = list()
for x in self.production.split(' ')[1:]:
if x not in Keywords:
rule_type = eval(x)
if is_sketch:
if rule_type is not A and rule_type is not T:
actions.append(rule_type)
else:
actions.append(rule_type)
return actions
def __repr__(self):
space_ind = self.production.find(' ')
return f'{self.production[:space_ind]} -> {self.production[space_ind + 1:]}'
def is_global(self):
"""
Actions are global means they fit for the whole dataset, while others only
fit for specific instances
:return:
"""
if self.__class__ in [C, T]:
return False
else:
return True
def __lt__(self, other):
return self.__repr__() < other.__repr__()
def __hash__(self):
return hash(self.__repr__())
def __eq__(self, other):
return self.__repr__() == other.__repr__()
@staticmethod
def from_str(action_repr: str):
"""
Build an action object from string
:param action_repr: the representation of action
:return: Action object
"""
lhs, rhs = action_repr.split(' -> ')
# eval class object
cls_obj = eval(lhs)
if cls_obj in [C, T]:
return cls_obj(rhs)
else:
# find the rule id
rule_str = ' '.join([lhs, rhs])
grammar_dict: Dict = cls_obj.grammar_dict
rule_id = list(grammar_dict.keys())[list(grammar_dict.values()).index(rule_str)]
return cls_obj(rule_id)
class Statement(Action):
grammar_dict = {
GrammarType.StateInter: 'Statement intersect Root Root',
GrammarType.StateUnion: 'Statement union Root Root',
GrammarType.StateExcept: 'Statement except Root Root',
GrammarType.StateNone: 'Statement Root'
}
def __init__(self, id_c):
super().__init__()
self.ins_id = id_c
self.production = self.grammar_dict[id_c]
class Root(Action):
grammar_dict = {
GrammarType.RootSFO: 'Root Select Filter Order',
GrammarType.RootSF: 'Root Select Filter',
GrammarType.RootSO: 'Root Select Order',
GrammarType.RootS: 'Root Select'
}
def __init__(self, id_c):
super().__init__()
self.ins_id = id_c
self.production = self.grammar_dict[id_c]
class Select(Action):
grammar_dict = {
0: 'Select A',
1: 'Select A A',
2: 'Select A A A',
3: 'Select A A A A',
4: 'Select A A A A A',
5: 'Select A A A A A A'
}
def __init__(self, id_c):
super().__init__()
self.ins_id = id_c
self.production = self.grammar_dict[id_c]
class A(Action):
grammar_dict = {
GrammarType.ANone: 'A none C T',
GrammarType.AMax: 'A max C T',
GrammarType.AMin: 'A min C T',
GrammarType.ACount: 'A count C T',
GrammarType.ASum: 'A sum C T',
GrammarType.AAvg: 'A avg C T'
}
def __init__(self, id_c):
super().__init__()
self.ins_id = id_c
self.production = self.grammar_dict[id_c]
class Filter(Action):
# TODO: why not directly predict the number of Filters
grammar_dict = {
GrammarType.FilterAnd: 'Filter Filter and Filter',
GrammarType.FilterOr: 'Filter Filter or Filter',
GrammarType.FilterEqual: 'Filter = A',
GrammarType.FilterGreater: 'Filter > A',
GrammarType.FilterLess: 'Filter < A',
GrammarType.FilterGeq: 'Filter >= A',
GrammarType.FilterLeq: 'Filter <= A',
GrammarType.FilterNeq: 'Filter != A',
GrammarType.FilterBetween: 'Filter between A',
# TODO: like/not_like only apply to string type
GrammarType.FilterLike: 'Filter like A',
GrammarType.FilterNotLike: 'Filter not_like A',
GrammarType.FilterEqualNes: 'Filter = A Root',
GrammarType.FilterGreaterNes: 'Filter > A Root',
GrammarType.FilterLessNes: 'Filter < A Root',
GrammarType.FilterGeqNes: 'Filter >= A Root',
GrammarType.FilterLeqNes: 'Filter <= A Root',
GrammarType.FilterNeqNes: 'Filter != A Root',
GrammarType.FilterBetweenNes: 'Filter between A Root',
GrammarType.FilterInNes: 'Filter in A Root',
GrammarType.FilterNotInNes: 'Filter not_in A Root',
}
def __init__(self, id_c):
super().__init__()
self.ins_id = id_c
self.production = self.grammar_dict[id_c]
class Order(Action):
grammar_dict = {
GrammarType.OrderAsc: 'Order asc A',
GrammarType.OrderDes: 'Order des A',
GrammarType.OrderAscLim: 'Order asc A limit',
GrammarType.OrderDesLim: 'Order des A limit'
}
def __init__(self, ins_id):
super().__init__()
self.ins_id = ins_id
self.production = self.grammar_dict[ins_id]
# class Ref(Action):
#
# grammar_dict = {
# GrammarType.RefStar: 'Ref *',
# GrammarType.RefCol: 'Ref C',
# }
#
# def __init__(self, ins_id):
# super().__init__()
# self.ins_id = ins_id
# self.production = self.grammar_dict[ins_id]
class C(Action):
def __init__(self, ins_id):
super().__init__()
# TODO: here we lower it because the col -> id (entities_names) in SparcWorld is the lower key-value pair.
self.ins_id = ins_id.lower()
self.production = f'C {self.ins_id}'
class T(Action):
def __init__(self, ins_id):
super().__init__()
self.ins_id = ins_id.lower()
self.production = f'T {self.ins_id}'
# TODO: consider copy value from source sentence
# class V(Action):
# def __init__(self, id_c):
# super().__init__()
# self.id_c = id_c
# self.production = 'V ins'
if __name__ == '__main__':
order_rule = Order(GrammarType.OrderDesLim)
assert order_rule.production == 'Order des A limit'
assert str(order_rule) == 'Order -> des A limit'
sel_rule = Select(1)
assert sel_rule.production == 'Select A A'
assert str(sel_rule) == 'Select -> A A'
col_rule = C('sales')
assert col_rule.production == 'C sales'
assert str(col_rule) == 'C -> sales'
|
ContextualSP/interactive_text_to_sql/src/context/grammar.py/0
|
{
"file_path": "ContextualSP/interactive_text_to_sql/src/context/grammar.py",
"repo_id": "ContextualSP",
"token_count": 5484
}
| 243 |
# coding: utf-8
import json
all_examples = {
'trian': json.load(open('data/spider/train_spider.json', 'r', encoding='utf-8')),
'dev': json.load(open('data/spider/dev.json', 'r', encoding='utf-8'))
}
def search_for_id(question, split='dev'):
examples = all_examples[split]
for idx, example in enumerate(examples):
if example['question'] == question:
return idx
if __name__ == '__main__':
question = 'Find the last name of the student who has a cat that is age 3.'
print(search_for_id(question))
|
ContextualSP/interactive_text_to_sql/src/utils/tools.py/0
|
{
"file_path": "ContextualSP/interactive_text_to_sql/src/utils/tools.py",
"repo_id": "ContextualSP",
"token_count": 214
}
| 244 |
from collections import defaultdict, Counter, deque
import numpy as np
import random
from gtd import utils
# defines whether an edge is inverted or not
inverted = lambda r: r[:2] == '**'
invert = lambda r: r[2:] if inverted(r) else '**' + r
class Graph(object):
def __init__(self, triples):
self.triples = triples
neighbors = defaultdict(lambda: defaultdict(set))
relation_args = defaultdict(lambda: defaultdict(set))
for s, r, t in triples:
relation_args[r]['s'].add(s)
relation_args[r]['t'].add(t)
neighbors[s][r].add(t)
neighbors[t][invert(r)].add(s)
def freeze(d):
frozen = {}
for key, subdict in d.items():
frozen[key] = {}
for subkey, set_val in subdict.items():
frozen[key][subkey] = tuple(set_val)
return frozen
# WARNING: both neighbors and relation_args must not have default initialization.
# Default init is dangerous, because we sometimes perform uniform sampling over
# all keys in the dictionary. This distribution will get altered if a user asks about
# entities or relations that weren't present.
# self.neighbors[start][relation] = (end1, end2, ...)
# self.relation_args[relation][position] = (ent1, ent2, ...)
# position is either 's' (domain) or 't' (range)
self.neighbors = freeze(neighbors)
self.relation_args = freeze(relation_args)
self.random_entities = []
# cpp_graph = graph_traversal.Graph()
# for s, r, t in triples:
# cpp_graph.add_edge(s, r, t)
# cpp_graph.add_edge(t, invert(r), s)
# self.cpp_graph = cpp_graph
cpp_graph = None
def shortest_path(self, source, target):
# use breadth-first search
queue = deque()
explored = {} # stores backpointers
def enqueue(node, backpointer):
queue.appendleft(node)
explored[node] = backpointer
def path(node):
current = node
path = [current]
while True:
backpointer = explored[current]
if backpointer:
rel, current = backpointer
path.extend((rel, current))
else:
break # we've hit the source
return path[::-1] # reverse
enqueue(source, None)
while len(queue) != 0:
current = queue.pop()
for rel, nbrs in self.neighbors[current].items():
for nbr in nbrs:
if nbr not in explored:
enqueue(nbr, (rel, current))
if nbr == target:
return path(nbr)
def random_walk_probs(self, start, path):
return self.cpp_graph.exact_random_walk_probs(start, list(path))
def walk_all(self, start, path, positive_branch_factor=float('inf')):
if positive_branch_factor == 0:
return set()
approx = positive_branch_factor != float('inf')
if approx:
return set(self.cpp_graph.approx_path_traversal(start, list(path), positive_branch_factor))
else:
return set(self.cpp_graph.path_traversal(start, list(path)))
def is_trivial_query(self, start, path):
return self.cpp_graph.is_trivial_query(start, list(path))
def type_matching_entities(self, path, position):
if position == 's':
r = path[0]
elif position == 't':
r = path[-1]
else:
raise ValueError(position)
try:
if not inverted(r):
return self.relation_args[r][position]
else:
inv_pos = 's' if position == 't' else 't'
return self.relation_args[invert(r)][inv_pos]
except KeyError:
# nothing type-matches
return tuple()
# TODO: test this
def random_walk(self, start, length, no_return=False):
"""
If no_return, the random walk never revisits the same node. Can sometimes return None, None.
"""
max_attempts = 1000
for i in range(max_attempts):
sampled_path = []
visited = set()
current = start
for k in range(length):
visited.add(current)
r = random.choice(list(self.neighbors[current].keys()))
sampled_path.append(r)
candidates = self.neighbors[current][r]
if no_return:
current = utils.sample_excluding(candidates, visited)
else:
current = random.choice(candidates)
# no viable next step
if current is None:
break
# failed to find a viable walk. Try again.
if current is None:
continue
return tuple(sampled_path), current
return None, None
def random_walk_constrained(self, start, path):
"""
Warning! Can sometimes return None.
"""
# if start node isn't present we can't take this walk
if start not in self.neighbors:
return None
current = start
for r in path:
rels = self.neighbors[current]
if r not in rels:
# no viable next steps
return None
current = random.choice(rels[r])
return current
def random_entity(self):
if len(self.random_entities) == 0:
self.random_entities = list(np.random.choice(list(self.neighbors.keys()), size=20000, replace=True))
return self.random_entities.pop()
def relation_stats(self):
stats = defaultdict(dict)
rel_counts = Counter(r for s, r, t in self.triples)
for r, args in self.relation_args.items():
out_degrees, in_degrees = [], []
for s in args['s']:
out_degrees.append(len(self.neighbors[s][r]))
for t in args['t']:
in_degrees.append(len(self.neighbors[t][invert(r)]))
domain = float(len(args['s']))
range = float(len(args['t']))
out_degree = np.mean(out_degrees)
in_degree = np.mean(in_degrees)
stat = {'avg_out_degree': out_degree,
'avg_in_degree': in_degree,
'min_degree': min(in_degree, out_degree),
'in/out': in_degree / out_degree,
'domain': domain,
'range': range,
'r/d': range / domain,
'total': rel_counts[r],
'log(total)': np.log(rel_counts[r])
}
# include inverted relation
inv_stat = {'avg_out_degree': in_degree,
'avg_in_degree': out_degree,
'min_degree': stat['min_degree'],
'in/out': out_degree / in_degree,
'domain': range,
'range': domain,
'r/d': domain / range,
'total': stat['total'],
'log(total)': stat['log(total)']
}
stats[r] = stat
stats[invert(r)] = inv_stat
return stats
|
ContextualSP/lemon/executor/gtd/graph.py/0
|
{
"file_path": "ContextualSP/lemon/executor/gtd/graph.py",
"repo_id": "ContextualSP",
"token_count": 3785
}
| 245 |
from abc import ABCMeta, abstractmethod
import numpy as np
from strongsup.utils import softmax_with_alpha_beta
from strongsup.value import check_denotation
from strongsup.value_function import ConstantValueFunction
class CaseWeighter(object, metaclass=ABCMeta):
@abstractmethod
def __call__(self, paths, example):
"""Compute update weights for all ParseCases in a batch of ParsePaths.
Args:
paths (list[ParsePath])
example (Example): the Example for which the ParsePaths were sampled
Returns:
weights (list[list[float]]): one weight for each ParseCase
"""
pass
class MMLCaseWeighter(CaseWeighter):
def __init__(self, alpha, beta, parse_model):
self._alpha = alpha
self._beta = beta
self._parse_model = parse_model
def _destroy_path_scores(self, paths):
# A bit of an information-hiding hack.
# Now that the path weighter has used the path scores, prevent anyone else from using them by overwriting
# them with None
for path in paths:
for case in path:
case.choice_logits = None
case.choice_log_probs = None
def _weight_paths(self, paths, example):
# paths may have incorrect scores, left there by some exploration policy
self._parse_model.score_paths(
paths, ignore_previous_utterances=False, caching=False)
log_probs = [] # log p(z | x) + log p(y | z)
for path in paths:
z_given_x = path.log_prob
y_given_z = 0 if check_denotation(example.answer, path.finalized_denotation) else float('-inf')
lp = z_given_x + y_given_z
log_probs.append(lp)
log_probs = np.array(log_probs)
self._destroy_path_scores(paths) # destroy scores so no one else misuses them
# if every probability is 0, the softmax downstream will compute 0/0 = NaN.
# We will assume 0/0 = 0
if np.all(log_probs == float('-inf')):
return np.zeros(len(paths))
# Reweight with alpha and beta
weights_alpha = softmax_with_alpha_beta(log_probs, self._alpha, self._beta)
assert np.all(np.isfinite(weights_alpha))
return weights_alpha
def __call__(self, paths, example):
path_weights = self._weight_paths(paths, example)
case_weights = []
for path, path_wt in zip(paths, path_weights):
case_weights.append([path_wt] * len(path))
return case_weights
class REINFORCECaseWeighter(CaseWeighter):
def __init__(self, correct_weight, incorrect_weight, value_function):
"""Weights the cases according to REINFORCE
Args:
correct_weight (float): the weight that each case should get if the
denotation is correct
incorrect_weight (float): weight for incorrect denotations
value_function (StateValueFunction): assigns a value to each state to
be subtracted as a baseline
"""
self._correct_weight = correct_weight
self._incorrect_weight = incorrect_weight
self._value_function = value_function
def __call__(self, paths, example):
path_weights = self._weight_paths(paths, example)
cases = [case for path in paths for case in path]
state_values = self._value_function.values(cases)
case_weights = []
index = 0
for path, path_weight in zip(paths, path_weights):
case_weights_for_path = []
for case in path:
case_weights_for_path.append(path_weight - state_values[index])
index += 1
case_weights.append(case_weights_for_path)
return case_weights
def _weight_paths(self, paths, example):
# TODO: Destroy path scores?
return [self._correct_weight
if check_denotation(example.answer, path.finalized_denotation)
else self._incorrect_weight for path in paths]
def get_case_weighter(config, parse_model, value_function):
"""Creates the correct CaseWeighter from the Config
Args:
config (Config): the config
parse_model (ParseModel): the parse model that the case weighter
will use
value_function (ValueFunction): the value function that the case
weighter will use
Returns:
CaseWeighter
"""
if config.type == 'mml':
# Make sure we're not using a ValueFunction if it's MML
assert type(value_function) is ConstantValueFunction
assert value_function.constant_value == 0
return MMLCaseWeighter(config.alpha, config.beta, parse_model)
elif config.type == 'reinforce':
return REINFORCECaseWeighter(
config.correct_weight, config.incorrect_weight, value_function)
else:
raise ValueError('CaseWeighter {} not supported.'.format(config.type))
|
ContextualSP/lemon/executor/strongsup/case_weighter.py/0
|
{
"file_path": "ContextualSP/lemon/executor/strongsup/case_weighter.py",
"repo_id": "ContextualSP",
"token_count": 2087
}
| 246 |
from strongsup.value import Value
class RLongStateValue(Value):
"""Value based on RLongState."""
def __init__(self, state):
self._state = state
def __repr__(self):
return repr(self._state)
@property
def state(self):
return self._state
def __eq__(self, other):
return (isinstance(other, self.__class__)
and self._state == other._state)
def match(self, other):
return self._state == other._state
|
ContextualSP/lemon/executor/strongsup/rlong/value.py/0
|
{
"file_path": "ContextualSP/lemon/executor/strongsup/rlong/value.py",
"repo_id": "ContextualSP",
"token_count": 197
}
| 247 |
import copy
import os
import pytest
import shutil
from strongsup.results.tracker import LeafTracker, TopLevelTracker
from strongsup.results.entry import Entry, ExperimentType
from strongsup.results.result_value import ResultValue
class TestTracker(object):
@pytest.fixture
def filters(self):
return ["match", "other"]
@pytest.fixture
def result(self):
return ResultValue([1, 2, 3, 4, 5], [2, 3, 4, 5, 6])
@pytest.fixture
def experiment_types(self):
match = ExperimentType(["should-match", "config"], "base")
also_match = ExperimentType(["config", "other"], "base")
no_match = ExperimentType(["filter"], "base")
other = ExperimentType(["config"], "base")
return [match, also_match, no_match, other]
def _entries_equal(self, entries, expected_entries):
"""Returns if two lists of entries contain equal entries"""
return sorted(entries, key=lambda entry: str(entry)) == sorted(
expected_entries, key=lambda entry: str(entry))
class TestLeafTracker(TestTracker):
def test_merge(self, tracker, result, experiment_types):
tracker.add_result(experiment_types[0], 0, result)
expected_entry = Entry(experiment_types[0])
expected_entry.add_seed(0, result)
expected_entries = [expected_entry]
# Test merge of two seeds
other = LeafTracker("other")
other.add_result(experiment_types[0], 1, result * 2)
tracker.merge(other)
expected_entry.add_seed(1, result * 2)
assert tracker.entries() == expected_entries
# Test merge on two Entries
other = LeafTracker("other")
other.add_result(experiment_types[1], 0, result)
tracker.merge(other)
expected_entry = Entry(experiment_types[1])
expected_entry.add_seed(0, result)
expected_entries.append(expected_entry)
self._entries_equal(tracker.entries(), expected_entries)
# Test merge updates to best seed
other = LeafTracker("other")
other.add_result(experiment_types[1], 0, result * 2)
tracker.merge(other)
expected_entry.update_seed(0, result * 2)
self._entries_equal(tracker.entries(), expected_entries)
def test_entries(self, tracker, result, experiment_types, filters):
# Make sure is empty upon construction
entries = tracker.entries()
assert len(entries) == 0
# Test filtering
# No matches
tracker.add_result(experiment_types[2], 0, result)
entries = tracker.entries(filters)
assert len(entries) == 0
# Matches both
expected_entry = Entry(experiment_types[0])
expected_entry.add_seed(0, result)
expected_entry.add_seed(1, result * 2)
expected_entries = [expected_entry]
expected_entry = Entry(experiment_types[1])
expected_entry.add_seed(0, result)
expected_entries.append(expected_entry)
tracker.add_result(experiment_types[1], 0, result)
tracker.add_result(experiment_types[0], 0, result)
tracker.add_result(experiment_types[0], 1, result * 2)
entries = tracker.entries(filters)
assert self._entries_equal(entries, expected_entries)
def test_add_entry(self, tracker, result, experiment_types):
# Test adding a single entry
tracker.add_result(experiment_types[0], 0, result)
entries = tracker.entries()
expected_entry = Entry(experiment_types[0])
expected_entry.add_seed(0, result)
expected_entries = [expected_entry]
assert entries == expected_entries
# Test adding a duplicate entry
with pytest.raises(ValueError) as excinfo:
tracker.add_result(experiment_types[0], 0, result*2)
assert excinfo.match("Seed 0 already in Entry")
# Test adding multiple seeds
tracker.add_result(experiment_types[0], 1, result * 2)
entries = tracker.entries()
expected_entry.add_seed(1, result * 2)
assert entries == expected_entries
# Test adding multiple entries
tracker.add_result(experiment_types[1], 0, result)
entries = tracker.entries()
expected_entry = Entry(experiment_types[1])
expected_entry.add_seed(0, result)
expected_entries.append(expected_entry)
assert self._entries_equal(entries, expected_entries)
@pytest.fixture
def tracker(self):
tracker = LeafTracker("name")
return tracker
class TestTopLevelTracker(TestTracker):
def test_register_result(self, result, experiment_types,
datasets, teardown_tensorboard):
# Clear out previous tracker
if os.path.exists(TopLevelTracker("test_tracker").filename):
os.remove(TopLevelTracker("test_tracker").filename)
# Register result
with TopLevelTracker("test_tracker") as tracker:
tracker.register_result(
datasets[0], experiment_types[0], 0, ".")
assert tracker.entries() == []
# Make sure that result gets loaded
with TopLevelTracker("test_tracker") as tracker:
expected_entry = Entry(experiment_types[0])
expected_entry.add_seed(0, ResultValue([0.0] * 5, [0.0] * 5))
expected_entries = [expected_entry]
assert tracker.entries() == expected_entries
# Update result
shutil.move("tensorboard", "backup")
shutil.move("other_tensorboard", "tensorboard")
result = ResultValue(
[0.9396985173225403, 0.839195966720581, 0.6281406879425049,
0.49246230721473694, 0.3467336595058441],
[0.9012500047683716, 0.8087499737739563, 0.6499999761581421,
0.4737499952316284, 0.3449999988079071])
# Make sure result gets loaded again
with TopLevelTracker("test_tracker") as tracker:
entries = tracker.entries()
expected_entry.update_seed(0, result)
assert tracker.entries() == expected_entries
# Make sure result doesn't change
with TopLevelTracker("test_tracker") as tracker:
entries = tracker.entries()
expected_entry.update_seed(0, result)
assert tracker.entries() == expected_entries
@pytest.fixture
def teardown_tensorboard(self):
yield
# Restore files to correct place
shutil.move("tensorboard", "other_tensorboard")
shutil.move("backup", "tensorboard")
def test_persist(self, result, experiment_types, datasets):
# Clear out previous tracker
if os.path.exists(TopLevelTracker("test_tracker").filename):
os.remove(TopLevelTracker("test_tracker").filename)
# Test reloading empty tracker
with TopLevelTracker("test_tracker") as tracker:
clone = copy.deepcopy(tracker)
assert clone == TopLevelTracker("test_tracker")
# Test reloading non-empty tracker
with TopLevelTracker("test_tracker") as tracker:
# Multiple datasets
tracker.add_result(datasets[0], experiment_types[0], 0, result)
tracker.add_result(datasets[1], experiment_types[0], 0, result * 2)
tracker.add_result(datasets[2], experiment_types[0], 0, result * 3)
# Multiple entries per dataset
tracker.add_result(datasets[0], experiment_types[1], 0, result)
# Multiple seeds per entry
tracker.add_result(datasets[0], experiment_types[1], 1, result * 2)
clone = copy.deepcopy(tracker)
assert clone == TopLevelTracker("test_tracker")
def test_merge(self, tracker, result, experiment_types, datasets):
# Merge two empty trackers
other = TopLevelTracker("other")
tracker.merge(other)
assert tracker.entries() == []
# Merge non-empty into empty
other.add_result(datasets[0], experiment_types[0], 0, result)
tracker.merge(other)
entries = tracker.entries()
expected_entry = Entry(experiment_types[0])
expected_entry.add_seed(0, result)
expected_entries = [expected_entry]
assert self._entries_equal(entries, expected_entries)
# Merge empty into non-empty
other = TopLevelTracker("other")
tracker.merge(other)
entries = tracker.entries()
assert self._entries_equal(entries, expected_entries)
# Merge two different datasets
other.add_result(datasets[1], experiment_types[0], 0, result)
tracker.merge(other)
entries = tracker.entries()
expected_entry = Entry(experiment_types[0])
expected_entry.add_seed(0, result)
expected_entries.append(expected_entry)
assert self._entries_equal(entries, expected_entries)
# Merge on same dataset
other = TopLevelTracker("other")
other.add_result(datasets[0], experiment_types[1], 0, result)
tracker.merge(other)
entries = tracker.entries()
expected_entry = Entry(experiment_types[1])
expected_entry.add_seed(0, result)
expected_entries.append(expected_entry)
assert self._entries_equal(entries, expected_entries)
# Merge on same Entry
other = TopLevelTracker("other")
other.add_result(datasets[0], experiment_types[0], 1, result)
tracker.merge(other)
entries = tracker.entries()
expected_entries[0].add_seed(1, result)
assert self._entries_equal(entries, expected_entries)
# Merge on same seed
other = TopLevelTracker("other")
other.add_result(datasets[0], experiment_types[0], 1, result * 2)
tracker.merge(other)
expected_entries[0].update_seed(1, result * 2)
assert self._entries_equal(entries, expected_entries)
def test_entries(self, tracker, result, experiment_types,
filters, datasets):
# Empty at beginning
assert tracker.entries() == []
# Filter on experiment type
tracker.add_result(datasets[0], experiment_types[2], 0, result)
entries = tracker.entries(experiment_type_filters=filters)
assert entries == []
# Filter on dataset
tracker.add_result(datasets[2], experiment_types[0], 0, result)
entries = tracker.entries(dataset_filters=filters)
expected_entry = Entry(experiment_types[2])
expected_entry.add_seed(0, result)
expected_entries = [expected_entry]
assert self._entries_equal(entries, expected_entries)
# Filter on experiment type and dataset
entries = tracker.entries(dataset_filters=filters,
experiment_type_filters=filters)
assert entries == []
# Match both
tracker.add_result(datasets[1], experiment_types[1], 1, result * 2)
expected_entry = Entry(experiment_types[1])
expected_entry.add_seed(1, result * 2)
expected_entries = [expected_entry]
entries = tracker.entries(dataset_filters=filters,
experiment_type_filters=filters)
assert self._entries_equal(entries, expected_entries)
def test_add_result(self, tracker, result, experiment_types, datasets):
# Add a single result
tracker.add_result(datasets[0], experiment_types[0], 0, result)
entries = tracker.entries()
expected_entry = Entry(experiment_types[0])
expected_entry.add_seed(0, result)
expected_entries = [expected_entry]
assert entries == expected_entries
# Add multiple results to same dataset
tracker.add_result(datasets[0], experiment_types[1], 0, result)
entries = tracker.entries()
expected_entry = Entry(experiment_types[1])
expected_entry.add_seed(0, result)
expected_entries.append(expected_entry)
assert self._entries_equal(entries, expected_entries)
# Add invalid result to same dataset
with pytest.raises(ValueError) as excinfo:
tracker.add_result(datasets[0], experiment_types[1], 0, result * 2)
assert excinfo.match("Seed 0 already in Entry")
assert self._entries_equal(entries, expected_entries)
# Add to multiple datasets
tracker.add_result(datasets[1], experiment_types[0], 0, result)
entries = tracker.entries()
expected_entry = Entry(experiment_types[0])
expected_entry.add_seed(0, result)
expected_entries.append(expected_entry)
assert self._entries_equal(entries, expected_entries)
@pytest.fixture
def tracker(self):
tracker = TopLevelTracker("tracker")
return tracker
@pytest.fixture
def datasets(self):
return ["match-dataset", "dataset-other", "filtered-dataset"]
|
ContextualSP/lemon/executor/strongsup/tests/results/test_tracker.py/0
|
{
"file_path": "ContextualSP/lemon/executor/strongsup/tests/results/test_tracker.py",
"repo_id": "ContextualSP",
"token_count": 5367
}
| 248 |
import copy
import random
from collections import MutableMapping
import numpy as np
import tensorflow as tf
# End of utterance token
EOU = '<EOU>'
def epsilon_greedy_sample(choices, num_to_sample, epsilon=0.05):
"""Samples without replacement num_to_sample choices from choices
where the ith choice is choices[i] with prob 1 - epsilon, and
uniformly at random with prob epsilon
Args:
choices (list[Object]): a list of choices
num_to_sample (int): number of things to sample
epsilon (float): probability to deviate
Returns:
list[Object]: list of size num_to_sample choices
"""
assert(len(choices) >= num_to_sample)
assert(0 <= epsilon <= 1)
if (len(choices) == num_to_sample):
return choices
# Performance
if epsilon == 0:
return choices[:num_to_sample]
sample = []
index_choices = list(range(len(choices)))
for i in range(num_to_sample):
if random.random() <= epsilon or not i in index_choices:
choice_index = random.choice(index_choices)
else:
choice_index = i
index_choices.remove(choice_index)
sample.append(choices[choice_index])
return sample
def softmax(stuff):
"""Compute [exp(x) / S for x in stuff] where S = sum(exp(x) for x in stuff)"""
stuff = np.array(stuff)
stuff = np.exp(stuff - np.max(stuff))
return stuff / np.sum(stuff)
def softmax_with_alpha_beta(stuff, alpha, beta):
"""Compute [exp(x*beta) / T * S^(1-alpha) for x in stuff]
where S = sum(exp(x) for x in stuff)
and T = sum(exp(x*beta) for x in stuff)
Assume that alpha >= 0 and beta >= 0.
"""
stuff = np.array(stuff)
stuff_times_beta = np.array([
x * beta if x != float('-inf') else float('-inf')
for x in stuff])
m = np.max(stuff)
return np.exp(
stuff_times_beta
- (m * beta + np.log(np.sum(np.exp(stuff_times_beta - m * beta))))
+ (1 - alpha) * (m + np.log(np.sum(np.exp(stuff - m)))))
def sample_with_replacement(stuff, probs, num_to_sample):
"""Samples num_to_sample total elements from stuff.
Returns:
list: list of elements
"""
indices = np.random.choice(
len(stuff), size=num_to_sample, replace=True, p=probs)
return [stuff[index] for index in indices]
class PredicateList(object):
"""list[Predicate] but with fast index lookup"""
def __init__(self, predicates):
self.predicates = predicates
self.predicate_to_index = {x.name: i for (i, x) in enumerate(predicates)}
def index(self, x):
return self.predicate_to_index[x.name]
def __iter__(self):
return iter(self.predicates)
def __len__(self):
return len(self.predicates)
def __repr__(self):
return repr(self.predicates)
def __getitem__(self, i):
return self.predicates[i]
class OptimizerOptions(object):
SGD = "sgd"
ADAM = "adam"
VALID_OPTIONS = [SGD, ADAM]
"""Light-weight wrapper around options for Optimizers
Args:
opt_str (string): the string, needs to be in VALID_OPTIONS
"""
def __init__(self, opt_str):
if opt_str not in OptimizerOptions.VALID_OPTIONS:
raise ValueError(
"{} not a valid optimizer option".format(opt_str))
self._opt = opt_str
@property
def opt(self):
return self._opt
def get_optimizer(optimizer_opt):
assert type(optimizer_opt) is OptimizerOptions
if optimizer_opt.opt == OptimizerOptions.SGD:
return tf.train.GradientDescentOptimizer
elif optimizer_opt.opt == OptimizerOptions.ADAM:
return tf.train.AdamOptimizer
else:
raise ValueError("This should never happen")
|
ContextualSP/lemon/executor/strongsup/utils.py/0
|
{
"file_path": "ContextualSP/lemon/executor/strongsup/utils.py",
"repo_id": "ContextualSP",
"token_count": 1578
}
| 249 |
This repository contains tools for generating datasets and evaluating predictions for the following [AI2 Leaderboards](https://leaderboard.allenai.org/):
* [ARC (AI2 Reasoning Challenge)](arc/)
* [OpenBook QA](openbookqa/)
* [ProPara](propara/)
* [QASC](qasc/)
* [SciTail](scitail/)
* [eQASC](eqasc/)
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/README.md/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/README.md",
"repo_id": "ContextualSP",
"token_count": 101
}
| 250 |
{ "id": "question1", "answerKey": "C" }
{ "id": "question2", "answerKey": "B" }
{ "id": "question3", "answerKey": "C" }
{ "id": "question4", "answerKey": "D" }
{ "id": "question5", "answerKey": "D" }
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/arc/evaluator/questions.jsonl/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/arc/evaluator/questions.jsonl",
"repo_id": "ContextualSP",
"token_count": 85
}
| 251 |
{"score": 0.2023383378982544, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_1"}
{"score": 0.5158032774925232, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_2"}
{"score": 0.17925743758678436, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_5"}
{"score": 0.8793290853500366, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_7"}
{"score": 0.49962201714515686, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_3"}
{"score": 0.318893164396286, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_4"}
{"score": 0.042609114199876785, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_6"}
{"score": 0.4866274893283844, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_8"}
{"score": 0.17660178244113922, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_9"}
{"score": 0.022419992834329605, "chain_id": "3C44YUNSI1OBFBB8D36GODNOZN9DPA_1_10"}
{"score": 0.9762198328971863, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_1"}
{"score": 0.5939199924468994, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_2"}
{"score": 0.13692770898342133, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_3"}
{"score": 0.06807658821344376, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_4"}
{"score": 0.3188892602920532, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_5"}
{"score": 0.07258988916873932, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_6"}
{"score": 0.046394575387239456, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_7"}
{"score": 0.04906206950545311, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_8"}
{"score": 0.046142932027578354, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_9"}
{"score": 0.053280651569366455, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ_1_10"}
{"score": 0.09263954311609268, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_1"}
{"score": 0.16910839080810547, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_2"}
{"score": 0.027015184983611107, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_3"}
{"score": 0.07709699869155884, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_4"}
{"score": 0.0625581368803978, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_5"}
{"score": 0.03083304688334465, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_6"}
{"score": 0.04556988552212715, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_7"}
{"score": 0.032626792788505554, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_8"}
{"score": 0.2351386696100235, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_9"}
{"score": 0.021611249074339867, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0F4S0NT9_1_10"}
{"score": 0.3319288492202759, "chain_id": "336KAV9KYQRILF5T71II5LPW6IJ2YE_1_1"}
{"score": 0.3393683135509491, "chain_id": "336KAV9KYQRILF5T71II5LPW6IJ2YE_1_2"}
{"score": 0.1019323542714119, "chain_id": "336KAV9KYQRILF5T71II5LPW6IJ2YE_1_3"}
{"score": 0.17231668531894684, "chain_id": "336KAV9KYQRILF5T71II5LPW6IJ2YE_1_4"}
{"score": 0.10625903308391571, "chain_id": "336KAV9KYQRILF5T71II5LPW6IJ2YE_1_5"}
{"score": 0.3550889194011688, "chain_id": "336KAV9KYQRILF5T71II5LPW6IJ2YE_1_6"}
{"score": 0.24990414083003998, "chain_id": "336KAV9KYQRILF5T71II5LPW6IJ2YE_1_7"}
{"score": 0.49256256222724915, "chain_id": "336KAV9KYQRILF5T71II5LPW6IJ2YE_1_8"}
{"score": 0.4175323247909546, "chain_id": "336KAV9KYQRILF5T71II5LPW6IJ2YE_1_9"}
{"score": 0.289831280708313, "chain_id": "336KAV9KYQRILF5T71II5LPW6IJ2YE_1_10"}
{"score": 0.8398701548576355, "chain_id": "3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_1"}
{"score": 0.752326488494873, "chain_id": "3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_2"}
{"score": 0.17661374807357788, "chain_id": "3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_3"}
{"score": 0.08687683194875717, "chain_id": "3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_4"}
{"score": 0.07977458834648132, "chain_id": "3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_5"}
{"score": 0.3049956262111664, "chain_id": "3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_6"}
{"score": 0.13215121626853943, "chain_id": "3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_7"}
{"score": 0.09796953946352005, "chain_id": "3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_8"}
{"score": 0.3386376202106476, "chain_id": "3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_9"}
{"score": 0.07817163318395615, "chain_id": "3NJM2BJS4W51AJ5UD7B54756E49CPJ_1_10"}
{"score": 0.37462639808654785, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_1"}
{"score": 0.7736762762069702, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_3"}
{"score": 0.08248872309923172, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_2"}
{"score": 0.17387132346630096, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_4"}
{"score": 0.3566812574863434, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_5"}
{"score": 0.18837140500545502, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_6"}
{"score": 0.0988221988081932, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_7"}
{"score": 0.12544329464435577, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_8"}
{"score": 0.08482809364795685, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_9"}
{"score": 0.08082888275384903, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8CL3C1A_1_10"}
{"score": 0.9133855700492859, "chain_id": "33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_5"}
{"score": 0.875912070274353, "chain_id": "33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_9"}
{"score": 0.8060799241065979, "chain_id": "33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_10"}
{"score": 0.7857192754745483, "chain_id": "33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_1"}
{"score": 0.04883244261145592, "chain_id": "33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_2"}
{"score": 0.13211819529533386, "chain_id": "33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_3"}
{"score": 0.2612411081790924, "chain_id": "33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_4"}
{"score": 0.4621364176273346, "chain_id": "33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_6"}
{"score": 0.10293852537870407, "chain_id": "33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_7"}
{"score": 0.09142011404037476, "chain_id": "33OOO72IVHKZ2BY1UOKP9H631AGCTE_1_8"}
{"score": 0.06857550889253616, "chain_id": "3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_1"}
{"score": 0.25306063890457153, "chain_id": "3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_2"}
{"score": 0.11338400840759277, "chain_id": "3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_3"}
{"score": 0.11746183037757874, "chain_id": "3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_4"}
{"score": 0.05603582412004471, "chain_id": "3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_5"}
{"score": 0.061703041195869446, "chain_id": "3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_6"}
{"score": 0.03510373458266258, "chain_id": "3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_7"}
{"score": 0.12237264215946198, "chain_id": "3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_8"}
{"score": 0.023876579478383064, "chain_id": "3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_9"}
{"score": 0.04188814014196396, "chain_id": "3JBT3HLQF81EICG45LVDF56RLSYPZ8_1_10"}
{"score": 0.9044582843780518, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_4"}
{"score": 0.8255676627159119, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_1"}
{"score": 0.8846401572227478, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_2"}
{"score": 0.8255676627159119, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_3"}
{"score": 0.013255574740469456, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_5"}
{"score": 0.03665152192115784, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_6"}
{"score": 0.036047544330358505, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_7"}
{"score": 0.011777615174651146, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_8"}
{"score": 0.039197612553834915, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_9"}
{"score": 0.04864739999175072, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y7ZOLA7_1_10"}
{"score": 0.7632380127906799, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_3"}
{"score": 0.5352249145507812, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_4"}
{"score": 0.7536261081695557, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_1"}
{"score": 0.10466351360082626, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_2"}
{"score": 0.16523195803165436, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_5"}
{"score": 0.839786946773529, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_6"}
{"score": 0.21846607327461243, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_7"}
{"score": 0.641603410243988, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_8"}
{"score": 0.48928728699684143, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_9"}
{"score": 0.12079144269227982, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB1E9W1E_1_10"}
{"score": 0.8353675603866577, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_2"}
{"score": 0.6096939444541931, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_3"}
{"score": 0.8686092495918274, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_4"}
{"score": 0.7437042593955994, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_6"}
{"score": 0.7972725629806519, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_7"}
{"score": 0.9376955032348633, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_9"}
{"score": 0.6657786965370178, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_1"}
{"score": 0.9382190704345703, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_5"}
{"score": 0.6713827848434448, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_8"}
{"score": 0.4746702313423157, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UDQSLKR_1_10"}
{"score": 0.6366685628890991, "chain_id": "39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_7"}
{"score": 0.13135340809822083, "chain_id": "39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_1"}
{"score": 0.03226114809513092, "chain_id": "39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_2"}
{"score": 0.046870019286870956, "chain_id": "39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_3"}
{"score": 0.04489680752158165, "chain_id": "39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_4"}
{"score": 0.7205605506896973, "chain_id": "39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_5"}
{"score": 0.18286024034023285, "chain_id": "39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_6"}
{"score": 0.4540029764175415, "chain_id": "39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_8"}
{"score": 0.06966561079025269, "chain_id": "39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_9"}
{"score": 0.023733172565698624, "chain_id": "39RP059MEHSCFBGB7RNICJ5TQAZMBI_1_10"}
{"score": 0.01647893898189068, "chain_id": "37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_1"}
{"score": 0.016540559008717537, "chain_id": "37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_2"}
{"score": 0.015403724275529385, "chain_id": "37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_3"}
{"score": 0.017426196485757828, "chain_id": "37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_4"}
{"score": 0.03384215012192726, "chain_id": "37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_5"}
{"score": 0.02439924143254757, "chain_id": "37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_6"}
{"score": 0.02264990098774433, "chain_id": "37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_7"}
{"score": 0.04251828044652939, "chain_id": "37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_8"}
{"score": 0.03409824147820473, "chain_id": "37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_9"}
{"score": 0.059302542358636856, "chain_id": "37UQDCYH6XU83M7U82CTUD2AYDLV7N_1_10"}
{"score": 0.821366012096405, "chain_id": "3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_5"}
{"score": 0.06966358423233032, "chain_id": "3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_1"}
{"score": 0.08344791829586029, "chain_id": "3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_2"}
{"score": 0.1553470641374588, "chain_id": "3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_3"}
{"score": 0.1933247447013855, "chain_id": "3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_4"}
{"score": 0.028767941519618034, "chain_id": "3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_6"}
{"score": 0.1380046159029007, "chain_id": "3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_7"}
{"score": 0.029581714421510696, "chain_id": "3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_8"}
{"score": 0.031859882175922394, "chain_id": "3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_9"}
{"score": 0.8815996050834656, "chain_id": "3NXNZ5RS1AWA6FUR517X2VDD7TN97M_1_10"}
{"score": 0.3263559639453888, "chain_id": "358010RM5ES2I1DLQFGROCFY4NLVX5_1_1"}
{"score": 0.7512660622596741, "chain_id": "358010RM5ES2I1DLQFGROCFY4NLVX5_1_6"}
{"score": 0.406207412481308, "chain_id": "358010RM5ES2I1DLQFGROCFY4NLVX5_1_2"}
{"score": 0.06309666484594345, "chain_id": "358010RM5ES2I1DLQFGROCFY4NLVX5_1_3"}
{"score": 0.9258131980895996, "chain_id": "358010RM5ES2I1DLQFGROCFY4NLVX5_1_4"}
{"score": 0.06539591401815414, "chain_id": "358010RM5ES2I1DLQFGROCFY4NLVX5_1_5"}
{"score": 0.10767804086208344, "chain_id": "358010RM5ES2I1DLQFGROCFY4NLVX5_1_7"}
{"score": 0.10111741721630096, "chain_id": "358010RM5ES2I1DLQFGROCFY4NLVX5_1_8"}
{"score": 0.1968596875667572, "chain_id": "358010RM5ES2I1DLQFGROCFY4NLVX5_1_9"}
{"score": 0.9400942921638489, "chain_id": "358010RM5ES2I1DLQFGROCFY4NLVX5_1_10"}
{"score": 0.036755647510290146, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_7"}
{"score": 0.025993159040808678, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_1"}
{"score": 0.03289685398340225, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_2"}
{"score": 0.037508487701416016, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_3"}
{"score": 0.05785013362765312, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_4"}
{"score": 0.020302407443523407, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_5"}
{"score": 0.05674457922577858, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_6"}
{"score": 0.16305245459079742, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_8"}
{"score": 0.02168487012386322, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_9"}
{"score": 0.012927955016493797, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UGCXMY4_1_10"}
{"score": 0.9665948748588562, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_2"}
{"score": 0.516265869140625, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_8"}
{"score": 0.9535672068595886, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_1"}
{"score": 0.968649685382843, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_3"}
{"score": 0.9684985876083374, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_4"}
{"score": 0.9550628066062927, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_5"}
{"score": 0.48977380990982056, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_6"}
{"score": 0.21187131106853485, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_7"}
{"score": 0.2241518348455429, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_9"}
{"score": 0.42254742980003357, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IHKMEHD_1_10"}
{"score": 0.9370291233062744, "chain_id": "30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_4"}
{"score": 0.056855279952287674, "chain_id": "30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_9"}
{"score": 0.8608370423316956, "chain_id": "30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_1"}
{"score": 0.613294243812561, "chain_id": "30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_2"}
{"score": 0.8497787714004517, "chain_id": "30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_3"}
{"score": 0.9134693145751953, "chain_id": "30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_5"}
{"score": 0.7115750312805176, "chain_id": "30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_6"}
{"score": 0.04371250048279762, "chain_id": "30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_7"}
{"score": 0.09521152079105377, "chain_id": "30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_8"}
{"score": 0.3795100748538971, "chain_id": "30BUDKLTXDUCE77PPJ8MWP0SCBYE5Z_1_10"}
{"score": 0.22212649881839752, "chain_id": "3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_1"}
{"score": 0.4455289840698242, "chain_id": "3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_2"}
{"score": 0.2620682716369629, "chain_id": "3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_3"}
{"score": 0.711925745010376, "chain_id": "3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_4"}
{"score": 0.05293738842010498, "chain_id": "3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_5"}
{"score": 0.2133210301399231, "chain_id": "3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_6"}
{"score": 0.12218166142702103, "chain_id": "3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_7"}
{"score": 0.5137094855308533, "chain_id": "3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_8"}
{"score": 0.07708409428596497, "chain_id": "3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_9"}
{"score": 0.9062312841415405, "chain_id": "3K772S5NP8AOU0RKQL9VLM3ID8VEHU_1_10"}
{"score": 0.989189624786377, "chain_id": "32KTQ2V7RDETRI1E979MLDA33ETM9E_1_1"}
{"score": 0.9815819263458252, "chain_id": "32KTQ2V7RDETRI1E979MLDA33ETM9E_1_2"}
{"score": 0.9827540516853333, "chain_id": "32KTQ2V7RDETRI1E979MLDA33ETM9E_1_4"}
{"score": 0.9799287915229797, "chain_id": "32KTQ2V7RDETRI1E979MLDA33ETM9E_1_3"}
{"score": 0.12840093672275543, "chain_id": "32KTQ2V7RDETRI1E979MLDA33ETM9E_1_5"}
{"score": 0.27205967903137207, "chain_id": "32KTQ2V7RDETRI1E979MLDA33ETM9E_1_6"}
{"score": 0.33747202157974243, "chain_id": "32KTQ2V7RDETRI1E979MLDA33ETM9E_1_7"}
{"score": 0.08181705325841904, "chain_id": "32KTQ2V7RDETRI1E979MLDA33ETM9E_1_8"}
{"score": 0.020344894379377365, "chain_id": "32KTQ2V7RDETRI1E979MLDA33ETM9E_1_9"}
{"score": 0.014041785150766373, "chain_id": "32KTQ2V7RDETRI1E979MLDA33ETM9E_1_10"}
{"score": 0.9899576306343079, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_1"}
{"score": 0.8855201005935669, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_2"}
{"score": 0.24624714255332947, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_3"}
{"score": 0.7943950891494751, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_4"}
{"score": 0.33134353160858154, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_5"}
{"score": 0.3801756799221039, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_6"}
{"score": 0.20967841148376465, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_7"}
{"score": 0.06849371641874313, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_8"}
{"score": 0.7259992957115173, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_9"}
{"score": 0.2910816967487335, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE9B1V5T_1_10"}
{"score": 0.9894891381263733, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_1"}
{"score": 0.7463639378547668, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_3"}
{"score": 0.2317049652338028, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_4"}
{"score": 0.9810124635696411, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_2"}
{"score": 0.09827973693609238, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_5"}
{"score": 0.04844331741333008, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_6"}
{"score": 0.04640039801597595, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_7"}
{"score": 0.021913466975092888, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_8"}
{"score": 0.14013530313968658, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_9"}
{"score": 0.027687011286616325, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KZJIFDB_1_10"}
{"score": 0.8398764729499817, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_2"}
{"score": 0.8683324456214905, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_4"}
{"score": 0.9905971884727478, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_1"}
{"score": 0.9793686866760254, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_3"}
{"score": 0.030098237097263336, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_5"}
{"score": 0.022287189960479736, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_6"}
{"score": 0.022312527522444725, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_7"}
{"score": 0.027194155380129814, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_8"}
{"score": 0.022342057898640633, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_9"}
{"score": 0.029684584587812424, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7KY1LD_1_10"}
{"score": 0.8337042331695557, "chain_id": "3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_1"}
{"score": 0.08962155878543854, "chain_id": "3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_2"}
{"score": 0.8361915349960327, "chain_id": "3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_3"}
{"score": 0.2881397306919098, "chain_id": "3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_4"}
{"score": 0.8969811201095581, "chain_id": "3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_5"}
{"score": 0.11486461013555527, "chain_id": "3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_6"}
{"score": 0.20072662830352783, "chain_id": "3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_7"}
{"score": 0.13397961854934692, "chain_id": "3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_8"}
{"score": 0.6745132803916931, "chain_id": "3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_9"}
{"score": 0.10437515377998352, "chain_id": "3T111IHZ5EPKOYE6EF537C4DM0V9R3_1_10"}
{"score": 0.4777771532535553, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_1"}
{"score": 0.4574280381202698, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_2"}
{"score": 0.5309774279594421, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_3"}
{"score": 0.3570319414138794, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_4"}
{"score": 0.7932518124580383, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_5"}
{"score": 0.4650282561779022, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_6"}
{"score": 0.7743787169456482, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_7"}
{"score": 0.9757777452468872, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_8"}
{"score": 0.35477060079574585, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_9"}
{"score": 0.7659540176391602, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMBNEO39_1_10"}
{"score": 0.9926707744598389, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_1"}
{"score": 0.9010935425758362, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_2"}
{"score": 0.9924820065498352, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_3"}
{"score": 0.8428212404251099, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_4"}
{"score": 0.01317331288009882, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_5"}
{"score": 0.01869630627334118, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_6"}
{"score": 0.01620364747941494, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_7"}
{"score": 0.014697928912937641, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_8"}
{"score": 0.017449138686060905, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_9"}
{"score": 0.029354412108659744, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPF4U2NS_1_10"}
{"score": 0.9884560108184814, "chain_id": "3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_1"}
{"score": 0.8084842562675476, "chain_id": "3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_3"}
{"score": 0.8471212387084961, "chain_id": "3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_4"}
{"score": 0.9673542976379395, "chain_id": "3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_2"}
{"score": 0.10444016009569168, "chain_id": "3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_5"}
{"score": 0.05549401417374611, "chain_id": "3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_6"}
{"score": 0.0805240198969841, "chain_id": "3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_7"}
{"score": 0.7305310964584351, "chain_id": "3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_8"}
{"score": 0.7011041045188904, "chain_id": "3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_9"}
{"score": 0.14810819923877716, "chain_id": "3L2IS5HSFAHXTSAHJJJDUOMHYGTUNO_1_10"}
{"score": 0.9442319869995117, "chain_id": "32RIADZISS3VS787C99HGEYTM3S4S4_1_5"}
{"score": 0.5992108583450317, "chain_id": "32RIADZISS3VS787C99HGEYTM3S4S4_1_1"}
{"score": 0.5362958312034607, "chain_id": "32RIADZISS3VS787C99HGEYTM3S4S4_1_2"}
{"score": 0.16987909376621246, "chain_id": "32RIADZISS3VS787C99HGEYTM3S4S4_1_3"}
{"score": 0.35831719636917114, "chain_id": "32RIADZISS3VS787C99HGEYTM3S4S4_1_4"}
{"score": 0.04841872304677963, "chain_id": "32RIADZISS3VS787C99HGEYTM3S4S4_1_6"}
{"score": 0.6521724462509155, "chain_id": "32RIADZISS3VS787C99HGEYTM3S4S4_1_7"}
{"score": 0.5100412368774414, "chain_id": "32RIADZISS3VS787C99HGEYTM3S4S4_1_8"}
{"score": 0.4044564366340637, "chain_id": "32RIADZISS3VS787C99HGEYTM3S4S4_1_9"}
{"score": 0.047114383429288864, "chain_id": "32RIADZISS3VS787C99HGEYTM3S4S4_1_10"}
{"score": 0.9923518300056458, "chain_id": "3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_1"}
{"score": 0.8439410924911499, "chain_id": "3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_2"}
{"score": 0.9120049476623535, "chain_id": "3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_4"}
{"score": 0.16972576081752777, "chain_id": "3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_3"}
{"score": 0.3472607433795929, "chain_id": "3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_5"}
{"score": 0.39995625615119934, "chain_id": "3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_6"}
{"score": 0.2114173322916031, "chain_id": "3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_7"}
{"score": 0.07768794149160385, "chain_id": "3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_8"}
{"score": 0.09700740873813629, "chain_id": "3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_9"}
{"score": 0.023631563410162926, "chain_id": "3QIYRE09Y3GHKVJJHV9TJMHKNSTN1X_1_10"}
{"score": 0.21629048883914948, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_3"}
{"score": 0.8673613667488098, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_1"}
{"score": 0.08126679807901382, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_2"}
{"score": 0.31888285279273987, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_4"}
{"score": 0.18328575789928436, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_5"}
{"score": 0.04153578728437424, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_6"}
{"score": 0.053204283118247986, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_7"}
{"score": 0.14582650363445282, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_8"}
{"score": 0.16666515171527863, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_9"}
{"score": 0.08033885806798935, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7WZFVH8D_1_10"}
{"score": 0.24151524901390076, "chain_id": "373ERPL3YO738DNKCLAKYC5P24ZTR4_1_1"}
{"score": 0.31788942217826843, "chain_id": "373ERPL3YO738DNKCLAKYC5P24ZTR4_1_2"}
{"score": 0.29457253217697144, "chain_id": "373ERPL3YO738DNKCLAKYC5P24ZTR4_1_3"}
{"score": 0.24047663807868958, "chain_id": "373ERPL3YO738DNKCLAKYC5P24ZTR4_1_4"}
{"score": 0.1970454454421997, "chain_id": "373ERPL3YO738DNKCLAKYC5P24ZTR4_1_5"}
{"score": 0.04812343046069145, "chain_id": "373ERPL3YO738DNKCLAKYC5P24ZTR4_1_6"}
{"score": 0.020284727215766907, "chain_id": "373ERPL3YO738DNKCLAKYC5P24ZTR4_1_7"}
{"score": 0.08508771657943726, "chain_id": "373ERPL3YO738DNKCLAKYC5P24ZTR4_1_8"}
{"score": 0.0914626270532608, "chain_id": "373ERPL3YO738DNKCLAKYC5P24ZTR4_1_9"}
{"score": 0.041098251938819885, "chain_id": "373ERPL3YO738DNKCLAKYC5P24ZTR4_1_10"}
{"score": 0.6860213875770569, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_1"}
{"score": 0.13792087137699127, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_2"}
{"score": 0.12998756766319275, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_3"}
{"score": 0.060014039278030396, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_4"}
{"score": 0.05864468961954117, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_5"}
{"score": 0.8057789206504822, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_6"}
{"score": 0.6813462376594543, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_7"}
{"score": 0.3141630291938782, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_8"}
{"score": 0.28139379620552063, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_9"}
{"score": 0.6073617339134216, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5BLWLYD_1_10"}
{"score": 0.8361778259277344, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_1"}
{"score": 0.9656243920326233, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_2"}
{"score": 0.5318172574043274, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_3"}
{"score": 0.3577338457107544, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_4"}
{"score": 0.0928681418299675, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_5"}
{"score": 0.3555404245853424, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_6"}
{"score": 0.21772846579551697, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_7"}
{"score": 0.8957876563072205, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_8"}
{"score": 0.09520802646875381, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_9"}
{"score": 0.16789604723453522, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76G0KJ4U_1_10"}
{"score": 0.1490403413772583, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_3"}
{"score": 0.907627284526825, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_7"}
{"score": 0.974826455116272, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_1"}
{"score": 0.7871871590614319, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_2"}
{"score": 0.16771647334098816, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_4"}
{"score": 0.8049781322479248, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_5"}
{"score": 0.757803738117218, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_6"}
{"score": 0.1383199244737625, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_8"}
{"score": 0.11431457847356796, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_9"}
{"score": 0.23266415297985077, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3ENO25U_1_10"}
{"score": 0.9815878868103027, "chain_id": "3TE3O8573079OET7T6QOXPWZ65FR2Z_1_1"}
{"score": 0.7921923995018005, "chain_id": "3TE3O8573079OET7T6QOXPWZ65FR2Z_1_3"}
{"score": 0.8796286582946777, "chain_id": "3TE3O8573079OET7T6QOXPWZ65FR2Z_1_4"}
{"score": 0.9847905039787292, "chain_id": "3TE3O8573079OET7T6QOXPWZ65FR2Z_1_5"}
{"score": 0.7003065347671509, "chain_id": "3TE3O8573079OET7T6QOXPWZ65FR2Z_1_2"}
{"score": 0.013195287436246872, "chain_id": "3TE3O8573079OET7T6QOXPWZ65FR2Z_1_6"}
{"score": 0.03025178797543049, "chain_id": "3TE3O8573079OET7T6QOXPWZ65FR2Z_1_7"}
{"score": 0.021012965589761734, "chain_id": "3TE3O8573079OET7T6QOXPWZ65FR2Z_1_8"}
{"score": 0.022505130618810654, "chain_id": "3TE3O8573079OET7T6QOXPWZ65FR2Z_1_9"}
{"score": 0.09474389255046844, "chain_id": "3TE3O8573079OET7T6QOXPWZ65FR2Z_1_10"}
{"score": 0.03249487280845642, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_1"}
{"score": 0.051898859441280365, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_2"}
{"score": 0.03924262151122093, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_3"}
{"score": 0.03260441869497299, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_4"}
{"score": 0.9444201588630676, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_5"}
{"score": 0.03505226969718933, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_6"}
{"score": 0.6888753771781921, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_7"}
{"score": 0.2537498474121094, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_8"}
{"score": 0.032863058149814606, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_9"}
{"score": 0.029285961762070656, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QYBCJNL6_1_10"}
{"score": 0.29215285181999207, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_1"}
{"score": 0.022310519590973854, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_2"}
{"score": 0.02108684927225113, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_3"}
{"score": 0.2730967104434967, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_4"}
{"score": 0.5497796535491943, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_5"}
{"score": 0.056720659136772156, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_6"}
{"score": 0.03874582052230835, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_7"}
{"score": 0.11402331292629242, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_8"}
{"score": 0.3611949682235718, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_9"}
{"score": 0.11744117736816406, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1EC2X9Q_1_10"}
{"score": 0.21805115044116974, "chain_id": "34HJIJKLP5VBKZPB64EMR1I05694VE_1_1"}
{"score": 0.3030031621456146, "chain_id": "34HJIJKLP5VBKZPB64EMR1I05694VE_1_2"}
{"score": 0.6193975806236267, "chain_id": "34HJIJKLP5VBKZPB64EMR1I05694VE_1_3"}
{"score": 0.5368941426277161, "chain_id": "34HJIJKLP5VBKZPB64EMR1I05694VE_1_4"}
{"score": 0.3485585153102875, "chain_id": "34HJIJKLP5VBKZPB64EMR1I05694VE_1_5"}
{"score": 0.09618660062551498, "chain_id": "34HJIJKLP5VBKZPB64EMR1I05694VE_1_6"}
{"score": 0.06169288977980614, "chain_id": "34HJIJKLP5VBKZPB64EMR1I05694VE_1_7"}
{"score": 0.1885337084531784, "chain_id": "34HJIJKLP5VBKZPB64EMR1I05694VE_1_8"}
{"score": 0.18036849796772003, "chain_id": "34HJIJKLP5VBKZPB64EMR1I05694VE_1_9"}
{"score": 0.29407379031181335, "chain_id": "34HJIJKLP5VBKZPB64EMR1I05694VE_1_10"}
{"score": 0.21894776821136475, "chain_id": "3IXEICO792IAMUP0KX7MNHET6G56T1_1_1"}
{"score": 0.16442738473415375, "chain_id": "3IXEICO792IAMUP0KX7MNHET6G56T1_1_2"}
{"score": 0.7214082479476929, "chain_id": "3IXEICO792IAMUP0KX7MNHET6G56T1_1_3"}
{"score": 0.4974067211151123, "chain_id": "3IXEICO792IAMUP0KX7MNHET6G56T1_1_4"}
{"score": 0.20332030951976776, "chain_id": "3IXEICO792IAMUP0KX7MNHET6G56T1_1_5"}
{"score": 0.719197154045105, "chain_id": "3IXEICO792IAMUP0KX7MNHET6G56T1_1_6"}
{"score": 0.7415965795516968, "chain_id": "3IXEICO792IAMUP0KX7MNHET6G56T1_1_7"}
{"score": 0.13755856454372406, "chain_id": "3IXEICO792IAMUP0KX7MNHET6G56T1_1_8"}
{"score": 0.06171469762921333, "chain_id": "3IXEICO792IAMUP0KX7MNHET6G56T1_1_9"}
{"score": 0.08286911994218826, "chain_id": "3IXEICO792IAMUP0KX7MNHET6G56T1_1_10"}
{"score": 0.9857587814331055, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_2"}
{"score": 0.9617506265640259, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_3"}
{"score": 0.9824371933937073, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_4"}
{"score": 0.9896990656852722, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_5"}
{"score": 0.9913467764854431, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_6"}
{"score": 0.9875757694244385, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_7"}
{"score": 0.9471564888954163, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_8"}
{"score": 0.6294551491737366, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_9"}
{"score": 0.9229037165641785, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_10"}
{"score": 0.39781859517097473, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLLI18A_1_1"}
{"score": 0.6185726523399353, "chain_id": "3WYP994K17Q63GOUU3ULVY68MJEY6C_1_2"}
{"score": 0.9089565277099609, "chain_id": "3WYP994K17Q63GOUU3ULVY68MJEY6C_1_3"}
{"score": 0.9809193015098572, "chain_id": "3WYP994K17Q63GOUU3ULVY68MJEY6C_1_5"}
{"score": 0.8405240178108215, "chain_id": "3WYP994K17Q63GOUU3ULVY68MJEY6C_1_6"}
{"score": 0.5086553692817688, "chain_id": "3WYP994K17Q63GOUU3ULVY68MJEY6C_1_7"}
{"score": 0.9279714822769165, "chain_id": "3WYP994K17Q63GOUU3ULVY68MJEY6C_1_9"}
{"score": 0.9379281401634216, "chain_id": "3WYP994K17Q63GOUU3ULVY68MJEY6C_1_10"}
{"score": 0.8053033351898193, "chain_id": "3WYP994K17Q63GOUU3ULVY68MJEY6C_1_1"}
{"score": 0.65697181224823, "chain_id": "3WYP994K17Q63GOUU3ULVY68MJEY6C_1_4"}
{"score": 0.3813200294971466, "chain_id": "3WYP994K17Q63GOUU3ULVY68MJEY6C_1_8"}
{"score": 0.25133413076400757, "chain_id": "3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_2"}
{"score": 0.2519063651561737, "chain_id": "3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_9"}
{"score": 0.33763357996940613, "chain_id": "3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_1"}
{"score": 0.562949001789093, "chain_id": "3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_3"}
{"score": 0.42082715034484863, "chain_id": "3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_4"}
{"score": 0.09157557785511017, "chain_id": "3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_5"}
{"score": 0.15308985114097595, "chain_id": "3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_6"}
{"score": 0.1770956963300705, "chain_id": "3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_7"}
{"score": 0.14185971021652222, "chain_id": "3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_8"}
{"score": 0.13762517273426056, "chain_id": "3TE3O8573079OET7T6QOXPWZ3U4R2Z_1_10"}
{"score": 0.7919415831565857, "chain_id": "3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_1"}
{"score": 0.5415679216384888, "chain_id": "3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_2"}
{"score": 0.9070650935173035, "chain_id": "3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_4"}
{"score": 0.31728383898735046, "chain_id": "3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_7"}
{"score": 0.7322466969490051, "chain_id": "3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_8"}
{"score": 0.609283447265625, "chain_id": "3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_3"}
{"score": 0.44294679164886475, "chain_id": "3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_5"}
{"score": 0.20068864524364471, "chain_id": "3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_6"}
{"score": 0.09338913857936859, "chain_id": "3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_9"}
{"score": 0.12569913268089294, "chain_id": "3PJUZCGDJ6FE6TZAF6Z3GV98J8H98O_1_10"}
{"score": 0.6349253058433533, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_3"}
{"score": 0.6726340055465698, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_7"}
{"score": 0.5436278581619263, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_10"}
{"score": 0.7731122970581055, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_1"}
{"score": 0.674819827079773, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_2"}
{"score": 0.7202927470207214, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_4"}
{"score": 0.6642867922782898, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_5"}
{"score": 0.2116093933582306, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_6"}
{"score": 0.48594024777412415, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_8"}
{"score": 0.6391252279281616, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLFBFZJ_1_9"}
{"score": 0.9567440152168274, "chain_id": "3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_1"}
{"score": 0.9891768097877502, "chain_id": "3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_2"}
{"score": 0.9193685054779053, "chain_id": "3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_3"}
{"score": 0.05624423921108246, "chain_id": "3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_10"}
{"score": 0.8632722496986389, "chain_id": "3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_4"}
{"score": 0.6382546424865723, "chain_id": "3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_5"}
{"score": 0.37033188343048096, "chain_id": "3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_6"}
{"score": 0.3919024169445038, "chain_id": "3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_7"}
{"score": 0.20137189328670502, "chain_id": "3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_8"}
{"score": 0.21231138706207275, "chain_id": "3PH3VY7DJLW42LD5H7987ZENGMLZWT_1_9"}
{"score": 0.9191380143165588, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_2"}
{"score": 0.9710100293159485, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_3"}
{"score": 0.9855519533157349, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_4"}
{"score": 0.9670160412788391, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_5"}
{"score": 0.9820963144302368, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_6"}
{"score": 0.9920600652694702, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_8"}
{"score": 0.7603789567947388, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_10"}
{"score": 0.9764559268951416, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_1"}
{"score": 0.9903674125671387, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_7"}
{"score": 0.9654616713523865, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32QELOI_1_9"}
{"score": 0.9135903716087341, "chain_id": "3OXV7EAXLEP5NDR65I1V54AVH8636I_1_2"}
{"score": 0.811151921749115, "chain_id": "3OXV7EAXLEP5NDR65I1V54AVH8636I_1_3"}
{"score": 0.9354241490364075, "chain_id": "3OXV7EAXLEP5NDR65I1V54AVH8636I_1_4"}
{"score": 0.9539145231246948, "chain_id": "3OXV7EAXLEP5NDR65I1V54AVH8636I_1_6"}
{"score": 0.9629189372062683, "chain_id": "3OXV7EAXLEP5NDR65I1V54AVH8636I_1_7"}
{"score": 0.9250715374946594, "chain_id": "3OXV7EAXLEP5NDR65I1V54AVH8636I_1_1"}
{"score": 0.07251444458961487, "chain_id": "3OXV7EAXLEP5NDR65I1V54AVH8636I_1_5"}
{"score": 0.9472677111625671, "chain_id": "3OXV7EAXLEP5NDR65I1V54AVH8636I_1_8"}
{"score": 0.9475233554840088, "chain_id": "3OXV7EAXLEP5NDR65I1V54AVH8636I_1_9"}
{"score": 0.027591869235038757, "chain_id": "3OXV7EAXLEP5NDR65I1V54AVH8636I_1_10"}
{"score": 0.7711402177810669, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_1"}
{"score": 0.33240845799446106, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_5"}
{"score": 0.26237988471984863, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_2"}
{"score": 0.7908608913421631, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_3"}
{"score": 0.6892794966697693, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_4"}
{"score": 0.1528148502111435, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_6"}
{"score": 0.19654816389083862, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_7"}
{"score": 0.3056890070438385, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_8"}
{"score": 0.22666428983211517, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_9"}
{"score": 0.27276337146759033, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREDDQGCN_1_10"}
{"score": 0.27533474564552307, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_2"}
{"score": 0.26036110520362854, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_4"}
{"score": 0.32290971279144287, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_8"}
{"score": 0.15514114499092102, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_1"}
{"score": 0.5957075953483582, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_3"}
{"score": 0.6047306060791016, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_5"}
{"score": 0.3283277750015259, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_6"}
{"score": 0.37066057324409485, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_7"}
{"score": 0.42824259400367737, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_9"}
{"score": 0.16354797780513763, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BBWN9P_1_10"}
{"score": 0.02398308739066124, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_1"}
{"score": 0.08328790217638016, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_2"}
{"score": 0.05365036055445671, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_3"}
{"score": 0.7924655675888062, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_4"}
{"score": 0.21402707695960999, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_5"}
{"score": 0.09333626180887222, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_6"}
{"score": 0.028797511011362076, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_7"}
{"score": 0.16521812975406647, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_8"}
{"score": 0.04496557265520096, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_9"}
{"score": 0.1688125878572464, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGTOVC8D_1_10"}
{"score": 0.4449424147605896, "chain_id": "3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_1"}
{"score": 0.902410626411438, "chain_id": "3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_2"}
{"score": 0.5119417309761047, "chain_id": "3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_3"}
{"score": 0.2354804426431656, "chain_id": "3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_4"}
{"score": 0.3193625807762146, "chain_id": "3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_5"}
{"score": 0.2050715833902359, "chain_id": "3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_6"}
{"score": 0.046860307455062866, "chain_id": "3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_7"}
{"score": 0.09397859871387482, "chain_id": "3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_8"}
{"score": 0.1800081729888916, "chain_id": "3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_9"}
{"score": 0.9025315046310425, "chain_id": "3Z3ZLGNNSITYXVAQKRFTB9RMHHK3QX_1_10"}
{"score": 0.9886892437934875, "chain_id": "3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_1"}
{"score": 0.9887287616729736, "chain_id": "3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_2"}
{"score": 0.9875671863555908, "chain_id": "3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_4"}
{"score": 0.37468233704566956, "chain_id": "3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_6"}
{"score": 0.9921157956123352, "chain_id": "3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_3"}
{"score": 0.47544118762016296, "chain_id": "3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_5"}
{"score": 0.174798384308815, "chain_id": "3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_7"}
{"score": 0.48049023747444153, "chain_id": "3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_8"}
{"score": 0.046462107449769974, "chain_id": "3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_9"}
{"score": 0.13396866619586945, "chain_id": "3018Q3ZVOIPYTHOB6LJ337FXF57ARA_1_10"}
{"score": 0.7820394039154053, "chain_id": "3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_1"}
{"score": 0.9726693630218506, "chain_id": "3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_2"}
{"score": 0.3990831971168518, "chain_id": "3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_3"}
{"score": 0.33631661534309387, "chain_id": "3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_4"}
{"score": 0.6394280195236206, "chain_id": "3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_5"}
{"score": 0.07449155300855637, "chain_id": "3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_6"}
{"score": 0.5277230739593506, "chain_id": "3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_7"}
{"score": 0.05477989837527275, "chain_id": "3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_8"}
{"score": 0.29586419463157654, "chain_id": "3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_9"}
{"score": 0.03074811026453972, "chain_id": "3U088ZLJVKS7007FDDWG10B1Y1B0WJ_1_10"}
{"score": 0.989554762840271, "chain_id": "3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_1"}
{"score": 0.9853968024253845, "chain_id": "3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_2"}
{"score": 0.98832768201828, "chain_id": "3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_3"}
{"score": 0.985792338848114, "chain_id": "3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_4"}
{"score": 0.2822820544242859, "chain_id": "3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_5"}
{"score": 0.17776033282279968, "chain_id": "3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_6"}
{"score": 0.041535958647727966, "chain_id": "3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_7"}
{"score": 0.07423543184995651, "chain_id": "3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_8"}
{"score": 0.14889110624790192, "chain_id": "3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_9"}
{"score": 0.7218266725540161, "chain_id": "3RANCT1ZVFGVSJLKGTE43TMN4RLBU9_1_10"}
{"score": 0.9589889049530029, "chain_id": "3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_1"}
{"score": 0.9707103371620178, "chain_id": "3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_2"}
{"score": 0.9774790406227112, "chain_id": "3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_4"}
{"score": 0.89356929063797, "chain_id": "3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_5"}
{"score": 0.9793732166290283, "chain_id": "3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_6"}
{"score": 0.8353902697563171, "chain_id": "3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_3"}
{"score": 0.11592880636453629, "chain_id": "3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_7"}
{"score": 0.44843390583992004, "chain_id": "3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_8"}
{"score": 0.6351901292800903, "chain_id": "3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_9"}
{"score": 0.20877891778945923, "chain_id": "3P529IW9KYKIMAA6CH8ZVWHP665FL2_1_10"}
{"score": 0.9471220374107361, "chain_id": "30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_1"}
{"score": 0.7089617252349854, "chain_id": "30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_3"}
{"score": 0.922541618347168, "chain_id": "30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_4"}
{"score": 0.9235604405403137, "chain_id": "30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_5"}
{"score": 0.5923240184783936, "chain_id": "30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_6"}
{"score": 0.7998456358909607, "chain_id": "30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_7"}
{"score": 0.8209313154220581, "chain_id": "30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_8"}
{"score": 0.8106291890144348, "chain_id": "30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_9"}
{"score": 0.7041488289833069, "chain_id": "30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_10"}
{"score": 0.9622686505317688, "chain_id": "30LSNF239UUWVFQO3JWFJXV8H8D2IJ_1_2"}
{"score": 0.32146212458610535, "chain_id": "33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_1"}
{"score": 0.6558692455291748, "chain_id": "33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_2"}
{"score": 0.7771638631820679, "chain_id": "33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_3"}
{"score": 0.2446449100971222, "chain_id": "33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_4"}
{"score": 0.46462181210517883, "chain_id": "33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_5"}
{"score": 0.36314359307289124, "chain_id": "33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_6"}
{"score": 0.09858331829309464, "chain_id": "33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_7"}
{"score": 0.5265430808067322, "chain_id": "33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_8"}
{"score": 0.6316548585891724, "chain_id": "33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_9"}
{"score": 0.5274831652641296, "chain_id": "33LKR6A5KEJFF8O3ERV5SLNC0LDT1C_1_10"}
{"score": 0.988106369972229, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_2"}
{"score": 0.3510138690471649, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_7"}
{"score": 0.9861738681793213, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_1"}
{"score": 0.9922375082969666, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_3"}
{"score": 0.9845481514930725, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_4"}
{"score": 0.024634627625346184, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_5"}
{"score": 0.08273178339004517, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_6"}
{"score": 0.10871633887290955, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_8"}
{"score": 0.2351071834564209, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_9"}
{"score": 0.11787020415067673, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPM7NC60_1_10"}
{"score": 0.26732251048088074, "chain_id": "39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_1"}
{"score": 0.0302118007093668, "chain_id": "39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_2"}
{"score": 0.03254568949341774, "chain_id": "39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_3"}
{"score": 0.03786981478333473, "chain_id": "39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_4"}
{"score": 0.5467883348464966, "chain_id": "39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_5"}
{"score": 0.6016547679901123, "chain_id": "39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_6"}
{"score": 0.48986542224884033, "chain_id": "39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_7"}
{"score": 0.25840070843696594, "chain_id": "39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_8"}
{"score": 0.4257572293281555, "chain_id": "39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_9"}
{"score": 0.02791052684187889, "chain_id": "39DD6S19JPAALLREW7F2LT7NCP6ZEK_1_10"}
{"score": 0.9863876104354858, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_2"}
{"score": 0.9900268316268921, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_3"}
{"score": 0.9872927665710449, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_4"}
{"score": 0.9897409677505493, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_1"}
{"score": 0.3157305121421814, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_5"}
{"score": 0.15004883706569672, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_6"}
{"score": 0.03878612071275711, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_7"}
{"score": 0.06229649856686592, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_8"}
{"score": 0.14351356029510498, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_9"}
{"score": 0.746549665927887, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696QKYIMB_1_10"}
{"score": 0.9886892437934875, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_1"}
{"score": 0.9887287616729736, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_2"}
{"score": 0.9921157956123352, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_3"}
{"score": 0.9875671863555908, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_4"}
{"score": 0.47544118762016296, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_5"}
{"score": 0.37468233704566956, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_6"}
{"score": 0.174798384308815, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_7"}
{"score": 0.48049023747444153, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_8"}
{"score": 0.046462107449769974, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_9"}
{"score": 0.13396866619586945, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EO34I7E6_1_10"}
{"score": 0.9862035512924194, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_2"}
{"score": 0.9894059300422668, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_4"}
{"score": 0.24778400361537933, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_5"}
{"score": 0.9867532849311829, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_1"}
{"score": 0.9872337579727173, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_3"}
{"score": 0.532346785068512, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_6"}
{"score": 0.12080852687358856, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_7"}
{"score": 0.7177907824516296, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_8"}
{"score": 0.49679917097091675, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_9"}
{"score": 0.6516163349151611, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2DTWDZT_1_10"}
{"score": 0.6961647272109985, "chain_id": "39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_6"}
{"score": 0.34531551599502563, "chain_id": "39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_7"}
{"score": 0.8145788311958313, "chain_id": "39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_1"}
{"score": 0.18412886559963226, "chain_id": "39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_2"}
{"score": 0.34641513228416443, "chain_id": "39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_3"}
{"score": 0.1592598557472229, "chain_id": "39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_4"}
{"score": 0.7885825037956238, "chain_id": "39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_5"}
{"score": 0.15316729247570038, "chain_id": "39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_8"}
{"score": 0.04726738482713699, "chain_id": "39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_9"}
{"score": 0.029300443828105927, "chain_id": "39RP059MEHSCFBGB7RNICJ5TTXSMBO_1_10"}
{"score": 0.03796778619289398, "chain_id": "3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_1"}
{"score": 0.48129454255104065, "chain_id": "3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_2"}
{"score": 0.7254457473754883, "chain_id": "3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_3"}
{"score": 0.04879710078239441, "chain_id": "3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_4"}
{"score": 0.024941807612776756, "chain_id": "3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_5"}
{"score": 0.03258330747485161, "chain_id": "3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_6"}
{"score": 0.04876327887177467, "chain_id": "3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_7"}
{"score": 0.4317256808280945, "chain_id": "3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_8"}
{"score": 0.7155023217201233, "chain_id": "3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_9"}
{"score": 0.7501763105392456, "chain_id": "3D4CH1LGEASTZ85SY4BR88Q6BAJ9GZ_1_10"}
{"score": 0.632850706577301, "chain_id": "33F859I566CQNXF0GU75KEXXEHNHBI_1_1"}
{"score": 0.6497350931167603, "chain_id": "33F859I566CQNXF0GU75KEXXEHNHBI_1_2"}
{"score": 0.72269207239151, "chain_id": "33F859I566CQNXF0GU75KEXXEHNHBI_1_3"}
{"score": 0.6675340533256531, "chain_id": "33F859I566CQNXF0GU75KEXXEHNHBI_1_4"}
{"score": 0.43412351608276367, "chain_id": "33F859I566CQNXF0GU75KEXXEHNHBI_1_5"}
{"score": 0.3214554488658905, "chain_id": "33F859I566CQNXF0GU75KEXXEHNHBI_1_6"}
{"score": 0.48860055208206177, "chain_id": "33F859I566CQNXF0GU75KEXXEHNHBI_1_7"}
{"score": 0.33549749851226807, "chain_id": "33F859I566CQNXF0GU75KEXXEHNHBI_1_8"}
{"score": 0.27967673540115356, "chain_id": "33F859I566CQNXF0GU75KEXXEHNHBI_1_9"}
{"score": 0.15416158735752106, "chain_id": "33F859I566CQNXF0GU75KEXXEHNHBI_1_10"}
{"score": 0.961983859539032, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_1"}
{"score": 0.2342749536037445, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_2"}
{"score": 0.822347104549408, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_3"}
{"score": 0.5893095135688782, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_4"}
{"score": 0.10031332075595856, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_5"}
{"score": 0.2918059229850769, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_6"}
{"score": 0.03458784520626068, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_7"}
{"score": 0.2212289720773697, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_8"}
{"score": 0.10795483738183975, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_9"}
{"score": 0.15600699186325073, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1ENIEH0U_1_10"}
{"score": 0.7940883636474609, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_1"}
{"score": 0.43610405921936035, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_2"}
{"score": 0.9714958071708679, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_3"}
{"score": 0.537977933883667, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_4"}
{"score": 0.12396659702062607, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_5"}
{"score": 0.14577606320381165, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_6"}
{"score": 0.4405944347381592, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_7"}
{"score": 0.3367394804954529, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_8"}
{"score": 0.31602099537849426, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_9"}
{"score": 0.039668839424848557, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFE7NZJ7_1_10"}
{"score": 0.917353630065918, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_2"}
{"score": 0.9652080535888672, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_7"}
{"score": 0.9701718091964722, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_1"}
{"score": 0.9325137138366699, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_3"}
{"score": 0.7585666179656982, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_4"}
{"score": 0.4730243384838104, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_5"}
{"score": 0.09703920036554337, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_6"}
{"score": 0.04119850695133209, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_8"}
{"score": 0.5896634459495544, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_9"}
{"score": 0.6608075499534607, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKFDKAV_1_10"}
{"score": 0.9589043259620667, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_1"}
{"score": 0.9349663853645325, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_2"}
{"score": 0.9746468663215637, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_7"}
{"score": 0.9145552515983582, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_3"}
{"score": 0.7304132580757141, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_4"}
{"score": 0.5302850008010864, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_5"}
{"score": 0.09270224720239639, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_6"}
{"score": 0.047465622425079346, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_8"}
{"score": 0.6440563201904297, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_9"}
{"score": 0.6757715344429016, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMNB4P9_1_10"}
{"score": 0.3513355851173401, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_1"}
{"score": 0.06857025623321533, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_2"}
{"score": 0.04360519349575043, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_3"}
{"score": 0.028131000697612762, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_4"}
{"score": 0.028264174237847328, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_5"}
{"score": 0.0721694678068161, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_6"}
{"score": 0.08043333142995834, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_7"}
{"score": 0.015981631353497505, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_8"}
{"score": 0.10436472296714783, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_9"}
{"score": 0.07220504432916641, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU1EJ9W0_1_10"}
{"score": 0.985713005065918, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_1"}
{"score": 0.07304055243730545, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_2"}
{"score": 0.11324045062065125, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_3"}
{"score": 0.977771520614624, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_4"}
{"score": 0.044970620423555374, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_5"}
{"score": 0.05280961096286774, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_6"}
{"score": 0.07238810509443283, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_7"}
{"score": 0.07450413703918457, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_8"}
{"score": 0.5176454186439514, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_9"}
{"score": 0.03446251153945923, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RNRYMH2_1_10"}
{"score": 0.23984076082706451, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_1"}
{"score": 0.45518559217453003, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_2"}
{"score": 0.49533185362815857, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_3"}
{"score": 0.849315345287323, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_4"}
{"score": 0.027302678674459457, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_5"}
{"score": 0.3214986324310303, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_6"}
{"score": 0.2792038321495056, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_7"}
{"score": 0.4352929890155792, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_8"}
{"score": 0.23357392847537994, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_9"}
{"score": 0.08674817532300949, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7G047RR_1_10"}
{"score": 0.2388557642698288, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_1"}
{"score": 0.05814632400870323, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_2"}
{"score": 0.7087937593460083, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_3"}
{"score": 0.22205083072185516, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_4"}
{"score": 0.018464768305420876, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_5"}
{"score": 0.16113297641277313, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_6"}
{"score": 0.10597579926252365, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_7"}
{"score": 0.1242380291223526, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_8"}
{"score": 0.014278772287070751, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_9"}
{"score": 0.02727634645998478, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJJIINKG_1_10"}
{"score": 0.03728727623820305, "chain_id": "3YOH7BII096WY1EERW12YI7W5LOVK9_1_1"}
{"score": 0.048388831317424774, "chain_id": "3YOH7BII096WY1EERW12YI7W5LOVK9_1_2"}
{"score": 0.059808555990457535, "chain_id": "3YOH7BII096WY1EERW12YI7W5LOVK9_1_3"}
{"score": 0.03824532404541969, "chain_id": "3YOH7BII096WY1EERW12YI7W5LOVK9_1_4"}
{"score": 0.0204011183232069, "chain_id": "3YOH7BII096WY1EERW12YI7W5LOVK9_1_5"}
{"score": 0.03039322793483734, "chain_id": "3YOH7BII096WY1EERW12YI7W5LOVK9_1_6"}
{"score": 0.036085858941078186, "chain_id": "3YOH7BII096WY1EERW12YI7W5LOVK9_1_7"}
{"score": 0.03221205249428749, "chain_id": "3YOH7BII096WY1EERW12YI7W5LOVK9_1_8"}
{"score": 0.02531716413795948, "chain_id": "3YOH7BII096WY1EERW12YI7W5LOVK9_1_9"}
{"score": 0.012263134121894836, "chain_id": "3YOH7BII096WY1EERW12YI7W5LOVK9_1_10"}
{"score": 0.045744750648736954, "chain_id": "3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_8"}
{"score": 0.9244514107704163, "chain_id": "3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_1"}
{"score": 0.6606931090354919, "chain_id": "3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_2"}
{"score": 0.6859264373779297, "chain_id": "3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_3"}
{"score": 0.2735801637172699, "chain_id": "3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_4"}
{"score": 0.04954485967755318, "chain_id": "3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_5"}
{"score": 0.048599135130643845, "chain_id": "3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_6"}
{"score": 0.06125812977552414, "chain_id": "3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_7"}
{"score": 0.05909932404756546, "chain_id": "3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_9"}
{"score": 0.046580445021390915, "chain_id": "3SKRO2GZ71QGCPYGKIHDRU0GGVK1KL_1_10"}
{"score": 0.5912509560585022, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_1"}
{"score": 0.6210793256759644, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_2"}
{"score": 0.5707592368125916, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_5"}
{"score": 0.10291554778814316, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_3"}
{"score": 0.07379084080457687, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_4"}
{"score": 0.09928205609321594, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_6"}
{"score": 0.05089631676673889, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_7"}
{"score": 0.37358710169792175, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_8"}
{"score": 0.017863688990473747, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_9"}
{"score": 0.056328218430280685, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59ZJEPEN_1_10"}
{"score": 0.9717729091644287, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_1"}
{"score": 0.9819886684417725, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_2"}
{"score": 0.916419267654419, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_7"}
{"score": 0.8743537664413452, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_3"}
{"score": 0.9346250295639038, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_4"}
{"score": 0.8854311108589172, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_5"}
{"score": 0.7497885823249817, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_6"}
{"score": 0.06130722910165787, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_8"}
{"score": 0.0342496782541275, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_9"}
{"score": 0.04679781571030617, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LIO8XAF_1_10"}
{"score": 0.9482936859130859, "chain_id": "3U4J9857OEATU89O3LLTT183WOYB7X_1_1"}
{"score": 0.89585942029953, "chain_id": "3U4J9857OEATU89O3LLTT183WOYB7X_1_2"}
{"score": 0.036150623112916946, "chain_id": "3U4J9857OEATU89O3LLTT183WOYB7X_1_3"}
{"score": 0.020232105627655983, "chain_id": "3U4J9857OEATU89O3LLTT183WOYB7X_1_4"}
{"score": 0.08667833358049393, "chain_id": "3U4J9857OEATU89O3LLTT183WOYB7X_1_5"}
{"score": 0.03115977719426155, "chain_id": "3U4J9857OEATU89O3LLTT183WOYB7X_1_6"}
{"score": 0.0582621693611145, "chain_id": "3U4J9857OEATU89O3LLTT183WOYB7X_1_7"}
{"score": 0.24668438732624054, "chain_id": "3U4J9857OEATU89O3LLTT183WOYB7X_1_8"}
{"score": 0.3072498142719269, "chain_id": "3U4J9857OEATU89O3LLTT183WOYB7X_1_9"}
{"score": 0.05097072198987007, "chain_id": "3U4J9857OEATU89O3LLTT183WOYB7X_1_10"}
{"score": 0.7977977991104126, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_1"}
{"score": 0.3768554925918579, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_2"}
{"score": 0.06257230788469315, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_3"}
{"score": 0.5234935283660889, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_4"}
{"score": 0.033009354025125504, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_5"}
{"score": 0.3268662393093109, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_6"}
{"score": 0.022826001048088074, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_7"}
{"score": 0.2432868629693985, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_8"}
{"score": 0.1956099271774292, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_9"}
{"score": 0.6840819120407104, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SKVXQA6_1_10"}
{"score": 0.9460564851760864, "chain_id": "3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_3"}
{"score": 0.8024733066558838, "chain_id": "3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_4"}
{"score": 0.3233587145805359, "chain_id": "3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_6"}
{"score": 0.2748965322971344, "chain_id": "3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_1"}
{"score": 0.7318570017814636, "chain_id": "3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_2"}
{"score": 0.07358869165182114, "chain_id": "3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_5"}
{"score": 0.0732140764594078, "chain_id": "3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_7"}
{"score": 0.48958665132522583, "chain_id": "3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_8"}
{"score": 0.05375639721751213, "chain_id": "3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_9"}
{"score": 0.07142043113708496, "chain_id": "3NS0A6KXC4785ZN5225QLWSZNXCZGA_1_10"}
{"score": 0.9922401905059814, "chain_id": "39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_2"}
{"score": 0.6885374784469604, "chain_id": "39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_4"}
{"score": 0.5763446688652039, "chain_id": "39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_7"}
{"score": 0.0675802156329155, "chain_id": "39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_1"}
{"score": 0.07204701751470566, "chain_id": "39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_3"}
{"score": 0.2584819197654724, "chain_id": "39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_5"}
{"score": 0.841670036315918, "chain_id": "39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_6"}
{"score": 0.2055550366640091, "chain_id": "39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_8"}
{"score": 0.685763418674469, "chain_id": "39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_9"}
{"score": 0.05219142511487007, "chain_id": "39PAAFCODMZV1K41L5FUZ9USOZSVTU_1_10"}
{"score": 0.9361492991447449, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_1"}
{"score": 0.9171412587165833, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_3"}
{"score": 0.27948349714279175, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_5"}
{"score": 0.3492262363433838, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_2"}
{"score": 0.7104056477546692, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_4"}
{"score": 0.6536765694618225, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_6"}
{"score": 0.3090682923793793, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_7"}
{"score": 0.456182599067688, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_8"}
{"score": 0.1744493693113327, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_9"}
{"score": 0.2736692726612091, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ5J51Y_1_10"}
{"score": 0.1575784832239151, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZECOK93_1_1"}
{"score": 0.10015495121479034, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZECOK93_1_2"}
{"score": 0.8442806601524353, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZECOK93_1_3"}
{"score": 0.7616512179374695, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZECOK93_1_4"}
{"score": 0.21828573942184448, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZECOK93_1_5"}
{"score": 0.07444896548986435, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZECOK93_1_6"}
{"score": 0.373539537191391, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZECOK93_1_7"}
{"score": 0.7773420214653015, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZECOK93_1_8"}
{"score": 0.9212695360183716, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZECOK93_1_9"}
{"score": 0.029797552153468132, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZECOK93_1_10"}
{"score": 0.08703559637069702, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_6"}
{"score": 0.02067827247083187, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_1"}
{"score": 0.08127384632825851, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_2"}
{"score": 0.08527060598134995, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_3"}
{"score": 0.023505039513111115, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_4"}
{"score": 0.017912637442350388, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_5"}
{"score": 0.1066814586520195, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_7"}
{"score": 0.014603731222450733, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_8"}
{"score": 0.07637939602136612, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_9"}
{"score": 0.03991691395640373, "chain_id": "3K772S5NP8AOU0RKQL9VLM3IENYHEV_1_10"}
{"score": 0.3363167345523834, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_6"}
{"score": 0.06609123200178146, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_1"}
{"score": 0.646805465221405, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_2"}
{"score": 0.23487292230129242, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_3"}
{"score": 0.16404545307159424, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_4"}
{"score": 0.08145501464605331, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_5"}
{"score": 0.07381689548492432, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_7"}
{"score": 0.06440557539463043, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_8"}
{"score": 0.20917852222919464, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_9"}
{"score": 0.055658675730228424, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29Y153TK_1_10"}
{"score": 0.5853492021560669, "chain_id": "33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_1"}
{"score": 0.541861355304718, "chain_id": "33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_2"}
{"score": 0.5370045900344849, "chain_id": "33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_3"}
{"score": 0.172322079539299, "chain_id": "33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_4"}
{"score": 0.07007192075252533, "chain_id": "33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_5"}
{"score": 0.2385946363210678, "chain_id": "33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_6"}
{"score": 0.06192729249596596, "chain_id": "33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_7"}
{"score": 0.20865321159362793, "chain_id": "33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_8"}
{"score": 0.021112041547894478, "chain_id": "33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_9"}
{"score": 0.016174977645277977, "chain_id": "33OOO72IVHKZ2BY1UOKP9H635OUTC5_1_10"}
{"score": 0.9911337494850159, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_1"}
{"score": 0.5783217549324036, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_8"}
{"score": 0.5941488146781921, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_2"}
{"score": 0.9090446829795837, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_3"}
{"score": 0.6450496912002563, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_4"}
{"score": 0.2559070587158203, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_5"}
{"score": 0.09087260812520981, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_6"}
{"score": 0.3520236313343048, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_7"}
{"score": 0.020719120278954506, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_9"}
{"score": 0.09469466656446457, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNMGVEB_1_10"}
{"score": 0.14441326260566711, "chain_id": "3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_1"}
{"score": 0.8026977181434631, "chain_id": "3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_2"}
{"score": 0.47091931104660034, "chain_id": "3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_3"}
{"score": 0.1441594511270523, "chain_id": "3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_4"}
{"score": 0.802116870880127, "chain_id": "3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_5"}
{"score": 0.6451898217201233, "chain_id": "3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_6"}
{"score": 0.6994529962539673, "chain_id": "3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_7"}
{"score": 0.12506970763206482, "chain_id": "3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_8"}
{"score": 0.024535352364182472, "chain_id": "3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_9"}
{"score": 0.02314160391688347, "chain_id": "3G5W44VEU7HDG4OJ212GYH4MKBVKGG_1_10"}
{"score": 0.055780716240406036, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_1"}
{"score": 0.6535148024559021, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_2"}
{"score": 0.17904354631900787, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_3"}
{"score": 0.02608206495642662, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_4"}
{"score": 0.12402775883674622, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_5"}
{"score": 0.1287272572517395, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_6"}
{"score": 0.09997013211250305, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_7"}
{"score": 0.43391239643096924, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_8"}
{"score": 0.044956814497709274, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_9"}
{"score": 0.0453813262283802, "chain_id": "3FQ5JJ512LNJQW55P5FBO1DJLPNNK1_1_10"}
{"score": 0.7421324849128723, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_1"}
{"score": 0.4418417811393738, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_2"}
{"score": 0.8315750360488892, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_3"}
{"score": 0.043185099959373474, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_4"}
{"score": 0.03799821063876152, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_5"}
{"score": 0.9104287624359131, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_6"}
{"score": 0.38110312819480896, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_7"}
{"score": 0.08163082599639893, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_8"}
{"score": 0.4066452085971832, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_9"}
{"score": 0.06569928675889969, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNVHGD3E_1_10"}
{"score": 0.9689311981201172, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_1"}
{"score": 0.9723588824272156, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_2"}
{"score": 0.9840599298477173, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_3"}
{"score": 0.9811029434204102, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_4"}
{"score": 0.7921320796012878, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_5"}
{"score": 0.7302311658859253, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_6"}
{"score": 0.6215923428535461, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_7"}
{"score": 0.6785271167755127, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_8"}
{"score": 0.34298649430274963, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_9"}
{"score": 0.3159642219543457, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5PY5CUC_1_10"}
{"score": 0.9725301265716553, "chain_id": "30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_1"}
{"score": 0.9719923734664917, "chain_id": "30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_2"}
{"score": 0.9852584004402161, "chain_id": "30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_3"}
{"score": 0.9820054769515991, "chain_id": "30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_4"}
{"score": 0.8396161198616028, "chain_id": "30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_7"}
{"score": 0.795762300491333, "chain_id": "30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_8"}
{"score": 0.8651369214057922, "chain_id": "30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_5"}
{"score": 0.737517237663269, "chain_id": "30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_6"}
{"score": 0.1252746731042862, "chain_id": "30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_9"}
{"score": 0.16538701951503754, "chain_id": "30IQTZXKAK5MP0C5NIS23JP8BW0X0I_1_10"}
{"score": 0.9798846244812012, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_1"}
{"score": 0.979206919670105, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_2"}
{"score": 0.9870153665542603, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_3"}
{"score": 0.9847086668014526, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_4"}
{"score": 0.8379072546958923, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_5"}
{"score": 0.6948103904724121, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_6"}
{"score": 0.8184589743614197, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_8"}
{"score": 0.8596130609512329, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_7"}
{"score": 0.16324222087860107, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_9"}
{"score": 0.22055529057979584, "chain_id": "33FOTY3KEMKYTRMSS50F3BN8FDKC1E_1_10"}
{"score": 0.9826927185058594, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_1"}
{"score": 0.9838824272155762, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_2"}
{"score": 0.9861837029457092, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_3"}
{"score": 0.9843722581863403, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_4"}
{"score": 0.899387776851654, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_5"}
{"score": 0.8447761535644531, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_6"}
{"score": 0.7617170214653015, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_7"}
{"score": 0.7964922189712524, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_8"}
{"score": 0.44486185908317566, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_9"}
{"score": 0.3933596611022949, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY9Y2QSX_1_10"}
{"score": 0.39817777276039124, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_3"}
{"score": 0.417366623878479, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_1"}
{"score": 0.7188136577606201, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_2"}
{"score": 0.936326265335083, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_4"}
{"score": 0.6922022104263306, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_5"}
{"score": 0.04544011130928993, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_6"}
{"score": 0.5888357162475586, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_7"}
{"score": 0.1472369134426117, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_8"}
{"score": 0.030938316136598587, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_9"}
{"score": 0.020463792607188225, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQI8WOX6_1_10"}
{"score": 0.1267852932214737, "chain_id": "3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_1"}
{"score": 0.20549975335597992, "chain_id": "3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_2"}
{"score": 0.22313039004802704, "chain_id": "3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_3"}
{"score": 0.1599355787038803, "chain_id": "3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_4"}
{"score": 0.10924368351697922, "chain_id": "3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_5"}
{"score": 0.16383856534957886, "chain_id": "3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_6"}
{"score": 0.024262264370918274, "chain_id": "3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_7"}
{"score": 0.04911966994404793, "chain_id": "3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_8"}
{"score": 0.2265075147151947, "chain_id": "3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_9"}
{"score": 0.038792550563812256, "chain_id": "3ERMJ6L4DYRPDZDLUAB27HJXLHXM7T_1_10"}
{"score": 0.9830477833747864, "chain_id": "3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_1"}
{"score": 0.9821786284446716, "chain_id": "3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_2"}
{"score": 0.25303199887275696, "chain_id": "3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_3"}
{"score": 0.5072196125984192, "chain_id": "3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_4"}
{"score": 0.12299969792366028, "chain_id": "3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_6"}
{"score": 0.36678844690322876, "chain_id": "3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_7"}
{"score": 0.5280774235725403, "chain_id": "3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_8"}
{"score": 0.7110729813575745, "chain_id": "3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_5"}
{"score": 0.2841801047325134, "chain_id": "3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_9"}
{"score": 0.05696796998381615, "chain_id": "3R3YRB5GRF2Q99GSAFE88I2HZC1UAH_1_10"}
{"score": 0.971874475479126, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_1"}
{"score": 0.9849492907524109, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_2"}
{"score": 0.7687680721282959, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_3"}
{"score": 0.9738627076148987, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_4"}
{"score": 0.7224722504615784, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_6"}
{"score": 0.8157476782798767, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_7"}
{"score": 0.6348943710327148, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_8"}
{"score": 0.9816781878471375, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_5"}
{"score": 0.04418589919805527, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_9"}
{"score": 0.07054402679204941, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44JN8XJ0_1_10"}
{"score": 0.9826927185058594, "chain_id": "37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_1"}
{"score": 0.9838824272155762, "chain_id": "37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_2"}
{"score": 0.9843722581863403, "chain_id": "37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_4"}
{"score": 0.899387776851654, "chain_id": "37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_5"}
{"score": 0.8447761535644531, "chain_id": "37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_6"}
{"score": 0.7964922189712524, "chain_id": "37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_8"}
{"score": 0.9861837029457092, "chain_id": "37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_3"}
{"score": 0.7617170214653015, "chain_id": "37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_7"}
{"score": 0.44486185908317566, "chain_id": "37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_9"}
{"score": 0.3933596611022949, "chain_id": "37Q970SNZE7E08BOPRQFIGRQA2WS1V_1_10"}
{"score": 0.6510259509086609, "chain_id": "3DL65MZB8DEXDSG44TVUAV62DBACE6_1_1"}
{"score": 0.6821446418762207, "chain_id": "3DL65MZB8DEXDSG44TVUAV62DBACE6_1_2"}
{"score": 0.9299067854881287, "chain_id": "3DL65MZB8DEXDSG44TVUAV62DBACE6_1_3"}
{"score": 0.7621113061904907, "chain_id": "3DL65MZB8DEXDSG44TVUAV62DBACE6_1_4"}
{"score": 0.7393246293067932, "chain_id": "3DL65MZB8DEXDSG44TVUAV62DBACE6_1_6"}
{"score": 0.8923138976097107, "chain_id": "3DL65MZB8DEXDSG44TVUAV62DBACE6_1_5"}
{"score": 0.13905468583106995, "chain_id": "3DL65MZB8DEXDSG44TVUAV62DBACE6_1_7"}
{"score": 0.27877992391586304, "chain_id": "3DL65MZB8DEXDSG44TVUAV62DBACE6_1_8"}
{"score": 0.0536593459546566, "chain_id": "3DL65MZB8DEXDSG44TVUAV62DBACE6_1_9"}
{"score": 0.044416870921850204, "chain_id": "3DL65MZB8DEXDSG44TVUAV62DBACE6_1_10"}
{"score": 0.9886793494224548, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_1"}
{"score": 0.9866798520088196, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_2"}
{"score": 0.9375211000442505, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_3"}
{"score": 0.2952674627304077, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_4"}
{"score": 0.056572359055280685, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_5"}
{"score": 0.6776962876319885, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_6"}
{"score": 0.025635093450546265, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_7"}
{"score": 0.08565982431173325, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_8"}
{"score": 0.02493242733180523, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_9"}
{"score": 0.1492917686700821, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYCM1KWXY_1_10"}
{"score": 0.12331399321556091, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_2"}
{"score": 0.23562520742416382, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_3"}
{"score": 0.1437770426273346, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_1"}
{"score": 0.11710652709007263, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_4"}
{"score": 0.08975216746330261, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_5"}
{"score": 0.7492919564247131, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_6"}
{"score": 0.05050031468272209, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_7"}
{"score": 0.511273980140686, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_8"}
{"score": 0.13116973638534546, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_9"}
{"score": 0.29148194193840027, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAMP0JHF_1_10"}
{"score": 0.9447976350784302, "chain_id": "3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_1"}
{"score": 0.8636153936386108, "chain_id": "3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_2"}
{"score": 0.5226145386695862, "chain_id": "3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_3"}
{"score": 0.5634247660636902, "chain_id": "3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_4"}
{"score": 0.018687859177589417, "chain_id": "3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_5"}
{"score": 0.06387092918157578, "chain_id": "3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_6"}
{"score": 0.40236037969589233, "chain_id": "3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_7"}
{"score": 0.7815876007080078, "chain_id": "3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_8"}
{"score": 0.1358879953622818, "chain_id": "3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_9"}
{"score": 0.034644715487957, "chain_id": "3A9AA95ATWLGBYWFYXOXQ1ZWMFV5PW_1_10"}
{"score": 0.17932482063770294, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_1"}
{"score": 0.11383267492055893, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_2"}
{"score": 0.08245814591646194, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_3"}
{"score": 0.0232784952968359, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_4"}
{"score": 0.04408896341919899, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_5"}
{"score": 0.1008591577410698, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_6"}
{"score": 0.053587980568408966, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_7"}
{"score": 0.057366810739040375, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_8"}
{"score": 0.10097120702266693, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_9"}
{"score": 0.4119614064693451, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU7902Q9_1_10"}
{"score": 0.019180549308657646, "chain_id": "3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_1"}
{"score": 0.03284595534205437, "chain_id": "3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_2"}
{"score": 0.024737168103456497, "chain_id": "3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_3"}
{"score": 0.01646568812429905, "chain_id": "3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_4"}
{"score": 0.06295713037252426, "chain_id": "3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_5"}
{"score": 0.026662863790988922, "chain_id": "3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_6"}
{"score": 0.060882799327373505, "chain_id": "3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_7"}
{"score": 0.061169661581516266, "chain_id": "3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_8"}
{"score": 0.029021935537457466, "chain_id": "3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_9"}
{"score": 0.027063628658652306, "chain_id": "3137ONMDKG4AU4W96FRD0MRHZX2GEW_1_10"}
{"score": 0.9590144753456116, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_2"}
{"score": 0.029497502371668816, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_1"}
{"score": 0.034749336540699005, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_3"}
{"score": 0.02432866394519806, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_4"}
{"score": 0.014827066101133823, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_5"}
{"score": 0.8130871057510376, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_6"}
{"score": 0.5269595980644226, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_7"}
{"score": 0.8810989260673523, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_8"}
{"score": 0.5461553335189819, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_9"}
{"score": 0.020846273750066757, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTXUHJ01_1_10"}
{"score": 0.37438127398490906, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_5"}
{"score": 0.1110953837633133, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_1"}
{"score": 0.0834614709019661, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_2"}
{"score": 0.05585112050175667, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_3"}
{"score": 0.08196864277124405, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_4"}
{"score": 0.08831039816141129, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_6"}
{"score": 0.0241408571600914, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_7"}
{"score": 0.08854486793279648, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_8"}
{"score": 0.045695170760154724, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_9"}
{"score": 0.02276245318353176, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SFUBMQC_1_10"}
{"score": 0.021647930145263672, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_1"}
{"score": 0.01785070076584816, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_2"}
{"score": 0.01893858052790165, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_3"}
{"score": 0.039649318903684616, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_4"}
{"score": 0.03963339328765869, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_5"}
{"score": 0.027347790077328682, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_6"}
{"score": 0.12273363023996353, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_7"}
{"score": 0.04381489008665085, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_8"}
{"score": 0.03617485985159874, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_9"}
{"score": 0.0194852277636528, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5E2QYLL_1_10"}
{"score": 0.36579430103302, "chain_id": "3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_1"}
{"score": 0.5549899935722351, "chain_id": "3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_5"}
{"score": 0.25548288226127625, "chain_id": "3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_9"}
{"score": 0.5443301796913147, "chain_id": "3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_2"}
{"score": 0.030544662848114967, "chain_id": "3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_3"}
{"score": 0.12914280593395233, "chain_id": "3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_4"}
{"score": 0.21363021433353424, "chain_id": "3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_6"}
{"score": 0.02769368700683117, "chain_id": "3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_7"}
{"score": 0.030831152573227882, "chain_id": "3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_8"}
{"score": 0.13292665779590607, "chain_id": "3DZQRBDBSLEAABP3CV4Y696N82OS3X_1_10"}
{"score": 0.7556447386741638, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_1"}
{"score": 0.06863746792078018, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_4"}
{"score": 0.07241462171077728, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_9"}
{"score": 0.3259970545768738, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_2"}
{"score": 0.02505413442850113, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_3"}
{"score": 0.03771376982331276, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_5"}
{"score": 0.14707960188388824, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_6"}
{"score": 0.054836273193359375, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_7"}
{"score": 0.021350186318159103, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_8"}
{"score": 0.045466143637895584, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNAE6KHU_1_10"}
{"score": 0.02687326818704605, "chain_id": "3ATTHHXXWANXWVTLR8H89NP468MXIR_1_1"}
{"score": 0.34991341829299927, "chain_id": "3ATTHHXXWANXWVTLR8H89NP468MXIR_1_2"}
{"score": 0.1219916045665741, "chain_id": "3ATTHHXXWANXWVTLR8H89NP468MXIR_1_3"}
{"score": 0.024470467120409012, "chain_id": "3ATTHHXXWANXWVTLR8H89NP468MXIR_1_4"}
{"score": 0.050127673894166946, "chain_id": "3ATTHHXXWANXWVTLR8H89NP468MXIR_1_5"}
{"score": 0.03397301957011223, "chain_id": "3ATTHHXXWANXWVTLR8H89NP468MXIR_1_6"}
{"score": 0.029265472665429115, "chain_id": "3ATTHHXXWANXWVTLR8H89NP468MXIR_1_7"}
{"score": 0.017553145065903664, "chain_id": "3ATTHHXXWANXWVTLR8H89NP468MXIR_1_8"}
{"score": 0.0278011504560709, "chain_id": "3ATTHHXXWANXWVTLR8H89NP468MXIR_1_9"}
{"score": 0.011400977149605751, "chain_id": "3ATTHHXXWANXWVTLR8H89NP468MXIR_1_10"}
{"score": 0.9740021228790283, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_1"}
{"score": 0.28299257159233093, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_2"}
{"score": 0.3716403543949127, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_3"}
{"score": 0.05187419056892395, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_4"}
{"score": 0.01570257358253002, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_5"}
{"score": 0.012048850767314434, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_6"}
{"score": 0.017015589401125908, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_7"}
{"score": 0.03047666884958744, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_8"}
{"score": 0.0209729615598917, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_9"}
{"score": 0.014785067178308964, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MU31HY4V_1_10"}
{"score": 0.46415838599205017, "chain_id": "3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_1"}
{"score": 0.1648941934108734, "chain_id": "3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_3"}
{"score": 0.8246101140975952, "chain_id": "3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_2"}
{"score": 0.8624105453491211, "chain_id": "3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_4"}
{"score": 0.4436875581741333, "chain_id": "3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_5"}
{"score": 0.1565735787153244, "chain_id": "3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_6"}
{"score": 0.1440870612859726, "chain_id": "3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_7"}
{"score": 0.04054852947592735, "chain_id": "3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_8"}
{"score": 0.09604694694280624, "chain_id": "3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_9"}
{"score": 0.04426439478993416, "chain_id": "3EJPLAJKEMF686YZQPW495FA5NO6ZE_1_10"}
{"score": 0.12224593013525009, "chain_id": "3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_3"}
{"score": 0.13372394442558289, "chain_id": "3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_7"}
{"score": 0.06938397884368896, "chain_id": "3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_1"}
{"score": 0.14166666567325592, "chain_id": "3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_2"}
{"score": 0.07867306470870972, "chain_id": "3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_4"}
{"score": 0.023182064294815063, "chain_id": "3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_5"}
{"score": 0.04127468541264534, "chain_id": "3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_6"}
{"score": 0.16204895079135895, "chain_id": "3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_8"}
{"score": 0.04667710140347481, "chain_id": "3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_9"}
{"score": 0.02426709607243538, "chain_id": "3Y54SXRO1LKVO5F1GF5P3NS9NDRUTV_1_10"}
{"score": 0.05238930508494377, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_6"}
{"score": 0.269187331199646, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_1"}
{"score": 0.02263632044196129, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_2"}
{"score": 0.014776957221329212, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_3"}
{"score": 0.017036650329828262, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_4"}
{"score": 0.03395211324095726, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_5"}
{"score": 0.03883201256394386, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_7"}
{"score": 0.07876432687044144, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_8"}
{"score": 0.020019764080643654, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_9"}
{"score": 0.5733287334442139, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KYYJDF3_1_10"}
{"score": 0.989962100982666, "chain_id": "3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_2"}
{"score": 0.9902162551879883, "chain_id": "3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_3"}
{"score": 0.9879075884819031, "chain_id": "3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_1"}
{"score": 0.9902961850166321, "chain_id": "3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_4"}
{"score": 0.4005311131477356, "chain_id": "3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_5"}
{"score": 0.8887441158294678, "chain_id": "3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_6"}
{"score": 0.14744378626346588, "chain_id": "3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_7"}
{"score": 0.6050322651863098, "chain_id": "3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_8"}
{"score": 0.21338430047035217, "chain_id": "3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_9"}
{"score": 0.132613405585289, "chain_id": "3EJJQNKU9R4D34WPCRTVKT21QMRRH2_1_10"}
{"score": 0.9529123902320862, "chain_id": "3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_1"}
{"score": 0.6054344773292542, "chain_id": "3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_5"}
{"score": 0.2436574548482895, "chain_id": "3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_7"}
{"score": 0.2875816524028778, "chain_id": "3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_2"}
{"score": 0.6463683843612671, "chain_id": "3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_3"}
{"score": 0.5023134350776672, "chain_id": "3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_4"}
{"score": 0.12036290764808655, "chain_id": "3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_6"}
{"score": 0.4422362446784973, "chain_id": "3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_8"}
{"score": 0.3303152620792389, "chain_id": "3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_9"}
{"score": 0.029241429641842842, "chain_id": "3SNVL38CI4R0ZS8E0F6X8QJ7HISKCO_1_10"}
{"score": 0.8468676209449768, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_2"}
{"score": 0.02408246323466301, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_5"}
{"score": 0.5992864966392517, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_1"}
{"score": 0.44676584005355835, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_3"}
{"score": 0.6533979177474976, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_4"}
{"score": 0.03331875428557396, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_6"}
{"score": 0.053227443248033524, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_7"}
{"score": 0.04106702283024788, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_8"}
{"score": 0.0529121458530426, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_9"}
{"score": 0.05854282155632973, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM3TMTYG_1_10"}
{"score": 0.6791632771492004, "chain_id": "39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_1"}
{"score": 0.8314403891563416, "chain_id": "39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_3"}
{"score": 0.5188141465187073, "chain_id": "39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_4"}
{"score": 0.41434717178344727, "chain_id": "39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_2"}
{"score": 0.22778882086277008, "chain_id": "39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_5"}
{"score": 0.06320878118276596, "chain_id": "39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_6"}
{"score": 0.0725989118218422, "chain_id": "39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_7"}
{"score": 0.03188294172286987, "chain_id": "39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_8"}
{"score": 0.11795759201049805, "chain_id": "39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_9"}
{"score": 0.20397841930389404, "chain_id": "39LNWE0K4UV5FRZQM36LPGQ0YDPUIC_1_10"}
{"score": 0.760866641998291, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_1"}
{"score": 0.053036030381917953, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_2"}
{"score": 0.1024915874004364, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_3"}
{"score": 0.1234569102525711, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_4"}
{"score": 0.02075238712131977, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_5"}
{"score": 0.1128244698047638, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_6"}
{"score": 0.07556141912937164, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_7"}
{"score": 0.4738740622997284, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_8"}
{"score": 0.020512260496616364, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_9"}
{"score": 0.02788088470697403, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOYOIE7C_1_10"}
{"score": 0.02743513323366642, "chain_id": "3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_1"}
{"score": 0.9577423930168152, "chain_id": "3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_2"}
{"score": 0.048819586634635925, "chain_id": "3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_3"}
{"score": 0.09894344210624695, "chain_id": "3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_4"}
{"score": 0.11288746446371078, "chain_id": "3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_5"}
{"score": 0.4601776897907257, "chain_id": "3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_6"}
{"score": 0.961962103843689, "chain_id": "3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_7"}
{"score": 0.05011164769530296, "chain_id": "3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_8"}
{"score": 0.0440482534468174, "chain_id": "3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_9"}
{"score": 0.7941014170646667, "chain_id": "3AMW0RGHOD1K1N2L2XKJKIZIGHHPNT_1_10"}
{"score": 0.9656290411949158, "chain_id": "3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_1"}
{"score": 0.2852121591567993, "chain_id": "3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_2"}
{"score": 0.05741962790489197, "chain_id": "3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_3"}
{"score": 0.09725631028413773, "chain_id": "3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_4"}
{"score": 0.3562834858894348, "chain_id": "3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_5"}
{"score": 0.092081718146801, "chain_id": "3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_6"}
{"score": 0.08299963921308517, "chain_id": "3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_7"}
{"score": 0.03498993441462517, "chain_id": "3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_8"}
{"score": 0.0251966193318367, "chain_id": "3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_9"}
{"score": 0.027526620775461197, "chain_id": "3CFJTT4SXTP3HGNU9VDAFOCGUPHI71_1_10"}
{"score": 0.1829860657453537, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_1"}
{"score": 0.02883126772940159, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_3"}
{"score": 0.02452206052839756, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_2"}
{"score": 0.029491521418094635, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_4"}
{"score": 0.07858294248580933, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_5"}
{"score": 0.042619358748197556, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_6"}
{"score": 0.05688486993312836, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_7"}
{"score": 0.14332136511802673, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_8"}
{"score": 0.21258454024791718, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_9"}
{"score": 0.11405698955059052, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZAWTOY5_1_10"}
{"score": 0.5637102127075195, "chain_id": "34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_6"}
{"score": 0.18828512728214264, "chain_id": "34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_7"}
{"score": 0.4685070514678955, "chain_id": "34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_8"}
{"score": 0.20570969581604004, "chain_id": "34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_9"}
{"score": 0.47144585847854614, "chain_id": "34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_1"}
{"score": 0.5448963046073914, "chain_id": "34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_2"}
{"score": 0.36171939969062805, "chain_id": "34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_3"}
{"score": 0.45903369784355164, "chain_id": "34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_4"}
{"score": 0.7403878569602966, "chain_id": "34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_5"}
{"score": 0.13645057380199432, "chain_id": "34Z02EIMISCF8J3LI8R5EG42YXS0TC_1_10"}
{"score": 0.9469135999679565, "chain_id": "37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_5"}
{"score": 0.9765180945396423, "chain_id": "37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_6"}
{"score": 0.09385234862565994, "chain_id": "37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_1"}
{"score": 0.25373876094818115, "chain_id": "37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_2"}
{"score": 0.06635908782482147, "chain_id": "37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_3"}
{"score": 0.0553898848593235, "chain_id": "37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_4"}
{"score": 0.6026286482810974, "chain_id": "37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_7"}
{"score": 0.43956661224365234, "chain_id": "37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_8"}
{"score": 0.17143501341342926, "chain_id": "37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_9"}
{"score": 0.164560467004776, "chain_id": "37M28K1J0QCHVT5YYGAU1GT69PIAJT_1_10"}
{"score": 0.44914308190345764, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_5"}
{"score": 0.8027976751327515, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_7"}
{"score": 0.3072410523891449, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_1"}
{"score": 0.35981878638267517, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_2"}
{"score": 0.2910501956939697, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_3"}
{"score": 0.13101322948932648, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_4"}
{"score": 0.4483968913555145, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_6"}
{"score": 0.6527007818222046, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_8"}
{"score": 0.2650876045227051, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_9"}
{"score": 0.24110595881938934, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNPBLZF9_1_10"}
{"score": 0.9893660545349121, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_1"}
{"score": 0.9192708730697632, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_2"}
{"score": 0.9282407164573669, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_3"}
{"score": 0.3738654553890228, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_4"}
{"score": 0.15605899691581726, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_5"}
{"score": 0.13286186754703522, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_6"}
{"score": 0.050005462020635605, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_7"}
{"score": 0.06511187553405762, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_8"}
{"score": 0.08812902122735977, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_9"}
{"score": 0.12139197438955307, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NL97P8Y_1_10"}
{"score": 0.6513458490371704, "chain_id": "31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_2"}
{"score": 0.6817856431007385, "chain_id": "31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_4"}
{"score": 0.27411600947380066, "chain_id": "31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_5"}
{"score": 0.1418447345495224, "chain_id": "31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_9"}
{"score": 0.878687858581543, "chain_id": "31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_1"}
{"score": 0.0698736160993576, "chain_id": "31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_3"}
{"score": 0.29781073331832886, "chain_id": "31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_6"}
{"score": 0.020387953147292137, "chain_id": "31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_7"}
{"score": 0.02180406264960766, "chain_id": "31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_8"}
{"score": 0.029570626094937325, "chain_id": "31LVTDXBL79FP0FF3C8TCLV86VJRLB_1_10"}
{"score": 0.151192307472229, "chain_id": "3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_1"}
{"score": 0.08652763813734055, "chain_id": "3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_3"}
{"score": 0.12905167043209076, "chain_id": "3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_7"}
{"score": 0.44512248039245605, "chain_id": "3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_2"}
{"score": 0.1413232982158661, "chain_id": "3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_4"}
{"score": 0.04858316481113434, "chain_id": "3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_5"}
{"score": 0.6677334904670715, "chain_id": "3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_6"}
{"score": 0.06890372186899185, "chain_id": "3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_8"}
{"score": 0.036103080958127975, "chain_id": "3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_9"}
{"score": 0.040438756346702576, "chain_id": "3X73LLYYQ1DSO64XJKCEB9XRQCCHNP_1_10"}
{"score": 0.023625541478395462, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_1"}
{"score": 0.021845072507858276, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_2"}
{"score": 0.027951272204518318, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_3"}
{"score": 0.02852707915008068, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_4"}
{"score": 0.01988784410059452, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_5"}
{"score": 0.041557811200618744, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_6"}
{"score": 0.02251906879246235, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_7"}
{"score": 0.027757389470934868, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_8"}
{"score": 0.016201838850975037, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_9"}
{"score": 0.015837056562304497, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W5USH8A_1_10"}
{"score": 0.9135614633560181, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_1"}
{"score": 0.22425177693367004, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_2"}
{"score": 0.27602237462997437, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_3"}
{"score": 0.20501896739006042, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_5"}
{"score": 0.488496869802475, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_6"}
{"score": 0.4298863112926483, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_8"}
{"score": 0.047257401049137115, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_9"}
{"score": 0.08870340883731842, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_4"}
{"score": 0.37379083037376404, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_7"}
{"score": 0.2941233515739441, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM94HYT8_1_10"}
{"score": 0.7841662764549255, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_2"}
{"score": 0.3477177023887634, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_1"}
{"score": 0.11026296764612198, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_3"}
{"score": 0.02791208028793335, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_4"}
{"score": 0.19133567810058594, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_5"}
{"score": 0.041667159646749496, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_6"}
{"score": 0.04808575659990311, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_7"}
{"score": 0.060709524899721146, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_8"}
{"score": 0.41560912132263184, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_9"}
{"score": 0.25687599182128906, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QOGEURF4_1_10"}
{"score": 0.43433520197868347, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_1"}
{"score": 0.7958104610443115, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_2"}
{"score": 0.8323804140090942, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_3"}
{"score": 0.8442041277885437, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_4"}
{"score": 0.03199255093932152, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_5"}
{"score": 0.020507311448454857, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_6"}
{"score": 0.05251745134592056, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_7"}
{"score": 0.05932077020406723, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_8"}
{"score": 0.07003408670425415, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_9"}
{"score": 0.07343368977308273, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX76EMRQ_1_10"}
{"score": 0.9883397817611694, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_1"}
{"score": 0.8071951270103455, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_2"}
{"score": 0.9475386738777161, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_3"}
{"score": 0.4030727744102478, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_7"}
{"score": 0.1852044016122818, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_9"}
{"score": 0.5017093420028687, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_4"}
{"score": 0.1938103884458542, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_5"}
{"score": 0.5518460273742676, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_6"}
{"score": 0.29078051447868347, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_8"}
{"score": 0.08402955532073975, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWM7LZAR_1_10"}
{"score": 0.0423220656812191, "chain_id": "3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_10"}
{"score": 0.11372460424900055, "chain_id": "3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_1"}
{"score": 0.0492272675037384, "chain_id": "3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_2"}
{"score": 0.16280417144298553, "chain_id": "3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_3"}
{"score": 0.2361663281917572, "chain_id": "3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_4"}
{"score": 0.05174516141414642, "chain_id": "3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_5"}
{"score": 0.07357559353113174, "chain_id": "3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_6"}
{"score": 0.14826984703540802, "chain_id": "3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_7"}
{"score": 0.22140690684318542, "chain_id": "3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_8"}
{"score": 0.2659648656845093, "chain_id": "3DYGAII7PL754KFDIPC0OCUNGTAQPK_1_9"}
{"score": 0.04849947243928909, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_1"}
{"score": 0.06961842626333237, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_2"}
{"score": 0.06400275975465775, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_3"}
{"score": 0.05277493596076965, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_4"}
{"score": 0.28100454807281494, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_5"}
{"score": 0.07806456834077835, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_6"}
{"score": 0.1193971112370491, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_7"}
{"score": 0.03704743832349777, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_8"}
{"score": 0.1440773606300354, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_9"}
{"score": 0.03068503364920616, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2JYPVN_1_10"}
{"score": 0.44431108236312866, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_1"}
{"score": 0.05002513900399208, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_2"}
{"score": 0.3004089891910553, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_3"}
{"score": 0.7649312615394592, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_4"}
{"score": 0.025636114180088043, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_5"}
{"score": 0.223946675658226, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_6"}
{"score": 0.08568264544010162, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_7"}
{"score": 0.2609943747520447, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_8"}
{"score": 0.4417230188846588, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_9"}
{"score": 0.028761887922883034, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR6L9XSP_1_10"}
{"score": 0.927692174911499, "chain_id": "378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_1"}
{"score": 0.944140613079071, "chain_id": "378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_2"}
{"score": 0.6519410610198975, "chain_id": "378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_3"}
{"score": 0.3573085069656372, "chain_id": "378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_4"}
{"score": 0.06882073730230331, "chain_id": "378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_5"}
{"score": 0.08533058315515518, "chain_id": "378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_6"}
{"score": 0.07935710996389389, "chain_id": "378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_7"}
{"score": 0.01852201670408249, "chain_id": "378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_8"}
{"score": 0.813421905040741, "chain_id": "378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_9"}
{"score": 0.6580358147621155, "chain_id": "378XPAWRUCCL0ILSGYPUPFE6S76AIK_1_10"}
{"score": 0.9914211630821228, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_1"}
{"score": 0.9907947182655334, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_2"}
{"score": 0.9890899062156677, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_3"}
{"score": 0.982333779335022, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_4"}
{"score": 0.10831145942211151, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_5"}
{"score": 0.05701894313097, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_6"}
{"score": 0.015817811712622643, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_7"}
{"score": 0.010915873572230339, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_8"}
{"score": 0.20215103030204773, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_9"}
{"score": 0.08425173163414001, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVOEMD2U_1_10"}
{"score": 0.16681131720542908, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_5"}
{"score": 0.16681131720542908, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_8"}
{"score": 0.7730688452720642, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_1"}
{"score": 0.7345470190048218, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_2"}
{"score": 0.3637889623641968, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_3"}
{"score": 0.527812659740448, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_4"}
{"score": 0.3040047883987427, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_6"}
{"score": 0.0634351521730423, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_7"}
{"score": 0.1477620154619217, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_9"}
{"score": 0.1669739931821823, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EZMSENV_1_10"}
{"score": 0.9914211630821228, "chain_id": "37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_1"}
{"score": 0.9907947182655334, "chain_id": "37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_2"}
{"score": 0.9890899062156677, "chain_id": "37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_3"}
{"score": 0.982333779335022, "chain_id": "37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_4"}
{"score": 0.10831145942211151, "chain_id": "37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_5"}
{"score": 0.05701894313097, "chain_id": "37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_6"}
{"score": 0.015817811712622643, "chain_id": "37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_7"}
{"score": 0.010915873572230339, "chain_id": "37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_8"}
{"score": 0.20215103030204773, "chain_id": "37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_9"}
{"score": 0.08425173163414001, "chain_id": "37QW5D2ZRGLWB8V9OCZUXQEYBL4S8U_1_10"}
{"score": 0.8846365809440613, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_1"}
{"score": 0.8013771772384644, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_2"}
{"score": 0.9307407140731812, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_3"}
{"score": 0.8500295281410217, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_5"}
{"score": 0.015389522537589073, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_8"}
{"score": 0.03803086653351784, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_4"}
{"score": 0.06085469201207161, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_6"}
{"score": 0.04415219649672508, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_7"}
{"score": 0.01940683089196682, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_9"}
{"score": 0.04102384299039841, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY13OQBM_1_10"}
{"score": 0.9871691465377808, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_1"}
{"score": 0.9866064786911011, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_2"}
{"score": 0.39612823724746704, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_5"}
{"score": 0.044094379991292953, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_3"}
{"score": 0.05171826481819153, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_4"}
{"score": 0.5033102631568909, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_6"}
{"score": 0.051921866834163666, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_7"}
{"score": 0.05089227110147476, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_8"}
{"score": 0.11375561356544495, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_9"}
{"score": 0.031196175143122673, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA3GMRP_1_10"}
{"score": 0.9344497919082642, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_1"}
{"score": 0.6862500309944153, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_3"}
{"score": 0.9817349314689636, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_2"}
{"score": 0.5884066224098206, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_4"}
{"score": 0.12706522643566132, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_5"}
{"score": 0.043535977602005005, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_6"}
{"score": 0.0350906103849411, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_7"}
{"score": 0.02619265764951706, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_8"}
{"score": 0.06575702875852585, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_9"}
{"score": 0.01810881681740284, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSJCONQZ_1_10"}
{"score": 0.991456925868988, "chain_id": "31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_1"}
{"score": 0.9908260703086853, "chain_id": "31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_2"}
{"score": 0.9891327023506165, "chain_id": "31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_3"}
{"score": 0.982300341129303, "chain_id": "31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_4"}
{"score": 0.11247270554304123, "chain_id": "31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_5"}
{"score": 0.05921418219804764, "chain_id": "31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_6"}
{"score": 0.014954044483602047, "chain_id": "31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_7"}
{"score": 0.011196529492735863, "chain_id": "31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_8"}
{"score": 0.2250138223171234, "chain_id": "31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_9"}
{"score": 0.07807155698537827, "chain_id": "31N2WW6R9RP166KH6B4ZZAN87UO3F7_1_10"}
{"score": 0.04651867225766182, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_1"}
{"score": 0.02487765997648239, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_2"}
{"score": 0.06960482895374298, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_3"}
{"score": 0.033308129757642746, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_4"}
{"score": 0.023415081202983856, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_5"}
{"score": 0.02083052136003971, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_6"}
{"score": 0.11134645342826843, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_7"}
{"score": 0.0314791202545166, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_8"}
{"score": 0.027757585048675537, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_9"}
{"score": 0.16283494234085083, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3013GVJ_1_10"}
{"score": 0.991431474685669, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_1"}
{"score": 0.9904035329818726, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_2"}
{"score": 0.989458441734314, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_3"}
{"score": 0.9824819564819336, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_4"}
{"score": 0.0405360646545887, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_5"}
{"score": 0.05244296044111252, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_6"}
{"score": 0.020976418629288673, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_7"}
{"score": 0.010003465227782726, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_8"}
{"score": 0.16093574464321136, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_9"}
{"score": 0.16526076197624207, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ5IYQEK_1_10"}
{"score": 0.9660302996635437, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_1"}
{"score": 0.9809946417808533, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_2"}
{"score": 0.9899889230728149, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_5"}
{"score": 0.1960994452238083, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_6"}
{"score": 0.7080116271972656, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_3"}
{"score": 0.5918977856636047, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_4"}
{"score": 0.9323411583900452, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_7"}
{"score": 0.3008139729499817, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_8"}
{"score": 0.08438356220722198, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_9"}
{"score": 0.06222881004214287, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZNM8IZ_1_10"}
{"score": 0.9880964756011963, "chain_id": "3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_1"}
{"score": 0.5554947853088379, "chain_id": "3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_5"}
{"score": 0.9505621194839478, "chain_id": "3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_2"}
{"score": 0.8396114706993103, "chain_id": "3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_3"}
{"score": 0.6702800393104553, "chain_id": "3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_4"}
{"score": 0.42639869451522827, "chain_id": "3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_6"}
{"score": 0.24067091941833496, "chain_id": "3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_7"}
{"score": 0.08640672266483307, "chain_id": "3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_8"}
{"score": 0.9004074335098267, "chain_id": "3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_9"}
{"score": 0.0404350571334362, "chain_id": "3YJ6NA41JBFOIXB0NZSRRBI11GLJPO_1_10"}
{"score": 0.9459839463233948, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_2"}
{"score": 0.8334718346595764, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_4"}
{"score": 0.5989881753921509, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_10"}
{"score": 0.5063827037811279, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_1"}
{"score": 0.8325225710868835, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_3"}
{"score": 0.9382896423339844, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_5"}
{"score": 0.8678272366523743, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_6"}
{"score": 0.31510868668556213, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_7"}
{"score": 0.8886202573776245, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_8"}
{"score": 0.8950401544570923, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTUPHMSX_1_9"}
{"score": 0.48880693316459656, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_1"}
{"score": 0.11548867076635361, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_2"}
{"score": 0.5312458872795105, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_3"}
{"score": 0.3302268385887146, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_4"}
{"score": 0.06025191769003868, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_5"}
{"score": 0.24139289557933807, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_6"}
{"score": 0.10222439467906952, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_7"}
{"score": 0.49949702620506287, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_8"}
{"score": 0.023583337664604187, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_9"}
{"score": 0.046171821653842926, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSOJL8A9_1_10"}
{"score": 0.8795865178108215, "chain_id": "30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_1"}
{"score": 0.9790488481521606, "chain_id": "30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_2"}
{"score": 0.9260743856430054, "chain_id": "30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_4"}
{"score": 0.5915571451187134, "chain_id": "30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_5"}
{"score": 0.9189905524253845, "chain_id": "30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_3"}
{"score": 0.45578867197036743, "chain_id": "30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_6"}
{"score": 0.3400470018386841, "chain_id": "30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_7"}
{"score": 0.6245846748352051, "chain_id": "30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_8"}
{"score": 0.905224621295929, "chain_id": "30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_9"}
{"score": 0.5341776013374329, "chain_id": "30X31N5D63PAUWOOLAJ8THKT0Q6SAX_1_10"}
{"score": 0.9238304495811462, "chain_id": "3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_1"}
{"score": 0.9792511463165283, "chain_id": "3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_2"}
{"score": 0.9786314964294434, "chain_id": "3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_3"}
{"score": 0.8495381474494934, "chain_id": "3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_4"}
{"score": 0.9405500888824463, "chain_id": "3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_5"}
{"score": 0.8878045678138733, "chain_id": "3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_6"}
{"score": 0.1833026111125946, "chain_id": "3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_7"}
{"score": 0.0593782402575016, "chain_id": "3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_8"}
{"score": 0.2150518149137497, "chain_id": "3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_9"}
{"score": 0.10216104239225388, "chain_id": "3C8HJ7UOP7T8X9JRD53LY1CWI10MZG_1_10"}
{"score": 0.928570032119751, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_1"}
{"score": 0.9315063953399658, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_2"}
{"score": 0.4112953841686249, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_3"}
{"score": 0.9436720013618469, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_7"}
{"score": 0.1725205034017563, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_4"}
{"score": 0.11424032598733902, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_5"}
{"score": 0.7151238322257996, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_6"}
{"score": 0.12212931364774704, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_8"}
{"score": 0.8445336818695068, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_9"}
{"score": 0.5087730884552002, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6NSZZ78_1_10"}
{"score": 0.9139493107795715, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_1"}
{"score": 0.9609729647636414, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_2"}
{"score": 0.02400178462266922, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_6"}
{"score": 0.16145634651184082, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_3"}
{"score": 0.05033571273088455, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_4"}
{"score": 0.045995473861694336, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_5"}
{"score": 0.7975853681564331, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_7"}
{"score": 0.7230958938598633, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_8"}
{"score": 0.12243123352527618, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_9"}
{"score": 0.03979642316699028, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8B9M0QJ2_1_10"}
{"score": 0.9416494369506836, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_1"}
{"score": 0.890999436378479, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_2"}
{"score": 0.8910493850708008, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_3"}
{"score": 0.7713608741760254, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_4"}
{"score": 0.1646842658519745, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_5"}
{"score": 0.09320870786905289, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_6"}
{"score": 0.10206934064626694, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_7"}
{"score": 0.055062148720026016, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_8"}
{"score": 0.03504324331879616, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_9"}
{"score": 0.07180893421173096, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1UQVR9_1_10"}
{"score": 0.9706440567970276, "chain_id": "3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_1"}
{"score": 0.16442173719406128, "chain_id": "3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_2"}
{"score": 0.06612637639045715, "chain_id": "3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_3"}
{"score": 0.06378040462732315, "chain_id": "3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_4"}
{"score": 0.05445870757102966, "chain_id": "3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_5"}
{"score": 0.1707061380147934, "chain_id": "3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_6"}
{"score": 0.19823598861694336, "chain_id": "3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_7"}
{"score": 0.13720405101776123, "chain_id": "3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_8"}
{"score": 0.18514658510684967, "chain_id": "3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_9"}
{"score": 0.21085476875305176, "chain_id": "3TY7ZAOG5FJG50DYOZDDDPH6C8JK0H_1_10"}
{"score": 0.9675098061561584, "chain_id": "3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_1"}
{"score": 0.9686607718467712, "chain_id": "3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_2"}
{"score": 0.9609179496765137, "chain_id": "3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_3"}
{"score": 0.16335009038448334, "chain_id": "3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_4"}
{"score": 0.13387320935726166, "chain_id": "3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_5"}
{"score": 0.08047284185886383, "chain_id": "3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_6"}
{"score": 0.05880990996956825, "chain_id": "3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_7"}
{"score": 0.1340886801481247, "chain_id": "3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_8"}
{"score": 0.11687721312046051, "chain_id": "3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_9"}
{"score": 0.057836584746837616, "chain_id": "3BV8HQ2ZZW057YQREXG5SCO1LCH6AM_1_10"}
{"score": 0.9593425989151001, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_1"}
{"score": 0.38863733410835266, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_3"}
{"score": 0.4457578659057617, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_2"}
{"score": 0.5915741920471191, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_4"}
{"score": 0.06723500043153763, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_5"}
{"score": 0.5162129998207092, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_6"}
{"score": 0.04551341384649277, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_7"}
{"score": 0.04656742513179779, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_8"}
{"score": 0.35623931884765625, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_9"}
{"score": 0.07357979565858841, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQPARYK4_1_10"}
{"score": 0.7407547831535339, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_2"}
{"score": 0.4861578941345215, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_10"}
{"score": 0.3044706881046295, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_1"}
{"score": 0.6495702266693115, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_3"}
{"score": 0.2377336323261261, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_4"}
{"score": 0.12330301105976105, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_5"}
{"score": 0.36778655648231506, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_6"}
{"score": 0.8032005429267883, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_7"}
{"score": 0.0868263840675354, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_8"}
{"score": 0.2753188908100128, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31RZVIP9_1_9"}
{"score": 0.9383996725082397, "chain_id": "37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_1"}
{"score": 0.34463560581207275, "chain_id": "37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_6"}
{"score": 0.17158176004886627, "chain_id": "37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_2"}
{"score": 0.06715737283229828, "chain_id": "37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_3"}
{"score": 0.05910707637667656, "chain_id": "37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_4"}
{"score": 0.055264923721551895, "chain_id": "37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_5"}
{"score": 0.17913715541362762, "chain_id": "37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_7"}
{"score": 0.10584507137537003, "chain_id": "37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_8"}
{"score": 0.291666179895401, "chain_id": "37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_9"}
{"score": 0.29301518201828003, "chain_id": "37FMASSAYCQQJSQKMCPQKQYCCYMBIV_1_10"}
{"score": 0.8170627951622009, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_3"}
{"score": 0.9394452571868896, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_8"}
{"score": 0.1805572360754013, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_10"}
{"score": 0.20426402986049652, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_1"}
{"score": 0.09828375279903412, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_2"}
{"score": 0.4984123706817627, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_4"}
{"score": 0.28832653164863586, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_5"}
{"score": 0.1638599932193756, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_6"}
{"score": 0.07020781934261322, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_7"}
{"score": 0.13325735926628113, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GSA7RIK_1_9"}
{"score": 0.7555909156799316, "chain_id": "3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_2"}
{"score": 0.9834268093109131, "chain_id": "3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_4"}
{"score": 0.7451780438423157, "chain_id": "3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_5"}
{"score": 0.49353858828544617, "chain_id": "3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_9"}
{"score": 0.9667642712593079, "chain_id": "3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_1"}
{"score": 0.4815290868282318, "chain_id": "3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_3"}
{"score": 0.26032519340515137, "chain_id": "3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_6"}
{"score": 0.8656952977180481, "chain_id": "3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_7"}
{"score": 0.05321730300784111, "chain_id": "3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_8"}
{"score": 0.05628504604101181, "chain_id": "3KAKFY4PGU1LGXM77JAK2700M4RI3N_1_10"}
{"score": 0.8304702043533325, "chain_id": "3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_3"}
{"score": 0.9746648669242859, "chain_id": "3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_8"}
{"score": 0.30508890748023987, "chain_id": "3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_1"}
{"score": 0.23644742369651794, "chain_id": "3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_2"}
{"score": 0.4908422827720642, "chain_id": "3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_4"}
{"score": 0.29459813237190247, "chain_id": "3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_5"}
{"score": 0.14852288365364075, "chain_id": "3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_6"}
{"score": 0.05834552273154259, "chain_id": "3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_7"}
{"score": 0.1843043565750122, "chain_id": "3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_9"}
{"score": 0.44167017936706543, "chain_id": "3KJYX6QCM9A1NH8W9B1QX37JQD0JV8_1_10"}
{"score": 0.9875463843345642, "chain_id": "3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_4"}
{"score": 0.9577633738517761, "chain_id": "3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_9"}
{"score": 0.928462028503418, "chain_id": "3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_10"}
{"score": 0.9879844188690186, "chain_id": "3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_1"}
{"score": 0.9815726280212402, "chain_id": "3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_2"}
{"score": 0.9843961000442505, "chain_id": "3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_3"}
{"score": 0.359526664018631, "chain_id": "3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_5"}
{"score": 0.38629624247550964, "chain_id": "3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_6"}
{"score": 0.28117093443870544, "chain_id": "3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_7"}
{"score": 0.3898228108882904, "chain_id": "3TAYZSBPLL7LPTTK8VQTNZ1VP0ES2N_1_8"}
{"score": 0.3743549585342407, "chain_id": "3ATPCQ38J897QI0XKGBXB38UI93YAF_1_1"}
{"score": 0.4139532446861267, "chain_id": "3ATPCQ38J897QI0XKGBXB38UI93YAF_1_2"}
{"score": 0.10483521223068237, "chain_id": "3ATPCQ38J897QI0XKGBXB38UI93YAF_1_3"}
{"score": 0.0248115174472332, "chain_id": "3ATPCQ38J897QI0XKGBXB38UI93YAF_1_4"}
{"score": 0.047469817101955414, "chain_id": "3ATPCQ38J897QI0XKGBXB38UI93YAF_1_5"}
{"score": 0.14452104270458221, "chain_id": "3ATPCQ38J897QI0XKGBXB38UI93YAF_1_6"}
{"score": 0.052546024322509766, "chain_id": "3ATPCQ38J897QI0XKGBXB38UI93YAF_1_7"}
{"score": 0.13885195553302765, "chain_id": "3ATPCQ38J897QI0XKGBXB38UI93YAF_1_8"}
{"score": 0.04812220484018326, "chain_id": "3ATPCQ38J897QI0XKGBXB38UI93YAF_1_9"}
{"score": 0.29920223355293274, "chain_id": "3ATPCQ38J897QI0XKGBXB38UI93YAF_1_10"}
{"score": 0.9900362491607666, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_1"}
{"score": 0.7508453726768494, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_6"}
{"score": 0.6330910921096802, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_2"}
{"score": 0.8115637898445129, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_3"}
{"score": 0.9347298741340637, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_4"}
{"score": 0.8276845812797546, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_5"}
{"score": 0.7628861665725708, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_7"}
{"score": 0.4892266094684601, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_8"}
{"score": 0.6252624988555908, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_9"}
{"score": 0.14368008077144623, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN59JX2S_1_10"}
{"score": 0.03833107650279999, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_6"}
{"score": 0.30803096294403076, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_1"}
{"score": 0.1201343834400177, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_2"}
{"score": 0.05527867004275322, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_3"}
{"score": 0.052195023745298386, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_4"}
{"score": 0.03564828634262085, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_5"}
{"score": 0.05239356309175491, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_7"}
{"score": 0.11181193590164185, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_8"}
{"score": 0.15709912776947021, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_9"}
{"score": 0.06103675439953804, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRY1LVFA_1_10"}
{"score": 0.5034213066101074, "chain_id": "34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_3"}
{"score": 0.9806026816368103, "chain_id": "34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_4"}
{"score": 0.8113434314727783, "chain_id": "34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_7"}
{"score": 0.7093861699104309, "chain_id": "34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_8"}
{"score": 0.6652227640151978, "chain_id": "34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_1"}
{"score": 0.32578927278518677, "chain_id": "34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_2"}
{"score": 0.254615843296051, "chain_id": "34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_5"}
{"score": 0.8010282516479492, "chain_id": "34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_6"}
{"score": 0.378629207611084, "chain_id": "34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_9"}
{"score": 0.15612992644309998, "chain_id": "34PGFRQONOAE2681ZL6MJ5QXYI9JWI_1_10"}
{"score": 0.9713186621665955, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_5"}
{"score": 0.16512881219387054, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_1"}
{"score": 0.18072976171970367, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_2"}
{"score": 0.9177088737487793, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_3"}
{"score": 0.35466116666793823, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_4"}
{"score": 0.4687047004699707, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_6"}
{"score": 0.24508905410766602, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_7"}
{"score": 0.036983512341976166, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_8"}
{"score": 0.21746404469013214, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_9"}
{"score": 0.031001003459095955, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPWRLJJI_1_10"}
{"score": 0.98785001039505, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_1"}
{"score": 0.9887341260910034, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_2"}
{"score": 0.9691790342330933, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_4"}
{"score": 0.8008118271827698, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_5"}
{"score": 0.09170914441347122, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_7"}
{"score": 0.9164067506790161, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_8"}
{"score": 0.9612295627593994, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_10"}
{"score": 0.9393413066864014, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_3"}
{"score": 0.18095500767230988, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_6"}
{"score": 0.05637788027524948, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSQTY8A8_1_9"}
{"score": 0.9812619090080261, "chain_id": "3IXEICO792IAMUP0KX7MNHET8NU6T6_1_2"}
{"score": 0.32199573516845703, "chain_id": "3IXEICO792IAMUP0KX7MNHET8NU6T6_1_1"}
{"score": 0.6863413453102112, "chain_id": "3IXEICO792IAMUP0KX7MNHET8NU6T6_1_3"}
{"score": 0.02816297858953476, "chain_id": "3IXEICO792IAMUP0KX7MNHET8NU6T6_1_4"}
{"score": 0.2874455153942108, "chain_id": "3IXEICO792IAMUP0KX7MNHET8NU6T6_1_5"}
{"score": 0.4319772720336914, "chain_id": "3IXEICO792IAMUP0KX7MNHET8NU6T6_1_6"}
{"score": 0.5041524171829224, "chain_id": "3IXEICO792IAMUP0KX7MNHET8NU6T6_1_7"}
{"score": 0.6802955865859985, "chain_id": "3IXEICO792IAMUP0KX7MNHET8NU6T6_1_8"}
{"score": 0.5577141642570496, "chain_id": "3IXEICO792IAMUP0KX7MNHET8NU6T6_1_9"}
{"score": 0.0641956478357315, "chain_id": "3IXEICO792IAMUP0KX7MNHET8NU6T6_1_10"}
{"score": 0.9879844188690186, "chain_id": "37C0GNLMHF2355T3Y777IDW76IZD6M_1_1"}
{"score": 0.9815726280212402, "chain_id": "37C0GNLMHF2355T3Y777IDW76IZD6M_1_2"}
{"score": 0.9843961000442505, "chain_id": "37C0GNLMHF2355T3Y777IDW76IZD6M_1_3"}
{"score": 0.9875463843345642, "chain_id": "37C0GNLMHF2355T3Y777IDW76IZD6M_1_4"}
{"score": 0.38629624247550964, "chain_id": "37C0GNLMHF2355T3Y777IDW76IZD6M_1_6"}
{"score": 0.9577633738517761, "chain_id": "37C0GNLMHF2355T3Y777IDW76IZD6M_1_9"}
{"score": 0.928462028503418, "chain_id": "37C0GNLMHF2355T3Y777IDW76IZD6M_1_10"}
{"score": 0.359526664018631, "chain_id": "37C0GNLMHF2355T3Y777IDW76IZD6M_1_5"}
{"score": 0.28117093443870544, "chain_id": "37C0GNLMHF2355T3Y777IDW76IZD6M_1_7"}
{"score": 0.3898228108882904, "chain_id": "37C0GNLMHF2355T3Y777IDW76IZD6M_1_8"}
{"score": 0.9792943596839905, "chain_id": "34J10VATJFXDNYS95UMGFFTBWO7QIK_1_1"}
{"score": 0.9483195543289185, "chain_id": "34J10VATJFXDNYS95UMGFFTBWO7QIK_1_2"}
{"score": 0.9465410113334656, "chain_id": "34J10VATJFXDNYS95UMGFFTBWO7QIK_1_3"}
{"score": 0.9359816312789917, "chain_id": "34J10VATJFXDNYS95UMGFFTBWO7QIK_1_4"}
{"score": 0.9612663388252258, "chain_id": "34J10VATJFXDNYS95UMGFFTBWO7QIK_1_6"}
{"score": 0.902824878692627, "chain_id": "34J10VATJFXDNYS95UMGFFTBWO7QIK_1_7"}
{"score": 0.9409307837486267, "chain_id": "34J10VATJFXDNYS95UMGFFTBWO7QIK_1_10"}
{"score": 0.5878410935401917, "chain_id": "34J10VATJFXDNYS95UMGFFTBWO7QIK_1_5"}
{"score": 0.8886701464653015, "chain_id": "34J10VATJFXDNYS95UMGFFTBWO7QIK_1_8"}
{"score": 0.914993941783905, "chain_id": "34J10VATJFXDNYS95UMGFFTBWO7QIK_1_9"}
{"score": 0.978243350982666, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_1"}
{"score": 0.9274653792381287, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_5"}
{"score": 0.9888148903846741, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_6"}
{"score": 0.9684508442878723, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_2"}
{"score": 0.3023146688938141, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_3"}
{"score": 0.7436468601226807, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_4"}
{"score": 0.5572948455810547, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_7"}
{"score": 0.37136608362197876, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_8"}
{"score": 0.4773459732532501, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_9"}
{"score": 0.2437664121389389, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GV4JW2X_1_10"}
{"score": 0.9506145715713501, "chain_id": "3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_1"}
{"score": 0.7527263164520264, "chain_id": "3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_3"}
{"score": 0.19707296788692474, "chain_id": "3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_5"}
{"score": 0.8939962387084961, "chain_id": "3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_2"}
{"score": 0.39363428950309753, "chain_id": "3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_4"}
{"score": 0.05210435017943382, "chain_id": "3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_6"}
{"score": 0.21099571883678436, "chain_id": "3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_7"}
{"score": 0.18204006552696228, "chain_id": "3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_8"}
{"score": 0.2887173295021057, "chain_id": "3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_9"}
{"score": 0.45818960666656494, "chain_id": "3FTYUGLFSUK7M1TPTOX2Q7I7842D5G_1_10"}
{"score": 0.9460834860801697, "chain_id": "3RYC5T2D73S5GLUDV410T24SE7TRPP_1_1"}
{"score": 0.9467577934265137, "chain_id": "3RYC5T2D73S5GLUDV410T24SE7TRPP_1_3"}
{"score": 0.991161584854126, "chain_id": "3RYC5T2D73S5GLUDV410T24SE7TRPP_1_4"}
{"score": 0.9914063215255737, "chain_id": "3RYC5T2D73S5GLUDV410T24SE7TRPP_1_2"}
{"score": 0.45178455114364624, "chain_id": "3RYC5T2D73S5GLUDV410T24SE7TRPP_1_5"}
{"score": 0.20183368027210236, "chain_id": "3RYC5T2D73S5GLUDV410T24SE7TRPP_1_6"}
{"score": 0.11688536405563354, "chain_id": "3RYC5T2D73S5GLUDV410T24SE7TRPP_1_7"}
{"score": 0.38960838317871094, "chain_id": "3RYC5T2D73S5GLUDV410T24SE7TRPP_1_8"}
{"score": 0.349208265542984, "chain_id": "3RYC5T2D73S5GLUDV410T24SE7TRPP_1_9"}
{"score": 0.12202468514442444, "chain_id": "3RYC5T2D73S5GLUDV410T24SE7TRPP_1_10"}
{"score": 0.019492629915475845, "chain_id": "3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_7"}
{"score": 0.3481845259666443, "chain_id": "3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_1"}
{"score": 0.3895142376422882, "chain_id": "3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_2"}
{"score": 0.19332857429981232, "chain_id": "3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_3"}
{"score": 0.36408036947250366, "chain_id": "3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_4"}
{"score": 0.1035015657544136, "chain_id": "3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_5"}
{"score": 0.21314725279808044, "chain_id": "3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_6"}
{"score": 0.31587862968444824, "chain_id": "3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_8"}
{"score": 0.02716837264597416, "chain_id": "3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_9"}
{"score": 0.02442147769033909, "chain_id": "3Q5ZZ9ZEVOEV56XYCGMM4F46Y9658W_1_10"}
{"score": 0.8756793737411499, "chain_id": "39DD6S19JPAALLREW7F2LT7NAGQZEK_1_2"}
{"score": 0.931439995765686, "chain_id": "39DD6S19JPAALLREW7F2LT7NAGQZEK_1_4"}
{"score": 0.6435291171073914, "chain_id": "39DD6S19JPAALLREW7F2LT7NAGQZEK_1_5"}
{"score": 0.40105655789375305, "chain_id": "39DD6S19JPAALLREW7F2LT7NAGQZEK_1_8"}
{"score": 0.6586593985557556, "chain_id": "39DD6S19JPAALLREW7F2LT7NAGQZEK_1_1"}
{"score": 0.9549263715744019, "chain_id": "39DD6S19JPAALLREW7F2LT7NAGQZEK_1_3"}
{"score": 0.9079769849777222, "chain_id": "39DD6S19JPAALLREW7F2LT7NAGQZEK_1_6"}
{"score": 0.712008535861969, "chain_id": "39DD6S19JPAALLREW7F2LT7NAGQZEK_1_7"}
{"score": 0.5104432702064514, "chain_id": "39DD6S19JPAALLREW7F2LT7NAGQZEK_1_9"}
{"score": 0.2658531069755554, "chain_id": "39DD6S19JPAALLREW7F2LT7NAGQZEK_1_10"}
{"score": 0.9099897146224976, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_1"}
{"score": 0.6755364537239075, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_4"}
{"score": 0.21082116663455963, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_8"}
{"score": 0.310086727142334, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_9"}
{"score": 0.13184833526611328, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_2"}
{"score": 0.34458643198013306, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_3"}
{"score": 0.11835798621177673, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_5"}
{"score": 0.8147692680358887, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_6"}
{"score": 0.2762663662433624, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_7"}
{"score": 0.5227175951004028, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YHNATIQ_1_10"}
{"score": 0.9857431054115295, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_2"}
{"score": 0.9793358445167542, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_3"}
{"score": 0.9914166331291199, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_4"}
{"score": 0.9919914603233337, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_5"}
{"score": 0.9881376624107361, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_6"}
{"score": 0.9331756234169006, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_7"}
{"score": 0.9881154298782349, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_8"}
{"score": 0.9855890870094299, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_9"}
{"score": 0.8931971788406372, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_10"}
{"score": 0.9865291118621826, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYHNN16F_1_1"}
{"score": 0.9926812052726746, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_1"}
{"score": 0.9918388724327087, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_2"}
{"score": 0.9608218669891357, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_6"}
{"score": 0.9913681149482727, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_7"}
{"score": 0.2679632604122162, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_3"}
{"score": 0.3573083281517029, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_4"}
{"score": 0.3908446729183197, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_5"}
{"score": 0.4188839793205261, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_8"}
{"score": 0.22724978625774384, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_9"}
{"score": 0.09689412266016006, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY371QB9_1_10"}
{"score": 0.5584361553192139, "chain_id": "3RYC5T2D73S5GLUDV410T24SF2PRPC_1_1"}
{"score": 0.768315315246582, "chain_id": "3RYC5T2D73S5GLUDV410T24SF2PRPC_1_2"}
{"score": 0.18255481123924255, "chain_id": "3RYC5T2D73S5GLUDV410T24SF2PRPC_1_3"}
{"score": 0.2324550300836563, "chain_id": "3RYC5T2D73S5GLUDV410T24SF2PRPC_1_4"}
{"score": 0.5022381544113159, "chain_id": "3RYC5T2D73S5GLUDV410T24SF2PRPC_1_5"}
{"score": 0.5388346910476685, "chain_id": "3RYC5T2D73S5GLUDV410T24SF2PRPC_1_6"}
{"score": 0.17454026639461517, "chain_id": "3RYC5T2D73S5GLUDV410T24SF2PRPC_1_7"}
{"score": 0.08447165787220001, "chain_id": "3RYC5T2D73S5GLUDV410T24SF2PRPC_1_8"}
{"score": 0.5781673789024353, "chain_id": "3RYC5T2D73S5GLUDV410T24SF2PRPC_1_9"}
{"score": 0.4426697790622711, "chain_id": "3RYC5T2D73S5GLUDV410T24SF2PRPC_1_10"}
{"score": 0.9929717183113098, "chain_id": "34Q075JO1XCEZZRCGP7V8AL71M310H_1_1"}
{"score": 0.7640556693077087, "chain_id": "34Q075JO1XCEZZRCGP7V8AL71M310H_1_2"}
{"score": 0.8268507719039917, "chain_id": "34Q075JO1XCEZZRCGP7V8AL71M310H_1_3"}
{"score": 0.5431873202323914, "chain_id": "34Q075JO1XCEZZRCGP7V8AL71M310H_1_4"}
{"score": 0.9206939339637756, "chain_id": "34Q075JO1XCEZZRCGP7V8AL71M310H_1_6"}
{"score": 0.7530951499938965, "chain_id": "34Q075JO1XCEZZRCGP7V8AL71M310H_1_7"}
{"score": 0.47420042753219604, "chain_id": "34Q075JO1XCEZZRCGP7V8AL71M310H_1_9"}
{"score": 0.2010834515094757, "chain_id": "34Q075JO1XCEZZRCGP7V8AL71M310H_1_5"}
{"score": 0.21346014738082886, "chain_id": "34Q075JO1XCEZZRCGP7V8AL71M310H_1_8"}
{"score": 0.5130179524421692, "chain_id": "34Q075JO1XCEZZRCGP7V8AL71M310H_1_10"}
{"score": 0.13730992376804352, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_2"}
{"score": 0.21339282393455505, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_4"}
{"score": 0.2104567587375641, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_6"}
{"score": 0.19484412670135498, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_1"}
{"score": 0.12913712859153748, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_3"}
{"score": 0.13425232470035553, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_5"}
{"score": 0.0865936279296875, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_7"}
{"score": 0.8242059350013733, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_8"}
{"score": 0.36778655648231506, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_9"}
{"score": 0.13002710044384003, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJJVIKAV_1_10"}
{"score": 0.31776368618011475, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_5"}
{"score": 0.6379289031028748, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_1"}
{"score": 0.6816525459289551, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_2"}
{"score": 0.8100684285163879, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_3"}
{"score": 0.8190628290176392, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_4"}
{"score": 0.4199669361114502, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_6"}
{"score": 0.21993105113506317, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_7"}
{"score": 0.35451439023017883, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_8"}
{"score": 0.49215829372406006, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_9"}
{"score": 0.3495534658432007, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB21T330_1_10"}
{"score": 0.9067502617835999, "chain_id": "32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_1"}
{"score": 0.2560647130012512, "chain_id": "32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_2"}
{"score": 0.832918107509613, "chain_id": "32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_4"}
{"score": 0.8542815446853638, "chain_id": "32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_6"}
{"score": 0.10565754771232605, "chain_id": "32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_3"}
{"score": 0.1302746683359146, "chain_id": "32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_5"}
{"score": 0.02567247301340103, "chain_id": "32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_7"}
{"score": 0.10437461733818054, "chain_id": "32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_8"}
{"score": 0.03437602519989014, "chain_id": "32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_9"}
{"score": 0.17946337163448334, "chain_id": "32AT8R96GL8U952MRF0ZTIWNLV0SUT_1_10"}
{"score": 0.08518176525831223, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_7"}
{"score": 0.08331865072250366, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_8"}
{"score": 0.8926287293434143, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_1"}
{"score": 0.886867105960846, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_2"}
{"score": 0.4635043442249298, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_3"}
{"score": 0.8463897109031677, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_4"}
{"score": 0.9359543919563293, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_5"}
{"score": 0.5549555420875549, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_6"}
{"score": 0.9430368542671204, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_9"}
{"score": 0.03136492148041725, "chain_id": "3BF51CHDTV9P3ACQIEAG0X1EW030HB_1_10"}
{"score": 0.06593626737594604, "chain_id": "33F859I566CQNXF0GU75KEXXO9XBHG_1_1"}
{"score": 0.04049387946724892, "chain_id": "33F859I566CQNXF0GU75KEXXO9XBHG_1_3"}
{"score": 0.09166581928730011, "chain_id": "33F859I566CQNXF0GU75KEXXO9XBHG_1_2"}
{"score": 0.03769419342279434, "chain_id": "33F859I566CQNXF0GU75KEXXO9XBHG_1_4"}
{"score": 0.015391730703413486, "chain_id": "33F859I566CQNXF0GU75KEXXO9XBHG_1_5"}
{"score": 0.014770482666790485, "chain_id": "33F859I566CQNXF0GU75KEXXO9XBHG_1_6"}
{"score": 0.016133544966578484, "chain_id": "33F859I566CQNXF0GU75KEXXO9XBHG_1_7"}
{"score": 0.017718281596899033, "chain_id": "33F859I566CQNXF0GU75KEXXO9XBHG_1_8"}
{"score": 0.014504731632769108, "chain_id": "33F859I566CQNXF0GU75KEXXO9XBHG_1_9"}
{"score": 0.033553287386894226, "chain_id": "33F859I566CQNXF0GU75KEXXO9XBHG_1_10"}
{"score": 0.10427144914865494, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_3"}
{"score": 0.7592055797576904, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_1"}
{"score": 0.7196557521820068, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_2"}
{"score": 0.08467817306518555, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_4"}
{"score": 0.0466991551220417, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_5"}
{"score": 0.6945710182189941, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_6"}
{"score": 0.1309572160243988, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_7"}
{"score": 0.7549588084220886, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_8"}
{"score": 0.31454530358314514, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_9"}
{"score": 0.0828854888677597, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64ENOY7K_1_10"}
{"score": 0.4700012803077698, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_1"}
{"score": 0.8564116954803467, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_2"}
{"score": 0.514302134513855, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_3"}
{"score": 0.2249452918767929, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_4"}
{"score": 0.25669151544570923, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_5"}
{"score": 0.30986979603767395, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_6"}
{"score": 0.5008572936058044, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_7"}
{"score": 0.42463064193725586, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_8"}
{"score": 0.11256960034370422, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_9"}
{"score": 0.19868767261505127, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NKY6A4L_1_10"}
{"score": 0.9152590036392212, "chain_id": "3AZHRG4CU4JA925R3TLEW304XIP305_1_1"}
{"score": 0.8446303606033325, "chain_id": "3AZHRG4CU4JA925R3TLEW304XIP305_1_2"}
{"score": 0.3038969039916992, "chain_id": "3AZHRG4CU4JA925R3TLEW304XIP305_1_5"}
{"score": 0.17392681539058685, "chain_id": "3AZHRG4CU4JA925R3TLEW304XIP305_1_6"}
{"score": 0.19670961797237396, "chain_id": "3AZHRG4CU4JA925R3TLEW304XIP305_1_7"}
{"score": 0.8185697793960571, "chain_id": "3AZHRG4CU4JA925R3TLEW304XIP305_1_3"}
{"score": 0.43922391533851624, "chain_id": "3AZHRG4CU4JA925R3TLEW304XIP305_1_4"}
{"score": 0.2951298654079437, "chain_id": "3AZHRG4CU4JA925R3TLEW304XIP305_1_8"}
{"score": 0.19515027105808258, "chain_id": "3AZHRG4CU4JA925R3TLEW304XIP305_1_9"}
{"score": 0.03460061177611351, "chain_id": "3AZHRG4CU4JA925R3TLEW304XIP305_1_10"}
{"score": 0.9794331789016724, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_2"}
{"score": 0.9489426016807556, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_6"}
{"score": 0.7495524883270264, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_7"}
{"score": 0.17071986198425293, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_9"}
{"score": 0.16266775131225586, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_10"}
{"score": 0.18299685418605804, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_1"}
{"score": 0.5804712176322937, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_3"}
{"score": 0.2649182975292206, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_4"}
{"score": 0.9261298179626465, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_5"}
{"score": 0.5276601910591125, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPDHNS0GO_1_8"}
{"score": 0.15328200161457062, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_5"}
{"score": 0.2767471969127655, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_6"}
{"score": 0.46032607555389404, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_7"}
{"score": 0.9829917550086975, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_1"}
{"score": 0.973496675491333, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_2"}
{"score": 0.9786635637283325, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_3"}
{"score": 0.9001798033714294, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_4"}
{"score": 0.12219484895467758, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_8"}
{"score": 0.10341912508010864, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_9"}
{"score": 0.4265994429588318, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKWM814C_1_10"}
{"score": 0.9473364353179932, "chain_id": "3PQMUDRV7R50604QSMH76D2PI7LII3_1_1"}
{"score": 0.9825431108474731, "chain_id": "3PQMUDRV7R50604QSMH76D2PI7LII3_1_2"}
{"score": 0.9488394260406494, "chain_id": "3PQMUDRV7R50604QSMH76D2PI7LII3_1_3"}
{"score": 0.8987551927566528, "chain_id": "3PQMUDRV7R50604QSMH76D2PI7LII3_1_4"}
{"score": 0.6086598634719849, "chain_id": "3PQMUDRV7R50604QSMH76D2PI7LII3_1_5"}
{"score": 0.5209280848503113, "chain_id": "3PQMUDRV7R50604QSMH76D2PI7LII3_1_9"}
{"score": 0.5377677083015442, "chain_id": "3PQMUDRV7R50604QSMH76D2PI7LII3_1_10"}
{"score": 0.7781319618225098, "chain_id": "3PQMUDRV7R50604QSMH76D2PI7LII3_1_6"}
{"score": 0.3194084167480469, "chain_id": "3PQMUDRV7R50604QSMH76D2PI7LII3_1_7"}
{"score": 0.3164166212081909, "chain_id": "3PQMUDRV7R50604QSMH76D2PI7LII3_1_8"}
{"score": 0.9510183930397034, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_1"}
{"score": 0.6180248260498047, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_2"}
{"score": 0.6960570216178894, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_3"}
{"score": 0.09661901742219925, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_5"}
{"score": 0.5247631669044495, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_7"}
{"score": 0.8950250744819641, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_4"}
{"score": 0.2586647868156433, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_6"}
{"score": 0.029620962217450142, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_8"}
{"score": 0.22713254392147064, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_9"}
{"score": 0.12089300155639648, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29A8I3TN_1_10"}
{"score": 0.8745676279067993, "chain_id": "3HRMW88U16PBVOD19BQTS29A3I5M0R_1_3"}
{"score": 0.21055249869823456, "chain_id": "3HRMW88U16PBVOD19BQTS29A3I5M0R_1_1"}
{"score": 0.36441555619239807, "chain_id": "3HRMW88U16PBVOD19BQTS29A3I5M0R_1_2"}
{"score": 0.3743700087070465, "chain_id": "3HRMW88U16PBVOD19BQTS29A3I5M0R_1_4"}
{"score": 0.28055065870285034, "chain_id": "3HRMW88U16PBVOD19BQTS29A3I5M0R_1_5"}
{"score": 0.09153397381305695, "chain_id": "3HRMW88U16PBVOD19BQTS29A3I5M0R_1_6"}
{"score": 0.07439889013767242, "chain_id": "3HRMW88U16PBVOD19BQTS29A3I5M0R_1_7"}
{"score": 0.3401827812194824, "chain_id": "3HRMW88U16PBVOD19BQTS29A3I5M0R_1_8"}
{"score": 0.11716291308403015, "chain_id": "3HRMW88U16PBVOD19BQTS29A3I5M0R_1_9"}
{"score": 0.1313769668340683, "chain_id": "3HRMW88U16PBVOD19BQTS29A3I5M0R_1_10"}
{"score": 0.4799151122570038, "chain_id": "3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_3"}
{"score": 0.4681215286254883, "chain_id": "3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_5"}
{"score": 0.5688501000404358, "chain_id": "3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_1"}
{"score": 0.43373557925224304, "chain_id": "3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_2"}
{"score": 0.7912382483482361, "chain_id": "3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_4"}
{"score": 0.338935524225235, "chain_id": "3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_6"}
{"score": 0.2717103958129883, "chain_id": "3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_7"}
{"score": 0.055165451020002365, "chain_id": "3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_8"}
{"score": 0.5602487921714783, "chain_id": "3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_9"}
{"score": 0.6826480627059937, "chain_id": "3TU5ZICBRD0KYSGWW8AP2QZXWICQ85_1_10"}
{"score": 0.3361039161682129, "chain_id": "3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_1"}
{"score": 0.34137141704559326, "chain_id": "3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_2"}
{"score": 0.7571152448654175, "chain_id": "3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_3"}
{"score": 0.6908171772956848, "chain_id": "3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_4"}
{"score": 0.31938686966896057, "chain_id": "3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_5"}
{"score": 0.3060420751571655, "chain_id": "3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_6"}
{"score": 0.37670186161994934, "chain_id": "3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_7"}
{"score": 0.11709829419851303, "chain_id": "3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_8"}
{"score": 0.18631573021411896, "chain_id": "3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_9"}
{"score": 0.10125508159399033, "chain_id": "3JBT3HLQF81EICG45LVDF56RQ0LZPQ_1_10"}
{"score": 0.9914063215255737, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_2"}
{"score": 0.9467577934265137, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_3"}
{"score": 0.991161584854126, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_4"}
{"score": 0.9460834860801697, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_1"}
{"score": 0.45178455114364624, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_5"}
{"score": 0.20183368027210236, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_6"}
{"score": 0.11688536405563354, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_7"}
{"score": 0.38960838317871094, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_8"}
{"score": 0.349208265542984, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_9"}
{"score": 0.12202468514442444, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76HHU4JO_1_10"}
{"score": 0.24483057856559753, "chain_id": "3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_5"}
{"score": 0.5822733044624329, "chain_id": "3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_9"}
{"score": 0.23617233335971832, "chain_id": "3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_10"}
{"score": 0.7850605845451355, "chain_id": "3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_1"}
{"score": 0.20318156480789185, "chain_id": "3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_2"}
{"score": 0.6782299876213074, "chain_id": "3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_3"}
{"score": 0.1938685029745102, "chain_id": "3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_4"}
{"score": 0.3778577744960785, "chain_id": "3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_6"}
{"score": 0.3633370101451874, "chain_id": "3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_7"}
{"score": 0.2775909900665283, "chain_id": "3VNL7UK1XFI65NIBLQAQHNR64EVFT3_1_8"}
{"score": 0.9205984473228455, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_3"}
{"score": 0.22011928260326385, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_4"}
{"score": 0.537048876285553, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_5"}
{"score": 0.7935999035835266, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_6"}
{"score": 0.8600237369537354, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_8"}
{"score": 0.8510710597038269, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_1"}
{"score": 0.17280583083629608, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_2"}
{"score": 0.9154056310653687, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_7"}
{"score": 0.39983513951301575, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_9"}
{"score": 0.24318446218967438, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y985433WB_1_10"}
{"score": 0.9780368208885193, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_2"}
{"score": 0.8534596562385559, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_3"}
{"score": 0.9894098043441772, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_4"}
{"score": 0.9847827553749084, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_6"}
{"score": 0.9639368057250977, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_8"}
{"score": 0.9861365556716919, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_1"}
{"score": 0.8487195372581482, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_5"}
{"score": 0.9857988357543945, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_7"}
{"score": 0.8437317609786987, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_9"}
{"score": 0.19930197298526764, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0N6U87_1_10"}
{"score": 0.19363754987716675, "chain_id": "3ON104KXQKVOZOPGWEJID31EEAF4WK_1_1"}
{"score": 0.0716930404305458, "chain_id": "3ON104KXQKVOZOPGWEJID31EEAF4WK_1_2"}
{"score": 0.19100654125213623, "chain_id": "3ON104KXQKVOZOPGWEJID31EEAF4WK_1_3"}
{"score": 0.15230685472488403, "chain_id": "3ON104KXQKVOZOPGWEJID31EEAF4WK_1_4"}
{"score": 0.02509104646742344, "chain_id": "3ON104KXQKVOZOPGWEJID31EEAF4WK_1_5"}
{"score": 0.021338317543268204, "chain_id": "3ON104KXQKVOZOPGWEJID31EEAF4WK_1_6"}
{"score": 0.026850391179323196, "chain_id": "3ON104KXQKVOZOPGWEJID31EEAF4WK_1_7"}
{"score": 0.07678406685590744, "chain_id": "3ON104KXQKVOZOPGWEJID31EEAF4WK_1_8"}
{"score": 0.0679435133934021, "chain_id": "3ON104KXQKVOZOPGWEJID31EEAF4WK_1_9"}
{"score": 0.030237402766942978, "chain_id": "3ON104KXQKVOZOPGWEJID31EEAF4WK_1_10"}
{"score": 0.9892416596412659, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_1"}
{"score": 0.9191246628761292, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_2"}
{"score": 0.7909923791885376, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_3"}
{"score": 0.7708802819252014, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_4"}
{"score": 0.0324595645070076, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_5"}
{"score": 0.1202240139245987, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_6"}
{"score": 0.028572898358106613, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_7"}
{"score": 0.04547186940908432, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_8"}
{"score": 0.042607102543115616, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_9"}
{"score": 0.03859352692961693, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD3KOG0G_1_10"}
{"score": 0.9901543259620667, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_1"}
{"score": 0.951835036277771, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_2"}
{"score": 0.8477085828781128, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_3"}
{"score": 0.8474387526512146, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_4"}
{"score": 0.042014963924884796, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_5"}
{"score": 0.9195663332939148, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_6"}
{"score": 0.7523550391197205, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_7"}
{"score": 0.08501606434583664, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_8"}
{"score": 0.6054915189743042, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_9"}
{"score": 0.5217137336730957, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBM5LC4T_1_10"}
{"score": 0.26596295833587646, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_1"}
{"score": 0.6108843088150024, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_2"}
{"score": 0.5713686943054199, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_3"}
{"score": 0.5261148810386658, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_4"}
{"score": 0.19767311215400696, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_5"}
{"score": 0.25509506464004517, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_6"}
{"score": 0.6969821453094482, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_7"}
{"score": 0.6414143443107605, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_8"}
{"score": 0.11208944767713547, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_9"}
{"score": 0.06581903994083405, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEKE0KPP_1_10"}
{"score": 0.9919036030769348, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_1"}
{"score": 0.9805520176887512, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_4"}
{"score": 0.9496093988418579, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_5"}
{"score": 0.43991535902023315, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_9"}
{"score": 0.16999293863773346, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_2"}
{"score": 0.12596546113491058, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_3"}
{"score": 0.9123578071594238, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_6"}
{"score": 0.10548214614391327, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_7"}
{"score": 0.48828157782554626, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_8"}
{"score": 0.261010080575943, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXA9ZRMP_1_10"}
{"score": 0.9877344965934753, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_1"}
{"score": 0.9786204099655151, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_4"}
{"score": 0.2696616053581238, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_10"}
{"score": 0.9815942049026489, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_2"}
{"score": 0.9803910255432129, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_3"}
{"score": 0.723516047000885, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_5"}
{"score": 0.6395719051361084, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_6"}
{"score": 0.0943061038851738, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_7"}
{"score": 0.08046289533376694, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_8"}
{"score": 0.07582739740610123, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LNBFSMAE_1_9"}
{"score": 0.9891725778579712, "chain_id": "3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_1"}
{"score": 0.9890769720077515, "chain_id": "3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_2"}
{"score": 0.9655908346176147, "chain_id": "3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_3"}
{"score": 0.0426536463201046, "chain_id": "3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_10"}
{"score": 0.9582394361495972, "chain_id": "3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_4"}
{"score": 0.08740375190973282, "chain_id": "3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_5"}
{"score": 0.8835698962211609, "chain_id": "3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_6"}
{"score": 0.8645373582839966, "chain_id": "3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_7"}
{"score": 0.6281717419624329, "chain_id": "3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_8"}
{"score": 0.021371211856603622, "chain_id": "3300DTYQT2G17TQN9BWPU0VJH6UQE4_1_9"}
{"score": 0.9292318224906921, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_1"}
{"score": 0.7369624972343445, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_2"}
{"score": 0.5751636624336243, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_3"}
{"score": 0.8828562498092651, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_4"}
{"score": 0.21711814403533936, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_5"}
{"score": 0.1340809464454651, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_6"}
{"score": 0.2589471936225891, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_7"}
{"score": 0.1766262650489807, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_8"}
{"score": 0.8672113418579102, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_9"}
{"score": 0.5504745841026306, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYZ8HOBC_1_10"}
{"score": 0.8897126913070679, "chain_id": "31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_5"}
{"score": 0.9680717587471008, "chain_id": "31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_7"}
{"score": 0.9096059203147888, "chain_id": "31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_8"}
{"score": 0.3467910885810852, "chain_id": "31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_10"}
{"score": 0.39791497588157654, "chain_id": "31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_1"}
{"score": 0.16404500603675842, "chain_id": "31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_2"}
{"score": 0.11589626222848892, "chain_id": "31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_3"}
{"score": 0.10109297931194305, "chain_id": "31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_4"}
{"score": 0.9903122186660767, "chain_id": "31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_6"}
{"score": 0.8313992619514465, "chain_id": "31QTRG6Q2TCEDM6Z9ZTU1YXPUSCYPB_1_9"}
{"score": 0.8903772234916687, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_2"}
{"score": 0.9549639821052551, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_4"}
{"score": 0.9831985831260681, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_1"}
{"score": 0.03437532112002373, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_3"}
{"score": 0.9458882212638855, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_5"}
{"score": 0.0578896701335907, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_6"}
{"score": 0.6520726084709167, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_7"}
{"score": 0.5108676552772522, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_8"}
{"score": 0.11167789250612259, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_9"}
{"score": 0.1356559693813324, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO0C0HNVB_1_10"}
{"score": 0.9853531718254089, "chain_id": "30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_2"}
{"score": 0.9661656022071838, "chain_id": "30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_5"}
{"score": 0.9236079454421997, "chain_id": "30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_1"}
{"score": 0.7471112012863159, "chain_id": "30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_3"}
{"score": 0.187580406665802, "chain_id": "30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_4"}
{"score": 0.7115736603736877, "chain_id": "30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_6"}
{"score": 0.2947259843349457, "chain_id": "30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_7"}
{"score": 0.05553396791219711, "chain_id": "30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_8"}
{"score": 0.4758944809436798, "chain_id": "30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_9"}
{"score": 0.5131267309188843, "chain_id": "30LSNF239UUWVFQO3JWFJXV8DQF2IH_1_10"}
{"score": 0.8538436889648438, "chain_id": "3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_1"}
{"score": 0.9742348194122314, "chain_id": "3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_2"}
{"score": 0.6708962321281433, "chain_id": "3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_3"}
{"score": 0.9504051208496094, "chain_id": "3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_4"}
{"score": 0.6718852519989014, "chain_id": "3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_7"}
{"score": 0.6026815176010132, "chain_id": "3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_5"}
{"score": 0.04122476652264595, "chain_id": "3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_6"}
{"score": 0.11651060730218887, "chain_id": "3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_8"}
{"score": 0.01856997050344944, "chain_id": "3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_9"}
{"score": 0.01794678531587124, "chain_id": "3NGMS9VZTLHWMI0AQ6510JC5M5JFF0_1_10"}
{"score": 0.9446108341217041, "chain_id": "3RJSC4XJ10TDNHSVHC97B0YORMT051_1_1"}
{"score": 0.44081535935401917, "chain_id": "3RJSC4XJ10TDNHSVHC97B0YORMT051_1_3"}
{"score": 0.8077253103256226, "chain_id": "3RJSC4XJ10TDNHSVHC97B0YORMT051_1_4"}
{"score": 0.7831987738609314, "chain_id": "3RJSC4XJ10TDNHSVHC97B0YORMT051_1_2"}
{"score": 0.4638034403324127, "chain_id": "3RJSC4XJ10TDNHSVHC97B0YORMT051_1_5"}
{"score": 0.056602317839860916, "chain_id": "3RJSC4XJ10TDNHSVHC97B0YORMT051_1_6"}
{"score": 0.03874659165740013, "chain_id": "3RJSC4XJ10TDNHSVHC97B0YORMT051_1_7"}
{"score": 0.027208033949136734, "chain_id": "3RJSC4XJ10TDNHSVHC97B0YORMT051_1_8"}
{"score": 0.03269396722316742, "chain_id": "3RJSC4XJ10TDNHSVHC97B0YORMT051_1_9"}
{"score": 0.02685238979756832, "chain_id": "3RJSC4XJ10TDNHSVHC97B0YORMT051_1_10"}
{"score": 0.9372732639312744, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_4"}
{"score": 0.9798276424407959, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_6"}
{"score": 0.9395155310630798, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_7"}
{"score": 0.47793498635292053, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_1"}
{"score": 0.34046411514282227, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_2"}
{"score": 0.9192469120025635, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_3"}
{"score": 0.8594329357147217, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_5"}
{"score": 0.7038501501083374, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_8"}
{"score": 0.4878857135772705, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_9"}
{"score": 0.11845646053552628, "chain_id": "33C7UALJVLXWHOWFBTKA1PRPLGZ81O_1_10"}
{"score": 0.7910907864570618, "chain_id": "37Q970SNZE7E08BOPRQFIGRQXCWS12_1_1"}
{"score": 0.8191338181495667, "chain_id": "37Q970SNZE7E08BOPRQFIGRQXCWS12_1_3"}
{"score": 0.8801499605178833, "chain_id": "37Q970SNZE7E08BOPRQFIGRQXCWS12_1_7"}
{"score": 0.9819706678390503, "chain_id": "37Q970SNZE7E08BOPRQFIGRQXCWS12_1_2"}
{"score": 0.626286506652832, "chain_id": "37Q970SNZE7E08BOPRQFIGRQXCWS12_1_4"}
{"score": 0.9302442669868469, "chain_id": "37Q970SNZE7E08BOPRQFIGRQXCWS12_1_5"}
{"score": 0.4685252904891968, "chain_id": "37Q970SNZE7E08BOPRQFIGRQXCWS12_1_6"}
{"score": 0.2998470962047577, "chain_id": "37Q970SNZE7E08BOPRQFIGRQXCWS12_1_8"}
{"score": 0.5203366875648499, "chain_id": "37Q970SNZE7E08BOPRQFIGRQXCWS12_1_9"}
{"score": 0.6045598983764648, "chain_id": "37Q970SNZE7E08BOPRQFIGRQXCWS12_1_10"}
{"score": 0.27869388461112976, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_1"}
{"score": 0.5403079390525818, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_2"}
{"score": 0.4265631139278412, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_3"}
{"score": 0.26772385835647583, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_4"}
{"score": 0.73122638463974, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_5"}
{"score": 0.9564170837402344, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_6"}
{"score": 0.7092298269271851, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_7"}
{"score": 0.049288008362054825, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_8"}
{"score": 0.19196775555610657, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_9"}
{"score": 0.45574942231178284, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD3TB2CE_1_10"}
{"score": 0.669166624546051, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_1"}
{"score": 0.8801740407943726, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_2"}
{"score": 0.46211379766464233, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_3"}
{"score": 0.24229979515075684, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_4"}
{"score": 0.8280352354049683, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_5"}
{"score": 0.32108697295188904, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_6"}
{"score": 0.8840315341949463, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_7"}
{"score": 0.3976663053035736, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_8"}
{"score": 0.1921645551919937, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_9"}
{"score": 0.05034950375556946, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0X482LH_1_10"}
{"score": 0.40775880217552185, "chain_id": "3RYC5T2D73S5GLUDV410T24SHEMPRX_1_1"}
{"score": 0.7924609184265137, "chain_id": "3RYC5T2D73S5GLUDV410T24SHEMPRX_1_3"}
{"score": 0.3157244324684143, "chain_id": "3RYC5T2D73S5GLUDV410T24SHEMPRX_1_2"}
{"score": 0.3899422585964203, "chain_id": "3RYC5T2D73S5GLUDV410T24SHEMPRX_1_4"}
{"score": 0.09290153533220291, "chain_id": "3RYC5T2D73S5GLUDV410T24SHEMPRX_1_5"}
{"score": 0.26438984274864197, "chain_id": "3RYC5T2D73S5GLUDV410T24SHEMPRX_1_6"}
{"score": 0.06983685493469238, "chain_id": "3RYC5T2D73S5GLUDV410T24SHEMPRX_1_7"}
{"score": 0.0504918247461319, "chain_id": "3RYC5T2D73S5GLUDV410T24SHEMPRX_1_8"}
{"score": 0.3283430337905884, "chain_id": "3RYC5T2D73S5GLUDV410T24SHEMPRX_1_9"}
{"score": 0.6238681077957153, "chain_id": "3RYC5T2D73S5GLUDV410T24SHEMPRX_1_10"}
{"score": 0.3361744284629822, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_1"}
{"score": 0.293053537607193, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_4"}
{"score": 0.21550704538822174, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_5"}
{"score": 0.9694185853004456, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_6"}
{"score": 0.919111967086792, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_8"}
{"score": 0.5769457221031189, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_2"}
{"score": 0.6328338980674744, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_3"}
{"score": 0.903281569480896, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_7"}
{"score": 0.7238082885742188, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_9"}
{"score": 0.6523950695991516, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NRN4P8T_1_10"}
{"score": 0.17049196362495422, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_1"}
{"score": 0.4638596773147583, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_4"}
{"score": 0.6763050556182861, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_2"}
{"score": 0.2523733377456665, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_3"}
{"score": 0.43789997696876526, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_5"}
{"score": 0.12911562621593475, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_6"}
{"score": 0.589759886264801, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_7"}
{"score": 0.039864830672740936, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_8"}
{"score": 0.2416335791349411, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_9"}
{"score": 0.3200138509273529, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU51B9W6_1_10"}
{"score": 0.9535447955131531, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_3"}
{"score": 0.9613798260688782, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_4"}
{"score": 0.8646423816680908, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_5"}
{"score": 0.8529472947120667, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_7"}
{"score": 0.41329917311668396, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_9"}
{"score": 0.9791075587272644, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_10"}
{"score": 0.7310245633125305, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_1"}
{"score": 0.782798707485199, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_2"}
{"score": 0.9318264126777649, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_6"}
{"score": 0.4634534418582916, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WNXQZNL_1_8"}
{"score": 0.9926069974899292, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_1"}
{"score": 0.9879788160324097, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_3"}
{"score": 0.9856260418891907, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_4"}
{"score": 0.9575147032737732, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_5"}
{"score": 0.6281211376190186, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_6"}
{"score": 0.9604581594467163, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_7"}
{"score": 0.877236008644104, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_8"}
{"score": 0.9797383546829224, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_2"}
{"score": 0.6582927703857422, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_9"}
{"score": 0.5348970890045166, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWA3OXLI_1_10"}
{"score": 0.9349715709686279, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_1"}
{"score": 0.6215378642082214, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_2"}
{"score": 0.9077014327049255, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_3"}
{"score": 0.9183961153030396, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_4"}
{"score": 0.7088794708251953, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_8"}
{"score": 0.7140434384346008, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_9"}
{"score": 0.1998482346534729, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_5"}
{"score": 0.3581417500972748, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_6"}
{"score": 0.6029291152954102, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_7"}
{"score": 0.09454421699047089, "chain_id": "3N2BF7Y2VQTM6OJX7JXEYU8RP3OHMD_1_10"}
{"score": 0.6887797117233276, "chain_id": "3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_1"}
{"score": 0.04528393596410751, "chain_id": "3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_2"}
{"score": 0.8938608765602112, "chain_id": "3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_3"}
{"score": 0.6500686407089233, "chain_id": "3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_8"}
{"score": 0.8901426196098328, "chain_id": "3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_9"}
{"score": 0.3354077637195587, "chain_id": "3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_4"}
{"score": 0.20635926723480225, "chain_id": "3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_5"}
{"score": 0.06644397228956223, "chain_id": "3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_6"}
{"score": 0.1261928826570511, "chain_id": "3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_7"}
{"score": 0.07450974732637405, "chain_id": "3OHYZ19UGC4VW4WVET2Z9CAS8LAAOY_1_10"}
{"score": 0.051681917160749435, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_1"}
{"score": 0.026356318965554237, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_2"}
{"score": 0.01765015907585621, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_3"}
{"score": 0.04059331491589546, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_4"}
{"score": 0.03314093500375748, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_5"}
{"score": 0.0597665011882782, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_6"}
{"score": 0.024819673970341682, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_7"}
{"score": 0.0313870944082737, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_8"}
{"score": 0.02215954102575779, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_9"}
{"score": 0.0435742549598217, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N83LNMP_1_10"}
{"score": 0.968245267868042, "chain_id": "3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_1"}
{"score": 0.7031989097595215, "chain_id": "3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_2"}
{"score": 0.9587360620498657, "chain_id": "3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_3"}
{"score": 0.8802765011787415, "chain_id": "3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_4"}
{"score": 0.2685087025165558, "chain_id": "3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_7"}
{"score": 0.1544356793165207, "chain_id": "3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_5"}
{"score": 0.21844466030597687, "chain_id": "3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_6"}
{"score": 0.25294700264930725, "chain_id": "3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_8"}
{"score": 0.07562767714262009, "chain_id": "3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_9"}
{"score": 0.01823214814066887, "chain_id": "3YT88D1N08XCMSCV7MVWFNFDEHP3KA_1_10"}
{"score": 0.9352657198905945, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_1"}
{"score": 0.9819837808609009, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_2"}
{"score": 0.8013685345649719, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_3"}
{"score": 0.9732340574264526, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_4"}
{"score": 0.4021998941898346, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_5"}
{"score": 0.08844436705112457, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_6"}
{"score": 0.11037007719278336, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_7"}
{"score": 0.3391331732273102, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_8"}
{"score": 0.18138162791728973, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_9"}
{"score": 0.19216454029083252, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFYHWMT7_1_10"}
{"score": 0.04018065333366394, "chain_id": "3WYP994K17Q63GOUU3ULVY68NUT6YM_1_1"}
{"score": 0.08356434851884842, "chain_id": "3WYP994K17Q63GOUU3ULVY68NUT6YM_1_2"}
{"score": 0.13133925199508667, "chain_id": "3WYP994K17Q63GOUU3ULVY68NUT6YM_1_3"}
{"score": 0.354520320892334, "chain_id": "3WYP994K17Q63GOUU3ULVY68NUT6YM_1_4"}
{"score": 0.304402619600296, "chain_id": "3WYP994K17Q63GOUU3ULVY68NUT6YM_1_5"}
{"score": 0.08347127586603165, "chain_id": "3WYP994K17Q63GOUU3ULVY68NUT6YM_1_6"}
{"score": 0.01644216664135456, "chain_id": "3WYP994K17Q63GOUU3ULVY68NUT6YM_1_7"}
{"score": 0.0953531265258789, "chain_id": "3WYP994K17Q63GOUU3ULVY68NUT6YM_1_8"}
{"score": 0.012851043604314327, "chain_id": "3WYP994K17Q63GOUU3ULVY68NUT6YM_1_9"}
{"score": 0.0418812595307827, "chain_id": "3WYP994K17Q63GOUU3ULVY68NUT6YM_1_10"}
{"score": 0.836455225944519, "chain_id": "3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_1"}
{"score": 0.2348470240831375, "chain_id": "3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_2"}
{"score": 0.07305336743593216, "chain_id": "3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_3"}
{"score": 0.10315355658531189, "chain_id": "3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_4"}
{"score": 0.06498444080352783, "chain_id": "3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_5"}
{"score": 0.07308143377304077, "chain_id": "3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_6"}
{"score": 0.4547892212867737, "chain_id": "3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_7"}
{"score": 0.20878629386425018, "chain_id": "3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_8"}
{"score": 0.27893370389938354, "chain_id": "3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_9"}
{"score": 0.12369024008512497, "chain_id": "3YMU66OBIN7MEENBWGZJLPOURDAHGT_1_10"}
{"score": 0.885199785232544, "chain_id": "3NQL1CS15R7RI63VVB2T7QM7522YVI_1_1"}
{"score": 0.36547037959098816, "chain_id": "3NQL1CS15R7RI63VVB2T7QM7522YVI_1_2"}
{"score": 0.1416352540254593, "chain_id": "3NQL1CS15R7RI63VVB2T7QM7522YVI_1_3"}
{"score": 0.5442482233047485, "chain_id": "3NQL1CS15R7RI63VVB2T7QM7522YVI_1_4"}
{"score": 0.4208383560180664, "chain_id": "3NQL1CS15R7RI63VVB2T7QM7522YVI_1_5"}
{"score": 0.508206307888031, "chain_id": "3NQL1CS15R7RI63VVB2T7QM7522YVI_1_6"}
{"score": 0.32459262013435364, "chain_id": "3NQL1CS15R7RI63VVB2T7QM7522YVI_1_7"}
{"score": 0.04308289662003517, "chain_id": "3NQL1CS15R7RI63VVB2T7QM7522YVI_1_8"}
{"score": 0.4841058850288391, "chain_id": "3NQL1CS15R7RI63VVB2T7QM7522YVI_1_9"}
{"score": 0.3064914345741272, "chain_id": "3NQL1CS15R7RI63VVB2T7QM7522YVI_1_10"}
{"score": 0.916093647480011, "chain_id": "3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_6"}
{"score": 0.021679580211639404, "chain_id": "3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_1"}
{"score": 0.03150807321071625, "chain_id": "3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_2"}
{"score": 0.25358742475509644, "chain_id": "3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_3"}
{"score": 0.10707743465900421, "chain_id": "3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_4"}
{"score": 0.05092749372124672, "chain_id": "3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_5"}
{"score": 0.2272229939699173, "chain_id": "3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_7"}
{"score": 0.024895092472434044, "chain_id": "3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_8"}
{"score": 0.02438265271484852, "chain_id": "3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_9"}
{"score": 0.1831742376089096, "chain_id": "3EFVCAY5L383C5CJ1IQG5PNBIL98JH_1_10"}
{"score": 0.98382967710495, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_1"}
{"score": 0.9767733812332153, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_2"}
{"score": 0.6802504658699036, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_6"}
{"score": 0.35302770137786865, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_3"}
{"score": 0.2684079110622406, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_4"}
{"score": 0.21648705005645752, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_5"}
{"score": 0.35077059268951416, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_7"}
{"score": 0.34986117482185364, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_8"}
{"score": 0.43994593620300293, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_9"}
{"score": 0.3660951554775238, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64LFE7YA_1_10"}
{"score": 0.3032434582710266, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_2"}
{"score": 0.6857616305351257, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_4"}
{"score": 0.3549724221229553, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_10"}
{"score": 0.24300453066825867, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_1"}
{"score": 0.473504900932312, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_3"}
{"score": 0.2367193102836609, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_5"}
{"score": 0.031804777681827545, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_6"}
{"score": 0.07702593505382538, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_7"}
{"score": 0.03772624954581261, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_8"}
{"score": 0.30694979429244995, "chain_id": "3FTF2T8WLRHPWUVSD9F9UBCU4O1W9S_1_9"}
{"score": 0.9768174290657043, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_1"}
{"score": 0.9704880118370056, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_2"}
{"score": 0.14565438032150269, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_5"}
{"score": 0.5581644773483276, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_6"}
{"score": 0.213920459151268, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_3"}
{"score": 0.09397260844707489, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_4"}
{"score": 0.44561147689819336, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_7"}
{"score": 0.6169430613517761, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_8"}
{"score": 0.10720892250537872, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_9"}
{"score": 0.8367414474487305, "chain_id": "3KV0LJBBH2KZVIX03O98CYAXDFPMRP_1_10"}
{"score": 0.9911121726036072, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_1"}
{"score": 0.9898161888122559, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_3"}
{"score": 0.9856460690498352, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_4"}
{"score": 0.9221913814544678, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_5"}
{"score": 0.4168737530708313, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_8"}
{"score": 0.3582402467727661, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_10"}
{"score": 0.9916471242904663, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_2"}
{"score": 0.7075740098953247, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_6"}
{"score": 0.8942641019821167, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_7"}
{"score": 0.18791231513023376, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWSUUAZR_1_9"}
{"score": 0.019064152613282204, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_1"}
{"score": 0.01976623758673668, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_2"}
{"score": 0.0171392522752285, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_3"}
{"score": 0.135270357131958, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_4"}
{"score": 0.023574354127049446, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_5"}
{"score": 0.02458224631845951, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_6"}
{"score": 0.03330564871430397, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_7"}
{"score": 0.016738368198275566, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_8"}
{"score": 0.06352024525403976, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_9"}
{"score": 0.04668400436639786, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SJGNAQL_1_10"}
{"score": 0.2625790536403656, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_1"}
{"score": 0.36496320366859436, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_2"}
{"score": 0.3154843747615814, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_3"}
{"score": 0.18910035490989685, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_4"}
{"score": 0.08130021393299103, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_5"}
{"score": 0.20923052728176117, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_6"}
{"score": 0.036322277039289474, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_7"}
{"score": 0.11471515148878098, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_8"}
{"score": 0.0864279642701149, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_9"}
{"score": 0.055278580635786057, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WAJH6O_1_10"}
{"score": 0.5384483933448792, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_1"}
{"score": 0.06266392767429352, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_2"}
{"score": 0.11413362622261047, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_3"}
{"score": 0.16330312192440033, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_4"}
{"score": 0.04198186844587326, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_5"}
{"score": 0.07314088940620422, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_6"}
{"score": 0.07441078126430511, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_7"}
{"score": 0.01832769811153412, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_8"}
{"score": 0.957331657409668, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_9"}
{"score": 0.5292197465896606, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50RG5HS_1_10"}
{"score": 0.8742250204086304, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_5"}
{"score": 0.9586597084999084, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_6"}
{"score": 0.9724140763282776, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_7"}
{"score": 0.9828565716743469, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_9"}
{"score": 0.24050478637218475, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_1"}
{"score": 0.21326079964637756, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_2"}
{"score": 0.2387089878320694, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_3"}
{"score": 0.15983211994171143, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_4"}
{"score": 0.555397093296051, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_8"}
{"score": 0.3408890664577484, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM6SPYTP_1_10"}
{"score": 0.5205956101417542, "chain_id": "3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_1"}
{"score": 0.03491652384400368, "chain_id": "3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_2"}
{"score": 0.06520351022481918, "chain_id": "3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_3"}
{"score": 0.6178109645843506, "chain_id": "3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_4"}
{"score": 0.17612048983573914, "chain_id": "3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_5"}
{"score": 0.46400755643844604, "chain_id": "3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_6"}
{"score": 0.41982710361480713, "chain_id": "3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_7"}
{"score": 0.7199838161468506, "chain_id": "3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_8"}
{"score": 0.18457576632499695, "chain_id": "3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_9"}
{"score": 0.1476249396800995, "chain_id": "3W92K5RLWUGTGITBK9XWWTOEBLVV59_1_10"}
{"score": 0.4956073462963104, "chain_id": "31LM9EDVOLROFCZN7KFZNMD684SNJH_1_1"}
{"score": 0.8805655241012573, "chain_id": "31LM9EDVOLROFCZN7KFZNMD684SNJH_1_2"}
{"score": 0.6614198684692383, "chain_id": "31LM9EDVOLROFCZN7KFZNMD684SNJH_1_3"}
{"score": 0.861523449420929, "chain_id": "31LM9EDVOLROFCZN7KFZNMD684SNJH_1_5"}
{"score": 0.7015535831451416, "chain_id": "31LM9EDVOLROFCZN7KFZNMD684SNJH_1_10"}
{"score": 0.2351173758506775, "chain_id": "31LM9EDVOLROFCZN7KFZNMD684SNJH_1_4"}
{"score": 0.2818834185600281, "chain_id": "31LM9EDVOLROFCZN7KFZNMD684SNJH_1_6"}
{"score": 0.129672572016716, "chain_id": "31LM9EDVOLROFCZN7KFZNMD684SNJH_1_7"}
{"score": 0.16477566957473755, "chain_id": "31LM9EDVOLROFCZN7KFZNMD684SNJH_1_8"}
{"score": 0.29325956106185913, "chain_id": "31LM9EDVOLROFCZN7KFZNMD684SNJH_1_9"}
{"score": 0.9812379479408264, "chain_id": "3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_5"}
{"score": 0.7009619474411011, "chain_id": "3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_1"}
{"score": 0.39008018374443054, "chain_id": "3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_2"}
{"score": 0.9685760736465454, "chain_id": "3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_3"}
{"score": 0.8974879384040833, "chain_id": "3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_4"}
{"score": 0.9388367533683777, "chain_id": "3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_6"}
{"score": 0.9664422869682312, "chain_id": "3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_7"}
{"score": 0.8183411955833435, "chain_id": "3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_8"}
{"score": 0.21936725080013275, "chain_id": "3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_9"}
{"score": 0.6167675256729126, "chain_id": "3YWRV122CSYCQLNDDHUUCRWMWGA8U7_1_10"}
{"score": 0.9930686950683594, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_5"}
{"score": 0.96903395652771, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_6"}
{"score": 0.6103258728981018, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_8"}
{"score": 0.03978811949491501, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_1"}
{"score": 0.03575177118182182, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_2"}
{"score": 0.01705707237124443, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_3"}
{"score": 0.028682956472039223, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_4"}
{"score": 0.7280322313308716, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_7"}
{"score": 0.3872619569301605, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_9"}
{"score": 0.20639421045780182, "chain_id": "3SB4CE2TJVUIQDANFKPVSP1LKQ3XAG_1_10"}
{"score": 0.6140138506889343, "chain_id": "33CID57104SN6YUDSM7XUNSS76U3LV_1_4"}
{"score": 0.4029552638530731, "chain_id": "33CID57104SN6YUDSM7XUNSS76U3LV_1_7"}
{"score": 0.35546451807022095, "chain_id": "33CID57104SN6YUDSM7XUNSS76U3LV_1_8"}
{"score": 0.6287064552307129, "chain_id": "33CID57104SN6YUDSM7XUNSS76U3LV_1_9"}
{"score": 0.41213279962539673, "chain_id": "33CID57104SN6YUDSM7XUNSS76U3LV_1_10"}
{"score": 0.5844631791114807, "chain_id": "33CID57104SN6YUDSM7XUNSS76U3LV_1_1"}
{"score": 0.20611882209777832, "chain_id": "33CID57104SN6YUDSM7XUNSS76U3LV_1_2"}
{"score": 0.4618491232395172, "chain_id": "33CID57104SN6YUDSM7XUNSS76U3LV_1_3"}
{"score": 0.595639169216156, "chain_id": "33CID57104SN6YUDSM7XUNSS76U3LV_1_5"}
{"score": 0.039698511362075806, "chain_id": "33CID57104SN6YUDSM7XUNSS76U3LV_1_6"}
{"score": 0.10065541416406631, "chain_id": "34PGFRQONOAE2681ZL6MJ5QX114WJV_1_1"}
{"score": 0.6875263452529907, "chain_id": "34PGFRQONOAE2681ZL6MJ5QX114WJV_1_4"}
{"score": 0.0893380343914032, "chain_id": "34PGFRQONOAE2681ZL6MJ5QX114WJV_1_2"}
{"score": 0.5431599020957947, "chain_id": "34PGFRQONOAE2681ZL6MJ5QX114WJV_1_3"}
{"score": 0.06345682591199875, "chain_id": "34PGFRQONOAE2681ZL6MJ5QX114WJV_1_5"}
{"score": 0.13411621749401093, "chain_id": "34PGFRQONOAE2681ZL6MJ5QX114WJV_1_6"}
{"score": 0.01688460260629654, "chain_id": "34PGFRQONOAE2681ZL6MJ5QX114WJV_1_7"}
{"score": 0.021733997389674187, "chain_id": "34PGFRQONOAE2681ZL6MJ5QX114WJV_1_8"}
{"score": 0.042067527770996094, "chain_id": "34PGFRQONOAE2681ZL6MJ5QX114WJV_1_9"}
{"score": 0.023898271843791008, "chain_id": "34PGFRQONOAE2681ZL6MJ5QX114WJV_1_10"}
{"score": 0.25920817255973816, "chain_id": "30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_1"}
{"score": 0.3303495943546295, "chain_id": "30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_2"}
{"score": 0.3823340833187103, "chain_id": "30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_3"}
{"score": 0.6286837458610535, "chain_id": "30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_4"}
{"score": 0.6188557744026184, "chain_id": "30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_5"}
{"score": 0.787028431892395, "chain_id": "30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_6"}
{"score": 0.5087283253669739, "chain_id": "30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_7"}
{"score": 0.9056010842323303, "chain_id": "30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_8"}
{"score": 0.13704563677310944, "chain_id": "30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_9"}
{"score": 0.5627762675285339, "chain_id": "30LSNF239UUWVFQO3JWFJXV8KPI2IP_1_10"}
{"score": 0.0181911401450634, "chain_id": "3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_1"}
{"score": 0.026328837499022484, "chain_id": "3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_2"}
{"score": 0.02378023974597454, "chain_id": "3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_3"}
{"score": 0.030792970210313797, "chain_id": "3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_4"}
{"score": 0.02286960743367672, "chain_id": "3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_5"}
{"score": 0.026801761239767075, "chain_id": "3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_6"}
{"score": 0.02720894291996956, "chain_id": "3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_7"}
{"score": 0.028126318007707596, "chain_id": "3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_8"}
{"score": 0.01827990636229515, "chain_id": "3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_9"}
{"score": 0.026835449039936066, "chain_id": "3B2X28YI3WEAQ8VJKBG1NN87EBL6B9_1_10"}
{"score": 0.028082288801670074, "chain_id": "374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_7"}
{"score": 0.01696929894387722, "chain_id": "374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_1"}
{"score": 0.01639879308640957, "chain_id": "374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_2"}
{"score": 0.020435430109500885, "chain_id": "374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_3"}
{"score": 0.05462782084941864, "chain_id": "374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_4"}
{"score": 0.013786219991743565, "chain_id": "374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_5"}
{"score": 0.016446277499198914, "chain_id": "374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_6"}
{"score": 0.022262051701545715, "chain_id": "374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_8"}
{"score": 0.023215752094984055, "chain_id": "374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_9"}
{"score": 0.0713043138384819, "chain_id": "374TNBHA8BUZDY7E9C8J13NZZMMYQM_1_10"}
{"score": 0.5431599020957947, "chain_id": "34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_3"}
{"score": 0.10065541416406631, "chain_id": "34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_1"}
{"score": 0.0893380343914032, "chain_id": "34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_2"}
{"score": 0.6875263452529907, "chain_id": "34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_4"}
{"score": 0.06345682591199875, "chain_id": "34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_5"}
{"score": 0.13411621749401093, "chain_id": "34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_6"}
{"score": 0.01688460260629654, "chain_id": "34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_7"}
{"score": 0.021733997389674187, "chain_id": "34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_8"}
{"score": 0.042067527770996094, "chain_id": "34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_9"}
{"score": 0.023898271843791008, "chain_id": "34YB12FSQYN86SOMNDFWDUWQK2LMG1_1_10"}
{"score": 0.02572096511721611, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_9"}
{"score": 0.11528665572404861, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_1"}
{"score": 0.024393899366259575, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_2"}
{"score": 0.018056116998195648, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_3"}
{"score": 0.04068540036678314, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_4"}
{"score": 0.08417107909917831, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_5"}
{"score": 0.02153446339070797, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_6"}
{"score": 0.016376342624425888, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_7"}
{"score": 0.01137245912104845, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_8"}
{"score": 0.021955082193017006, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU7IVAB5S_1_10"}
{"score": 0.08312342315912247, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_1"}
{"score": 0.03982476517558098, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_2"}
{"score": 0.032035987824201584, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_3"}
{"score": 0.017670895904302597, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_4"}
{"score": 0.03359051048755646, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_5"}
{"score": 0.01328863576054573, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_6"}
{"score": 0.18450745940208435, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_7"}
{"score": 0.09648758172988892, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_8"}
{"score": 0.035725317895412445, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_9"}
{"score": 0.047486312687397, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHTDZUME_1_10"}
{"score": 0.20296718180179596, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_6"}
{"score": 0.2552315890789032, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_1"}
{"score": 0.09363947063684464, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_2"}
{"score": 0.4707423448562622, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_3"}
{"score": 0.35288748145103455, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_4"}
{"score": 0.0252839308232069, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_5"}
{"score": 0.11963173747062683, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_7"}
{"score": 0.05743276700377464, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_8"}
{"score": 0.038073986768722534, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_9"}
{"score": 0.034773021936416626, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWP97KJJQ_1_10"}
{"score": 0.04617960378527641, "chain_id": "3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_1"}
{"score": 0.04344794526696205, "chain_id": "3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_2"}
{"score": 0.08727468550205231, "chain_id": "3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_3"}
{"score": 0.028689278289675713, "chain_id": "3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_4"}
{"score": 0.024959711357951164, "chain_id": "3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_5"}
{"score": 0.06251651048660278, "chain_id": "3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_6"}
{"score": 0.5907382965087891, "chain_id": "3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_7"}
{"score": 0.02518445812165737, "chain_id": "3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_8"}
{"score": 0.3215942978858948, "chain_id": "3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_9"}
{"score": 0.14419716596603394, "chain_id": "3VE8AYVF8MWN73QNISZVQRVJZ1RF84_1_10"}
{"score": 0.08253240585327148, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_1"}
{"score": 0.6979168653488159, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_2"}
{"score": 0.0690389946103096, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_3"}
{"score": 0.0981466993689537, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_4"}
{"score": 0.052319783717393875, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_5"}
{"score": 0.3504936099052429, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_6"}
{"score": 0.4060216546058655, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_7"}
{"score": 0.7254627346992493, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_8"}
{"score": 0.020339692011475563, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_9"}
{"score": 0.34259602427482605, "chain_id": "39U1BHVTDLQBPB2I1V9OGE29XBJ3TH_1_10"}
{"score": 0.9857400059700012, "chain_id": "30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_1"}
{"score": 0.9890182018280029, "chain_id": "30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_2"}
{"score": 0.9188352823257446, "chain_id": "30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_3"}
{"score": 0.9876500964164734, "chain_id": "30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_5"}
{"score": 0.9493808746337891, "chain_id": "30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_6"}
{"score": 0.9621159434318542, "chain_id": "30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_7"}
{"score": 0.4176539480686188, "chain_id": "30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_8"}
{"score": 0.05784450098872185, "chain_id": "30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_9"}
{"score": 0.6647170186042786, "chain_id": "30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_10"}
{"score": 0.9812535643577576, "chain_id": "30JNVC0OR9JDR3HPZC4VF3SWWEOHQK_1_4"}
{"score": 0.19539232552051544, "chain_id": "3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_1"}
{"score": 0.49177882075309753, "chain_id": "3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_2"}
{"score": 0.8903793692588806, "chain_id": "3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_3"}
{"score": 0.23676255345344543, "chain_id": "3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_4"}
{"score": 0.06866344064474106, "chain_id": "3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_5"}
{"score": 0.08141748607158661, "chain_id": "3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_6"}
{"score": 0.5805388689041138, "chain_id": "3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_7"}
{"score": 0.04356677085161209, "chain_id": "3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_8"}
{"score": 0.10324828326702118, "chain_id": "3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_9"}
{"score": 0.4123810827732086, "chain_id": "3ZDAD0O1T1CN599WLKGCNURD4FVXT4_1_10"}
{"score": 0.9794966578483582, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_1"}
{"score": 0.9832748770713806, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_2"}
{"score": 0.9904983639717102, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_3"}
{"score": 0.8690275549888611, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_4"}
{"score": 0.8391369581222534, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_5"}
{"score": 0.8142072558403015, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_6"}
{"score": 0.9235972166061401, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_7"}
{"score": 0.11056344956159592, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_8"}
{"score": 0.805792510509491, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_9"}
{"score": 0.043136950582265854, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9G13XC9_1_10"}
{"score": 0.34221911430358887, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_1"}
{"score": 0.33193427324295044, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_2"}
{"score": 0.09051722288131714, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_3"}
{"score": 0.1518014669418335, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_4"}
{"score": 0.06620888411998749, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_5"}
{"score": 0.5832935571670532, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_6"}
{"score": 0.5358290076255798, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_7"}
{"score": 0.3200347423553467, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_8"}
{"score": 0.10280928760766983, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_9"}
{"score": 0.060931261628866196, "chain_id": "3RKNTXVS3MXRSBMDV9NQVE4NN4W4AK_1_10"}
{"score": 0.9612741470336914, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_1"}
{"score": 0.9632535576820374, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_2"}
{"score": 0.8787732720375061, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_5"}
{"score": 0.9474585056304932, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_8"}
{"score": 0.9381726384162903, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_3"}
{"score": 0.9646422266960144, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_4"}
{"score": 0.3680947422981262, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_6"}
{"score": 0.8981550335884094, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_7"}
{"score": 0.15946444869041443, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_9"}
{"score": 0.48321670293807983, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1FNWMN_1_10"}
{"score": 0.9704378247261047, "chain_id": "3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_1"}
{"score": 0.8269341588020325, "chain_id": "3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_3"}
{"score": 0.9513445496559143, "chain_id": "3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_4"}
{"score": 0.321043998003006, "chain_id": "3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_5"}
{"score": 0.4419352114200592, "chain_id": "3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_6"}
{"score": 0.9851937890052795, "chain_id": "3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_2"}
{"score": 0.9117695689201355, "chain_id": "3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_7"}
{"score": 0.8604675531387329, "chain_id": "3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_8"}
{"score": 0.3326892852783203, "chain_id": "3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_9"}
{"score": 0.06523316353559494, "chain_id": "3LEIZ60CDJYTQP0XOWZGTF6CVIP9Z9_1_10"}
{"score": 0.954380989074707, "chain_id": "3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_1"}
{"score": 0.9882516264915466, "chain_id": "3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_2"}
{"score": 0.6865464448928833, "chain_id": "3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_4"}
{"score": 0.6562359929084778, "chain_id": "3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_9"}
{"score": 0.9291729927062988, "chain_id": "3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_10"}
{"score": 0.8352360129356384, "chain_id": "3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_3"}
{"score": 0.8657374382019043, "chain_id": "3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_5"}
{"score": 0.8604342341423035, "chain_id": "3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_6"}
{"score": 0.33183348178863525, "chain_id": "3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_7"}
{"score": 0.5118632912635803, "chain_id": "3QY7M81QH7LUNBDI9YYMS4RTWXFK75_1_8"}
{"score": 0.7718412280082703, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_1"}
{"score": 0.8185635209083557, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_2"}
{"score": 0.8132449388504028, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_3"}
{"score": 0.9599770903587341, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_4"}
{"score": 0.6767857670783997, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_5"}
{"score": 0.2885403335094452, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_6"}
{"score": 0.6886000633239746, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_7"}
{"score": 0.8391515016555786, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_8"}
{"score": 0.9066275954246521, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_9"}
{"score": 0.2813873291015625, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5IWNWF6_1_10"}
{"score": 0.9652560353279114, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_1"}
{"score": 0.9880496859550476, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_3"}
{"score": 0.984809935092926, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_4"}
{"score": 0.8747043609619141, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_2"}
{"score": 0.25325754284858704, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_5"}
{"score": 0.042227160185575485, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_6"}
{"score": 0.06464722752571106, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_7"}
{"score": 0.03290707990527153, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_8"}
{"score": 0.018125023692846298, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_9"}
{"score": 0.18882636725902557, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL31L21II_1_10"}
{"score": 0.9888367652893066, "chain_id": "3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_1"}
{"score": 0.9880908727645874, "chain_id": "3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_3"}
{"score": 0.9539201855659485, "chain_id": "3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_5"}
{"score": 0.9484509825706482, "chain_id": "3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_6"}
{"score": 0.05127923563122749, "chain_id": "3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_9"}
{"score": 0.9884719848632812, "chain_id": "3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_2"}
{"score": 0.9863117337226868, "chain_id": "3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_4"}
{"score": 0.40286287665367126, "chain_id": "3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_7"}
{"score": 0.2731170356273651, "chain_id": "3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_8"}
{"score": 0.08821253478527069, "chain_id": "3NS0A6KXC4785ZN5225QLWSZZSYGZF_1_10"}
{"score": 0.9879999756813049, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_2"}
{"score": 0.8426192402839661, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_3"}
{"score": 0.9697818160057068, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_4"}
{"score": 0.32946670055389404, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_7"}
{"score": 0.931060791015625, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_8"}
{"score": 0.9604922533035278, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_1"}
{"score": 0.8605608940124512, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_5"}
{"score": 0.9019380211830139, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_6"}
{"score": 0.22348184883594513, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_9"}
{"score": 0.339922159910202, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF60VU7ZU_1_10"}
{"score": 0.09259466081857681, "chain_id": "3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_1"}
{"score": 0.156132310628891, "chain_id": "3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_2"}
{"score": 0.06627845764160156, "chain_id": "3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_3"}
{"score": 0.0444047786295414, "chain_id": "3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_4"}
{"score": 0.010425342246890068, "chain_id": "3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_5"}
{"score": 0.013138039037585258, "chain_id": "3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_6"}
{"score": 0.014913451857864857, "chain_id": "3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_7"}
{"score": 0.01697780191898346, "chain_id": "3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_8"}
{"score": 0.014801586046814919, "chain_id": "3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_9"}
{"score": 0.01883416250348091, "chain_id": "3FIJLY1B6U38DVP44916CDQ9PXHFPN_1_10"}
{"score": 0.9848899245262146, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_1"}
{"score": 0.9850229620933533, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_3"}
{"score": 0.9547351002693176, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_7"}
{"score": 0.9632133841514587, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_2"}
{"score": 0.13844874501228333, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_4"}
{"score": 0.9708094596862793, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_5"}
{"score": 0.9695760607719421, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_6"}
{"score": 0.9185247421264648, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_8"}
{"score": 0.23420670628547668, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_9"}
{"score": 0.9678165316581726, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYMIB51J_1_10"}
{"score": 0.9591273069381714, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_3"}
{"score": 0.8859013915061951, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_6"}
{"score": 0.9704996943473816, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_7"}
{"score": 0.9842543601989746, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_8"}
{"score": 0.8483834862709045, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_1"}
{"score": 0.938819944858551, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_2"}
{"score": 0.982501745223999, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_4"}
{"score": 0.6988877058029175, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_5"}
{"score": 0.26879656314849854, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_9"}
{"score": 0.054290976375341415, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOCKGKO8_1_10"}
{"score": 0.990425169467926, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_2"}
{"score": 0.9902119040489197, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_3"}
{"score": 0.9896932244300842, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_6"}
{"score": 0.9891008734703064, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_7"}
{"score": 0.9814749956130981, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_1"}
{"score": 0.9836246967315674, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_4"}
{"score": 0.9776521921157837, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_5"}
{"score": 0.6781275868415833, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_8"}
{"score": 0.9838674068450928, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_9"}
{"score": 0.7729448080062866, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSTLYA8X_1_10"}
{"score": 0.08867519348859787, "chain_id": "3GDTJDAPVUATDDI44F38LHFSWZTM82_1_1"}
{"score": 0.10561669617891312, "chain_id": "3GDTJDAPVUATDDI44F38LHFSWZTM82_1_2"}
{"score": 0.01447928138077259, "chain_id": "3GDTJDAPVUATDDI44F38LHFSWZTM82_1_3"}
{"score": 0.13270029425621033, "chain_id": "3GDTJDAPVUATDDI44F38LHFSWZTM82_1_4"}
{"score": 0.017475826665759087, "chain_id": "3GDTJDAPVUATDDI44F38LHFSWZTM82_1_5"}
{"score": 0.03830772265791893, "chain_id": "3GDTJDAPVUATDDI44F38LHFSWZTM82_1_6"}
{"score": 0.019111022353172302, "chain_id": "3GDTJDAPVUATDDI44F38LHFSWZTM82_1_7"}
{"score": 0.035414278507232666, "chain_id": "3GDTJDAPVUATDDI44F38LHFSWZTM82_1_8"}
{"score": 0.37289556860923767, "chain_id": "3GDTJDAPVUATDDI44F38LHFSWZTM82_1_9"}
{"score": 0.12834665179252625, "chain_id": "3GDTJDAPVUATDDI44F38LHFSWZTM82_1_10"}
{"score": 0.9921073317527771, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_1"}
{"score": 0.992448091506958, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_2"}
{"score": 0.9931440353393555, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_4"}
{"score": 0.2801794409751892, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_3"}
{"score": 0.06256947666406631, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_5"}
{"score": 0.035209059715270996, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_6"}
{"score": 0.11896969377994537, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_7"}
{"score": 0.25745946168899536, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_8"}
{"score": 0.4330350160598755, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_9"}
{"score": 0.35098469257354736, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKQGV5JF_1_10"}
{"score": 0.9896432161331177, "chain_id": "3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_1"}
{"score": 0.9907855987548828, "chain_id": "3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_2"}
{"score": 0.9906538128852844, "chain_id": "3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_4"}
{"score": 0.990968644618988, "chain_id": "3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_5"}
{"score": 0.9911378026008606, "chain_id": "3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_6"}
{"score": 0.7581547498703003, "chain_id": "3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_7"}
{"score": 0.9918658137321472, "chain_id": "3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_9"}
{"score": 0.7094718813896179, "chain_id": "3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_10"}
{"score": 0.10003132373094559, "chain_id": "3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_3"}
{"score": 0.17360526323318481, "chain_id": "3C6FJU71TQSR5REVQLSOB4KO26KUYD_1_8"}
{"score": 0.9412992000579834, "chain_id": "3ON104KXQKVOZOPGWEJID31ESOD4WO_1_1"}
{"score": 0.9609060287475586, "chain_id": "3ON104KXQKVOZOPGWEJID31ESOD4WO_1_2"}
{"score": 0.9753727912902832, "chain_id": "3ON104KXQKVOZOPGWEJID31ESOD4WO_1_3"}
{"score": 0.9664108753204346, "chain_id": "3ON104KXQKVOZOPGWEJID31ESOD4WO_1_4"}
{"score": 0.3396645784378052, "chain_id": "3ON104KXQKVOZOPGWEJID31ESOD4WO_1_6"}
{"score": 0.33784979581832886, "chain_id": "3ON104KXQKVOZOPGWEJID31ESOD4WO_1_8"}
{"score": 0.9256299138069153, "chain_id": "3ON104KXQKVOZOPGWEJID31ESOD4WO_1_10"}
{"score": 0.8192030191421509, "chain_id": "3ON104KXQKVOZOPGWEJID31ESOD4WO_1_5"}
{"score": 0.0906720906496048, "chain_id": "3ON104KXQKVOZOPGWEJID31ESOD4WO_1_7"}
{"score": 0.07925237715244293, "chain_id": "3ON104KXQKVOZOPGWEJID31ESOD4WO_1_9"}
{"score": 0.09144491702318192, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_1"}
{"score": 0.9750354290008545, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_2"}
{"score": 0.04952762648463249, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_3"}
{"score": 0.4868656396865845, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_4"}
{"score": 0.8052114248275757, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_5"}
{"score": 0.03986121341586113, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_6"}
{"score": 0.030269652605056763, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_7"}
{"score": 0.040008798241615295, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_8"}
{"score": 0.04509450122714043, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_9"}
{"score": 0.12125636637210846, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES0D0NYB4_1_10"}
{"score": 0.9363079071044922, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_1"}
{"score": 0.26790112257003784, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_3"}
{"score": 0.8296130895614624, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_4"}
{"score": 0.46699240803718567, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_5"}
{"score": 0.6752709150314331, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_7"}
{"score": 0.6292642951011658, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_9"}
{"score": 0.8357582688331604, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_2"}
{"score": 0.3798828423023224, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_6"}
{"score": 0.2746630609035492, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_8"}
{"score": 0.33421748876571655, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMW5JQX_1_10"}
{"score": 0.7310047149658203, "chain_id": "3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_1"}
{"score": 0.7486575841903687, "chain_id": "3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_2"}
{"score": 0.7868125438690186, "chain_id": "3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_3"}
{"score": 0.40660619735717773, "chain_id": "3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_4"}
{"score": 0.014202028512954712, "chain_id": "3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_5"}
{"score": 0.04904995113611221, "chain_id": "3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_6"}
{"score": 0.07046613097190857, "chain_id": "3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_7"}
{"score": 0.07825659215450287, "chain_id": "3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_8"}
{"score": 0.03001302108168602, "chain_id": "3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_9"}
{"score": 0.012553771026432514, "chain_id": "3DYGAII7PL754KFDIPC0OCUNVWDQP8_1_10"}
{"score": 0.9154567718505859, "chain_id": "317HQ483I7RSK1FHP2UZBLY648IINQ_1_1"}
{"score": 0.5777605772018433, "chain_id": "317HQ483I7RSK1FHP2UZBLY648IINQ_1_3"}
{"score": 0.9831798076629639, "chain_id": "317HQ483I7RSK1FHP2UZBLY648IINQ_1_4"}
{"score": 0.984449565410614, "chain_id": "317HQ483I7RSK1FHP2UZBLY648IINQ_1_7"}
{"score": 0.8624579310417175, "chain_id": "317HQ483I7RSK1FHP2UZBLY648IINQ_1_2"}
{"score": 0.8356350660324097, "chain_id": "317HQ483I7RSK1FHP2UZBLY648IINQ_1_5"}
{"score": 0.62261962890625, "chain_id": "317HQ483I7RSK1FHP2UZBLY648IINQ_1_6"}
{"score": 0.20439565181732178, "chain_id": "317HQ483I7RSK1FHP2UZBLY648IINQ_1_8"}
{"score": 0.14658012986183167, "chain_id": "317HQ483I7RSK1FHP2UZBLY648IINQ_1_9"}
{"score": 0.9492968916893005, "chain_id": "317HQ483I7RSK1FHP2UZBLY648IINQ_1_10"}
{"score": 0.9581796526908875, "chain_id": "33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_4"}
{"score": 0.07774543017148972, "chain_id": "33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_9"}
{"score": 0.13334105908870697, "chain_id": "33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_1"}
{"score": 0.10442887991666794, "chain_id": "33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_2"}
{"score": 0.22259287536144257, "chain_id": "33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_3"}
{"score": 0.11935234069824219, "chain_id": "33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_5"}
{"score": 0.11763257533311844, "chain_id": "33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_6"}
{"score": 0.10220145434141159, "chain_id": "33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_7"}
{"score": 0.1041022539138794, "chain_id": "33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_8"}
{"score": 0.8557587265968323, "chain_id": "33JKGHPFYCTEGK58AHSR3E5NBF9NM4_1_10"}
{"score": 0.026636319234967232, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_3"}
{"score": 0.06297542154788971, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_1"}
{"score": 0.024925656616687775, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_2"}
{"score": 0.029428578913211823, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_4"}
{"score": 0.055048611015081406, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_5"}
{"score": 0.12813352048397064, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_6"}
{"score": 0.13603425025939941, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_7"}
{"score": 0.03279795125126839, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_8"}
{"score": 0.031302228569984436, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_9"}
{"score": 0.029330523684620857, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNLXDFZL_1_10"}
{"score": 0.07985483855009079, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_1"}
{"score": 0.07590900361537933, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_2"}
{"score": 0.04685087502002716, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_3"}
{"score": 0.09594999998807907, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_4"}
{"score": 0.25676029920578003, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_5"}
{"score": 0.06144321337342262, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_6"}
{"score": 0.04512657970190048, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_7"}
{"score": 0.04145573824644089, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_8"}
{"score": 0.03368903324007988, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_9"}
{"score": 0.11083556711673737, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9GWHCXS_1_10"}
{"score": 0.03610510379076004, "chain_id": "3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_7"}
{"score": 0.42121556401252747, "chain_id": "3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_1"}
{"score": 0.32749149203300476, "chain_id": "3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_2"}
{"score": 0.09690182656049728, "chain_id": "3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_3"}
{"score": 0.5031583309173584, "chain_id": "3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_4"}
{"score": 0.18831652402877808, "chain_id": "3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_5"}
{"score": 0.019123394042253494, "chain_id": "3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_6"}
{"score": 0.04683178290724754, "chain_id": "3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_8"}
{"score": 0.04537514969706535, "chain_id": "3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_9"}
{"score": 0.041998617351055145, "chain_id": "3URFVVM165HRAHO0M7U7PBTQWZGZUN_1_10"}
{"score": 0.7412227988243103, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_1"}
{"score": 0.07638975232839584, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_3"}
{"score": 0.15743526816368103, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_5"}
{"score": 0.8888723254203796, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_2"}
{"score": 0.05812326818704605, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_4"}
{"score": 0.12587207555770874, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_6"}
{"score": 0.46611008048057556, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_7"}
{"score": 0.20894096791744232, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_8"}
{"score": 0.04740026593208313, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_9"}
{"score": 0.14934270083904266, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXHHPBU_1_10"}
{"score": 0.4835258722305298, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_2"}
{"score": 0.42739132046699524, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_3"}
{"score": 0.33917826414108276, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_1"}
{"score": 0.08536523580551147, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_4"}
{"score": 0.7020468711853027, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_5"}
{"score": 0.6521119475364685, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_6"}
{"score": 0.1232077106833458, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_7"}
{"score": 0.023895233869552612, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_8"}
{"score": 0.017075607553124428, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_9"}
{"score": 0.02194664254784584, "chain_id": "3LBXNTKX0RU4LU0INEBVWUQ1BG29X7_1_10"}
{"score": 0.23827876150608063, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_5"}
{"score": 0.12758848071098328, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_10"}
{"score": 0.027926383540034294, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_1"}
{"score": 0.023714285343885422, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_2"}
{"score": 0.020623667165637016, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_3"}
{"score": 0.03776988759636879, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_4"}
{"score": 0.12347623705863953, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_6"}
{"score": 0.2568586468696594, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_7"}
{"score": 0.16518941521644592, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_8"}
{"score": 0.13972581923007965, "chain_id": "3MB8LZR5BFST2W2KDSZWB99UGJXKLK_1_9"}
{"score": 0.11185585707426071, "chain_id": "3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_1"}
{"score": 0.05529968440532684, "chain_id": "3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_2"}
{"score": 0.13319586217403412, "chain_id": "3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_3"}
{"score": 0.1283125877380371, "chain_id": "3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_4"}
{"score": 0.042667824774980545, "chain_id": "3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_5"}
{"score": 0.03394974023103714, "chain_id": "3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_6"}
{"score": 0.20345425605773926, "chain_id": "3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_7"}
{"score": 0.10968180745840073, "chain_id": "3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_8"}
{"score": 0.026705363765358925, "chain_id": "3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_9"}
{"score": 0.022960515692830086, "chain_id": "3HPZF4IVNMSVJXXV4U7OHYYIJ5UYCA_1_10"}
{"score": 0.9857215881347656, "chain_id": "373ERPL3YO738DNKCLAKYC5P44QTRX_1_2"}
{"score": 0.9904013276100159, "chain_id": "373ERPL3YO738DNKCLAKYC5P44QTRX_1_3"}
{"score": 0.9903594255447388, "chain_id": "373ERPL3YO738DNKCLAKYC5P44QTRX_1_1"}
{"score": 0.974920392036438, "chain_id": "373ERPL3YO738DNKCLAKYC5P44QTRX_1_4"}
{"score": 0.025011621415615082, "chain_id": "373ERPL3YO738DNKCLAKYC5P44QTRX_1_5"}
{"score": 0.02197246626019478, "chain_id": "373ERPL3YO738DNKCLAKYC5P44QTRX_1_6"}
{"score": 0.166298970580101, "chain_id": "373ERPL3YO738DNKCLAKYC5P44QTRX_1_7"}
{"score": 0.023742984980344772, "chain_id": "373ERPL3YO738DNKCLAKYC5P44QTRX_1_8"}
{"score": 0.024103593081235886, "chain_id": "373ERPL3YO738DNKCLAKYC5P44QTRX_1_9"}
{"score": 0.0638149306178093, "chain_id": "373ERPL3YO738DNKCLAKYC5P44QTRX_1_10"}
{"score": 0.9794880747795105, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_1"}
{"score": 0.970670223236084, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_3"}
{"score": 0.84765625, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_5"}
{"score": 0.8508180975914001, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_7"}
{"score": 0.7403894066810608, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_9"}
{"score": 0.5528419613838196, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_10"}
{"score": 0.9023457169532776, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_2"}
{"score": 0.8994147777557373, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_4"}
{"score": 0.5999276041984558, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_6"}
{"score": 0.7803041338920593, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKUXJBF9_1_8"}
{"score": 0.8087719082832336, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_2"}
{"score": 0.8440436720848083, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_4"}
{"score": 0.8017367124557495, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_7"}
{"score": 0.8196654915809631, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_10"}
{"score": 0.2048327922821045, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_1"}
{"score": 0.5364073514938354, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_3"}
{"score": 0.16587093472480774, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_5"}
{"score": 0.19284985959529877, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_6"}
{"score": 0.35652226209640503, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_8"}
{"score": 0.7451574206352234, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511D69ZOC_1_9"}
{"score": 0.9702250957489014, "chain_id": "3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_1"}
{"score": 0.9689123034477234, "chain_id": "3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_2"}
{"score": 0.959805965423584, "chain_id": "3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_4"}
{"score": 0.4105139970779419, "chain_id": "3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_3"}
{"score": 0.058928728103637695, "chain_id": "3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_5"}
{"score": 0.03955565765500069, "chain_id": "3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_6"}
{"score": 0.03427266702055931, "chain_id": "3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_7"}
{"score": 0.025522585958242416, "chain_id": "3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_8"}
{"score": 0.027898387983441353, "chain_id": "3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_9"}
{"score": 0.09743047505617142, "chain_id": "3U84XHCDICCSTJUL713PC7VWWSW4ZK_1_10"}
{"score": 0.9297007322311401, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_1"}
{"score": 0.4299171268939972, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_2"}
{"score": 0.8105069398880005, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_3"}
{"score": 0.9518170356750488, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_7"}
{"score": 0.09380880743265152, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_8"}
{"score": 0.2280242145061493, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_4"}
{"score": 0.9808711409568787, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_5"}
{"score": 0.11666527390480042, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_6"}
{"score": 0.10768789798021317, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_9"}
{"score": 0.24888253211975098, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPMN0B6_1_10"}
{"score": 0.9854065179824829, "chain_id": "32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_1"}
{"score": 0.983958899974823, "chain_id": "32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_2"}
{"score": 0.9625998139381409, "chain_id": "32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_3"}
{"score": 0.9729174375534058, "chain_id": "32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_5"}
{"score": 0.9713378548622131, "chain_id": "32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_8"}
{"score": 0.9729985594749451, "chain_id": "32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_9"}
{"score": 0.9543339610099792, "chain_id": "32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_10"}
{"score": 0.9761849045753479, "chain_id": "32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_4"}
{"score": 0.9527711868286133, "chain_id": "32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_6"}
{"score": 0.18908827006816864, "chain_id": "32SVAV9L3F86AF39VVI7L9CHBKH3AD_1_7"}
{"score": 0.9921497106552124, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_3"}
{"score": 0.9920905828475952, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_4"}
{"score": 0.2978891134262085, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_5"}
{"score": 0.992635190486908, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_6"}
{"score": 0.992784857749939, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_8"}
{"score": 0.8409917950630188, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_9"}
{"score": 0.9917328357696533, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_1"}
{"score": 0.9917978048324585, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_2"}
{"score": 0.1796063929796219, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_7"}
{"score": 0.7484343647956848, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KV3ABEO_1_10"}
{"score": 0.31491169333457947, "chain_id": "3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_3"}
{"score": 0.9448958039283752, "chain_id": "3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_4"}
{"score": 0.5572637915611267, "chain_id": "3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_10"}
{"score": 0.7850368022918701, "chain_id": "3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_1"}
{"score": 0.3837791383266449, "chain_id": "3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_2"}
{"score": 0.943192183971405, "chain_id": "3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_5"}
{"score": 0.9296678900718689, "chain_id": "3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_6"}
{"score": 0.7373611330986023, "chain_id": "3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_7"}
{"score": 0.9249341487884521, "chain_id": "3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_8"}
{"score": 0.19077908992767334, "chain_id": "3WRFBPLXRANDUYXY4ZNC7FWHL6EN3K_1_9"}
{"score": 0.9901954531669617, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_1"}
{"score": 0.9707315564155579, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_2"}
{"score": 0.9901623725891113, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_3"}
{"score": 0.9495334029197693, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_4"}
{"score": 0.9809195399284363, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_7"}
{"score": 0.9843897223472595, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_5"}
{"score": 0.9842574596405029, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_6"}
{"score": 0.980637788772583, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_8"}
{"score": 0.8338428139686584, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_9"}
{"score": 0.9083850383758545, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y5H9AFW1_1_10"}
{"score": 0.9910503625869751, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_2"}
{"score": 0.9874534010887146, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_3"}
{"score": 0.9916884303092957, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_4"}
{"score": 0.9594663977622986, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_5"}
{"score": 0.9168974161148071, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_8"}
{"score": 0.862824022769928, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_9"}
{"score": 0.9152292609214783, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_10"}
{"score": 0.9885393977165222, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_1"}
{"score": 0.9371470212936401, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_6"}
{"score": 0.9641270041465759, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DPFD8BK0_1_7"}
{"score": 0.748113214969635, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_3"}
{"score": 0.8222833871841431, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_7"}
{"score": 0.4304685890674591, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_8"}
{"score": 0.5992030501365662, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_1"}
{"score": 0.3146055042743683, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_2"}
{"score": 0.5548149347305298, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_4"}
{"score": 0.1073208674788475, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_5"}
{"score": 0.28291815519332886, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_6"}
{"score": 0.05751821771264076, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_9"}
{"score": 0.5337343215942383, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA87TQ2J8_1_10"}
{"score": 0.9854065179824829, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_1"}
{"score": 0.983958899974823, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_2"}
{"score": 0.9761849045753479, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_4"}
{"score": 0.9713378548622131, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_8"}
{"score": 0.9729985594749451, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_9"}
{"score": 0.9543339610099792, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_10"}
{"score": 0.9625998139381409, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_3"}
{"score": 0.9729174375534058, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_5"}
{"score": 0.9527711868286133, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_6"}
{"score": 0.18908827006816864, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41CM8VP39_1_7"}
{"score": 0.9847446084022522, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_1"}
{"score": 0.9537423849105835, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_2"}
{"score": 0.983352780342102, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_3"}
{"score": 0.16601616144180298, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_5"}
{"score": 0.9640381932258606, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_6"}
{"score": 0.3311668336391449, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_4"}
{"score": 0.9617972373962402, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_7"}
{"score": 0.9444975852966309, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_8"}
{"score": 0.8711000084877014, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_9"}
{"score": 0.14634743332862854, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7K3XFBWL_1_10"}
{"score": 0.049179282039403915, "chain_id": "3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_1"}
{"score": 0.0700080543756485, "chain_id": "3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_2"}
{"score": 0.0162035021930933, "chain_id": "3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_3"}
{"score": 0.07406250387430191, "chain_id": "3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_4"}
{"score": 0.036167461425065994, "chain_id": "3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_5"}
{"score": 0.31386685371398926, "chain_id": "3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_6"}
{"score": 0.13376232981681824, "chain_id": "3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_7"}
{"score": 0.019713561981916428, "chain_id": "3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_8"}
{"score": 0.4649793803691864, "chain_id": "3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_9"}
{"score": 0.37456604838371277, "chain_id": "3TUI152ZZBM2NSWBXN1ANGCPGZNQ1A_1_10"}
{"score": 0.7898266911506653, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_1"}
{"score": 0.9486637115478516, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_2"}
{"score": 0.9654468894004822, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_3"}
{"score": 0.44778746366500854, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_5"}
{"score": 0.7606213092803955, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_8"}
{"score": 0.3871762454509735, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_4"}
{"score": 0.42719706892967224, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_6"}
{"score": 0.040193457156419754, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_7"}
{"score": 0.055768538266420364, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_9"}
{"score": 0.3413337469100952, "chain_id": "3FTOP5WARFNLUG7G6ED1CAHTY8KJ0X_1_10"}
{"score": 0.04012324661016464, "chain_id": "3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_1"}
{"score": 0.192902073264122, "chain_id": "3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_2"}
{"score": 0.017297476530075073, "chain_id": "3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_3"}
{"score": 0.13547959923744202, "chain_id": "3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_4"}
{"score": 0.043877530843019485, "chain_id": "3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_5"}
{"score": 0.07609385251998901, "chain_id": "3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_6"}
{"score": 0.07620837539434433, "chain_id": "3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_7"}
{"score": 0.10261890292167664, "chain_id": "3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_8"}
{"score": 0.031682491302490234, "chain_id": "3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_9"}
{"score": 0.04053111374378204, "chain_id": "3IGI0VL647J2GNQKNX74VIUS1AHNOX_1_10"}
{"score": 0.8650367856025696, "chain_id": "3FIJLY1B6U38DVP44916CDQ99O2PFK_1_6"}
{"score": 0.045795440673828125, "chain_id": "3FIJLY1B6U38DVP44916CDQ99O2PFK_1_1"}
{"score": 0.37102803587913513, "chain_id": "3FIJLY1B6U38DVP44916CDQ99O2PFK_1_2"}
{"score": 0.2781636714935303, "chain_id": "3FIJLY1B6U38DVP44916CDQ99O2PFK_1_3"}
{"score": 0.0734143853187561, "chain_id": "3FIJLY1B6U38DVP44916CDQ99O2PFK_1_4"}
{"score": 0.7987061738967896, "chain_id": "3FIJLY1B6U38DVP44916CDQ99O2PFK_1_5"}
{"score": 0.29807227849960327, "chain_id": "3FIJLY1B6U38DVP44916CDQ99O2PFK_1_7"}
{"score": 0.7992562055587769, "chain_id": "3FIJLY1B6U38DVP44916CDQ99O2PFK_1_8"}
{"score": 0.8573560118675232, "chain_id": "3FIJLY1B6U38DVP44916CDQ99O2PFK_1_9"}
{"score": 0.06408859044313431, "chain_id": "3FIJLY1B6U38DVP44916CDQ99O2PFK_1_10"}
{"score": 0.9804608225822449, "chain_id": "39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_1"}
{"score": 0.9912586808204651, "chain_id": "39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_3"}
{"score": 0.7434263825416565, "chain_id": "39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_5"}
{"score": 0.9264957904815674, "chain_id": "39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_6"}
{"score": 0.8014420866966248, "chain_id": "39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_7"}
{"score": 0.9499127268791199, "chain_id": "39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_2"}
{"score": 0.9697241187095642, "chain_id": "39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_4"}
{"score": 0.4826086759567261, "chain_id": "39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_8"}
{"score": 0.09318441152572632, "chain_id": "39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_9"}
{"score": 0.5126415491104126, "chain_id": "39PAAFCODMZV1K41L5FUZ9USMMQTVY_1_10"}
{"score": 0.03205689787864685, "chain_id": "382M9COHEHETZMX4QKGU41S86PDEUS_1_10"}
{"score": 0.021078331395983696, "chain_id": "382M9COHEHETZMX4QKGU41S86PDEUS_1_1"}
{"score": 0.03959181532263756, "chain_id": "382M9COHEHETZMX4QKGU41S86PDEUS_1_2"}
{"score": 0.014783253893256187, "chain_id": "382M9COHEHETZMX4QKGU41S86PDEUS_1_3"}
{"score": 0.02229093387722969, "chain_id": "382M9COHEHETZMX4QKGU41S86PDEUS_1_4"}
{"score": 0.017934076488018036, "chain_id": "382M9COHEHETZMX4QKGU41S86PDEUS_1_5"}
{"score": 0.018240857869386673, "chain_id": "382M9COHEHETZMX4QKGU41S86PDEUS_1_6"}
{"score": 0.03339041396975517, "chain_id": "382M9COHEHETZMX4QKGU41S86PDEUS_1_7"}
{"score": 0.019204407930374146, "chain_id": "382M9COHEHETZMX4QKGU41S86PDEUS_1_8"}
{"score": 0.018802033737301826, "chain_id": "382M9COHEHETZMX4QKGU41S86PDEUS_1_9"}
{"score": 0.9804608225822449, "chain_id": "326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_1"}
{"score": 0.9499127268791199, "chain_id": "326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_2"}
{"score": 0.9912586808204651, "chain_id": "326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_3"}
{"score": 0.9264957904815674, "chain_id": "326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_6"}
{"score": 0.5126415491104126, "chain_id": "326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_10"}
{"score": 0.9697241187095642, "chain_id": "326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_4"}
{"score": 0.7434263825416565, "chain_id": "326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_5"}
{"score": 0.8014420866966248, "chain_id": "326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_7"}
{"score": 0.4826086759567261, "chain_id": "326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_8"}
{"score": 0.09318441152572632, "chain_id": "326O153BMIX7IKMI4PQ5U1OKKS8EDE_1_9"}
{"score": 0.9846953749656677, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_1"}
{"score": 0.9460883736610413, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_2"}
{"score": 0.28303956985473633, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_3"}
{"score": 0.07906454801559448, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_4"}
{"score": 0.0792069137096405, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_5"}
{"score": 0.11252561211585999, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_6"}
{"score": 0.0534319244325161, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_7"}
{"score": 0.08897761255502701, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_8"}
{"score": 0.176442950963974, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_9"}
{"score": 0.05985713377594948, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBGA6IOU_1_10"}
{"score": 0.9893445372581482, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_1"}
{"score": 0.9734662771224976, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_2"}
{"score": 0.09479371458292007, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_9"}
{"score": 0.1473105549812317, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_3"}
{"score": 0.6007817387580872, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_4"}
{"score": 0.20414845645427704, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_5"}
{"score": 0.15992410480976105, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_6"}
{"score": 0.06612573564052582, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_7"}
{"score": 0.1332981288433075, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_8"}
{"score": 0.09157378226518631, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLAVVGPN_1_10"}
{"score": 0.9703662991523743, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_1"}
{"score": 0.8770426511764526, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_2"}
{"score": 0.15116089582443237, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_3"}
{"score": 0.07078065723180771, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_4"}
{"score": 0.6032075881958008, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_5"}
{"score": 0.7736836075782776, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_6"}
{"score": 0.1388956606388092, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_7"}
{"score": 0.7700244784355164, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_8"}
{"score": 0.16970938444137573, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_9"}
{"score": 0.5869985222816467, "chain_id": "3IRIK4HM3AJT0DNPYBCWY7EPIVMC67_1_10"}
{"score": 0.9900216460227966, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_1"}
{"score": 0.9686681628227234, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_2"}
{"score": 0.9894502758979797, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_3"}
{"score": 0.9693134427070618, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_4"}
{"score": 0.9635159969329834, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_5"}
{"score": 0.10907159745693207, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_10"}
{"score": 0.9750192165374756, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_6"}
{"score": 0.742146909236908, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_7"}
{"score": 0.632805347442627, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_8"}
{"score": 0.15451693534851074, "chain_id": "36NEMU28XFC43EEM2IJEZXIE1XMWMM_1_9"}
{"score": 0.948867678642273, "chain_id": "320DUZ38G7LI5KI1KG24X2493HSJG1_1_1"}
{"score": 0.8771567344665527, "chain_id": "320DUZ38G7LI5KI1KG24X2493HSJG1_1_2"}
{"score": 0.7222528457641602, "chain_id": "320DUZ38G7LI5KI1KG24X2493HSJG1_1_3"}
{"score": 0.6976664066314697, "chain_id": "320DUZ38G7LI5KI1KG24X2493HSJG1_1_4"}
{"score": 0.04438715800642967, "chain_id": "320DUZ38G7LI5KI1KG24X2493HSJG1_1_5"}
{"score": 0.024167442694306374, "chain_id": "320DUZ38G7LI5KI1KG24X2493HSJG1_1_6"}
{"score": 0.055137716233730316, "chain_id": "320DUZ38G7LI5KI1KG24X2493HSJG1_1_7"}
{"score": 0.0627596527338028, "chain_id": "320DUZ38G7LI5KI1KG24X2493HSJG1_1_8"}
{"score": 0.41547322273254395, "chain_id": "320DUZ38G7LI5KI1KG24X2493HSJG1_1_9"}
{"score": 0.104792021214962, "chain_id": "320DUZ38G7LI5KI1KG24X2493HSJG1_1_10"}
{"score": 0.9902607798576355, "chain_id": "3B837J3LDOV2TDA5NL5UO7931S5SRD_1_1"}
{"score": 0.49671268463134766, "chain_id": "3B837J3LDOV2TDA5NL5UO7931S5SRD_1_2"}
{"score": 0.9822413325309753, "chain_id": "3B837J3LDOV2TDA5NL5UO7931S5SRD_1_3"}
{"score": 0.5903858542442322, "chain_id": "3B837J3LDOV2TDA5NL5UO7931S5SRD_1_4"}
{"score": 0.8437063097953796, "chain_id": "3B837J3LDOV2TDA5NL5UO7931S5SRD_1_5"}
{"score": 0.5747241377830505, "chain_id": "3B837J3LDOV2TDA5NL5UO7931S5SRD_1_6"}
{"score": 0.8301595449447632, "chain_id": "3B837J3LDOV2TDA5NL5UO7931S5SRD_1_7"}
{"score": 0.6660641431808472, "chain_id": "3B837J3LDOV2TDA5NL5UO7931S5SRD_1_8"}
{"score": 0.10596929490566254, "chain_id": "3B837J3LDOV2TDA5NL5UO7931S5SRD_1_9"}
{"score": 0.23313185572624207, "chain_id": "3B837J3LDOV2TDA5NL5UO7931S5SRD_1_10"}
{"score": 0.9896349310874939, "chain_id": "3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_1"}
{"score": 0.9899617433547974, "chain_id": "3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_3"}
{"score": 0.9639177322387695, "chain_id": "3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_4"}
{"score": 0.958730161190033, "chain_id": "3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_6"}
{"score": 0.9258610010147095, "chain_id": "3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_2"}
{"score": 0.9612423181533813, "chain_id": "3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_5"}
{"score": 0.6901140213012695, "chain_id": "3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_7"}
{"score": 0.5810004472732544, "chain_id": "3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_8"}
{"score": 0.8648675680160522, "chain_id": "3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_9"}
{"score": 0.09896320104598999, "chain_id": "3GNCZX450IMDH48WTTFEYCFIGM0PAE_1_10"}
{"score": 0.5642130374908447, "chain_id": "3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_1"}
{"score": 0.7365432977676392, "chain_id": "3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_2"}
{"score": 0.5412119030952454, "chain_id": "3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_3"}
{"score": 0.8616964221000671, "chain_id": "3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_4"}
{"score": 0.47883790731430054, "chain_id": "3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_5"}
{"score": 0.6489725112915039, "chain_id": "3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_6"}
{"score": 0.4105292856693268, "chain_id": "3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_7"}
{"score": 0.7372876405715942, "chain_id": "3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_8"}
{"score": 0.7345288395881653, "chain_id": "3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_9"}
{"score": 0.4294341504573822, "chain_id": "3FIJLY1B6U38DVP44916CDQ9D7GFPU_1_10"}
{"score": 0.034591853618621826, "chain_id": "3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_1"}
{"score": 0.1516413688659668, "chain_id": "3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_2"}
{"score": 0.042668417096138, "chain_id": "3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_3"}
{"score": 0.016563791781663895, "chain_id": "3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_4"}
{"score": 0.07429693639278412, "chain_id": "3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_5"}
{"score": 0.029960468411445618, "chain_id": "3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_6"}
{"score": 0.03401781618595123, "chain_id": "3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_7"}
{"score": 0.2757861316204071, "chain_id": "3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_8"}
{"score": 0.4480946958065033, "chain_id": "3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_9"}
{"score": 0.01644914783537388, "chain_id": "3LEP4MGT3GZ9BHAYUYOFTTIZFO7BD7_1_10"}
{"score": 0.042926397174596786, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_1"}
{"score": 0.07353728264570236, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_2"}
{"score": 0.1382400393486023, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_3"}
{"score": 0.07075571268796921, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_4"}
{"score": 0.04595399647951126, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_5"}
{"score": 0.026760833337903023, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_6"}
{"score": 0.05350079759955406, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_7"}
{"score": 0.01965015009045601, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_8"}
{"score": 0.03482433035969734, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_9"}
{"score": 0.06455030292272568, "chain_id": "3EA3QWIZ4IUQFEK1MYGBKK4YF0DTIH_1_10"}
{"score": 0.03866618871688843, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_1"}
{"score": 0.03917032480239868, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_2"}
{"score": 0.14355525374412537, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_3"}
{"score": 0.11112888902425766, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_4"}
{"score": 0.043691236525774, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_5"}
{"score": 0.03827283903956413, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_6"}
{"score": 0.05671147257089615, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_7"}
{"score": 0.04063829779624939, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_8"}
{"score": 0.03514615446329117, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_9"}
{"score": 0.054892223328351974, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX8WIRMG_1_10"}
{"score": 0.06439996510744095, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_8"}
{"score": 0.018706727772951126, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_1"}
{"score": 0.039863087236881256, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_2"}
{"score": 0.017929228022694588, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_3"}
{"score": 0.019412657245993614, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_4"}
{"score": 0.015096792951226234, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_5"}
{"score": 0.03838164359331131, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_6"}
{"score": 0.040165577083826065, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_7"}
{"score": 0.08452736586332321, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_9"}
{"score": 0.028894364833831787, "chain_id": "3R2PKQ87NW7M2JUHD1FZY696N0HIMN_1_10"}
{"score": 0.053724490106105804, "chain_id": "3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_1"}
{"score": 0.021363522857427597, "chain_id": "3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_2"}
{"score": 0.018900614231824875, "chain_id": "3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_3"}
{"score": 0.033926717936992645, "chain_id": "3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_4"}
{"score": 0.019382843747735023, "chain_id": "3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_5"}
{"score": 0.04735278710722923, "chain_id": "3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_6"}
{"score": 0.01621418632566929, "chain_id": "3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_7"}
{"score": 0.020829489454627037, "chain_id": "3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_8"}
{"score": 0.01758667640388012, "chain_id": "3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_9"}
{"score": 0.019374044612050056, "chain_id": "3A4NIXBJ76YOSK2NY4CCQM1Y452MLY_1_10"}
{"score": 0.016964636743068695, "chain_id": "308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_1"}
{"score": 0.01546438131481409, "chain_id": "308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_2"}
{"score": 0.02200378105044365, "chain_id": "308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_3"}
{"score": 0.16551797091960907, "chain_id": "308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_4"}
{"score": 0.012896351516246796, "chain_id": "308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_5"}
{"score": 0.08326756954193115, "chain_id": "308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_6"}
{"score": 0.021450970321893692, "chain_id": "308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_7"}
{"score": 0.02379247359931469, "chain_id": "308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_8"}
{"score": 0.09393510967493057, "chain_id": "308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_9"}
{"score": 0.022071324288845062, "chain_id": "308XBLVESI33CRT3CZJZYIZ3Y9IBR5_1_10"}
{"score": 0.06516290456056595, "chain_id": "36NEMU28XFC43EEM2IJEZXIE34WMW2_1_1"}
{"score": 0.022008279338479042, "chain_id": "36NEMU28XFC43EEM2IJEZXIE34WMW2_1_2"}
{"score": 0.14608260989189148, "chain_id": "36NEMU28XFC43EEM2IJEZXIE34WMW2_1_3"}
{"score": 0.3244105279445648, "chain_id": "36NEMU28XFC43EEM2IJEZXIE34WMW2_1_4"}
{"score": 0.03849893808364868, "chain_id": "36NEMU28XFC43EEM2IJEZXIE34WMW2_1_5"}
{"score": 0.030646465718746185, "chain_id": "36NEMU28XFC43EEM2IJEZXIE34WMW2_1_6"}
{"score": 0.04714242368936539, "chain_id": "36NEMU28XFC43EEM2IJEZXIE34WMW2_1_7"}
{"score": 0.5918416380882263, "chain_id": "36NEMU28XFC43EEM2IJEZXIE34WMW2_1_8"}
{"score": 0.016942180693149567, "chain_id": "36NEMU28XFC43EEM2IJEZXIE34WMW2_1_9"}
{"score": 0.022952407598495483, "chain_id": "36NEMU28XFC43EEM2IJEZXIE34WMW2_1_10"}
{"score": 0.07975862920284271, "chain_id": "3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_1"}
{"score": 0.019383052363991737, "chain_id": "3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_2"}
{"score": 0.020606832578778267, "chain_id": "3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_3"}
{"score": 0.06210717931389809, "chain_id": "3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_4"}
{"score": 0.04313357546925545, "chain_id": "3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_5"}
{"score": 0.031610213220119476, "chain_id": "3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_6"}
{"score": 0.017901943996548653, "chain_id": "3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_7"}
{"score": 0.03384534642100334, "chain_id": "3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_8"}
{"score": 0.026598269119858742, "chain_id": "3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_9"}
{"score": 0.19499824941158295, "chain_id": "3XCC1ODXDLAQGXVSVHGPT7U2LTGQRY_1_10"}
{"score": 0.05593818426132202, "chain_id": "3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_1"}
{"score": 0.02431575581431389, "chain_id": "3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_2"}
{"score": 0.024471281096339226, "chain_id": "3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_3"}
{"score": 0.03778956085443497, "chain_id": "3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_4"}
{"score": 0.02750241756439209, "chain_id": "3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_5"}
{"score": 0.01490565575659275, "chain_id": "3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_6"}
{"score": 0.015042847022414207, "chain_id": "3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_7"}
{"score": 0.022864066064357758, "chain_id": "3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_8"}
{"score": 0.0321304090321064, "chain_id": "3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_9"}
{"score": 0.021552888676524162, "chain_id": "3UJ1CZ6IZHODOQC7QESRL6476YX5S2_1_10"}
{"score": 0.24483463168144226, "chain_id": "3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_2"}
{"score": 0.9582250714302063, "chain_id": "3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_8"}
{"score": 0.6993237137794495, "chain_id": "3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_1"}
{"score": 0.08174800872802734, "chain_id": "3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_3"}
{"score": 0.31847599148750305, "chain_id": "3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_4"}
{"score": 0.03722454234957695, "chain_id": "3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_5"}
{"score": 0.03259947523474693, "chain_id": "3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_6"}
{"score": 0.06730406731367111, "chain_id": "3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_7"}
{"score": 0.09783587604761124, "chain_id": "3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_9"}
{"score": 0.03870376572012901, "chain_id": "3WOKGM4L71FZVRYDMR56K6YFU6NO0J_1_10"}
{"score": 0.09652425348758698, "chain_id": "37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_4"}
{"score": 0.09642499685287476, "chain_id": "37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_1"}
{"score": 0.26252537965774536, "chain_id": "37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_2"}
{"score": 0.07095550745725632, "chain_id": "37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_3"}
{"score": 0.0162374135106802, "chain_id": "37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_5"}
{"score": 0.03686520457267761, "chain_id": "37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_6"}
{"score": 0.016615228727459908, "chain_id": "37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_7"}
{"score": 0.10593444854021072, "chain_id": "37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_8"}
{"score": 0.24343915283679962, "chain_id": "37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_9"}
{"score": 0.045507319271564484, "chain_id": "37ZHEEHM6WLORD5BOS6NBIAR9Y473Q_1_10"}
{"score": 0.028460267931222916, "chain_id": "336KAV9KYQRILF5T71II5LPW88EY2N_1_1"}
{"score": 0.017578158527612686, "chain_id": "336KAV9KYQRILF5T71II5LPW88EY2N_1_2"}
{"score": 0.01958337239921093, "chain_id": "336KAV9KYQRILF5T71II5LPW88EY2N_1_3"}
{"score": 0.018118586391210556, "chain_id": "336KAV9KYQRILF5T71II5LPW88EY2N_1_4"}
{"score": 0.018305703997612, "chain_id": "336KAV9KYQRILF5T71II5LPW88EY2N_1_5"}
{"score": 0.037798404693603516, "chain_id": "336KAV9KYQRILF5T71II5LPW88EY2N_1_6"}
{"score": 0.015282568521797657, "chain_id": "336KAV9KYQRILF5T71II5LPW88EY2N_1_7"}
{"score": 0.4782504439353943, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_1"}
{"score": 0.534812331199646, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_2"}
{"score": 0.09915582090616226, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_4"}
{"score": 0.7433521747589111, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_5"}
{"score": 0.04223669320344925, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_3"}
{"score": 0.01440327800810337, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_6"}
{"score": 0.023149559274315834, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_7"}
{"score": 0.18248715996742249, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_8"}
{"score": 0.0199974924325943, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_9"}
{"score": 0.1487436592578888, "chain_id": "3M0BCWMB8VV8KNAPBTT5LH7KPFOWB1_1_10"}
{"score": 0.4910854399204254, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_1"}
{"score": 0.8670825958251953, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_4"}
{"score": 0.0570860281586647, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_2"}
{"score": 0.7958604097366333, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_3"}
{"score": 0.33648839592933655, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_5"}
{"score": 0.4682263731956482, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_6"}
{"score": 0.2538294494152069, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_7"}
{"score": 0.01949060894548893, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_8"}
{"score": 0.04973958060145378, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_9"}
{"score": 0.049700696021318436, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PYHK2MM_1_10"}
{"score": 0.845350980758667, "chain_id": "3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_1"}
{"score": 0.8725919723510742, "chain_id": "3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_2"}
{"score": 0.17452919483184814, "chain_id": "3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_3"}
{"score": 0.7262684106826782, "chain_id": "3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_8"}
{"score": 0.48763057589530945, "chain_id": "3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_4"}
{"score": 0.7778458595275879, "chain_id": "3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_5"}
{"score": 0.13922028243541718, "chain_id": "3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_6"}
{"score": 0.7303237318992615, "chain_id": "3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_7"}
{"score": 0.12026408314704895, "chain_id": "3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_9"}
{"score": 0.22530825436115265, "chain_id": "3ZGVPD4G6TGCA49BM24XKF7OPJ3ZTY_1_10"}
{"score": 0.03269681707024574, "chain_id": "3RYC5T2D73S5GLUDV410T24SRFURPJ_1_1"}
{"score": 0.028080111369490623, "chain_id": "3RYC5T2D73S5GLUDV410T24SRFURPJ_1_2"}
{"score": 0.029025115072727203, "chain_id": "3RYC5T2D73S5GLUDV410T24SRFURPJ_1_3"}
{"score": 0.02114635333418846, "chain_id": "3RYC5T2D73S5GLUDV410T24SRFURPJ_1_4"}
{"score": 0.0858578011393547, "chain_id": "3RYC5T2D73S5GLUDV410T24SRFURPJ_1_5"}
{"score": 0.04627809301018715, "chain_id": "3RYC5T2D73S5GLUDV410T24SRFURPJ_1_6"}
{"score": 0.02290409244596958, "chain_id": "3RYC5T2D73S5GLUDV410T24SRFURPJ_1_7"}
{"score": 0.5041458010673523, "chain_id": "3RYC5T2D73S5GLUDV410T24SRFURPJ_1_8"}
{"score": 0.051804158836603165, "chain_id": "3RYC5T2D73S5GLUDV410T24SRFURPJ_1_9"}
{"score": 0.025920113548636436, "chain_id": "3RYC5T2D73S5GLUDV410T24SRFURPJ_1_10"}
{"score": 0.5521496534347534, "chain_id": "37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_3"}
{"score": 0.49105104804039, "chain_id": "37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_1"}
{"score": 0.9911487102508545, "chain_id": "37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_2"}
{"score": 0.05119910463690758, "chain_id": "37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_4"}
{"score": 0.328058660030365, "chain_id": "37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_5"}
{"score": 0.18556195497512817, "chain_id": "37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_6"}
{"score": 0.0728730782866478, "chain_id": "37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_7"}
{"score": 0.5930079221725464, "chain_id": "37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_8"}
{"score": 0.035104282200336456, "chain_id": "37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_9"}
{"score": 0.16548457741737366, "chain_id": "37C0GNLMHF2355T3Y777IDW7HDZ6DG_1_10"}
{"score": 0.10076989978551865, "chain_id": "3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_1"}
{"score": 0.15154598653316498, "chain_id": "3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_2"}
{"score": 0.21885937452316284, "chain_id": "3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_3"}
{"score": 0.07338127493858337, "chain_id": "3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_4"}
{"score": 0.7864599227905273, "chain_id": "3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_5"}
{"score": 0.8406728506088257, "chain_id": "3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_6"}
{"score": 0.5879044532775879, "chain_id": "3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_7"}
{"score": 0.13541056215763092, "chain_id": "3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_8"}
{"score": 0.14717060327529907, "chain_id": "3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_9"}
{"score": 0.23517990112304688, "chain_id": "3RXCAC0YIROTL3MITC5D8CVVOZ4G87_1_10"}
{"score": 0.05448903515934944, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_1"}
{"score": 0.03449507802724838, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_2"}
{"score": 0.016353348270058632, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_3"}
{"score": 0.051537055522203445, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_4"}
{"score": 0.6829248666763306, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_5"}
{"score": 0.15501070022583008, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_6"}
{"score": 0.6154931783676147, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_7"}
{"score": 0.12835341691970825, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_8"}
{"score": 0.4711633026599884, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_9"}
{"score": 0.03430084511637688, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85Z4YP72T_1_10"}
{"score": 0.8259221315383911, "chain_id": "3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_2"}
{"score": 0.7572835683822632, "chain_id": "3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_1"}
{"score": 0.09818585962057114, "chain_id": "3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_3"}
{"score": 0.6714238524436951, "chain_id": "3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_4"}
{"score": 0.7610051035881042, "chain_id": "3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_5"}
{"score": 0.014340803027153015, "chain_id": "3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_6"}
{"score": 0.14398978650569916, "chain_id": "3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_7"}
{"score": 0.047245267778635025, "chain_id": "3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_8"}
{"score": 0.03589580953121185, "chain_id": "3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_9"}
{"score": 0.048674486577510834, "chain_id": "3KMS4QQVK2P724SORHWYGW4AHPSKFY_1_10"}
{"score": 0.10755758732557297, "chain_id": "3HYA4D452RICLOOY2BQUG0IG0R4F29_1_2"}
{"score": 0.8397847414016724, "chain_id": "3HYA4D452RICLOOY2BQUG0IG0R4F29_1_3"}
{"score": 0.7269610166549683, "chain_id": "3HYA4D452RICLOOY2BQUG0IG0R4F29_1_1"}
{"score": 0.30280056595802307, "chain_id": "3HYA4D452RICLOOY2BQUG0IG0R4F29_1_4"}
{"score": 0.10829824954271317, "chain_id": "3HYA4D452RICLOOY2BQUG0IG0R4F29_1_5"}
{"score": 0.1060338169336319, "chain_id": "3HYA4D452RICLOOY2BQUG0IG0R4F29_1_6"}
{"score": 0.013506438583135605, "chain_id": "3HYA4D452RICLOOY2BQUG0IG0R4F29_1_7"}
{"score": 0.034584321081638336, "chain_id": "3HYA4D452RICLOOY2BQUG0IG0R4F29_1_8"}
{"score": 0.028920313343405724, "chain_id": "3HYA4D452RICLOOY2BQUG0IG0R4F29_1_9"}
{"score": 0.021009454503655434, "chain_id": "3HYA4D452RICLOOY2BQUG0IG0R4F29_1_10"}
{"score": 0.9405590295791626, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_1"}
{"score": 0.5800114274024963, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_2"}
{"score": 0.5782549381256104, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_3"}
{"score": 0.7110059857368469, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_5"}
{"score": 0.13897275924682617, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_4"}
{"score": 0.03291330486536026, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_6"}
{"score": 0.014967143535614014, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_7"}
{"score": 0.008760624565184116, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_8"}
{"score": 0.2166775017976761, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_9"}
{"score": 0.807864248752594, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDV0MYIZ_1_10"}
{"score": 0.9900944828987122, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_1"}
{"score": 0.9854235053062439, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_2"}
{"score": 0.5896332263946533, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_3"}
{"score": 0.342433363199234, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_4"}
{"score": 0.21741747856140137, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_5"}
{"score": 0.9307575821876526, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_6"}
{"score": 0.7495818734169006, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_7"}
{"score": 0.4951752722263336, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_8"}
{"score": 0.15810559689998627, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_9"}
{"score": 0.6768882274627686, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF3YBTGVR_1_10"}
{"score": 0.8721335530281067, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_1"}
{"score": 0.14270377159118652, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_9"}
{"score": 0.7000414729118347, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_2"}
{"score": 0.9103202223777771, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_3"}
{"score": 0.5468452572822571, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_4"}
{"score": 0.26802030205726624, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_5"}
{"score": 0.9412935972213745, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_6"}
{"score": 0.7453970909118652, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_7"}
{"score": 0.21115200221538544, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_8"}
{"score": 0.45765843987464905, "chain_id": "3WJ1OXY92AFSBC9F7CD3CQKSPQ7A8C_1_10"}
{"score": 0.9900944828987122, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_1"}
{"score": 0.9854235053062439, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_2"}
{"score": 0.5896332263946533, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_3"}
{"score": 0.342433363199234, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_4"}
{"score": 0.21741747856140137, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_5"}
{"score": 0.9307575821876526, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_6"}
{"score": 0.7495818734169006, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_7"}
{"score": 0.4951752722263336, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_8"}
{"score": 0.15810559689998627, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_9"}
{"score": 0.6768882274627686, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCTCWZ4DA_1_10"}
{"score": 0.9916479587554932, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_2"}
{"score": 0.9358387589454651, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_3"}
{"score": 0.952042818069458, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_4"}
{"score": 0.08195856213569641, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_7"}
{"score": 0.9904470443725586, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_1"}
{"score": 0.1332027018070221, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_5"}
{"score": 0.05553140863776207, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_6"}
{"score": 0.019378993660211563, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_8"}
{"score": 0.037638090550899506, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_9"}
{"score": 0.016728099435567856, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ32ZGLO2_1_10"}
{"score": 0.9904470443725586, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_1"}
{"score": 0.9916479587554932, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_2"}
{"score": 0.9358387589454651, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_3"}
{"score": 0.952042818069458, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_4"}
{"score": 0.1332027018070221, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_5"}
{"score": 0.05553140863776207, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_6"}
{"score": 0.08195856213569641, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_7"}
{"score": 0.019378993660211563, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_8"}
{"score": 0.037638090550899506, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_9"}
{"score": 0.016728099435567856, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYJ9Y15H_1_10"}
{"score": 0.5771113038063049, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_1"}
{"score": 0.9643951654434204, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_2"}
{"score": 0.0868578851222992, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_3"}
{"score": 0.045742884278297424, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_4"}
{"score": 0.19842690229415894, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_5"}
{"score": 0.7282000780105591, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_6"}
{"score": 0.037719421088695526, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_7"}
{"score": 0.17342619597911835, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_8"}
{"score": 0.02195528708398342, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_9"}
{"score": 0.024157559499144554, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YR07OSD_1_10"}
{"score": 0.11188257485628128, "chain_id": "37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_2"}
{"score": 0.0746062621474266, "chain_id": "37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_4"}
{"score": 0.614717960357666, "chain_id": "37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_5"}
{"score": 0.0461973212659359, "chain_id": "37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_1"}
{"score": 0.12090978026390076, "chain_id": "37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_3"}
{"score": 0.34221339225769043, "chain_id": "37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_6"}
{"score": 0.18748952448368073, "chain_id": "37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_7"}
{"score": 0.3804631233215332, "chain_id": "37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_8"}
{"score": 0.056562792509794235, "chain_id": "37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_9"}
{"score": 0.39676761627197266, "chain_id": "37WLF8U1WPPBJBZDQOTUMQRXP7B6K2_1_10"}
{"score": 0.9879882335662842, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_1"}
{"score": 0.9838628172874451, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_3"}
{"score": 0.9723308086395264, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_4"}
{"score": 0.9908084273338318, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_6"}
{"score": 0.9318959712982178, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_2"}
{"score": 0.985372006893158, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_5"}
{"score": 0.8496091961860657, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_7"}
{"score": 0.9582607746124268, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_8"}
{"score": 0.9755561351776123, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_9"}
{"score": 0.10324681550264359, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1ED7NFAS_1_10"}
{"score": 0.15654350817203522, "chain_id": "3I02618YA05XWDMUZYW5YDRCM5LPUE_1_1"}
{"score": 0.35417863726615906, "chain_id": "3I02618YA05XWDMUZYW5YDRCM5LPUE_1_2"}
{"score": 0.043535541743040085, "chain_id": "3I02618YA05XWDMUZYW5YDRCM5LPUE_1_3"}
{"score": 0.07886110246181488, "chain_id": "3I02618YA05XWDMUZYW5YDRCM5LPUE_1_4"}
{"score": 0.1969754993915558, "chain_id": "3I02618YA05XWDMUZYW5YDRCM5LPUE_1_5"}
{"score": 0.021465009078383446, "chain_id": "3I02618YA05XWDMUZYW5YDRCM5LPUE_1_6"}
{"score": 0.03201264888048172, "chain_id": "3I02618YA05XWDMUZYW5YDRCM5LPUE_1_7"}
{"score": 0.06338763236999512, "chain_id": "3I02618YA05XWDMUZYW5YDRCM5LPUE_1_8"}
{"score": 0.03719551861286163, "chain_id": "3I02618YA05XWDMUZYW5YDRCM5LPUE_1_9"}
{"score": 0.23264937102794647, "chain_id": "3I02618YA05XWDMUZYW5YDRCM5LPUE_1_10"}
{"score": 0.0424969457089901, "chain_id": "3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_1"}
{"score": 0.4560645520687103, "chain_id": "3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_2"}
{"score": 0.06306007504463196, "chain_id": "3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_3"}
{"score": 0.15370520949363708, "chain_id": "3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_4"}
{"score": 0.18714413046836853, "chain_id": "3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_5"}
{"score": 0.19216515123844147, "chain_id": "3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_6"}
{"score": 0.03577727824449539, "chain_id": "3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_7"}
{"score": 0.07488906383514404, "chain_id": "3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_8"}
{"score": 0.18531130254268646, "chain_id": "3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_9"}
{"score": 0.04839499294757843, "chain_id": "3VW04L3ZLT5UMQIGQUH9CXCJD6VXXL_1_10"}
{"score": 0.09056147933006287, "chain_id": "3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_1"}
{"score": 0.09884803742170334, "chain_id": "3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_2"}
{"score": 0.06214834377169609, "chain_id": "3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_3"}
{"score": 0.07586683332920074, "chain_id": "3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_4"}
{"score": 0.037723101675510406, "chain_id": "3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_5"}
{"score": 0.2264571636915207, "chain_id": "3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_6"}
{"score": 0.18555037677288055, "chain_id": "3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_7"}
{"score": 0.017185509204864502, "chain_id": "3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_8"}
{"score": 0.038796015083789825, "chain_id": "3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_9"}
{"score": 0.020586024969816208, "chain_id": "3R5F3LQFV2JWXC43QLIYQ5119FYZOF_1_10"}
{"score": 0.9769259095191956, "chain_id": "3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_2"}
{"score": 0.3466857373714447, "chain_id": "3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_1"}
{"score": 0.09330113977193832, "chain_id": "3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_3"}
{"score": 0.06681520491838455, "chain_id": "3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_4"}
{"score": 0.041490595787763596, "chain_id": "3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_5"}
{"score": 0.057630546391010284, "chain_id": "3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_6"}
{"score": 0.03998542204499245, "chain_id": "3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_7"}
{"score": 0.03860323503613472, "chain_id": "3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_8"}
{"score": 0.032658692449331284, "chain_id": "3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_9"}
{"score": 0.04473720118403435, "chain_id": "3QECW5O0KH0E3QPMFEXHVB0TAS65TA_1_10"}
{"score": 0.04234832897782326, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_1"}
{"score": 0.16642943024635315, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_2"}
{"score": 0.2229107767343521, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_3"}
{"score": 0.1254849135875702, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_4"}
{"score": 0.2516786456108093, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_5"}
{"score": 0.2771064043045044, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_6"}
{"score": 0.3298325538635254, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_7"}
{"score": 0.088505819439888, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_8"}
{"score": 0.2951991558074951, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_9"}
{"score": 0.08937430381774902, "chain_id": "3NAPMVF0ZWEZ6V9SKSSIS85ZY5I72U_1_10"}
{"score": 0.4769619107246399, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_1"}
{"score": 0.4902859330177307, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_2"}
{"score": 0.2022729068994522, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_3"}
{"score": 0.33917421102523804, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_4"}
{"score": 0.9079618453979492, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_5"}
{"score": 0.6275643110275269, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_6"}
{"score": 0.04182766377925873, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_7"}
{"score": 0.4174048900604248, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_8"}
{"score": 0.9681011438369751, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_9"}
{"score": 0.037129007279872894, "chain_id": "3Y5140Z9DXFSNMRU5H7RFA31N6YPIT_1_10"}
{"score": 0.4683692157268524, "chain_id": "351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_1"}
{"score": 0.9695390462875366, "chain_id": "351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_5"}
{"score": 0.8315140604972839, "chain_id": "351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_6"}
{"score": 0.24200507998466492, "chain_id": "351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_2"}
{"score": 0.4216766059398651, "chain_id": "351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_3"}
{"score": 0.3443838357925415, "chain_id": "351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_4"}
{"score": 0.9693450927734375, "chain_id": "351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_7"}
{"score": 0.8527612090110779, "chain_id": "351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_8"}
{"score": 0.09464456140995026, "chain_id": "351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_9"}
{"score": 0.053625233471393585, "chain_id": "351SEKWQS0G5U8EVLNEO79TTRP7DMY_1_10"}
{"score": 0.7838525176048279, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_1"}
{"score": 0.9595149159431458, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_2"}
{"score": 0.944605827331543, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_4"}
{"score": 0.9582228660583496, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_5"}
{"score": 0.5652231574058533, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_9"}
{"score": 0.8015221357345581, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_3"}
{"score": 0.8606981635093689, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_6"}
{"score": 0.48681265115737915, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_7"}
{"score": 0.06459362059831619, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_8"}
{"score": 0.20717769861221313, "chain_id": "3C2NJ6JBKAGO9G1F0Z97O5RPZOUN21_1_10"}
{"score": 0.579220712184906, "chain_id": "3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_2"}
{"score": 0.22013089060783386, "chain_id": "3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_1"}
{"score": 0.47467365860939026, "chain_id": "3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_3"}
{"score": 0.33652523159980774, "chain_id": "3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_4"}
{"score": 0.04990522563457489, "chain_id": "3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_5"}
{"score": 0.022417331114411354, "chain_id": "3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_6"}
{"score": 0.02241327613592148, "chain_id": "3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_7"}
{"score": 0.026713529601693153, "chain_id": "3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_8"}
{"score": 0.05957704409956932, "chain_id": "3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_9"}
{"score": 0.09459738433361053, "chain_id": "3JWH6J9I9SCIXT1BJS2IPYUTTW3NB3_1_10"}
{"score": 0.9803099036216736, "chain_id": "3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_1"}
{"score": 0.8795400261878967, "chain_id": "3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_3"}
{"score": 0.869828999042511, "chain_id": "3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_9"}
{"score": 0.9441043734550476, "chain_id": "3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_10"}
{"score": 0.9809478521347046, "chain_id": "3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_2"}
{"score": 0.8115057349205017, "chain_id": "3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_4"}
{"score": 0.789172351360321, "chain_id": "3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_5"}
{"score": 0.7776191830635071, "chain_id": "3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_6"}
{"score": 0.9390684366226196, "chain_id": "3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_7"}
{"score": 0.8589649796485901, "chain_id": "3Z7VU45IPYGB1KX2KJKNE9OTJZNZ1K_1_8"}
{"score": 0.8304406404495239, "chain_id": "392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_3"}
{"score": 0.9306324124336243, "chain_id": "392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_1"}
{"score": 0.942096471786499, "chain_id": "392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_2"}
{"score": 0.8895958065986633, "chain_id": "392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_4"}
{"score": 0.017333583906292915, "chain_id": "392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_5"}
{"score": 0.05727212131023407, "chain_id": "392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_6"}
{"score": 0.8639029264450073, "chain_id": "392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_7"}
{"score": 0.05372878164052963, "chain_id": "392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_8"}
{"score": 0.7823760509490967, "chain_id": "392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_9"}
{"score": 0.035916902124881744, "chain_id": "392CY0QWG1Q6YT5B7XF3CCS61AV4IF_1_10"}
{"score": 0.0348096564412117, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_1"}
{"score": 0.05158732458949089, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_2"}
{"score": 0.16260434687137604, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_3"}
{"score": 0.19384686648845673, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_4"}
{"score": 0.0999046117067337, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_5"}
{"score": 0.05581864342093468, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_6"}
{"score": 0.027852818369865417, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_7"}
{"score": 0.0786224827170372, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_8"}
{"score": 0.026497244834899902, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_9"}
{"score": 0.041070278733968735, "chain_id": "3JNQLM5FT4LYLGYUOMTSBDN0Y382LG_1_10"}
{"score": 0.9013250470161438, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_1"}
{"score": 0.4267193078994751, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_2"}
{"score": 0.8210919499397278, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_3"}
{"score": 0.6721128225326538, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_4"}
{"score": 0.14716428518295288, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_8"}
{"score": 0.09664462506771088, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_5"}
{"score": 0.6350325345993042, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_6"}
{"score": 0.2841198146343231, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_7"}
{"score": 0.026694638654589653, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_9"}
{"score": 0.054111119359731674, "chain_id": "3N1FSUEFL5ZPQIPPFJESLFCT9AZ4DZ_1_10"}
{"score": 0.9872065782546997, "chain_id": "3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_2"}
{"score": 0.9396024346351624, "chain_id": "3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_3"}
{"score": 0.7657251358032227, "chain_id": "3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_4"}
{"score": 0.13773757219314575, "chain_id": "3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_10"}
{"score": 0.6795353293418884, "chain_id": "3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_1"}
{"score": 0.8422725796699524, "chain_id": "3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_5"}
{"score": 0.5333855152130127, "chain_id": "3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_6"}
{"score": 0.024632738903164864, "chain_id": "3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_7"}
{"score": 0.3084288537502289, "chain_id": "3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_8"}
{"score": 0.9477853178977966, "chain_id": "3VELCLL3GKI5W362J7QGBH8B9ER1F2_1_9"}
{"score": 0.9890235662460327, "chain_id": "31N2WW6R9RP166KH6B4ZZAN883WF3A_1_2"}
{"score": 0.9360491037368774, "chain_id": "31N2WW6R9RP166KH6B4ZZAN883WF3A_1_3"}
{"score": 0.5760735273361206, "chain_id": "31N2WW6R9RP166KH6B4ZZAN883WF3A_1_1"}
{"score": 0.7042190432548523, "chain_id": "31N2WW6R9RP166KH6B4ZZAN883WF3A_1_4"}
{"score": 0.7729032039642334, "chain_id": "31N2WW6R9RP166KH6B4ZZAN883WF3A_1_5"}
{"score": 0.8683418035507202, "chain_id": "31N2WW6R9RP166KH6B4ZZAN883WF3A_1_6"}
{"score": 0.03371656686067581, "chain_id": "31N2WW6R9RP166KH6B4ZZAN883WF3A_1_7"}
{"score": 0.10682502388954163, "chain_id": "31N2WW6R9RP166KH6B4ZZAN883WF3A_1_8"}
{"score": 0.5829291343688965, "chain_id": "31N2WW6R9RP166KH6B4ZZAN883WF3A_1_9"}
{"score": 0.0299969669431448, "chain_id": "31N2WW6R9RP166KH6B4ZZAN883WF3A_1_10"}
{"score": 0.045589011162519455, "chain_id": "3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_1"}
{"score": 0.05811125785112381, "chain_id": "3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_2"}
{"score": 0.06489606946706772, "chain_id": "3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_3"}
{"score": 0.02042321115732193, "chain_id": "3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_4"}
{"score": 0.061997417360544205, "chain_id": "3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_5"}
{"score": 0.1061791330575943, "chain_id": "3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_6"}
{"score": 0.01808192767202854, "chain_id": "3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_7"}
{"score": 0.01776822656393051, "chain_id": "3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_8"}
{"score": 0.023545492440462112, "chain_id": "3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_9"}
{"score": 0.046627044677734375, "chain_id": "3KGTPGBS6XK146LOX0LT20JJCNG2UC_1_10"}
{"score": 0.6985493898391724, "chain_id": "3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_1"}
{"score": 0.25040802359580994, "chain_id": "3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_2"}
{"score": 0.9788647890090942, "chain_id": "3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_3"}
{"score": 0.3805520236492157, "chain_id": "3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_4"}
{"score": 0.18531852960586548, "chain_id": "3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_5"}
{"score": 0.1929674744606018, "chain_id": "3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_6"}
{"score": 0.13821163773536682, "chain_id": "3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_7"}
{"score": 0.10130929946899414, "chain_id": "3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_8"}
{"score": 0.2626461982727051, "chain_id": "3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_9"}
{"score": 0.6206151843070984, "chain_id": "3NL0RFNU0FMX4OVZ700FPS7JS0GK49_1_10"}
{"score": 0.9908701777458191, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_1"}
{"score": 0.9917070269584656, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_3"}
{"score": 0.9876068234443665, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_4"}
{"score": 0.9910315275192261, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_2"}
{"score": 0.9175323247909546, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_5"}
{"score": 0.9394692182540894, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_6"}
{"score": 0.8859620094299316, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_7"}
{"score": 0.25981512665748596, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_8"}
{"score": 0.5243114829063416, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_9"}
{"score": 0.8604647517204285, "chain_id": "3NG53N1RLVIZYGFHWVV02L9NOGFP8N_1_10"}
{"score": 0.9856408834457397, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_1"}
{"score": 0.9865664839744568, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_2"}
{"score": 0.9623767733573914, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_4"}
{"score": 0.9791207313537598, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_6"}
{"score": 0.12620015442371368, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_7"}
{"score": 0.8758062720298767, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_3"}
{"score": 0.7602607011795044, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_5"}
{"score": 0.1641140729188919, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_8"}
{"score": 0.7808352112770081, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_9"}
{"score": 0.6100738644599915, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9ILKXCW_1_10"}
{"score": 0.9850179553031921, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y983843WI_1_1"}
{"score": 0.9460277557373047, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y983843WI_1_2"}
{"score": 0.973939836025238, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y983843WI_1_3"}
{"score": 0.9897994995117188, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y983843WI_1_5"}
{"score": 0.9896138310432434, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y983843WI_1_6"}
{"score": 0.5881291031837463, "chain_id": "3ZV9H2YQQD63HS6CW0EZ3Y983843WI_1_4"}
{"score": 0.9904161691665649, "chain_id": "3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_1"}
{"score": 0.9907764792442322, "chain_id": "3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_2"}
{"score": 0.9892733693122864, "chain_id": "3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_3"}
{"score": 0.9901751279830933, "chain_id": "3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_4"}
{"score": 0.9917305111885071, "chain_id": "3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_5"}
{"score": 0.9925526976585388, "chain_id": "3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_6"}
{"score": 0.86219722032547, "chain_id": "3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_7"}
{"score": 0.8213629722595215, "chain_id": "3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_8"}
{"score": 0.25907382369041443, "chain_id": "3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_9"}
{"score": 0.5673283338546753, "chain_id": "3KAKFY4PGU1LGXM77JAK2700OZJ3IS_1_10"}
{"score": 0.7913414835929871, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_3"}
{"score": 0.9692202210426331, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_1"}
{"score": 0.9287152886390686, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_2"}
{"score": 0.062270697206258774, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_4"}
{"score": 0.09563031047582626, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_5"}
{"score": 0.0584128275513649, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_6"}
{"score": 0.041809435933828354, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_7"}
{"score": 0.6751390695571899, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_8"}
{"score": 0.0321878045797348, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_9"}
{"score": 0.038618315011262894, "chain_id": "36WLNQG78Z9E3NOYQTZZZB0KI8CBEN_1_10"}
{"score": 0.9709309935569763, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_2"}
{"score": 0.9430598616600037, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_4"}
{"score": 0.21343286335468292, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_5"}
{"score": 0.5316922068595886, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_8"}
{"score": 0.2806686460971832, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_9"}
{"score": 0.8390300869941711, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_10"}
{"score": 0.6343940496444702, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_1"}
{"score": 0.9569464325904846, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_3"}
{"score": 0.6603418588638306, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_6"}
{"score": 0.11117355525493622, "chain_id": "3PZDLQMM0TK5IC4OB90T8UXD4SR2CT_1_7"}
{"score": 0.5619273781776428, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_1"}
{"score": 0.5052344799041748, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_2"}
{"score": 0.11137237399816513, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_3"}
{"score": 0.09051342308521271, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_4"}
{"score": 0.1557818353176117, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_5"}
{"score": 0.19775253534317017, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_6"}
{"score": 0.1702478975057602, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_7"}
{"score": 0.04365825653076172, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_8"}
{"score": 0.7853278517723083, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_9"}
{"score": 0.22485674917697906, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN4VJX2Z_1_10"}
{"score": 0.9922741055488586, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_1"}
{"score": 0.9742644429206848, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_2"}
{"score": 0.8974676132202148, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_3"}
{"score": 0.9871233701705933, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_4"}
{"score": 0.6030104756355286, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_5"}
{"score": 0.057277362793684006, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_6"}
{"score": 0.045705296099185944, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_7"}
{"score": 0.03531079366803169, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_8"}
{"score": 0.620681643486023, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_9"}
{"score": 0.9493424296379089, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MMRAI93_1_10"}
{"score": 0.09598571807146072, "chain_id": "30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_1"}
{"score": 0.03402472659945488, "chain_id": "30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_2"}
{"score": 0.05359451100230217, "chain_id": "30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_3"}
{"score": 0.05042361095547676, "chain_id": "30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_4"}
{"score": 0.04206022992730141, "chain_id": "30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_5"}
{"score": 0.8374961018562317, "chain_id": "30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_6"}
{"score": 0.21441012620925903, "chain_id": "30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_7"}
{"score": 0.7109217047691345, "chain_id": "30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_8"}
{"score": 0.08168203383684158, "chain_id": "30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_9"}
{"score": 0.5829445123672485, "chain_id": "30X31N5D63PAUWOOLAJ8THKT2IDAS8_1_10"}
{"score": 0.13713699579238892, "chain_id": "340UGXU9DY0A1XJQLA5445GU8SEVUV_1_1"}
{"score": 0.8095197677612305, "chain_id": "340UGXU9DY0A1XJQLA5445GU8SEVUV_1_2"}
{"score": 0.13541051745414734, "chain_id": "340UGXU9DY0A1XJQLA5445GU8SEVUV_1_3"}
{"score": 0.12655627727508545, "chain_id": "340UGXU9DY0A1XJQLA5445GU8SEVUV_1_4"}
{"score": 0.0542290173470974, "chain_id": "340UGXU9DY0A1XJQLA5445GU8SEVUV_1_5"}
{"score": 0.08007904887199402, "chain_id": "340UGXU9DY0A1XJQLA5445GU8SEVUV_1_6"}
{"score": 0.4714207649230957, "chain_id": "340UGXU9DY0A1XJQLA5445GU8SEVUV_1_7"}
{"score": 0.07455813139677048, "chain_id": "340UGXU9DY0A1XJQLA5445GU8SEVUV_1_8"}
{"score": 0.8952430486679077, "chain_id": "340UGXU9DY0A1XJQLA5445GU8SEVUV_1_9"}
{"score": 0.5741461515426636, "chain_id": "340UGXU9DY0A1XJQLA5445GU8SEVUV_1_10"}
{"score": 0.6567255258560181, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_1"}
{"score": 0.9825582504272461, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_2"}
{"score": 0.950247049331665, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_3"}
{"score": 0.19083940982818604, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_4"}
{"score": 0.17266713082790375, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_5"}
{"score": 0.7636598348617554, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_6"}
{"score": 0.9694271087646484, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_7"}
{"score": 0.6583028435707092, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_8"}
{"score": 0.21183578670024872, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_9"}
{"score": 0.05009789019823074, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZFA4K9G_1_10"}
{"score": 0.9676408171653748, "chain_id": "324G5B4FB37SAL6E55O49KCK72L07M_1_2"}
{"score": 0.9894753694534302, "chain_id": "324G5B4FB37SAL6E55O49KCK72L07M_1_3"}
{"score": 0.11428576707839966, "chain_id": "324G5B4FB37SAL6E55O49KCK72L07M_1_4"}
{"score": 0.986116349697113, "chain_id": "324G5B4FB37SAL6E55O49KCK72L07M_1_7"}
{"score": 0.9916287660598755, "chain_id": "324G5B4FB37SAL6E55O49KCK72L07M_1_1"}
{"score": 0.09582110494375229, "chain_id": "324G5B4FB37SAL6E55O49KCK72L07M_1_5"}
{"score": 0.04346156492829323, "chain_id": "324G5B4FB37SAL6E55O49KCK72L07M_1_6"}
{"score": 0.20179209113121033, "chain_id": "324G5B4FB37SAL6E55O49KCK72L07M_1_8"}
{"score": 0.3019693195819855, "chain_id": "324G5B4FB37SAL6E55O49KCK72L07M_1_9"}
{"score": 0.12929166853427887, "chain_id": "324G5B4FB37SAL6E55O49KCK72L07M_1_10"}
{"score": 0.31416159868240356, "chain_id": "3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_1"}
{"score": 0.024237701669335365, "chain_id": "3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_2"}
{"score": 0.4867391288280487, "chain_id": "3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_3"}
{"score": 0.9689118266105652, "chain_id": "3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_4"}
{"score": 0.9169474840164185, "chain_id": "3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_5"}
{"score": 0.05605905130505562, "chain_id": "3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_6"}
{"score": 0.026427168399095535, "chain_id": "3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_7"}
{"score": 0.26651719212532043, "chain_id": "3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_8"}
{"score": 0.4415493309497833, "chain_id": "3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_9"}
{"score": 0.12028943747282028, "chain_id": "3QAPZX2QN4CLOK98ZT79DTVCWS302M_1_10"}
{"score": 0.9914257526397705, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_1"}
{"score": 0.9912527203559875, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_2"}
{"score": 0.8970206379890442, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_3"}
{"score": 0.9278052449226379, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_9"}
{"score": 0.8055357933044434, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_4"}
{"score": 0.1807481199502945, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_5"}
{"score": 0.9234862327575684, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_6"}
{"score": 0.05661782622337341, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_7"}
{"score": 0.05103548988699913, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_8"}
{"score": 0.01806940883398056, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQZ59TL2_1_10"}
{"score": 0.9205842018127441, "chain_id": "3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_1"}
{"score": 0.8429412841796875, "chain_id": "3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_2"}
{"score": 0.991336464881897, "chain_id": "3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_5"}
{"score": 0.08391831815242767, "chain_id": "3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_6"}
{"score": 0.7329562306404114, "chain_id": "3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_8"}
{"score": 0.8668707609176636, "chain_id": "3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_9"}
{"score": 0.9704952836036682, "chain_id": "3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_10"}
{"score": 0.9215924143791199, "chain_id": "3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_3"}
{"score": 0.9654207825660706, "chain_id": "3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_4"}
{"score": 0.5314794778823853, "chain_id": "3R08VXYT7CULIB7ZYCHPGFLO74Q7WE_1_7"}
{"score": 0.4006476104259491, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_1"}
{"score": 0.7609026432037354, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_2"}
{"score": 0.09595321863889694, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_3"}
{"score": 0.023405231535434723, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_4"}
{"score": 0.5492115020751953, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_5"}
{"score": 0.08482035249471664, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_6"}
{"score": 0.34015464782714844, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_7"}
{"score": 0.03283281996846199, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_8"}
{"score": 0.04763704165816307, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_9"}
{"score": 0.1526232659816742, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELJIF4G_1_10"}
{"score": 0.047749485820531845, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUSINW_1_1"}
{"score": 0.06420234590768814, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUSINW_1_2"}
{"score": 0.032541222870349884, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUSINW_1_3"}
{"score": 0.025133084505796432, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUSINW_1_4"}
{"score": 0.8032740950584412, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUSINW_1_5"}
{"score": 0.5229150056838989, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUSINW_1_6"}
{"score": 0.1007659062743187, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUSINW_1_7"}
{"score": 0.11772626638412476, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUSINW_1_8"}
{"score": 0.33601292967796326, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUSINW_1_9"}
{"score": 0.32154926657676697, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUSINW_1_10"}
{"score": 0.8532102704048157, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_1"}
{"score": 0.8452767729759216, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_7"}
{"score": 0.5819367170333862, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_8"}
{"score": 0.8767338395118713, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_2"}
{"score": 0.09598319232463837, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_3"}
{"score": 0.10275845974683762, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_4"}
{"score": 0.040542520582675934, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_5"}
{"score": 0.08597251772880554, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_6"}
{"score": 0.05620031803846359, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_9"}
{"score": 0.27307528257369995, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8A0RRLX_1_10"}
{"score": 0.6998258233070374, "chain_id": "3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_1"}
{"score": 0.654389500617981, "chain_id": "3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_2"}
{"score": 0.5071374177932739, "chain_id": "3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_3"}
{"score": 0.6914356350898743, "chain_id": "3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_4"}
{"score": 0.9756356477737427, "chain_id": "3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_7"}
{"score": 0.04669126495718956, "chain_id": "3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_5"}
{"score": 0.9530490636825562, "chain_id": "3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_6"}
{"score": 0.9491099715232849, "chain_id": "3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_8"}
{"score": 0.08113826811313629, "chain_id": "3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_9"}
{"score": 0.3019696772098541, "chain_id": "3L6L49WXW0WUM5AW0DW9N3O1XAJ458_1_10"}
{"score": 0.017112910747528076, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_1"}
{"score": 0.15722209215164185, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_2"}
{"score": 0.11539125442504883, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_3"}
{"score": 0.02164820395410061, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_4"}
{"score": 0.08997275680303574, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_5"}
{"score": 0.059623513370752335, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_6"}
{"score": 0.10355071723461151, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_7"}
{"score": 0.06380007416009903, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_8"}
{"score": 0.1728385090827942, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_9"}
{"score": 0.6336665153503418, "chain_id": "3WMOAN2SRBWX67ZHO9TIQAO01F6NVJ_1_10"}
{"score": 0.541816771030426, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_1"}
{"score": 0.9689080119132996, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_2"}
{"score": 0.43959248065948486, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_4"}
{"score": 0.6263224482536316, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_6"}
{"score": 0.7601879835128784, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_7"}
{"score": 0.7923812866210938, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_8"}
{"score": 0.06107914820313454, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_3"}
{"score": 0.30363819003105164, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_5"}
{"score": 0.043030995875597, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_9"}
{"score": 0.042059458792209625, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE8ZPAWB_1_10"}
{"score": 0.7737621068954468, "chain_id": "3HWRJOOET51DK9501FLUP0AKN8ISEE_1_2"}
{"score": 0.8034618496894836, "chain_id": "3HWRJOOET51DK9501FLUP0AKN8ISEE_1_3"}
{"score": 0.7661446332931519, "chain_id": "3HWRJOOET51DK9501FLUP0AKN8ISEE_1_1"}
{"score": 0.08980543911457062, "chain_id": "3HWRJOOET51DK9501FLUP0AKN8ISEE_1_4"}
{"score": 0.6003015041351318, "chain_id": "3HWRJOOET51DK9501FLUP0AKN8ISEE_1_5"}
{"score": 0.43666359782218933, "chain_id": "3HWRJOOET51DK9501FLUP0AKN8ISEE_1_6"}
{"score": 0.12328276038169861, "chain_id": "3HWRJOOET51DK9501FLUP0AKN8ISEE_1_7"}
{"score": 0.08078159391880035, "chain_id": "3HWRJOOET51DK9501FLUP0AKN8ISEE_1_8"}
{"score": 0.30691343545913696, "chain_id": "3HWRJOOET51DK9501FLUP0AKN8ISEE_1_9"}
{"score": 0.21145163476467133, "chain_id": "3HWRJOOET51DK9501FLUP0AKN8ISEE_1_10"}
{"score": 0.9668914675712585, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_1"}
{"score": 0.581234335899353, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_2"}
{"score": 0.7035232782363892, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_3"}
{"score": 0.4100389778614044, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_4"}
{"score": 0.9856487512588501, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_5"}
{"score": 0.44297119975090027, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_7"}
{"score": 0.7569271922111511, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_9"}
{"score": 0.3244408965110779, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_6"}
{"score": 0.13392551243305206, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_8"}
{"score": 0.531536340713501, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1TP5H7Z_1_10"}
{"score": 0.8716413378715515, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_1"}
{"score": 0.019707417115569115, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_2"}
{"score": 0.7605498433113098, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_3"}
{"score": 0.07024785131216049, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_4"}
{"score": 0.04086177796125412, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_5"}
{"score": 0.029048658907413483, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_6"}
{"score": 0.019303666427731514, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_7"}
{"score": 0.02126302942633629, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_8"}
{"score": 0.1515781432390213, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_9"}
{"score": 0.0456402450799942, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE34ZL9X_1_10"}
{"score": 0.4753512144088745, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_1"}
{"score": 0.49989068508148193, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_2"}
{"score": 0.7764980792999268, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_3"}
{"score": 0.17980514466762543, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_4"}
{"score": 0.1654440015554428, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_5"}
{"score": 0.28939691185951233, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_6"}
{"score": 0.2663090229034424, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_7"}
{"score": 0.12176446616649628, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_8"}
{"score": 0.22850748896598816, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_9"}
{"score": 0.24370498955249786, "chain_id": "36V4Q8R5ZKZZJHI0Q9K8780SDE7QME_1_10"}
{"score": 0.9605117440223694, "chain_id": "3QEMNNSB2XYM9578HHCZORW3097D74_1_1"}
{"score": 0.15107618272304535, "chain_id": "3QEMNNSB2XYM9578HHCZORW3097D74_1_2"}
{"score": 0.772172749042511, "chain_id": "3QEMNNSB2XYM9578HHCZORW3097D74_1_3"}
{"score": 0.09246978163719177, "chain_id": "3QEMNNSB2XYM9578HHCZORW3097D74_1_4"}
{"score": 0.31345799565315247, "chain_id": "3QEMNNSB2XYM9578HHCZORW3097D74_1_5"}
{"score": 0.5602109432220459, "chain_id": "3QEMNNSB2XYM9578HHCZORW3097D74_1_6"}
{"score": 0.043841127306222916, "chain_id": "3QEMNNSB2XYM9578HHCZORW3097D74_1_7"}
{"score": 0.18322189152240753, "chain_id": "3QEMNNSB2XYM9578HHCZORW3097D74_1_8"}
{"score": 0.5293465852737427, "chain_id": "3QEMNNSB2XYM9578HHCZORW3097D74_1_9"}
{"score": 0.9295799136161804, "chain_id": "3QEMNNSB2XYM9578HHCZORW3097D74_1_10"}
{"score": 0.0877634659409523, "chain_id": "3ZSY5X72NXANVLICG4OL42Z2540ROG_1_1"}
{"score": 0.03061097301542759, "chain_id": "3ZSY5X72NXANVLICG4OL42Z2540ROG_1_2"}
{"score": 0.08654139935970306, "chain_id": "3ZSY5X72NXANVLICG4OL42Z2540ROG_1_3"}
{"score": 0.18065208196640015, "chain_id": "3ZSY5X72NXANVLICG4OL42Z2540ROG_1_4"}
{"score": 0.039939526468515396, "chain_id": "3ZSY5X72NXANVLICG4OL42Z2540ROG_1_5"}
{"score": 0.07025259733200073, "chain_id": "3ZSY5X72NXANVLICG4OL42Z2540ROG_1_6"}
{"score": 0.036924172192811966, "chain_id": "3ZSY5X72NXANVLICG4OL42Z2540ROG_1_7"}
{"score": 0.39238569140434265, "chain_id": "3ZSY5X72NXANVLICG4OL42Z2540ROG_1_8"}
{"score": 0.5406104922294617, "chain_id": "3ZSY5X72NXANVLICG4OL42Z2540ROG_1_9"}
{"score": 0.6759127378463745, "chain_id": "3ZSY5X72NXANVLICG4OL42Z2540ROG_1_10"}
{"score": 0.3510458171367645, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_1"}
{"score": 0.4535945653915405, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_2"}
{"score": 0.40979331731796265, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_3"}
{"score": 0.43199342489242554, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_4"}
{"score": 0.31283053755760193, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_5"}
{"score": 0.25463953614234924, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_6"}
{"score": 0.24955739080905914, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_7"}
{"score": 0.21092239022254944, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_8"}
{"score": 0.32046228647232056, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_9"}
{"score": 0.6008816957473755, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB4DC1WN_1_10"}
{"score": 0.9764379858970642, "chain_id": "3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_1"}
{"score": 0.9769855737686157, "chain_id": "3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_2"}
{"score": 0.9937658309936523, "chain_id": "3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_3"}
{"score": 0.9877305626869202, "chain_id": "3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_4"}
{"score": 0.9464561343193054, "chain_id": "3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_5"}
{"score": 0.7709922194480896, "chain_id": "3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_6"}
{"score": 0.8435567021369934, "chain_id": "3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_7"}
{"score": 0.3014128506183624, "chain_id": "3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_8"}
{"score": 0.6646180152893066, "chain_id": "3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_10"}
{"score": 0.6761057376861572, "chain_id": "3PW9OPU9PQJLV9UQVCB9RYEMZGU121_1_9"}
{"score": 0.9764379858970642, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_1"}
{"score": 0.9937658309936523, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_3"}
{"score": 0.7709922194480896, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_6"}
{"score": 0.6646180152893066, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_10"}
{"score": 0.9769855737686157, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_2"}
{"score": 0.9877305626869202, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_4"}
{"score": 0.9464561343193054, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_5"}
{"score": 0.8435567021369934, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_7"}
{"score": 0.3014128506183624, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_8"}
{"score": 0.6761057376861572, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ48KQEL_1_9"}
{"score": 0.9885567426681519, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_1"}
{"score": 0.98863285779953, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_2"}
{"score": 0.9932108521461487, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_3"}
{"score": 0.9474079608917236, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_5"}
{"score": 0.293503999710083, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_8"}
{"score": 0.9888677000999451, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_4"}
{"score": 0.7693239450454712, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_6"}
{"score": 0.84122234582901, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_7"}
{"score": 0.6939842700958252, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_9"}
{"score": 0.6533558368682861, "chain_id": "3LRLIPTPEQ8C6DBGG1A62VTJKSOAKM_1_10"}
{"score": 0.9885567426681519, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_1"}
{"score": 0.98863285779953, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_2"}
{"score": 0.9888677000999451, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_4"}
{"score": 0.9474079608917236, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_5"}
{"score": 0.7693239450454712, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_6"}
{"score": 0.84122234582901, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_7"}
{"score": 0.6939842700958252, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_9"}
{"score": 0.6533558368682861, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_10"}
{"score": 0.9932108521461487, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_3"}
{"score": 0.293503999710083, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUDBDR1H_1_8"}
{"score": 0.982282280921936, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_1"}
{"score": 0.98360276222229, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_2"}
{"score": 0.24918565154075623, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_3"}
{"score": 0.2771889269351959, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_4"}
{"score": 0.16646510362625122, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_5"}
{"score": 0.33626505732536316, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_6"}
{"score": 0.2517017126083374, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_7"}
{"score": 0.14020469784736633, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_8"}
{"score": 0.1730770468711853, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_9"}
{"score": 0.02033958211541176, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4FIGO7_1_10"}
{"score": 0.013200522400438786, "chain_id": "3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_1"}
{"score": 0.01643790304660797, "chain_id": "3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_2"}
{"score": 0.0539064034819603, "chain_id": "3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_3"}
{"score": 0.02739282138645649, "chain_id": "3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_4"}
{"score": 0.03663435950875282, "chain_id": "3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_5"}
{"score": 0.020465752109885216, "chain_id": "3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_6"}
{"score": 0.0659228190779686, "chain_id": "3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_7"}
{"score": 0.07473894208669662, "chain_id": "3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_8"}
{"score": 0.024159234017133713, "chain_id": "3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_9"}
{"score": 0.024501003324985504, "chain_id": "3ZSANO2JCF65QN5WWQ507IVKZXRSF2_1_10"}
{"score": 0.9906196594238281, "chain_id": "3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_1"}
{"score": 0.9862123727798462, "chain_id": "3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_2"}
{"score": 0.9877040386199951, "chain_id": "3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_4"}
{"score": 0.8778210282325745, "chain_id": "3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_6"}
{"score": 0.8534337878227234, "chain_id": "3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_8"}
{"score": 0.9877955913543701, "chain_id": "3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_3"}
{"score": 0.9094221591949463, "chain_id": "3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_5"}
{"score": 0.7039523124694824, "chain_id": "3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_7"}
{"score": 0.4897139072418213, "chain_id": "3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_9"}
{"score": 0.2396358698606491, "chain_id": "3RU7GD8VPOSHH0UQAT15JC9O14NPSD_1_10"}
{"score": 0.965821385383606, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_1"}
{"score": 0.9850526452064514, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_2"}
{"score": 0.9809402823448181, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_3"}
{"score": 0.972899854183197, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_4"}
{"score": 0.5941103100776672, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_5"}
{"score": 0.3453190326690674, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_6"}
{"score": 0.05854366719722748, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_7"}
{"score": 0.12458871304988861, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_8"}
{"score": 0.3027131259441376, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_9"}
{"score": 0.07560174912214279, "chain_id": "3HVVDCPGTERC5EZ6QG2E68YM71ATYO_1_10"}
{"score": 0.939943253993988, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_1"}
{"score": 0.9863126873970032, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_2"}
{"score": 0.9684461355209351, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_3"}
{"score": 0.9377120733261108, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_7"}
{"score": 0.9883939027786255, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_8"}
{"score": 0.9488491415977478, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_9"}
{"score": 0.8727880716323853, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_4"}
{"score": 0.06479858607053757, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_5"}
{"score": 0.9408517479896545, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_6"}
{"score": 0.03389601409435272, "chain_id": "3FE7TXL1LIM9CDE7GR1OSZMU58EQ27_1_10"}
{"score": 0.25287100672721863, "chain_id": "3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_1"}
{"score": 0.4369405508041382, "chain_id": "3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_2"}
{"score": 0.37111371755599976, "chain_id": "3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_3"}
{"score": 0.3428005874156952, "chain_id": "3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_4"}
{"score": 0.047093141824007034, "chain_id": "3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_5"}
{"score": 0.5798112750053406, "chain_id": "3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_6"}
{"score": 0.07880699634552002, "chain_id": "3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_7"}
{"score": 0.5824438333511353, "chain_id": "3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_8"}
{"score": 0.19301332533359528, "chain_id": "3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_9"}
{"score": 0.2574761211872101, "chain_id": "3UWN2HHPUY4HEFIDUEODFN4TYVSSNZ_1_10"}
{"score": 0.872903048992157, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_1"}
{"score": 0.4668155014514923, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_2"}
{"score": 0.8546494841575623, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_7"}
{"score": 0.11211403459310532, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_9"}
{"score": 0.17152871191501617, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_3"}
{"score": 0.6312422156333923, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_4"}
{"score": 0.22848811745643616, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_5"}
{"score": 0.36367279291152954, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_6"}
{"score": 0.16003760695457458, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_8"}
{"score": 0.6202408075332642, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59XRSPEF_1_10"}
{"score": 0.6324179172515869, "chain_id": "3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_2"}
{"score": 0.8454567193984985, "chain_id": "3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_5"}
{"score": 0.811129093170166, "chain_id": "3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_1"}
{"score": 0.8657873272895813, "chain_id": "3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_3"}
{"score": 0.7561652064323425, "chain_id": "3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_4"}
{"score": 0.02742181532084942, "chain_id": "3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_6"}
{"score": 0.01648840680718422, "chain_id": "3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_7"}
{"score": 0.04371153190732002, "chain_id": "3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_8"}
{"score": 0.1079479306936264, "chain_id": "3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_9"}
{"score": 0.04046238213777542, "chain_id": "3X4MXAO0BGNV0URE7QFVLWCO5CGWRN_1_10"}
{"score": 0.66582190990448, "chain_id": "3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_1"}
{"score": 0.1876021921634674, "chain_id": "3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_2"}
{"score": 0.40213310718536377, "chain_id": "3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_3"}
{"score": 0.2766716182231903, "chain_id": "3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_4"}
{"score": 0.13857285678386688, "chain_id": "3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_5"}
{"score": 0.76506507396698, "chain_id": "3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_6"}
{"score": 0.08867225795984268, "chain_id": "3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_7"}
{"score": 0.021609654650092125, "chain_id": "3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_8"}
{"score": 0.10122540593147278, "chain_id": "3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_9"}
{"score": 0.07725051045417786, "chain_id": "3P4MQ7TPPXBGWKCEG2X9Y3UZE2XBB2_1_10"}
{"score": 0.9797580242156982, "chain_id": "3AZHRG4CU4JA925R3TLEW304Z91301_1_4"}
{"score": 0.8384842276573181, "chain_id": "3AZHRG4CU4JA925R3TLEW304Z91301_1_7"}
{"score": 0.08914006501436234, "chain_id": "3AZHRG4CU4JA925R3TLEW304Z91301_1_1"}
{"score": 0.88889479637146, "chain_id": "3AZHRG4CU4JA925R3TLEW304Z91301_1_2"}
{"score": 0.7382000088691711, "chain_id": "3AZHRG4CU4JA925R3TLEW304Z91301_1_3"}
{"score": 0.0652581974864006, "chain_id": "3AZHRG4CU4JA925R3TLEW304Z91301_1_5"}
{"score": 0.9534401893615723, "chain_id": "3AZHRG4CU4JA925R3TLEW304Z91301_1_6"}
{"score": 0.6534826159477234, "chain_id": "3AZHRG4CU4JA925R3TLEW304Z91301_1_8"}
{"score": 0.7287110686302185, "chain_id": "3AZHRG4CU4JA925R3TLEW304Z91301_1_9"}
{"score": 0.4688939154148102, "chain_id": "3AZHRG4CU4JA925R3TLEW304Z91301_1_10"}
{"score": 0.4358920156955719, "chain_id": "3K4J6M3CXES74RFXQAPR431QHPNAGV_1_1"}
{"score": 0.8197861909866333, "chain_id": "3K4J6M3CXES74RFXQAPR431QHPNAGV_1_2"}
{"score": 0.051199864596128464, "chain_id": "3K4J6M3CXES74RFXQAPR431QHPNAGV_1_3"}
{"score": 0.3437190055847168, "chain_id": "3K4J6M3CXES74RFXQAPR431QHPNAGV_1_4"}
{"score": 0.4688287079334259, "chain_id": "3K4J6M3CXES74RFXQAPR431QHPNAGV_1_5"}
{"score": 0.1828688681125641, "chain_id": "3K4J6M3CXES74RFXQAPR431QHPNAGV_1_6"}
{"score": 0.0718025341629982, "chain_id": "3K4J6M3CXES74RFXQAPR431QHPNAGV_1_7"}
{"score": 0.10726159811019897, "chain_id": "3K4J6M3CXES74RFXQAPR431QHPNAGV_1_8"}
{"score": 0.20028866827487946, "chain_id": "3K4J6M3CXES74RFXQAPR431QHPNAGV_1_9"}
{"score": 0.08037377893924713, "chain_id": "3K4J6M3CXES74RFXQAPR431QHPNAGV_1_10"}
{"score": 0.8594335913658142, "chain_id": "3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_1"}
{"score": 0.20725952088832855, "chain_id": "3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_8"}
{"score": 0.8976173996925354, "chain_id": "3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_2"}
{"score": 0.7175804376602173, "chain_id": "3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_3"}
{"score": 0.7083702087402344, "chain_id": "3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_4"}
{"score": 0.4502936005592346, "chain_id": "3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_5"}
{"score": 0.395868718624115, "chain_id": "3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_6"}
{"score": 0.04526565968990326, "chain_id": "3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_7"}
{"score": 0.060092389583587646, "chain_id": "3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_9"}
{"score": 0.054144736379384995, "chain_id": "3OE22WJIGINIWPN9ZBBUYIHMUF3QUX_1_10"}
{"score": 0.2101796716451645, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_1"}
{"score": 0.027091704308986664, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_2"}
{"score": 0.12935185432434082, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_3"}
{"score": 0.8862185478210449, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_4"}
{"score": 0.8606535196304321, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_5"}
{"score": 0.2265566736459732, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_6"}
{"score": 0.09252561628818512, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_7"}
{"score": 0.025655247271060944, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_8"}
{"score": 0.044873178005218506, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_9"}
{"score": 0.019250735640525818, "chain_id": "3Z7EFSHGN9D6JS7LZYLMYKR9KRHCXM_1_10"}
{"score": 0.9880026578903198, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_1"}
{"score": 0.7215200662612915, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_3"}
{"score": 0.9869930744171143, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_5"}
{"score": 0.8937990665435791, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_2"}
{"score": 0.9801881313323975, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_4"}
{"score": 0.7066361308097839, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_6"}
{"score": 0.9472936391830444, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_7"}
{"score": 0.39627212285995483, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_8"}
{"score": 0.09046153724193573, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_9"}
{"score": 0.02762574329972267, "chain_id": "3TMSXRD2X6Z77PSX9W0GF5UB7VAW1J_1_10"}
{"score": 0.990461528301239, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_1"}
{"score": 0.990361213684082, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_2"}
{"score": 0.9884172081947327, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_3"}
{"score": 0.9856733083724976, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_4"}
{"score": 0.9889804124832153, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_5"}
{"score": 0.9751869440078735, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_7"}
{"score": 0.509863555431366, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_6"}
{"score": 0.05562162026762962, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_8"}
{"score": 0.18393836915493011, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_9"}
{"score": 0.497290700674057, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQ19CTLF_1_10"}
{"score": 0.9423670172691345, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_1"}
{"score": 0.04969038441777229, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_2"}
{"score": 0.3943229019641876, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_3"}
{"score": 0.06925445795059204, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_4"}
{"score": 0.015014211647212505, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_5"}
{"score": 0.0220431387424469, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_6"}
{"score": 0.017636168748140335, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_7"}
{"score": 0.014519236981868744, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_8"}
{"score": 0.018455015495419502, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_9"}
{"score": 0.024437611922621727, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FR10FVF5_1_10"}
{"score": 0.601386308670044, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_1"}
{"score": 0.026194768026471138, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_2"}
{"score": 0.03683824837207794, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_3"}
{"score": 0.07407883554697037, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_4"}
{"score": 0.06998881697654724, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_5"}
{"score": 0.03994053229689598, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_6"}
{"score": 0.017214607447385788, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_7"}
{"score": 0.03554336726665497, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_8"}
{"score": 0.02243831194937229, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_9"}
{"score": 0.02441919408738613, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XHGIO8H_1_10"}
{"score": 0.9850252270698547, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_1"}
{"score": 0.9810898900032043, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_9"}
{"score": 0.993301272392273, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_2"}
{"score": 0.8750136494636536, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_3"}
{"score": 0.9928932785987854, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_4"}
{"score": 0.846124529838562, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_5"}
{"score": 0.9807553291320801, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_6"}
{"score": 0.9606543779373169, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_7"}
{"score": 0.14969922602176666, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_8"}
{"score": 0.10217536985874176, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8PGP3E_1_10"}
{"score": 0.3244844377040863, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_1"}
{"score": 0.0728054940700531, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_2"}
{"score": 0.5444037318229675, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_3"}
{"score": 0.15410222113132477, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_4"}
{"score": 0.1525358408689499, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_5"}
{"score": 0.5224574208259583, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_6"}
{"score": 0.03810688853263855, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_7"}
{"score": 0.21017661690711975, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_8"}
{"score": 0.22006186842918396, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_9"}
{"score": 0.17163439095020294, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CE4GL6E_1_10"}
{"score": 0.12260197103023529, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_4"}
{"score": 0.344350129365921, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_1"}
{"score": 0.6921296119689941, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_2"}
{"score": 0.114654541015625, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_3"}
{"score": 0.05249728634953499, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_5"}
{"score": 0.6686126589775085, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_6"}
{"score": 0.04284367337822914, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_7"}
{"score": 0.02903471514582634, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_8"}
{"score": 0.02133709006011486, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_9"}
{"score": 0.011560154147446156, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04W90LSJ_1_10"}
{"score": 0.9489273428916931, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_5"}
{"score": 0.855181097984314, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_8"}
{"score": 0.1722378134727478, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_1"}
{"score": 0.0777869001030922, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_2"}
{"score": 0.04413716867566109, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_3"}
{"score": 0.04668590798974037, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_4"}
{"score": 0.049660857766866684, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_6"}
{"score": 0.663521945476532, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_7"}
{"score": 0.17879696190357208, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_9"}
{"score": 0.945574164390564, "chain_id": "39K0FND3AHE7W1BJ1DNMH8LN9F5AMD_1_10"}
{"score": 0.7008712887763977, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_1"}
{"score": 0.7170664072036743, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_2"}
{"score": 0.9149632453918457, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_3"}
{"score": 0.8305090069770813, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_4"}
{"score": 0.8671072721481323, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_5"}
{"score": 0.8107290863990784, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_6"}
{"score": 0.5252068042755127, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_7"}
{"score": 0.9705479741096497, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_8"}
{"score": 0.16004449129104614, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_9"}
{"score": 0.6747220754623413, "chain_id": "3A0EX8ZRN8NC9S5PQUBT6ES00J7BYQ_1_10"}
{"score": 0.989371657371521, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_1"}
{"score": 0.9302921295166016, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_5"}
{"score": 0.7025399804115295, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_7"}
{"score": 0.8409433960914612, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_2"}
{"score": 0.9283468723297119, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_3"}
{"score": 0.9502226114273071, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_4"}
{"score": 0.9114928245544434, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_6"}
{"score": 0.8437076210975647, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_8"}
{"score": 0.463006854057312, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_9"}
{"score": 0.21909776329994202, "chain_id": "3KIBXJ1WD5T18H5HQKFO3QDOA5PKOL_1_10"}
{"score": 0.783233642578125, "chain_id": "3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_2"}
{"score": 0.9247758984565735, "chain_id": "3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_4"}
{"score": 0.43595418334007263, "chain_id": "3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_5"}
{"score": 0.23306918144226074, "chain_id": "3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_1"}
{"score": 0.05272065848112106, "chain_id": "3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_3"}
{"score": 0.7920937538146973, "chain_id": "3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_6"}
{"score": 0.6083246469497681, "chain_id": "3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_7"}
{"score": 0.15575440227985382, "chain_id": "3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_8"}
{"score": 0.6891958713531494, "chain_id": "3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_9"}
{"score": 0.9451296925544739, "chain_id": "3U0SRXB7CD45D0I0FPO8PDZXG9HRNI_1_10"}
{"score": 0.6034153699874878, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_1"}
{"score": 0.017047906294465065, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_2"}
{"score": 0.12402428686618805, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_3"}
{"score": 0.0349627286195755, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_4"}
{"score": 0.023658785969018936, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_5"}
{"score": 0.04957667365670204, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_6"}
{"score": 0.018908457830548286, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_7"}
{"score": 0.03431479260325432, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_8"}
{"score": 0.016624845564365387, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_9"}
{"score": 0.01661861687898636, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYO6YOBE_1_10"}
{"score": 0.032096486538648605, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_1"}
{"score": 0.05758669972419739, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_2"}
{"score": 0.08330172300338745, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_3"}
{"score": 0.017404858022928238, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_4"}
{"score": 0.01871303841471672, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_5"}
{"score": 0.11060068756341934, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_6"}
{"score": 0.05272591486573219, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_7"}
{"score": 0.04368715360760689, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_8"}
{"score": 0.05141285061836243, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_9"}
{"score": 0.04232250899076462, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD7XSG0E_1_10"}
{"score": 0.9903186559677124, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_1"}
{"score": 0.9898109436035156, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_2"}
{"score": 0.9912732243537903, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_4"}
{"score": 0.9695528149604797, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_7"}
{"score": 0.9923140406608582, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_3"}
{"score": 0.5615676641464233, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_5"}
{"score": 0.8657466769218445, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_6"}
{"score": 0.7918065786361694, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_8"}
{"score": 0.766390323638916, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_9"}
{"score": 0.08535801619291306, "chain_id": "3R2UR8A0IAF7SH4OP3UDTKLQ8QZOXZ_1_10"}
{"score": 0.9440386891365051, "chain_id": "3QEMNNSB2XYM9578HHCZORW3316D7Q_1_2"}
{"score": 0.9406982660293579, "chain_id": "3QEMNNSB2XYM9578HHCZORW3316D7Q_1_3"}
{"score": 0.9443113803863525, "chain_id": "3QEMNNSB2XYM9578HHCZORW3316D7Q_1_4"}
{"score": 0.9650072455406189, "chain_id": "3QEMNNSB2XYM9578HHCZORW3316D7Q_1_5"}
{"score": 0.9027596712112427, "chain_id": "3QEMNNSB2XYM9578HHCZORW3316D7Q_1_10"}
{"score": 0.9508066773414612, "chain_id": "3QEMNNSB2XYM9578HHCZORW3316D7Q_1_1"}
{"score": 0.18454556167125702, "chain_id": "3QEMNNSB2XYM9578HHCZORW3316D7Q_1_6"}
{"score": 0.06332375854253769, "chain_id": "3QEMNNSB2XYM9578HHCZORW3316D7Q_1_7"}
{"score": 0.04069433733820915, "chain_id": "3QEMNNSB2XYM9578HHCZORW3316D7Q_1_8"}
{"score": 0.09875238686800003, "chain_id": "3QEMNNSB2XYM9578HHCZORW3316D7Q_1_9"}
{"score": 0.8981195092201233, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_5"}
{"score": 0.9264786243438721, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_6"}
{"score": 0.05172806605696678, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_1"}
{"score": 0.06555509567260742, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_2"}
{"score": 0.1725955605506897, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_3"}
{"score": 0.21365496516227722, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_4"}
{"score": 0.11031265556812286, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_7"}
{"score": 0.036568328738212585, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_8"}
{"score": 0.03086879290640354, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_9"}
{"score": 0.05112758278846741, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DFH5O8_1_10"}
{"score": 0.7348594069480896, "chain_id": "30X31N5D63PAUWOOLAJ8THKT52USAE_1_1"}
{"score": 0.5639935731887817, "chain_id": "30X31N5D63PAUWOOLAJ8THKT52USAE_1_2"}
{"score": 0.348746120929718, "chain_id": "30X31N5D63PAUWOOLAJ8THKT52USAE_1_3"}
{"score": 0.46940967440605164, "chain_id": "30X31N5D63PAUWOOLAJ8THKT52USAE_1_4"}
{"score": 0.055059198290109634, "chain_id": "30X31N5D63PAUWOOLAJ8THKT52USAE_1_5"}
{"score": 0.0875561460852623, "chain_id": "30X31N5D63PAUWOOLAJ8THKT52USAE_1_6"}
{"score": 0.4436066150665283, "chain_id": "30X31N5D63PAUWOOLAJ8THKT52USAE_1_7"}
{"score": 0.7061092257499695, "chain_id": "30X31N5D63PAUWOOLAJ8THKT52USAE_1_8"}
{"score": 0.6963911652565002, "chain_id": "30X31N5D63PAUWOOLAJ8THKT52USAE_1_9"}
{"score": 0.9022942781448364, "chain_id": "30X31N5D63PAUWOOLAJ8THKT52USAE_1_10"}
{"score": 0.8731732964515686, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_1"}
{"score": 0.91841059923172, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_2"}
{"score": 0.8771207928657532, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_4"}
{"score": 0.335954874753952, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_7"}
{"score": 0.43040400743484497, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_3"}
{"score": 0.19694429636001587, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_5"}
{"score": 0.2972787916660309, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_6"}
{"score": 0.19321990013122559, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_8"}
{"score": 0.2842824161052704, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_9"}
{"score": 0.2984071373939514, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAIXAJH1_1_10"}
{"score": 0.9786624312400818, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_1"}
{"score": 0.9836788773536682, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_2"}
{"score": 0.08511678874492645, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_5"}
{"score": 0.11595524102449417, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_8"}
{"score": 0.16073669493198395, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_3"}
{"score": 0.803458034992218, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_4"}
{"score": 0.1011291965842247, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_6"}
{"score": 0.05650576576590538, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_7"}
{"score": 0.0987134575843811, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_9"}
{"score": 0.16336743533611298, "chain_id": "32XVDSJFPZWIRYGFOYU7BQ2PX99M2E_1_10"}
{"score": 0.5767525434494019, "chain_id": "3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_6"}
{"score": 0.5466331839561462, "chain_id": "3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_1"}
{"score": 0.0675584003329277, "chain_id": "3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_2"}
{"score": 0.037769317626953125, "chain_id": "3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_3"}
{"score": 0.4840962290763855, "chain_id": "3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_4"}
{"score": 0.3180839717388153, "chain_id": "3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_5"}
{"score": 0.062021832913160324, "chain_id": "3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_7"}
{"score": 0.06914281845092773, "chain_id": "3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_8"}
{"score": 0.21044158935546875, "chain_id": "3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_9"}
{"score": 0.07410092651844025, "chain_id": "3URFVVM165HRAHO0M7U7PBTQW3EUZO_1_10"}
{"score": 0.22857780754566193, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_1"}
{"score": 0.0808168351650238, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_2"}
{"score": 0.10468750447034836, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_3"}
{"score": 0.11362957954406738, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_4"}
{"score": 0.03324121981859207, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_5"}
{"score": 0.028527729213237762, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_6"}
{"score": 0.05434660613536835, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_7"}
{"score": 0.20932643115520477, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_8"}
{"score": 0.07504712790250778, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_9"}
{"score": 0.08742783218622208, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4M2X5GA_1_10"}
{"score": 0.855313241481781, "chain_id": "36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_1"}
{"score": 0.3793259859085083, "chain_id": "36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_4"}
{"score": 0.8149463534355164, "chain_id": "36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_9"}
{"score": 0.13317695260047913, "chain_id": "36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_2"}
{"score": 0.4884543716907501, "chain_id": "36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_3"}
{"score": 0.3518892228603363, "chain_id": "36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_5"}
{"score": 0.030815741047263145, "chain_id": "36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_6"}
{"score": 0.05375466123223305, "chain_id": "36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_7"}
{"score": 0.1791291981935501, "chain_id": "36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_8"}
{"score": 0.03719339892268181, "chain_id": "36W0OB37HWDM5VIGM8N86W403E7ZHQ_1_10"}
{"score": 0.026271257549524307, "chain_id": "3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_1"}
{"score": 0.02761143632233143, "chain_id": "3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_2"}
{"score": 0.032455071806907654, "chain_id": "3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_3"}
{"score": 0.009344419464468956, "chain_id": "3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_4"}
{"score": 0.012788851745426655, "chain_id": "3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_5"}
{"score": 0.01692446880042553, "chain_id": "3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_6"}
{"score": 0.026463503018021584, "chain_id": "3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_7"}
{"score": 0.03403882682323456, "chain_id": "3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_8"}
{"score": 0.01927460916340351, "chain_id": "3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_9"}
{"score": 0.02757415361702442, "chain_id": "3PS7W85Z8Z1X4DRYI4AY7R5VZ5TT9U_1_10"}
{"score": 0.8323594331741333, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_1"}
{"score": 0.9115866422653198, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_2"}
{"score": 0.9455037117004395, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_3"}
{"score": 0.8768476843833923, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_4"}
{"score": 0.17824304103851318, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_5"}
{"score": 0.03334375470876694, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_6"}
{"score": 0.08647208660840988, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_7"}
{"score": 0.04796748608350754, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_8"}
{"score": 0.05528947338461876, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_9"}
{"score": 0.08368546515703201, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3TCGZYP_1_10"}
{"score": 0.9806745648384094, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_1"}
{"score": 0.9799096584320068, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_2"}
{"score": 0.7397955060005188, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_6"}
{"score": 0.3093128204345703, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_8"}
{"score": 0.5611846446990967, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_3"}
{"score": 0.717001736164093, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_4"}
{"score": 0.6815887689590454, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_5"}
{"score": 0.25685441493988037, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_7"}
{"score": 0.2170405238866806, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_9"}
{"score": 0.07534391433000565, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVFVFX7T_1_10"}
{"score": 0.9908722043037415, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_1"}
{"score": 0.9917176961898804, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_2"}
{"score": 0.1935657411813736, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_5"}
{"score": 0.2793411314487457, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_3"}
{"score": 0.7521197199821472, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_4"}
{"score": 0.3374629020690918, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_6"}
{"score": 0.6853346228599548, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_7"}
{"score": 0.3618714213371277, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_8"}
{"score": 0.05121814087033272, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_9"}
{"score": 0.10020145773887634, "chain_id": "3X31TUMD7XLRWVGY5ITE6UDV7LP1L6_1_10"}
{"score": 0.14055277407169342, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_1"}
{"score": 0.122966468334198, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_2"}
{"score": 0.022918615490198135, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_3"}
{"score": 0.03134285286068916, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_4"}
{"score": 0.01916554756462574, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_5"}
{"score": 0.017030350863933563, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_6"}
{"score": 0.03683020919561386, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_7"}
{"score": 0.08465172350406647, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_8"}
{"score": 0.04177531599998474, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_9"}
{"score": 0.054320160299539566, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSINJNQF_1_10"}
{"score": 0.8119080662727356, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_5"}
{"score": 0.5888422727584839, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_6"}
{"score": 0.2708995044231415, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_9"}
{"score": 0.13828805088996887, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_1"}
{"score": 0.067764513194561, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_2"}
{"score": 0.15315599739551544, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_3"}
{"score": 0.13260141015052795, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_4"}
{"score": 0.0789167657494545, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_7"}
{"score": 0.7187663316726685, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_8"}
{"score": 0.02888358384370804, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J546PQVZ_1_10"}
{"score": 0.0276004821062088, "chain_id": "3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_1"}
{"score": 0.03827393800020218, "chain_id": "3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_2"}
{"score": 0.03136732801795006, "chain_id": "3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_3"}
{"score": 0.04904628172516823, "chain_id": "3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_4"}
{"score": 0.017657354474067688, "chain_id": "3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_5"}
{"score": 0.023841796442866325, "chain_id": "3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_6"}
{"score": 0.0225426834076643, "chain_id": "3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_7"}
{"score": 0.02240620367228985, "chain_id": "3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_8"}
{"score": 0.03705412894487381, "chain_id": "3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_9"}
{"score": 0.024486180394887924, "chain_id": "3ATPCQ38J897QI0XKGBXB38UJ2EYAD_1_10"}
{"score": 0.4670570194721222, "chain_id": "3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_1"}
{"score": 0.17084799706935883, "chain_id": "3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_2"}
{"score": 0.07854858785867691, "chain_id": "3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_3"}
{"score": 0.8064721822738647, "chain_id": "3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_4"}
{"score": 0.07466591149568558, "chain_id": "3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_5"}
{"score": 0.03260458633303642, "chain_id": "3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_6"}
{"score": 0.1111137717962265, "chain_id": "3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_7"}
{"score": 0.16425010561943054, "chain_id": "3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_8"}
{"score": 0.1810707300901413, "chain_id": "3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_9"}
{"score": 0.06490698456764221, "chain_id": "3QY5DC2MXRJL50X0LV00MJD8KWHUFT_1_10"}
{"score": 0.02018425241112709, "chain_id": "3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_1"}
{"score": 0.12331627309322357, "chain_id": "3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_2"}
{"score": 0.02151038497686386, "chain_id": "3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_3"}
{"score": 0.08388110995292664, "chain_id": "3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_4"}
{"score": 0.05189235508441925, "chain_id": "3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_5"}
{"score": 0.026944240555167198, "chain_id": "3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_6"}
{"score": 0.05916052311658859, "chain_id": "3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_7"}
{"score": 0.049784813076257706, "chain_id": "3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_8"}
{"score": 0.17481975257396698, "chain_id": "3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_9"}
{"score": 0.13009588420391083, "chain_id": "3D3VGR7TA0EY9WPQX64TGZ1RACX3RO_1_10"}
{"score": 0.9424861073493958, "chain_id": "32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_1"}
{"score": 0.8929646611213684, "chain_id": "32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_2"}
{"score": 0.9099879264831543, "chain_id": "32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_3"}
{"score": 0.8657948970794678, "chain_id": "32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_4"}
{"score": 0.8469035029411316, "chain_id": "32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_5"}
{"score": 0.3514905273914337, "chain_id": "32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_6"}
{"score": 0.12792295217514038, "chain_id": "32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_7"}
{"score": 0.01997818984091282, "chain_id": "32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_8"}
{"score": 0.06060658022761345, "chain_id": "32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_9"}
{"score": 0.01744762621819973, "chain_id": "32Z9ZLUT1LJA6R49KZCRQYXWLKHOHC_1_10"}
{"score": 0.11463215202093124, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_8"}
{"score": 0.09200224280357361, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_1"}
{"score": 0.016370244324207306, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_2"}
{"score": 0.020495884120464325, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_3"}
{"score": 0.01743045076727867, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_4"}
{"score": 0.050704225897789, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_5"}
{"score": 0.38215747475624084, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_6"}
{"score": 0.019577275961637497, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_7"}
{"score": 0.08796490728855133, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_9"}
{"score": 0.03465576842427254, "chain_id": "3PIWWX1FJJ5SWM82SMN7UFWPZJHJJ1_1_10"}
{"score": 0.9702723622322083, "chain_id": "308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_2"}
{"score": 0.9858176112174988, "chain_id": "308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_3"}
{"score": 0.40914058685302734, "chain_id": "308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_8"}
{"score": 0.7148237228393555, "chain_id": "308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_10"}
{"score": 0.9601445198059082, "chain_id": "308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_1"}
{"score": 0.9373387098312378, "chain_id": "308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_4"}
{"score": 0.48224154114723206, "chain_id": "308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_5"}
{"score": 0.32178324460983276, "chain_id": "308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_6"}
{"score": 0.5825563073158264, "chain_id": "308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_7"}
{"score": 0.43750250339508057, "chain_id": "308XBLVESI33CRT3CZJZYIZ3XLVBR5_1_9"}
{"score": 0.9604309797286987, "chain_id": "36W0OB37HWDM5VIGM8N86W401V3ZHI_1_1"}
{"score": 0.21017993986606598, "chain_id": "36W0OB37HWDM5VIGM8N86W401V3ZHI_1_3"}
{"score": 0.4195299446582794, "chain_id": "36W0OB37HWDM5VIGM8N86W401V3ZHI_1_4"}
{"score": 0.8728730082511902, "chain_id": "36W0OB37HWDM5VIGM8N86W401V3ZHI_1_5"}
{"score": 0.8815140128135681, "chain_id": "36W0OB37HWDM5VIGM8N86W401V3ZHI_1_2"}
{"score": 0.07596690207719803, "chain_id": "36W0OB37HWDM5VIGM8N86W401V3ZHI_1_6"}
{"score": 0.06919725984334946, "chain_id": "36W0OB37HWDM5VIGM8N86W401V3ZHI_1_7"}
{"score": 0.026425769552588463, "chain_id": "36W0OB37HWDM5VIGM8N86W401V3ZHI_1_8"}
{"score": 0.6252869963645935, "chain_id": "36W0OB37HWDM5VIGM8N86W401V3ZHI_1_9"}
{"score": 0.04096619039773941, "chain_id": "36W0OB37HWDM5VIGM8N86W401V3ZHI_1_10"}
{"score": 0.46043887734413147, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_7"}
{"score": 0.5922839641571045, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_1"}
{"score": 0.6909554600715637, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_2"}
{"score": 0.6445025205612183, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_3"}
{"score": 0.39087989926338196, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_4"}
{"score": 0.12088826298713684, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_5"}
{"score": 0.09266719967126846, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_6"}
{"score": 0.4405986964702606, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_8"}
{"score": 0.06801671534776688, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_9"}
{"score": 0.8684747219085693, "chain_id": "3IHR8NYAM70YFFSFKS5NL9TIMUG4PS_1_10"}
{"score": 0.6703656315803528, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_2"}
{"score": 0.9059786796569824, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_3"}
{"score": 0.7014577984809875, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_4"}
{"score": 0.09778066724538803, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_9"}
{"score": 0.9900189638137817, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_1"}
{"score": 0.9905951023101807, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_5"}
{"score": 0.4475460350513458, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_6"}
{"score": 0.35306116938591003, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_7"}
{"score": 0.6322989463806152, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_8"}
{"score": 0.15265478193759918, "chain_id": "3R5F3LQFV2JWXC43QLIYQ511HZEOZW_1_10"}
{"score": 0.8867703676223755, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_2"}
{"score": 0.9867240786552429, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_3"}
{"score": 0.9323352575302124, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_4"}
{"score": 0.9191121459007263, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_1"}
{"score": 0.3378153145313263, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_5"}
{"score": 0.20744037628173828, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_6"}
{"score": 0.39530834555625916, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_7"}
{"score": 0.22029514610767365, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_8"}
{"score": 0.05869322642683983, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_9"}
{"score": 0.04173249751329422, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOWMYPBK_1_10"}
{"score": 0.990285336971283, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_1"}
{"score": 0.6985732316970825, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_2"}
{"score": 0.7042280435562134, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_3"}
{"score": 0.9888967275619507, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_4"}
{"score": 0.5922606587409973, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_5"}
{"score": 0.4072323143482208, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_6"}
{"score": 0.4321288466453552, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_7"}
{"score": 0.473145455121994, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_8"}
{"score": 0.021841155365109444, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_9"}
{"score": 0.03131723403930664, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5ZZR6H2_1_10"}
{"score": 0.836219072341919, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_1"}
{"score": 0.12444846332073212, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_2"}
{"score": 0.0805387794971466, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_3"}
{"score": 0.7465920448303223, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_4"}
{"score": 0.17593063414096832, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_5"}
{"score": 0.020115358754992485, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_6"}
{"score": 0.22527022659778595, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_7"}
{"score": 0.028116583824157715, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_8"}
{"score": 0.7956109046936035, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_9"}
{"score": 0.02584243193268776, "chain_id": "3JJVG1YBEBWE74V5FS6WVHU78VB5BD_1_10"}
{"score": 0.31947124004364014, "chain_id": "3L70J4KAZGL4S756OKOJYIYT46GADC_1_8"}
{"score": 0.07379372417926788, "chain_id": "3L70J4KAZGL4S756OKOJYIYT46GADC_1_9"}
{"score": 0.16751030087471008, "chain_id": "3L70J4KAZGL4S756OKOJYIYT46GADC_1_1"}
{"score": 0.30400511622428894, "chain_id": "3L70J4KAZGL4S756OKOJYIYT46GADC_1_2"}
{"score": 0.10479998588562012, "chain_id": "3L70J4KAZGL4S756OKOJYIYT46GADC_1_3"}
{"score": 0.05939459800720215, "chain_id": "3L70J4KAZGL4S756OKOJYIYT46GADC_1_4"}
{"score": 0.3947587013244629, "chain_id": "3L70J4KAZGL4S756OKOJYIYT46GADC_1_5"}
{"score": 0.1335369497537613, "chain_id": "3L70J4KAZGL4S756OKOJYIYT46GADC_1_6"}
{"score": 0.3081148862838745, "chain_id": "3L70J4KAZGL4S756OKOJYIYT46GADC_1_7"}
{"score": 0.03932870179414749, "chain_id": "3L70J4KAZGL4S756OKOJYIYT46GADC_1_10"}
{"score": 0.17562979459762573, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_6"}
{"score": 0.2070552408695221, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_1"}
{"score": 0.056136149913072586, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_2"}
{"score": 0.017117226496338844, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_3"}
{"score": 0.07486531138420105, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_4"}
{"score": 0.12899433076381683, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_5"}
{"score": 0.10170164704322815, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_7"}
{"score": 0.05091805011034012, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_8"}
{"score": 0.03360820561647415, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_9"}
{"score": 0.026322776451706886, "chain_id": "3K2755HG5S2ZOYMEZ0ABCJ9KV45DFY_1_10"}
{"score": 0.8264751434326172, "chain_id": "3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_2"}
{"score": 0.6875982284545898, "chain_id": "3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_5"}
{"score": 0.40925365686416626, "chain_id": "3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_1"}
{"score": 0.08740858733654022, "chain_id": "3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_3"}
{"score": 0.10063909739255905, "chain_id": "3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_4"}
{"score": 0.04097456485033035, "chain_id": "3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_6"}
{"score": 0.1108706146478653, "chain_id": "3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_7"}
{"score": 0.024064702913165092, "chain_id": "3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_8"}
{"score": 0.021372100338339806, "chain_id": "3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_9"}
{"score": 0.021450860425829887, "chain_id": "3WI0P0II61RWRORNQVA5T8N3YVHRDE_1_10"}
{"score": 0.8973820209503174, "chain_id": "3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_4"}
{"score": 0.8847006559371948, "chain_id": "3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_1"}
{"score": 0.902370810508728, "chain_id": "3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_2"}
{"score": 0.13522280752658844, "chain_id": "3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_3"}
{"score": 0.8746687769889832, "chain_id": "3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_5"}
{"score": 0.1197073757648468, "chain_id": "3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_6"}
{"score": 0.11936161667108536, "chain_id": "3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_7"}
{"score": 0.17434842884540558, "chain_id": "3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_8"}
{"score": 0.08004308491945267, "chain_id": "3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_9"}
{"score": 0.15966960787773132, "chain_id": "3K3R2QNK8B2C4Q6NI908CNRXEAMU9M_1_10"}
{"score": 0.0588693767786026, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_1"}
{"score": 0.027301879599690437, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_2"}
{"score": 0.3145056962966919, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_3"}
{"score": 0.054338954389095306, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_4"}
{"score": 0.0799989104270935, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_5"}
{"score": 0.04604094848036766, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_6"}
{"score": 0.02138127014040947, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_7"}
{"score": 0.05200296267867088, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_8"}
{"score": 0.0940156951546669, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_9"}
{"score": 0.03400594741106033, "chain_id": "3Q5C1WP23M0DU6DDDVD7P5HYIAQ51E_1_10"}
{"score": 0.49219098687171936, "chain_id": "3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_1"}
{"score": 0.6870032548904419, "chain_id": "3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_2"}
{"score": 0.9065099358558655, "chain_id": "3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_3"}
{"score": 0.6951137781143188, "chain_id": "3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_4"}
{"score": 0.045572999864816666, "chain_id": "3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_5"}
{"score": 0.032945986837148666, "chain_id": "3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_6"}
{"score": 0.027892710641026497, "chain_id": "3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_7"}
{"score": 0.07452057301998138, "chain_id": "3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_8"}
{"score": 0.2206573337316513, "chain_id": "3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_9"}
{"score": 0.050191737711429596, "chain_id": "3ZSY5X72NXANVLICG4OL42Z28H0RO9_1_10"}
{"score": 0.9468326568603516, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_1"}
{"score": 0.4601684510707855, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_3"}
{"score": 0.5100127458572388, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_2"}
{"score": 0.44454124569892883, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_4"}
{"score": 0.07359501719474792, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_5"}
{"score": 0.030590591952204704, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_6"}
{"score": 0.029221966862678528, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_7"}
{"score": 0.15774573385715485, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_8"}
{"score": 0.023512642830610275, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_9"}
{"score": 0.03229328244924545, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUYX83VC_1_10"}
{"score": 0.1048070564866066, "chain_id": "3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_1"}
{"score": 0.022638307884335518, "chain_id": "3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_2"}
{"score": 0.03357860818505287, "chain_id": "3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_3"}
{"score": 0.02273036539554596, "chain_id": "3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_4"}
{"score": 0.025740431621670723, "chain_id": "3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_5"}
{"score": 0.0652720257639885, "chain_id": "3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_6"}
{"score": 0.04598253220319748, "chain_id": "3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_7"}
{"score": 0.064106784760952, "chain_id": "3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_8"}
{"score": 0.014853360131382942, "chain_id": "3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_9"}
{"score": 0.07454358041286469, "chain_id": "3W92K5RLWUGTGITBK9XWWTOECZJ5V0_1_10"}
{"score": 0.8730283975601196, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_1"}
{"score": 0.731550395488739, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_3"}
{"score": 0.7924871444702148, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_2"}
{"score": 0.7282619476318359, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_4"}
{"score": 0.08540116995573044, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_5"}
{"score": 0.10797961801290512, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_6"}
{"score": 0.12687593698501587, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_7"}
{"score": 0.1469944566488266, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_8"}
{"score": 0.12232885509729385, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_9"}
{"score": 0.020015686750411987, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W6FOH8D_1_10"}
{"score": 0.06052149832248688, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_1"}
{"score": 0.023436326533555984, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_2"}
{"score": 0.03553759679198265, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_3"}
{"score": 0.02979547344148159, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_4"}
{"score": 0.03127181529998779, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_5"}
{"score": 0.0343499593436718, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_6"}
{"score": 0.019632643088698387, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_7"}
{"score": 0.04022921621799469, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_8"}
{"score": 0.028294658288359642, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_9"}
{"score": 0.025441978126764297, "chain_id": "3MD9PLUKKIDEFR4RP6ILBG1WR22NZZ_1_10"}
{"score": 0.9777031540870667, "chain_id": "337RC3OW0517I7WWCWD3DIKBUPDLVU_1_1"}
{"score": 0.2595481276512146, "chain_id": "337RC3OW0517I7WWCWD3DIKBUPDLVU_1_3"}
{"score": 0.36441999673843384, "chain_id": "337RC3OW0517I7WWCWD3DIKBUPDLVU_1_2"}
{"score": 0.4711589217185974, "chain_id": "337RC3OW0517I7WWCWD3DIKBUPDLVU_1_4"}
{"score": 0.21717479825019836, "chain_id": "337RC3OW0517I7WWCWD3DIKBUPDLVU_1_5"}
{"score": 0.9502522945404053, "chain_id": "337RC3OW0517I7WWCWD3DIKBUPDLVU_1_6"}
{"score": 0.9656388759613037, "chain_id": "337RC3OW0517I7WWCWD3DIKBUPDLVU_1_7"}
{"score": 0.885770857334137, "chain_id": "337RC3OW0517I7WWCWD3DIKBUPDLVU_1_8"}
{"score": 0.6701270341873169, "chain_id": "337RC3OW0517I7WWCWD3DIKBUPDLVU_1_9"}
{"score": 0.7887953519821167, "chain_id": "337RC3OW0517I7WWCWD3DIKBUPDLVU_1_10"}
{"score": 0.844865620136261, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_1"}
{"score": 0.970440149307251, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_2"}
{"score": 0.5824220776557922, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_3"}
{"score": 0.47317951917648315, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_4"}
{"score": 0.6235581040382385, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_5"}
{"score": 0.4676934778690338, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_6"}
{"score": 0.9147042036056519, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_7"}
{"score": 0.23132628202438354, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_8"}
{"score": 0.6378214359283447, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_9"}
{"score": 0.13515232503414154, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7EZV7RE_1_10"}
{"score": 0.5973337888717651, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_2"}
{"score": 0.4104008078575134, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_9"}
{"score": 0.9055771827697754, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_1"}
{"score": 0.4250107705593109, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_3"}
{"score": 0.7451071739196777, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_4"}
{"score": 0.9788243770599365, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_5"}
{"score": 0.19572940468788147, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_6"}
{"score": 0.20438355207443237, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_7"}
{"score": 0.11788579821586609, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_8"}
{"score": 0.12513451278209686, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFN65UXL_1_10"}
{"score": 0.8404551148414612, "chain_id": "3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_10"}
{"score": 0.4865041673183441, "chain_id": "3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_1"}
{"score": 0.07004845142364502, "chain_id": "3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_2"}
{"score": 0.7672804594039917, "chain_id": "3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_3"}
{"score": 0.11771730333566666, "chain_id": "3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_4"}
{"score": 0.9440962076187134, "chain_id": "3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_5"}
{"score": 0.1510811597108841, "chain_id": "3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_6"}
{"score": 0.08010011911392212, "chain_id": "3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_7"}
{"score": 0.051083486527204514, "chain_id": "3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_8"}
{"score": 0.5124474763870239, "chain_id": "3SKEMFQBZ34YNPI1J3QS64NOV16K85_1_9"}
{"score": 0.5651118159294128, "chain_id": "3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_4"}
{"score": 0.9698296785354614, "chain_id": "3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_5"}
{"score": 0.2012777328491211, "chain_id": "3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_7"}
{"score": 0.3513566553592682, "chain_id": "3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_9"}
{"score": 0.8246622681617737, "chain_id": "3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_1"}
{"score": 0.5276979207992554, "chain_id": "3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_2"}
{"score": 0.43060675263404846, "chain_id": "3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_3"}
{"score": 0.19931364059448242, "chain_id": "3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_6"}
{"score": 0.12572021782398224, "chain_id": "3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_8"}
{"score": 0.14657436311244965, "chain_id": "3VP0C6EFSGV69ZZGB06A13J1FYMM6C_1_10"}
{"score": 0.9273174405097961, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_7"}
{"score": 0.9593112468719482, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_1"}
{"score": 0.6341525316238403, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_2"}
{"score": 0.30400800704956055, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_3"}
{"score": 0.5659045577049255, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_4"}
{"score": 0.9674991965293884, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_5"}
{"score": 0.4480254054069519, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_6"}
{"score": 0.887102484703064, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_8"}
{"score": 0.07918500900268555, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_9"}
{"score": 0.3468695878982544, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGKPUM5_1_10"}
{"score": 0.9810896515846252, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_1"}
{"score": 0.9316778779029846, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_6"}
{"score": 0.5562341809272766, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_2"}
{"score": 0.22414568066596985, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_3"}
{"score": 0.3981945216655731, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_4"}
{"score": 0.5884978175163269, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_5"}
{"score": 0.14618387818336487, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_7"}
{"score": 0.6529613137245178, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_8"}
{"score": 0.8841698169708252, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_9"}
{"score": 0.04142783209681511, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBO5TC43_1_10"}
{"score": 0.9763434529304504, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_2"}
{"score": 0.4394323527812958, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_8"}
{"score": 0.3483298122882843, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_1"}
{"score": 0.14030899107456207, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_3"}
{"score": 0.04796729236841202, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_4"}
{"score": 0.6439855098724365, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_5"}
{"score": 0.9547001123428345, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_6"}
{"score": 0.6456068754196167, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_7"}
{"score": 0.45397165417671204, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_9"}
{"score": 0.35410165786743164, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOPRI9I_1_10"}
{"score": 0.9849306344985962, "chain_id": "3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_2"}
{"score": 0.9524027109146118, "chain_id": "3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_3"}
{"score": 0.9914127588272095, "chain_id": "3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_4"}
{"score": 0.7104628086090088, "chain_id": "3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_8"}
{"score": 0.4704766273498535, "chain_id": "3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_10"}
{"score": 0.8077701926231384, "chain_id": "3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_1"}
{"score": 0.7045657634735107, "chain_id": "3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_5"}
{"score": 0.7894188761711121, "chain_id": "3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_6"}
{"score": 0.14372040331363678, "chain_id": "3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_7"}
{"score": 0.4475480318069458, "chain_id": "3LWJHTCVCCLTD7QJ4MGVCIGJKT2QFE_1_9"}
{"score": 0.9710587859153748, "chain_id": "36H9ULYP62TCRKM69WWMFH4X616JFI_1_1"}
{"score": 0.9656491875648499, "chain_id": "36H9ULYP62TCRKM69WWMFH4X616JFI_1_2"}
{"score": 0.7899582982063293, "chain_id": "36H9ULYP62TCRKM69WWMFH4X616JFI_1_3"}
{"score": 0.9149097204208374, "chain_id": "36H9ULYP62TCRKM69WWMFH4X616JFI_1_4"}
{"score": 0.821015477180481, "chain_id": "36H9ULYP62TCRKM69WWMFH4X616JFI_1_6"}
{"score": 0.18593479692935944, "chain_id": "36H9ULYP62TCRKM69WWMFH4X616JFI_1_7"}
{"score": 0.3424190282821655, "chain_id": "36H9ULYP62TCRKM69WWMFH4X616JFI_1_8"}
{"score": 0.9486210942268372, "chain_id": "36H9ULYP62TCRKM69WWMFH4X616JFI_1_5"}
{"score": 0.19093254208564758, "chain_id": "36H9ULYP62TCRKM69WWMFH4X616JFI_1_9"}
{"score": 0.05111519992351532, "chain_id": "36H9ULYP62TCRKM69WWMFH4X616JFI_1_10"}
{"score": 0.3830609917640686, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_1"}
{"score": 0.09156284481287003, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_4"}
{"score": 0.9435125589370728, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_5"}
{"score": 0.8868803977966309, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_7"}
{"score": 0.9250856041908264, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_8"}
{"score": 0.8168387413024902, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_2"}
{"score": 0.6504166126251221, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_3"}
{"score": 0.8626440763473511, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_6"}
{"score": 0.28571510314941406, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_9"}
{"score": 0.8331403732299805, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGNMMQCR_1_10"}
{"score": 0.41598600149154663, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_1"}
{"score": 0.7777001261711121, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_2"}
{"score": 0.6332255005836487, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_3"}
{"score": 0.05977901443839073, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_4"}
{"score": 0.9403208494186401, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_5"}
{"score": 0.8730039000511169, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_6"}
{"score": 0.9764410257339478, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_7"}
{"score": 0.9782424569129944, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_8"}
{"score": 0.2410353571176529, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_9"}
{"score": 0.10520317405462265, "chain_id": "3ZOTGHDK5IAZW0IPVTOQUC4YUTFSOE_1_10"}
{"score": 0.9589793682098389, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_1"}
{"score": 0.9004181623458862, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_2"}
{"score": 0.916823148727417, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_3"}
{"score": 0.8956495523452759, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_4"}
{"score": 0.4239080548286438, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_5"}
{"score": 0.27353063225746155, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_6"}
{"score": 0.6332600116729736, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_7"}
{"score": 0.24511927366256714, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_8"}
{"score": 0.8205695748329163, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_9"}
{"score": 0.062185000628232956, "chain_id": "34X6J5FLPTX9I9CFNC7GRG8BMB5QJY_1_10"}
{"score": 0.25470277667045593, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_5"}
{"score": 0.09871206432580948, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_1"}
{"score": 0.027612784877419472, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_2"}
{"score": 0.04706624895334244, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_3"}
{"score": 0.047410644590854645, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_4"}
{"score": 0.2907840311527252, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_6"}
{"score": 0.06936328113079071, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_7"}
{"score": 0.04421873763203621, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_8"}
{"score": 0.8658271431922913, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_9"}
{"score": 0.030291317030787468, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURY0PEV6_1_10"}
{"score": 0.9215553998947144, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_1"}
{"score": 0.5975977778434753, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_2"}
{"score": 0.5273473262786865, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_3"}
{"score": 0.8912785053253174, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_4"}
{"score": 0.6860911846160889, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_5"}
{"score": 0.8886948823928833, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_9"}
{"score": 0.9394779205322266, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_10"}
{"score": 0.8095960021018982, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_6"}
{"score": 0.8844414353370667, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_7"}
{"score": 0.9116170406341553, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURYE1EVA_1_8"}
{"score": 0.9637910723686218, "chain_id": "3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_1"}
{"score": 0.8616830110549927, "chain_id": "3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_2"}
{"score": 0.9091048836708069, "chain_id": "3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_3"}
{"score": 0.7152931690216064, "chain_id": "3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_4"}
{"score": 0.7945101261138916, "chain_id": "3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_7"}
{"score": 0.8902438282966614, "chain_id": "3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_5"}
{"score": 0.8397817015647888, "chain_id": "3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_6"}
{"score": 0.5766626596450806, "chain_id": "3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_8"}
{"score": 0.8527613878250122, "chain_id": "3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_9"}
{"score": 0.7090646028518677, "chain_id": "3F6KKYWMNB0BCQZVXOTOKOITC2FDNM_1_10"}
{"score": 0.9713601469993591, "chain_id": "37C0GNLMHF2355T3Y777IDW7IEPD6G_1_1"}
{"score": 0.7306053042411804, "chain_id": "37C0GNLMHF2355T3Y777IDW7IEPD6G_1_2"}
{"score": 0.14464861154556274, "chain_id": "37C0GNLMHF2355T3Y777IDW7IEPD6G_1_3"}
{"score": 0.11808076500892639, "chain_id": "37C0GNLMHF2355T3Y777IDW7IEPD6G_1_4"}
{"score": 0.7916578650474548, "chain_id": "37C0GNLMHF2355T3Y777IDW7IEPD6G_1_5"}
{"score": 0.09740490466356277, "chain_id": "37C0GNLMHF2355T3Y777IDW7IEPD6G_1_6"}
{"score": 0.09581145644187927, "chain_id": "37C0GNLMHF2355T3Y777IDW7IEPD6G_1_7"}
{"score": 0.2635190784931183, "chain_id": "37C0GNLMHF2355T3Y777IDW7IEPD6G_1_8"}
{"score": 0.035805996507406235, "chain_id": "37C0GNLMHF2355T3Y777IDW7IEPD6G_1_9"}
{"score": 0.07626008987426758, "chain_id": "37C0GNLMHF2355T3Y777IDW7IEPD6G_1_10"}
{"score": 0.9211799502372742, "chain_id": "3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_2"}
{"score": 0.5650222897529602, "chain_id": "3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_3"}
{"score": 0.8971446752548218, "chain_id": "3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_4"}
{"score": 0.5090863108634949, "chain_id": "3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_5"}
{"score": 0.3966321647167206, "chain_id": "3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_10"}
{"score": 0.9715176224708557, "chain_id": "3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_1"}
{"score": 0.7018678784370422, "chain_id": "3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_6"}
{"score": 0.9268476366996765, "chain_id": "3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_7"}
{"score": 0.7940973043441772, "chain_id": "3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_8"}
{"score": 0.49481669068336487, "chain_id": "3OLQQLKKNSOKL6MAELCGXZJX4Y5EJF_1_9"}
{"score": 0.9715176224708557, "chain_id": "3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_2"}
{"score": 0.9211799502372742, "chain_id": "3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_5"}
{"score": 0.8971446752548218, "chain_id": "3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_6"}
{"score": 0.9268476366996765, "chain_id": "3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_8"}
{"score": 0.5650222897529602, "chain_id": "3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_1"}
{"score": 0.5090863108634949, "chain_id": "3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_3"}
{"score": 0.7018678784370422, "chain_id": "3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_4"}
{"score": 0.7940973043441772, "chain_id": "3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_7"}
{"score": 0.3966321647167206, "chain_id": "3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_9"}
{"score": 0.16425257921218872, "chain_id": "3Z2R0DQ0JHDKFAO2706OYIXGSOG2E5_1_10"}
{"score": 0.49509748816490173, "chain_id": "3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_2"}
{"score": 0.8317040801048279, "chain_id": "3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_3"}
{"score": 0.8739480376243591, "chain_id": "3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_5"}
{"score": 0.3796495199203491, "chain_id": "3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_7"}
{"score": 0.8632679581642151, "chain_id": "3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_8"}
{"score": 0.26932433247566223, "chain_id": "3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_9"}
{"score": 0.7111552357673645, "chain_id": "3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_1"}
{"score": 0.3861144483089447, "chain_id": "3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_4"}
{"score": 0.3875049352645874, "chain_id": "3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_6"}
{"score": 0.4623654782772064, "chain_id": "3YGXWBAF70GFLQJBFNJH19UB01W4C2_1_10"}
{"score": 0.905949056148529, "chain_id": "32N49TQG3GHQMO5SF5OD4440DJTAV5_1_3"}
{"score": 0.7927634119987488, "chain_id": "32N49TQG3GHQMO5SF5OD4440DJTAV5_1_4"}
{"score": 0.34929564595222473, "chain_id": "32N49TQG3GHQMO5SF5OD4440DJTAV5_1_1"}
{"score": 0.8284922242164612, "chain_id": "32N49TQG3GHQMO5SF5OD4440DJTAV5_1_2"}
{"score": 0.20809711515903473, "chain_id": "32N49TQG3GHQMO5SF5OD4440DJTAV5_1_5"}
{"score": 0.6678823232650757, "chain_id": "32N49TQG3GHQMO5SF5OD4440DJTAV5_1_6"}
{"score": 0.042185816913843155, "chain_id": "32N49TQG3GHQMO5SF5OD4440DJTAV5_1_7"}
{"score": 0.0536477230489254, "chain_id": "32N49TQG3GHQMO5SF5OD4440DJTAV5_1_8"}
{"score": 0.7145861387252808, "chain_id": "32N49TQG3GHQMO5SF5OD4440DJTAV5_1_9"}
{"score": 0.831048846244812, "chain_id": "32N49TQG3GHQMO5SF5OD4440DJTAV5_1_10"}
{"score": 0.957565188407898, "chain_id": "39LOEL67OS4SRRAUYXYTPI6MJJX832_1_1"}
{"score": 0.9839305281639099, "chain_id": "39LOEL67OS4SRRAUYXYTPI6MJJX832_1_2"}
{"score": 0.4459371864795685, "chain_id": "39LOEL67OS4SRRAUYXYTPI6MJJX832_1_3"}
{"score": 0.6171239614486694, "chain_id": "39LOEL67OS4SRRAUYXYTPI6MJJX832_1_4"}
{"score": 0.09855149686336517, "chain_id": "39LOEL67OS4SRRAUYXYTPI6MJJX832_1_5"}
{"score": 0.05214603617787361, "chain_id": "39LOEL67OS4SRRAUYXYTPI6MJJX832_1_6"}
{"score": 0.020924990996718407, "chain_id": "39LOEL67OS4SRRAUYXYTPI6MJJX832_1_7"}
{"score": 0.01633879728615284, "chain_id": "39LOEL67OS4SRRAUYXYTPI6MJJX832_1_8"}
{"score": 0.03695747256278992, "chain_id": "39LOEL67OS4SRRAUYXYTPI6MJJX832_1_9"}
{"score": 0.01823359727859497, "chain_id": "39LOEL67OS4SRRAUYXYTPI6MJJX832_1_10"}
{"score": 0.9714557528495789, "chain_id": "35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_2"}
{"score": 0.9044457674026489, "chain_id": "35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_8"}
{"score": 0.9576956033706665, "chain_id": "35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_1"}
{"score": 0.7289562821388245, "chain_id": "35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_3"}
{"score": 0.8813861012458801, "chain_id": "35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_4"}
{"score": 0.0613921619951725, "chain_id": "35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_5"}
{"score": 0.3258114159107208, "chain_id": "35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_6"}
{"score": 0.29555267095565796, "chain_id": "35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_7"}
{"score": 0.8163872957229614, "chain_id": "35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_9"}
{"score": 0.7764075994491577, "chain_id": "35BLDD71I6WRNWD0RX4CLXV99DNZVN_1_10"}
{"score": 0.07001585513353348, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_1"}
{"score": 0.015606493689119816, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_2"}
{"score": 0.09224473685026169, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_3"}
{"score": 0.023875663056969643, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_4"}
{"score": 0.01168899331241846, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_5"}
{"score": 0.12839750945568085, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_6"}
{"score": 0.014396422542631626, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_7"}
{"score": 0.18679462373256683, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_8"}
{"score": 0.024920905008912086, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_9"}
{"score": 0.038075197488069534, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3CFKB2_1_10"}
{"score": 0.6653819680213928, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_2"}
{"score": 0.10370676964521408, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_1"}
{"score": 0.12085211277008057, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_3"}
{"score": 0.4934765696525574, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_4"}
{"score": 0.6962010860443115, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_5"}
{"score": 0.09864655882120132, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_6"}
{"score": 0.10607332736253738, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_7"}
{"score": 0.7211951613426208, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_8"}
{"score": 0.04372319206595421, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_9"}
{"score": 0.5119451880455017, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXFWMDHU_1_10"}
{"score": 0.9793333411216736, "chain_id": "340UGXU9DY0A1XJQLA5445GUBHZVUX_1_3"}
{"score": 0.3415369391441345, "chain_id": "340UGXU9DY0A1XJQLA5445GUBHZVUX_1_1"}
{"score": 0.6045461297035217, "chain_id": "340UGXU9DY0A1XJQLA5445GUBHZVUX_1_2"}
{"score": 0.8917074799537659, "chain_id": "340UGXU9DY0A1XJQLA5445GUBHZVUX_1_4"}
{"score": 0.1954927146434784, "chain_id": "340UGXU9DY0A1XJQLA5445GUBHZVUX_1_5"}
{"score": 0.3432818353176117, "chain_id": "340UGXU9DY0A1XJQLA5445GUBHZVUX_1_6"}
{"score": 0.03780302405357361, "chain_id": "340UGXU9DY0A1XJQLA5445GUBHZVUX_1_7"}
{"score": 0.4017018973827362, "chain_id": "340UGXU9DY0A1XJQLA5445GUBHZVUX_1_8"}
{"score": 0.2888173758983612, "chain_id": "340UGXU9DY0A1XJQLA5445GUBHZVUX_1_9"}
{"score": 0.4286927282810211, "chain_id": "340UGXU9DY0A1XJQLA5445GUBHZVUX_1_10"}
{"score": 0.8872122764587402, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_1"}
{"score": 0.760168731212616, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_2"}
{"score": 0.9637000560760498, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_3"}
{"score": 0.01961193047463894, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_4"}
{"score": 0.3774400055408478, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_5"}
{"score": 0.017313580960035324, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_6"}
{"score": 0.8238534331321716, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_7"}
{"score": 0.53572678565979, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_8"}
{"score": 0.040667302906513214, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_9"}
{"score": 0.01199430227279663, "chain_id": "3B1NLC6UGZVERVLZFT7OUYQLCQCGPW_1_10"}
{"score": 0.061124205589294434, "chain_id": "3LPW2N6LKT1T334BFJNR07MVUN95UW_1_1"}
{"score": 0.04793769121170044, "chain_id": "3LPW2N6LKT1T334BFJNR07MVUN95UW_1_2"}
{"score": 0.039065875113010406, "chain_id": "3LPW2N6LKT1T334BFJNR07MVUN95UW_1_3"}
{"score": 0.03614119067788124, "chain_id": "3LPW2N6LKT1T334BFJNR07MVUN95UW_1_4"}
{"score": 0.09698840230703354, "chain_id": "3LPW2N6LKT1T334BFJNR07MVUN95UW_1_5"}
{"score": 0.22170139849185944, "chain_id": "3LPW2N6LKT1T334BFJNR07MVUN95UW_1_6"}
{"score": 0.40417876839637756, "chain_id": "3LPW2N6LKT1T334BFJNR07MVUN95UW_1_7"}
{"score": 0.021652372553944588, "chain_id": "3LPW2N6LKT1T334BFJNR07MVUN95UW_1_8"}
{"score": 0.2426811307668686, "chain_id": "3LPW2N6LKT1T334BFJNR07MVUN95UW_1_9"}
{"score": 0.07619106769561768, "chain_id": "3LPW2N6LKT1T334BFJNR07MVUN95UW_1_10"}
{"score": 0.6435019969940186, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_4"}
{"score": 0.6063966155052185, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_10"}
{"score": 0.025446219369769096, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_1"}
{"score": 0.181207537651062, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_2"}
{"score": 0.4953058362007141, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_3"}
{"score": 0.3654398024082184, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_5"}
{"score": 0.0326545424759388, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_6"}
{"score": 0.10236519575119019, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_7"}
{"score": 0.02910420298576355, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_8"}
{"score": 0.0223124660551548, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3ODLI1P_1_9"}
{"score": 0.4701806604862213, "chain_id": "3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_2"}
{"score": 0.6698796153068542, "chain_id": "3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_4"}
{"score": 0.8269070386886597, "chain_id": "3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_1"}
{"score": 0.08357707411050797, "chain_id": "3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_3"}
{"score": 0.3246031403541565, "chain_id": "3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_5"}
{"score": 0.5297604203224182, "chain_id": "3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_6"}
{"score": 0.039074067026376724, "chain_id": "3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_7"}
{"score": 0.05518133193254471, "chain_id": "3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_8"}
{"score": 0.598452627658844, "chain_id": "3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_9"}
{"score": 0.7698994874954224, "chain_id": "3C6FJU71TQSR5REVQLSOB4KOP80YUO_1_10"}
{"score": 0.7783843278884888, "chain_id": "3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_4"}
{"score": 0.3280605673789978, "chain_id": "3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_1"}
{"score": 0.25358834862709045, "chain_id": "3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_2"}
{"score": 0.7421741485595703, "chain_id": "3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_3"}
{"score": 0.1145746260881424, "chain_id": "3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_5"}
{"score": 0.16456620395183563, "chain_id": "3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_6"}
{"score": 0.05308493971824646, "chain_id": "3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_7"}
{"score": 0.09139611572027206, "chain_id": "3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_8"}
{"score": 0.3012234568595886, "chain_id": "3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_9"}
{"score": 0.28823617100715637, "chain_id": "3EF8EXOTT1UL15SY2XH1QF032AIJ1B_1_10"}
{"score": 0.5175957083702087, "chain_id": "3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_1"}
{"score": 0.5444817543029785, "chain_id": "3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_4"}
{"score": 0.21873000264167786, "chain_id": "3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_6"}
{"score": 0.6625789999961853, "chain_id": "3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_7"}
{"score": 0.2675436735153198, "chain_id": "3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_2"}
{"score": 0.4236571788787842, "chain_id": "3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_3"}
{"score": 0.6246052980422974, "chain_id": "3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_5"}
{"score": 0.5291457772254944, "chain_id": "3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_8"}
{"score": 0.6485243439674377, "chain_id": "3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_9"}
{"score": 0.8580114245414734, "chain_id": "3PB5A5BD0V5PLPHZJ7D7UCZ0EPU7GS_1_10"}
{"score": 0.5498273372650146, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_4"}
{"score": 0.03328482061624527, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_6"}
{"score": 0.04322730004787445, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_1"}
{"score": 0.05052274838089943, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_2"}
{"score": 0.2602885961532593, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_3"}
{"score": 0.06038191542029381, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_5"}
{"score": 0.11760786175727844, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_7"}
{"score": 0.3507537245750427, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_8"}
{"score": 0.8163807988166809, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_9"}
{"score": 0.13145990669727325, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5RYKOVQ_1_10"}
{"score": 0.7182552814483643, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_1"}
{"score": 0.5280621647834778, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_4"}
{"score": 0.48577407002449036, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_9"}
{"score": 0.6021357774734497, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_2"}
{"score": 0.9398488998413086, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_3"}
{"score": 0.2743663191795349, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_5"}
{"score": 0.17467738687992096, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_6"}
{"score": 0.6305394768714905, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_7"}
{"score": 0.060667745769023895, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_8"}
{"score": 0.34516963362693787, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNJFX4M6_1_10"}
{"score": 0.8817756772041321, "chain_id": "3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_3"}
{"score": 0.8024911284446716, "chain_id": "3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_4"}
{"score": 0.15172797441482544, "chain_id": "3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_10"}
{"score": 0.15646100044250488, "chain_id": "3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_1"}
{"score": 0.9736939668655396, "chain_id": "3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_2"}
{"score": 0.86223965883255, "chain_id": "3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_5"}
{"score": 0.25654926896095276, "chain_id": "3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_6"}
{"score": 0.6030028462409973, "chain_id": "3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_7"}
{"score": 0.1518746018409729, "chain_id": "3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_8"}
{"score": 0.4358948767185211, "chain_id": "3UN61F00HWO1NBCUBPSMVWZBNRRR56_1_9"}
{"score": 0.5968301892280579, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_7"}
{"score": 0.8016409873962402, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_1"}
{"score": 0.9056462049484253, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_2"}
{"score": 0.5652647614479065, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_3"}
{"score": 0.8038293719291687, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_4"}
{"score": 0.5275036096572876, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_5"}
{"score": 0.11087175458669662, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_6"}
{"score": 0.8395283818244934, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_8"}
{"score": 0.34845593571662903, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_9"}
{"score": 0.8418853282928467, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WMZJE3O4_1_10"}
{"score": 0.7071312069892883, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_1"}
{"score": 0.2723689079284668, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_2"}
{"score": 0.45755624771118164, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_3"}
{"score": 0.1814635694026947, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_4"}
{"score": 0.12847857177257538, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_5"}
{"score": 0.24677875638008118, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_6"}
{"score": 0.3755013644695282, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_7"}
{"score": 0.2508738338947296, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_8"}
{"score": 0.8049203753471375, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_9"}
{"score": 0.16950421035289764, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1TGKQZN_1_10"}
{"score": 0.9130871295928955, "chain_id": "39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_1"}
{"score": 0.8617900013923645, "chain_id": "39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_3"}
{"score": 0.9684181809425354, "chain_id": "39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_2"}
{"score": 0.7756855487823486, "chain_id": "39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_4"}
{"score": 0.9476110935211182, "chain_id": "39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_5"}
{"score": 0.9241812825202942, "chain_id": "39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_6"}
{"score": 0.6399215459823608, "chain_id": "39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_7"}
{"score": 0.7351926565170288, "chain_id": "39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_8"}
{"score": 0.22907501459121704, "chain_id": "39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_9"}
{"score": 0.04111945629119873, "chain_id": "39RP059MEHSCFBGB7RNICJ5TSLOBMK_1_10"}
{"score": 0.9369262456893921, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_1"}
{"score": 0.9798397421836853, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_2"}
{"score": 0.8498954772949219, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_4"}
{"score": 0.05077847093343735, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_10"}
{"score": 0.8928163051605225, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_3"}
{"score": 0.8961778283119202, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_5"}
{"score": 0.826429009437561, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_6"}
{"score": 0.380656898021698, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_7"}
{"score": 0.4130828380584717, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_8"}
{"score": 0.22929365932941437, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB1SBZH70_1_9"}
{"score": 0.9887157082557678, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_1"}
{"score": 0.9781658053398132, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_2"}
{"score": 0.5028893947601318, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_3"}
{"score": 0.21794280409812927, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_10"}
{"score": 0.7759404182434082, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_4"}
{"score": 0.5177671909332275, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_5"}
{"score": 0.30630582571029663, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_6"}
{"score": 0.22532351315021515, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_7"}
{"score": 0.04974348098039627, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_8"}
{"score": 0.8732377290725708, "chain_id": "3X1FV8S5JXQRWFIV15GN0QF31XIGVR_1_9"}
{"score": 0.9357259273529053, "chain_id": "345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_4"}
{"score": 0.6700955033302307, "chain_id": "345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_1"}
{"score": 0.5217100381851196, "chain_id": "345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_2"}
{"score": 0.8826202154159546, "chain_id": "345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_3"}
{"score": 0.024348817765712738, "chain_id": "345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_5"}
{"score": 0.022628366947174072, "chain_id": "345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_6"}
{"score": 0.0745794028043747, "chain_id": "345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_7"}
{"score": 0.027407865971326828, "chain_id": "345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_8"}
{"score": 0.04054728522896767, "chain_id": "345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_9"}
{"score": 0.02058357745409012, "chain_id": "345LHZDEDXRQPOH710ZYLAOBKPS3UE_1_10"}
{"score": 0.9847822785377502, "chain_id": "35H6S234SAZ81SEAJ1POK18FV1B65J_1_1"}
{"score": 0.5312028527259827, "chain_id": "35H6S234SAZ81SEAJ1POK18FV1B65J_1_5"}
{"score": 0.44080469012260437, "chain_id": "35H6S234SAZ81SEAJ1POK18FV1B65J_1_6"}
{"score": 0.9679387211799622, "chain_id": "35H6S234SAZ81SEAJ1POK18FV1B65J_1_2"}
{"score": 0.4648442268371582, "chain_id": "35H6S234SAZ81SEAJ1POK18FV1B65J_1_3"}
{"score": 0.6152639389038086, "chain_id": "35H6S234SAZ81SEAJ1POK18FV1B65J_1_4"}
{"score": 0.14266721904277802, "chain_id": "35H6S234SAZ81SEAJ1POK18FV1B65J_1_7"}
{"score": 0.0439070463180542, "chain_id": "35H6S234SAZ81SEAJ1POK18FV1B65J_1_8"}
{"score": 0.8979218602180481, "chain_id": "35H6S234SAZ81SEAJ1POK18FV1B65J_1_9"}
{"score": 0.2538672685623169, "chain_id": "35H6S234SAZ81SEAJ1POK18FV1B65J_1_10"}
{"score": 0.9868894815444946, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_1"}
{"score": 0.7027667164802551, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_7"}
{"score": 0.9323704838752747, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_2"}
{"score": 0.022694451734423637, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_3"}
{"score": 0.021354733034968376, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_4"}
{"score": 0.07090794295072556, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_5"}
{"score": 0.041036780923604965, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_6"}
{"score": 0.803176760673523, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_8"}
{"score": 0.04763714596629143, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_9"}
{"score": 0.3700147569179535, "chain_id": "3BDCF01OGXTOM1R1H70NKHO5GDBLYH_1_10"}
{"score": 0.5783344507217407, "chain_id": "3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_1"}
{"score": 0.8207389116287231, "chain_id": "3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_2"}
{"score": 0.7931603193283081, "chain_id": "3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_3"}
{"score": 0.493604451417923, "chain_id": "3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_4"}
{"score": 0.05743950232863426, "chain_id": "3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_5"}
{"score": 0.8636134266853333, "chain_id": "3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_6"}
{"score": 0.8904128670692444, "chain_id": "3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_7"}
{"score": 0.8345771431922913, "chain_id": "3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_8"}
{"score": 0.3858044743537903, "chain_id": "3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_9"}
{"score": 0.7168474793434143, "chain_id": "3GNCZX450IMDH48WTTFEYCFIFZ5PA8_1_10"}
{"score": 0.8563894033432007, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_1"}
{"score": 0.9648727774620056, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_2"}
{"score": 0.3764864504337311, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_3"}
{"score": 0.11496645957231522, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_4"}
{"score": 0.4985092878341675, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_5"}
{"score": 0.0418424978852272, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_6"}
{"score": 0.5093938112258911, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_7"}
{"score": 0.049195945262908936, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_8"}
{"score": 0.07122337818145752, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_9"}
{"score": 0.01874881610274315, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5SDJVOR_1_10"}
{"score": 0.646334707736969, "chain_id": "3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_2"}
{"score": 0.6849641799926758, "chain_id": "3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_7"}
{"score": 0.7690600156784058, "chain_id": "3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_1"}
{"score": 0.9302939176559448, "chain_id": "3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_3"}
{"score": 0.4150179624557495, "chain_id": "3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_4"}
{"score": 0.20834675431251526, "chain_id": "3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_5"}
{"score": 0.03852448612451553, "chain_id": "3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_6"}
{"score": 0.33795803785324097, "chain_id": "3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_8"}
{"score": 0.8308823108673096, "chain_id": "3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_9"}
{"score": 0.2782049775123596, "chain_id": "3KGTPGBS6XK146LOX0LT20JJEDB2UP_1_10"}
{"score": 0.9809077382087708, "chain_id": "3U088ZLJVKS7007FDDWG10B1YWDW07_1_1"}
{"score": 0.9831475615501404, "chain_id": "3U088ZLJVKS7007FDDWG10B1YWDW07_1_2"}
{"score": 0.9856346249580383, "chain_id": "3U088ZLJVKS7007FDDWG10B1YWDW07_1_3"}
{"score": 0.9841258525848389, "chain_id": "3U088ZLJVKS7007FDDWG10B1YWDW07_1_4"}
{"score": 0.867189347743988, "chain_id": "3U088ZLJVKS7007FDDWG10B1YWDW07_1_5"}
{"score": 0.7221226096153259, "chain_id": "3U088ZLJVKS7007FDDWG10B1YWDW07_1_7"}
{"score": 0.8321684002876282, "chain_id": "3U088ZLJVKS7007FDDWG10B1YWDW07_1_6"}
{"score": 0.7869532704353333, "chain_id": "3U088ZLJVKS7007FDDWG10B1YWDW07_1_8"}
{"score": 0.42569613456726074, "chain_id": "3U088ZLJVKS7007FDDWG10B1YWDW07_1_9"}
{"score": 0.3786136507987976, "chain_id": "3U088ZLJVKS7007FDDWG10B1YWDW07_1_10"}
{"score": 0.8913774490356445, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_1"}
{"score": 0.9034562706947327, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_2"}
{"score": 0.9694660902023315, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_3"}
{"score": 0.9601410031318665, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_4"}
{"score": 0.7111977338790894, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_7"}
{"score": 0.7931386828422546, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_5"}
{"score": 0.7627708911895752, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_6"}
{"score": 0.5203690528869629, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_8"}
{"score": 0.025251852348446846, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_9"}
{"score": 0.025366194546222687, "chain_id": "3TPZPLC3M0BDXJ9BKE04B41C8DRP31_1_10"}
{"score": 0.8600850105285645, "chain_id": "382M9COHEHETZMX4QKGU41S87MEUE4_1_1"}
{"score": 0.8740594387054443, "chain_id": "382M9COHEHETZMX4QKGU41S87MEUE4_1_2"}
{"score": 0.8691075444221497, "chain_id": "382M9COHEHETZMX4QKGU41S87MEUE4_1_3"}
{"score": 0.29633140563964844, "chain_id": "382M9COHEHETZMX4QKGU41S87MEUE4_1_5"}
{"score": 0.20801472663879395, "chain_id": "382M9COHEHETZMX4QKGU41S87MEUE4_1_6"}
{"score": 0.8566492795944214, "chain_id": "382M9COHEHETZMX4QKGU41S87MEUE4_1_4"}
{"score": 0.22889146208763123, "chain_id": "382M9COHEHETZMX4QKGU41S87MEUE4_1_7"}
{"score": 0.258191853761673, "chain_id": "382M9COHEHETZMX4QKGU41S87MEUE4_1_8"}
{"score": 0.1503613144159317, "chain_id": "382M9COHEHETZMX4QKGU41S87MEUE4_1_9"}
{"score": 0.1228894367814064, "chain_id": "382M9COHEHETZMX4QKGU41S87MEUE4_1_10"}
{"score": 0.9888434410095215, "chain_id": "38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_1"}
{"score": 0.9888773560523987, "chain_id": "38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_2"}
{"score": 0.9644541144371033, "chain_id": "38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_3"}
{"score": 0.9861708879470825, "chain_id": "38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_5"}
{"score": 0.9330798387527466, "chain_id": "38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_6"}
{"score": 0.9588990211486816, "chain_id": "38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_7"}
{"score": 0.9219856262207031, "chain_id": "38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_8"}
{"score": 0.9872822165489197, "chain_id": "38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_4"}
{"score": 0.03967088833451271, "chain_id": "38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_9"}
{"score": 0.06851287186145782, "chain_id": "38SKSKU7R1W2W1CWDPEKYTUHMSWILJ_1_10"}
{"score": 0.9576349258422852, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_2"}
{"score": 0.9835363626480103, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_3"}
{"score": 0.9811567068099976, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_4"}
{"score": 0.726750910282135, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_5"}
{"score": 0.6998808979988098, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_6"}
{"score": 0.5674152374267578, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_7"}
{"score": 0.643987774848938, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_8"}
{"score": 0.3274417221546173, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_9"}
{"score": 0.28861311078071594, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_10"}
{"score": 0.9481231570243835, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USS0CIX_1_1"}
{"score": 0.1267852932214737, "chain_id": "320DUZ38G7LI5KI1KG24X249GOWGJT_1_1"}
{"score": 0.20549975335597992, "chain_id": "320DUZ38G7LI5KI1KG24X249GOWGJT_1_2"}
{"score": 0.22313039004802704, "chain_id": "320DUZ38G7LI5KI1KG24X249GOWGJT_1_3"}
{"score": 0.1599355787038803, "chain_id": "320DUZ38G7LI5KI1KG24X249GOWGJT_1_4"}
{"score": 0.10924368351697922, "chain_id": "320DUZ38G7LI5KI1KG24X249GOWGJT_1_5"}
{"score": 0.16383856534957886, "chain_id": "320DUZ38G7LI5KI1KG24X249GOWGJT_1_6"}
{"score": 0.024262264370918274, "chain_id": "320DUZ38G7LI5KI1KG24X249GOWGJT_1_7"}
{"score": 0.04911966994404793, "chain_id": "320DUZ38G7LI5KI1KG24X249GOWGJT_1_8"}
{"score": 0.2265075147151947, "chain_id": "320DUZ38G7LI5KI1KG24X249GOWGJT_1_9"}
{"score": 0.038792550563812256, "chain_id": "320DUZ38G7LI5KI1KG24X249GOWGJT_1_10"}
{"score": 0.9860907793045044, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_1"}
{"score": 0.9892691969871521, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_2"}
{"score": 0.9839162230491638, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_3"}
{"score": 0.9890977144241333, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_4"}
{"score": 0.8990319967269897, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_5"}
{"score": 0.9850538372993469, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_6"}
{"score": 0.9665840864181519, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_7"}
{"score": 0.9054572582244873, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_9"}
{"score": 0.8867671489715576, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_8"}
{"score": 0.29162707924842834, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SKI28M1L_1_10"}
{"score": 0.9838571548461914, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_1"}
{"score": 0.983984649181366, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_2"}
{"score": 0.9865293502807617, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_3"}
{"score": 0.9854877591133118, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_4"}
{"score": 0.8771004676818848, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_6"}
{"score": 0.8227543830871582, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_8"}
{"score": 0.8990563750267029, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_5"}
{"score": 0.7753362655639648, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_7"}
{"score": 0.5211533904075623, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_9"}
{"score": 0.44908732175827026, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UP0UMYM_1_10"}
{"score": 0.15819092094898224, "chain_id": "35H6S234SAZ81SEAJ1POK18F4V0564_1_1"}
{"score": 0.038374971598386765, "chain_id": "35H6S234SAZ81SEAJ1POK18F4V0564_1_2"}
{"score": 0.22722843289375305, "chain_id": "35H6S234SAZ81SEAJ1POK18F4V0564_1_3"}
{"score": 0.7936580181121826, "chain_id": "35H6S234SAZ81SEAJ1POK18F4V0564_1_4"}
{"score": 0.10255873948335648, "chain_id": "35H6S234SAZ81SEAJ1POK18F4V0564_1_5"}
{"score": 0.024905016645789146, "chain_id": "35H6S234SAZ81SEAJ1POK18F4V0564_1_6"}
{"score": 0.020189424976706505, "chain_id": "35H6S234SAZ81SEAJ1POK18F4V0564_1_7"}
{"score": 0.12088464945554733, "chain_id": "35H6S234SAZ81SEAJ1POK18F4V0564_1_8"}
{"score": 0.15765438973903656, "chain_id": "35H6S234SAZ81SEAJ1POK18F4V0564_1_9"}
{"score": 0.04942549020051956, "chain_id": "35H6S234SAZ81SEAJ1POK18F4V0564_1_10"}
{"score": 0.9831475615501404, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_2"}
{"score": 0.867189347743988, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_5"}
{"score": 0.8321684002876282, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_6"}
{"score": 0.7221226096153259, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_7"}
{"score": 0.7869532704353333, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_8"}
{"score": 0.42569613456726074, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_9"}
{"score": 0.9809077382087708, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_1"}
{"score": 0.9856346249580383, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_3"}
{"score": 0.9841258525848389, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_4"}
{"score": 0.3786136507987976, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO52KUVOQ_1_10"}
{"score": 0.9303550720214844, "chain_id": "37XITHEISW8MMWL9QZFU925LS02CRL_1_1"}
{"score": 0.8093744516372681, "chain_id": "37XITHEISW8MMWL9QZFU925LS02CRL_1_2"}
{"score": 0.28699710965156555, "chain_id": "37XITHEISW8MMWL9QZFU925LS02CRL_1_3"}
{"score": 0.29380348324775696, "chain_id": "37XITHEISW8MMWL9QZFU925LS02CRL_1_7"}
{"score": 0.8104419112205505, "chain_id": "37XITHEISW8MMWL9QZFU925LS02CRL_1_4"}
{"score": 0.6312291622161865, "chain_id": "37XITHEISW8MMWL9QZFU925LS02CRL_1_5"}
{"score": 0.939516544342041, "chain_id": "37XITHEISW8MMWL9QZFU925LS02CRL_1_6"}
{"score": 0.4228454828262329, "chain_id": "37XITHEISW8MMWL9QZFU925LS02CRL_1_8"}
{"score": 0.9406856894493103, "chain_id": "37XITHEISW8MMWL9QZFU925LS02CRL_1_9"}
{"score": 0.8977048397064209, "chain_id": "37XITHEISW8MMWL9QZFU925LS02CRL_1_10"}
{"score": 0.7803246378898621, "chain_id": "3HYA4D452RICLOOY2BQUG0IG04N2F5_1_1"}
{"score": 0.7549359798431396, "chain_id": "3HYA4D452RICLOOY2BQUG0IG04N2F5_1_2"}
{"score": 0.6123988628387451, "chain_id": "3HYA4D452RICLOOY2BQUG0IG04N2F5_1_5"}
{"score": 0.40030357241630554, "chain_id": "3HYA4D452RICLOOY2BQUG0IG04N2F5_1_9"}
{"score": 0.7047066688537598, "chain_id": "3HYA4D452RICLOOY2BQUG0IG04N2F5_1_3"}
{"score": 0.7601833939552307, "chain_id": "3HYA4D452RICLOOY2BQUG0IG04N2F5_1_4"}
{"score": 0.20933392643928528, "chain_id": "3HYA4D452RICLOOY2BQUG0IG04N2F5_1_6"}
{"score": 0.36723756790161133, "chain_id": "3HYA4D452RICLOOY2BQUG0IG04N2F5_1_7"}
{"score": 0.03427574783563614, "chain_id": "3HYA4D452RICLOOY2BQUG0IG04N2F5_1_8"}
{"score": 0.16077370941638947, "chain_id": "3HYA4D452RICLOOY2BQUG0IG04N2F5_1_10"}
{"score": 0.7969744801521301, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_4"}
{"score": 0.1801944077014923, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_1"}
{"score": 0.04965018853545189, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_2"}
{"score": 0.19073134660720825, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_3"}
{"score": 0.7752805948257446, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_5"}
{"score": 0.15854889154434204, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_6"}
{"score": 0.1282554268836975, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_7"}
{"score": 0.03939800336956978, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_8"}
{"score": 0.2044181376695633, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_9"}
{"score": 0.06710673868656158, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKJA714A_1_10"}
{"score": 0.46428030729293823, "chain_id": "3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_3"}
{"score": 0.09474685788154602, "chain_id": "3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_10"}
{"score": 0.5459898114204407, "chain_id": "3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_1"}
{"score": 0.3463345766067505, "chain_id": "3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_2"}
{"score": 0.42838627099990845, "chain_id": "3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_4"}
{"score": 0.4047262370586395, "chain_id": "3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_5"}
{"score": 0.4935542941093445, "chain_id": "3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_6"}
{"score": 0.29127728939056396, "chain_id": "3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_7"}
{"score": 0.23373956978321075, "chain_id": "3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_8"}
{"score": 0.8105080127716064, "chain_id": "3S3AMIZX3U4SLM248YKA4DOHDDECDA_1_9"}
{"score": 0.8429766297340393, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_2"}
{"score": 0.8035504817962646, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_1"}
{"score": 0.5278051495552063, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_3"}
{"score": 0.36238721013069153, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_4"}
{"score": 0.9032170176506042, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_5"}
{"score": 0.48678797483444214, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_6"}
{"score": 0.0155730489641428, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_7"}
{"score": 0.01812075823545456, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_8"}
{"score": 0.02945009060204029, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_9"}
{"score": 0.1374390870332718, "chain_id": "3F0BG9B9MPMP7G2ZDDZD1C64HIPY7E_1_10"}
{"score": 0.9861765503883362, "chain_id": "3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_1"}
{"score": 0.9887303709983826, "chain_id": "3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_5"}
{"score": 0.7878208160400391, "chain_id": "3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_2"}
{"score": 0.803482711315155, "chain_id": "3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_3"}
{"score": 0.6512551307678223, "chain_id": "3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_4"}
{"score": 0.46350640058517456, "chain_id": "3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_6"}
{"score": 0.2556350529193878, "chain_id": "3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_7"}
{"score": 0.6050838828086853, "chain_id": "3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_8"}
{"score": 0.15128596127033234, "chain_id": "3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_9"}
{"score": 0.43667495250701904, "chain_id": "3J4Q2Z4UTY2VOTCEUBQVG62JBEPQW5_1_10"}
{"score": 0.9821954369544983, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_1"}
{"score": 0.6848308444023132, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_2"}
{"score": 0.7629762291908264, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_3"}
{"score": 0.5977591276168823, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_4"}
{"score": 0.9843505620956421, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_5"}
{"score": 0.4469353258609772, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_6"}
{"score": 0.2585737407207489, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_7"}
{"score": 0.6016602516174316, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_8"}
{"score": 0.1756783276796341, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_9"}
{"score": 0.4410662353038788, "chain_id": "3ZY8KE4ISJ2I94C941LZU4J554TVQ5_1_10"}
{"score": 0.033428650349378586, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_2"}
{"score": 0.10282155871391296, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_8"}
{"score": 0.03832703456282616, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_1"}
{"score": 0.04217947646975517, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_3"}
{"score": 0.024712178856134415, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_4"}
{"score": 0.062097761780023575, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_5"}
{"score": 0.02265590988099575, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_6"}
{"score": 0.034493133425712585, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_7"}
{"score": 0.6234731674194336, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_9"}
{"score": 0.1087714210152626, "chain_id": "3GD6L00D3SWB2DYJ5UUT67SK61I1MW_1_10"}
{"score": 0.9916251301765442, "chain_id": "3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_1"}
{"score": 0.8963115811347961, "chain_id": "3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_3"}
{"score": 0.7270128130912781, "chain_id": "3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_6"}
{"score": 0.847978413105011, "chain_id": "3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_2"}
{"score": 0.6537432074546814, "chain_id": "3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_4"}
{"score": 0.988325834274292, "chain_id": "3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_5"}
{"score": 0.24159923195838928, "chain_id": "3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_7"}
{"score": 0.6352822780609131, "chain_id": "3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_8"}
{"score": 0.28228744864463806, "chain_id": "3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_9"}
{"score": 0.44099321961402893, "chain_id": "3R6P78PK7KACJNE6WAG8Z8RI2Z1TGV_1_10"}
{"score": 0.22678792476654053, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_9"}
{"score": 0.9406219124794006, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_1"}
{"score": 0.9397831559181213, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_2"}
{"score": 0.8350148797035217, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_3"}
{"score": 0.45365026593208313, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_4"}
{"score": 0.5555431246757507, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_5"}
{"score": 0.12402749806642532, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_6"}
{"score": 0.01924447901546955, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_7"}
{"score": 0.01924447901546955, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_8"}
{"score": 0.18154925107955933, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIELWYPKT_1_10"}
{"score": 0.6331039667129517, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_8"}
{"score": 0.9622095227241516, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_1"}
{"score": 0.8446811437606812, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_2"}
{"score": 0.952093780040741, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_3"}
{"score": 0.1253630518913269, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_4"}
{"score": 0.19320623576641083, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_5"}
{"score": 0.0847603902220726, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_6"}
{"score": 0.23877961933612823, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_7"}
{"score": 0.8998863101005554, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_9"}
{"score": 0.5845768451690674, "chain_id": "31JLPPHS2UTVCJXA5ENPM4WM09VO3N_1_10"}
{"score": 0.037589531391859055, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_1"}
{"score": 0.030668344348669052, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_2"}
{"score": 0.030051592737436295, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_3"}
{"score": 0.01842307485640049, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_4"}
{"score": 0.13846762478351593, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_5"}
{"score": 0.01564691588282585, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_6"}
{"score": 0.03644657880067825, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_7"}
{"score": 0.04212148115038872, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_8"}
{"score": 0.05187588930130005, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_9"}
{"score": 0.0293233934789896, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFCZ71PT_1_10"}
{"score": 0.9751085042953491, "chain_id": "33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_1"}
{"score": 0.1687861829996109, "chain_id": "33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_2"}
{"score": 0.23479120433330536, "chain_id": "33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_3"}
{"score": 0.6123473644256592, "chain_id": "33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_4"}
{"score": 0.2978213131427765, "chain_id": "33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_5"}
{"score": 0.04256177693605423, "chain_id": "33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_6"}
{"score": 0.0861377939581871, "chain_id": "33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_7"}
{"score": 0.07191813737154007, "chain_id": "33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_8"}
{"score": 0.12334731966257095, "chain_id": "33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_9"}
{"score": 0.04271285980939865, "chain_id": "33CUSNVVNNBESOG0AETPGZEXZMJ88M_1_10"}
{"score": 0.9837162494659424, "chain_id": "3IGI0VL647J2GNQKNX74VIUS36HONS_1_1"}
{"score": 0.9629495739936829, "chain_id": "3IGI0VL647J2GNQKNX74VIUS36HONS_1_2"}
{"score": 0.6882104277610779, "chain_id": "3IGI0VL647J2GNQKNX74VIUS36HONS_1_4"}
{"score": 0.931725800037384, "chain_id": "3IGI0VL647J2GNQKNX74VIUS36HONS_1_5"}
{"score": 0.938320517539978, "chain_id": "3IGI0VL647J2GNQKNX74VIUS36HONS_1_6"}
{"score": 0.2984859347343445, "chain_id": "3IGI0VL647J2GNQKNX74VIUS36HONS_1_3"}
{"score": 0.8230963349342346, "chain_id": "3IGI0VL647J2GNQKNX74VIUS36HONS_1_7"}
{"score": 0.14215533435344696, "chain_id": "3IGI0VL647J2GNQKNX74VIUS36HONS_1_8"}
{"score": 0.4600464999675751, "chain_id": "3IGI0VL647J2GNQKNX74VIUS36HONS_1_9"}
{"score": 0.03315332159399986, "chain_id": "3IGI0VL647J2GNQKNX74VIUS36HONS_1_10"}
{"score": 0.9841168522834778, "chain_id": "3WS1NTTKEYB5PELKNOMGXCP147BF09_1_1"}
{"score": 0.5428671836853027, "chain_id": "3WS1NTTKEYB5PELKNOMGXCP147BF09_1_3"}
{"score": 0.9278533458709717, "chain_id": "3WS1NTTKEYB5PELKNOMGXCP147BF09_1_4"}
{"score": 0.18831689655780792, "chain_id": "3WS1NTTKEYB5PELKNOMGXCP147BF09_1_2"}
{"score": 0.06833024322986603, "chain_id": "3WS1NTTKEYB5PELKNOMGXCP147BF09_1_5"}
{"score": 0.0591110959649086, "chain_id": "3WS1NTTKEYB5PELKNOMGXCP147BF09_1_6"}
{"score": 0.08829513192176819, "chain_id": "3WS1NTTKEYB5PELKNOMGXCP147BF09_1_7"}
{"score": 0.2664435803890228, "chain_id": "3WS1NTTKEYB5PELKNOMGXCP147BF09_1_8"}
{"score": 0.6357784271240234, "chain_id": "3WS1NTTKEYB5PELKNOMGXCP147BF09_1_9"}
{"score": 0.38492289185523987, "chain_id": "3WS1NTTKEYB5PELKNOMGXCP147BF09_1_10"}
{"score": 0.9790512323379517, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N09HLO_1_1"}
{"score": 0.9499670267105103, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N09HLO_1_3"}
{"score": 0.967783510684967, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N09HLO_1_4"}
{"score": 0.8109490871429443, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N09HLO_1_5"}
{"score": 0.1993568241596222, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N09HLO_1_2"}
{"score": 0.8488272428512573, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N09HLO_1_6"}
{"score": 0.6787378787994385, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N09HLO_1_7"}
{"score": 0.3766127824783325, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N09HLO_1_8"}
{"score": 0.044882699847221375, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N09HLO_1_9"}
{"score": 0.08080148696899414, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N09HLO_1_10"}
{"score": 0.9711622595787048, "chain_id": "39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_1"}
{"score": 0.9451705813407898, "chain_id": "39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_2"}
{"score": 0.9411163926124573, "chain_id": "39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_3"}
{"score": 0.9386345744132996, "chain_id": "39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_6"}
{"score": 0.09003973007202148, "chain_id": "39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_4"}
{"score": 0.1833481341600418, "chain_id": "39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_5"}
{"score": 0.8403713703155518, "chain_id": "39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_7"}
{"score": 0.6729587912559509, "chain_id": "39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_8"}
{"score": 0.45919373631477356, "chain_id": "39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_9"}
{"score": 0.029749423265457153, "chain_id": "39GXDJN2OTDC30CDI74Z8DY5CLHV8F_1_10"}
{"score": 0.5044076442718506, "chain_id": "30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_1"}
{"score": 0.7609997987747192, "chain_id": "30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_2"}
{"score": 0.03562526777386665, "chain_id": "30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_3"}
{"score": 0.023143112659454346, "chain_id": "30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_4"}
{"score": 0.8711123466491699, "chain_id": "30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_5"}
{"score": 0.39749205112457275, "chain_id": "30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_6"}
{"score": 0.479006290435791, "chain_id": "30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_7"}
{"score": 0.12022969126701355, "chain_id": "30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_8"}
{"score": 0.0576484240591526, "chain_id": "30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_9"}
{"score": 0.35483160614967346, "chain_id": "30OG32W0SUAG4WDVTJ48Q60ECSOENG_1_10"}
{"score": 0.829211413860321, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_1"}
{"score": 0.9119966626167297, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_3"}
{"score": 0.9486106038093567, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_2"}
{"score": 0.8627251386642456, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_4"}
{"score": 0.29766443371772766, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_5"}
{"score": 0.07481703162193298, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_6"}
{"score": 0.20970085263252258, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_7"}
{"score": 0.49609023332595825, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_8"}
{"score": 0.3934253454208374, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_9"}
{"score": 0.12640385329723358, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FT2F80C_1_10"}
{"score": 0.9875447750091553, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_1"}
{"score": 0.983995258808136, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_4"}
{"score": 0.843624472618103, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_2"}
{"score": 0.9464990496635437, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_3"}
{"score": 0.7463793158531189, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_5"}
{"score": 0.7477670907974243, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_6"}
{"score": 0.17975664138793945, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_7"}
{"score": 0.3956267237663269, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_8"}
{"score": 0.7807961702346802, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_9"}
{"score": 0.7419318556785583, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD68L9TWK_1_10"}
{"score": 0.8194803595542908, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_1"}
{"score": 0.5975792407989502, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_2"}
{"score": 0.5441057682037354, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_3"}
{"score": 0.5414544343948364, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_4"}
{"score": 0.2546338737010956, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_5"}
{"score": 0.0690857544541359, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_6"}
{"score": 0.02273118495941162, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_7"}
{"score": 0.1655942052602768, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_8"}
{"score": 0.13911207020282745, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_9"}
{"score": 0.016323689371347427, "chain_id": "3L4D84MILZRW5GDC4MKMI2GAVW1HJ1_1_10"}
{"score": 0.9278533458709717, "chain_id": "3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_4"}
{"score": 0.06833024322986603, "chain_id": "3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_5"}
{"score": 0.9841168522834778, "chain_id": "3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_1"}
{"score": 0.18831689655780792, "chain_id": "3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_2"}
{"score": 0.5428671836853027, "chain_id": "3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_3"}
{"score": 0.0591110959649086, "chain_id": "3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_6"}
{"score": 0.08829513192176819, "chain_id": "3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_7"}
{"score": 0.2664435803890228, "chain_id": "3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_8"}
{"score": 0.6357784271240234, "chain_id": "3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_9"}
{"score": 0.38492289185523987, "chain_id": "3OUYGIZWR7XHGRAE1RIL9635ITFP0B_1_10"}
{"score": 0.7819507122039795, "chain_id": "34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_1"}
{"score": 0.5927941799163818, "chain_id": "34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_3"}
{"score": 0.2835964560508728, "chain_id": "34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_4"}
{"score": 0.859375, "chain_id": "34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_5"}
{"score": 0.6676820516586304, "chain_id": "34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_9"}
{"score": 0.5732062458992004, "chain_id": "34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_2"}
{"score": 0.13067951798439026, "chain_id": "34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_6"}
{"score": 0.3066531717777252, "chain_id": "34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_7"}
{"score": 0.5214967131614685, "chain_id": "34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_8"}
{"score": 0.02816029265522957, "chain_id": "34BBWHLWHAAI7VOVH3LM74BXYL4IW2_1_10"}
{"score": 0.9634931683540344, "chain_id": "378XPAWRUCCL0ILSGYPUPFE665DAI1_1_1"}
{"score": 0.41022172570228577, "chain_id": "378XPAWRUCCL0ILSGYPUPFE665DAI1_1_10"}
{"score": 0.8491421937942505, "chain_id": "378XPAWRUCCL0ILSGYPUPFE665DAI1_1_2"}
{"score": 0.1621745377779007, "chain_id": "378XPAWRUCCL0ILSGYPUPFE665DAI1_1_3"}
{"score": 0.704455554485321, "chain_id": "378XPAWRUCCL0ILSGYPUPFE665DAI1_1_4"}
{"score": 0.07314864546060562, "chain_id": "378XPAWRUCCL0ILSGYPUPFE665DAI1_1_5"}
{"score": 0.033646468073129654, "chain_id": "378XPAWRUCCL0ILSGYPUPFE665DAI1_1_6"}
{"score": 0.12426107376813889, "chain_id": "378XPAWRUCCL0ILSGYPUPFE665DAI1_1_7"}
{"score": 0.36231452226638794, "chain_id": "378XPAWRUCCL0ILSGYPUPFE665DAI1_1_8"}
{"score": 0.029483336955308914, "chain_id": "378XPAWRUCCL0ILSGYPUPFE665DAI1_1_9"}
{"score": 0.9866265654563904, "chain_id": "31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_1"}
{"score": 0.7317015528678894, "chain_id": "31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_3"}
{"score": 0.929959774017334, "chain_id": "31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_5"}
{"score": 0.33931639790534973, "chain_id": "31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_2"}
{"score": 0.5502725839614868, "chain_id": "31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_4"}
{"score": 0.14896328747272491, "chain_id": "31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_6"}
{"score": 0.20638403296470642, "chain_id": "31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_7"}
{"score": 0.061605922877788544, "chain_id": "31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_8"}
{"score": 0.10335864126682281, "chain_id": "31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_9"}
{"score": 0.019924620166420937, "chain_id": "31LM9EDVOLROFCZN7KFZNMD6IRNJNS_1_10"}
{"score": 0.9295682311058044, "chain_id": "33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_1"}
{"score": 0.6743519902229309, "chain_id": "33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_2"}
{"score": 0.6874744892120361, "chain_id": "33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_3"}
{"score": 0.7928991317749023, "chain_id": "33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_4"}
{"score": 0.5249363780021667, "chain_id": "33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_5"}
{"score": 0.09393680840730667, "chain_id": "33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_6"}
{"score": 0.3095562756061554, "chain_id": "33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_7"}
{"score": 0.2318214327096939, "chain_id": "33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_8"}
{"score": 0.5446373820304871, "chain_id": "33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_9"}
{"score": 0.5662675499916077, "chain_id": "33M4IA01QG0APUW4HVBHNFQVGOLRXW_1_10"}
{"score": 0.27169880270957947, "chain_id": "3ZAZR5XV01HVON700G97V57KOZCCZS_1_2"}
{"score": 0.6983770132064819, "chain_id": "3ZAZR5XV01HVON700G97V57KOZCCZS_1_4"}
{"score": 0.4668785631656647, "chain_id": "3ZAZR5XV01HVON700G97V57KOZCCZS_1_10"}
{"score": 0.452419251203537, "chain_id": "3ZAZR5XV01HVON700G97V57KOZCCZS_1_1"}
{"score": 0.05643746256828308, "chain_id": "3ZAZR5XV01HVON700G97V57KOZCCZS_1_3"}
{"score": 0.8042400479316711, "chain_id": "3ZAZR5XV01HVON700G97V57KOZCCZS_1_5"}
{"score": 0.03350486606359482, "chain_id": "3ZAZR5XV01HVON700G97V57KOZCCZS_1_6"}
{"score": 0.03752212971448898, "chain_id": "3ZAZR5XV01HVON700G97V57KOZCCZS_1_7"}
{"score": 0.033370837569236755, "chain_id": "3ZAZR5XV01HVON700G97V57KOZCCZS_1_8"}
{"score": 0.32611703872680664, "chain_id": "3ZAZR5XV01HVON700G97V57KOZCCZS_1_9"}
{"score": 0.8467742204666138, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_1"}
{"score": 0.6896348595619202, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_2"}
{"score": 0.5938934087753296, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_3"}
{"score": 0.7242074012756348, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_4"}
{"score": 0.9128597974777222, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_6"}
{"score": 0.48549824953079224, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_5"}
{"score": 0.07953467965126038, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_7"}
{"score": 0.042750246822834015, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_8"}
{"score": 0.7058132886886597, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_9"}
{"score": 0.03236018493771553, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2MWH8V_1_10"}
{"score": 0.9629665017127991, "chain_id": "369J354OFD96HP3U0X8FOYZ4ISJG62_1_2"}
{"score": 0.618838906288147, "chain_id": "369J354OFD96HP3U0X8FOYZ4ISJG62_1_3"}
{"score": 0.11247686296701431, "chain_id": "369J354OFD96HP3U0X8FOYZ4ISJG62_1_4"}
{"score": 0.18600863218307495, "chain_id": "369J354OFD96HP3U0X8FOYZ4ISJG62_1_1"}
{"score": 0.24578054249286652, "chain_id": "369J354OFD96HP3U0X8FOYZ4ISJG62_1_5"}
{"score": 0.7976447343826294, "chain_id": "369J354OFD96HP3U0X8FOYZ4ISJG62_1_6"}
{"score": 0.05585576966404915, "chain_id": "369J354OFD96HP3U0X8FOYZ4ISJG62_1_7"}
{"score": 0.8176959156990051, "chain_id": "369J354OFD96HP3U0X8FOYZ4ISJG62_1_8"}
{"score": 0.041836049407720566, "chain_id": "369J354OFD96HP3U0X8FOYZ4ISJG62_1_9"}
{"score": 0.10076751559972763, "chain_id": "369J354OFD96HP3U0X8FOYZ4ISJG62_1_10"}
{"score": 0.9195711016654968, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_1"}
{"score": 0.7071376442909241, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_2"}
{"score": 0.7420893907546997, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_3"}
{"score": 0.7837642431259155, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_4"}
{"score": 0.8640671968460083, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_5"}
{"score": 0.08973343670368195, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_6"}
{"score": 0.11315818130970001, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_7"}
{"score": 0.2040138989686966, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_8"}
{"score": 0.06659185886383057, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_9"}
{"score": 0.11688181757926941, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFELSU4FZ_1_10"}
{"score": 0.9870988726615906, "chain_id": "3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_1"}
{"score": 0.985755205154419, "chain_id": "3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_3"}
{"score": 0.9384202361106873, "chain_id": "3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_4"}
{"score": 0.7043567895889282, "chain_id": "3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_2"}
{"score": 0.04996756836771965, "chain_id": "3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_5"}
{"score": 0.2675180733203888, "chain_id": "3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_6"}
{"score": 0.14026328921318054, "chain_id": "3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_7"}
{"score": 0.24148640036582947, "chain_id": "3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_8"}
{"score": 0.4626893103122711, "chain_id": "3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_9"}
{"score": 0.1697409600019455, "chain_id": "3MHW492WW0CROPEHC8EIDVZ0WX0VMB_1_10"}
{"score": 0.9719617366790771, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_1"}
{"score": 0.6661039590835571, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_2"}
{"score": 0.7216684222221375, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_3"}
{"score": 0.6511561870574951, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_4"}
{"score": 0.9482269287109375, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_5"}
{"score": 0.12856629490852356, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_6"}
{"score": 0.5086728930473328, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_9"}
{"score": 0.5660778284072876, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_7"}
{"score": 0.18094637989997864, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_8"}
{"score": 0.7019422650337219, "chain_id": "3M81GAB8A0I30QE3ZKUZTSPY4NUQBZ_1_10"}
{"score": 0.596738874912262, "chain_id": "3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_1"}
{"score": 0.31325653195381165, "chain_id": "3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_2"}
{"score": 0.7797167897224426, "chain_id": "3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_4"}
{"score": 0.1026708111166954, "chain_id": "3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_5"}
{"score": 0.05521495267748833, "chain_id": "3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_10"}
{"score": 0.2693959176540375, "chain_id": "3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_3"}
{"score": 0.20922476053237915, "chain_id": "3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_6"}
{"score": 0.03866250813007355, "chain_id": "3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_7"}
{"score": 0.578667938709259, "chain_id": "3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_8"}
{"score": 0.11968325823545456, "chain_id": "3EFE17QCRC4P4JW2RGT0A37XHPIHSV_1_9"}
{"score": 0.9661473035812378, "chain_id": "3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_1"}
{"score": 0.9834322333335876, "chain_id": "3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_2"}
{"score": 0.9802258610725403, "chain_id": "3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_3"}
{"score": 0.9783918857574463, "chain_id": "3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_4"}
{"score": 0.7062279582023621, "chain_id": "3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_9"}
{"score": 0.6544075012207031, "chain_id": "3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_5"}
{"score": 0.048137228935956955, "chain_id": "3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_6"}
{"score": 0.09363319724798203, "chain_id": "3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_7"}
{"score": 0.47036662697792053, "chain_id": "3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_8"}
{"score": 0.30176180601119995, "chain_id": "3D8YOU6S9EJPM74PK2XWSD0VUFQ6UH_1_10"}
{"score": 0.9895752668380737, "chain_id": "3TR2532VIPTG8RTV83TILBRKD246J3_1_2"}
{"score": 0.9799516797065735, "chain_id": "3TR2532VIPTG8RTV83TILBRKD246J3_1_3"}
{"score": 0.9888218641281128, "chain_id": "3TR2532VIPTG8RTV83TILBRKD246J3_1_1"}
{"score": 0.9749974608421326, "chain_id": "3TR2532VIPTG8RTV83TILBRKD246J3_1_4"}
{"score": 0.6655460596084595, "chain_id": "3TR2532VIPTG8RTV83TILBRKD246J3_1_5"}
{"score": 0.8093721866607666, "chain_id": "3TR2532VIPTG8RTV83TILBRKD246J3_1_6"}
{"score": 0.7296346426010132, "chain_id": "3TR2532VIPTG8RTV83TILBRKD246J3_1_7"}
{"score": 0.7458953857421875, "chain_id": "3TR2532VIPTG8RTV83TILBRKD246J3_1_8"}
{"score": 0.281061053276062, "chain_id": "3TR2532VIPTG8RTV83TILBRKD246J3_1_9"}
{"score": 0.6745439767837524, "chain_id": "3TR2532VIPTG8RTV83TILBRKD246J3_1_10"}
{"score": 0.017503151670098305, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_1"}
{"score": 0.028219612315297127, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_2"}
{"score": 0.2314823418855667, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_3"}
{"score": 0.03995344042778015, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_4"}
{"score": 0.9695565104484558, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_5"}
{"score": 0.0738288015127182, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_6"}
{"score": 0.5860434770584106, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_7"}
{"score": 0.7839118838310242, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_8"}
{"score": 0.08360447734594345, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_9"}
{"score": 0.0779280960559845, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3H4UQLJ_1_10"}
{"score": 0.9915488958358765, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_1"}
{"score": 0.9900435209274292, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_2"}
{"score": 0.9878278970718384, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_3"}
{"score": 0.9322120547294617, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_4"}
{"score": 0.534045398235321, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_5"}
{"score": 0.5423410534858704, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_6"}
{"score": 0.6573696732521057, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_7"}
{"score": 0.3765729069709778, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_8"}
{"score": 0.5327286124229431, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_9"}
{"score": 0.37978869676589966, "chain_id": "3DH6GAKTYYO8RQ85W8RWSWZ3RSXZY0_1_10"}
{"score": 0.9908934831619263, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_1"}
{"score": 0.9887991547584534, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_2"}
{"score": 0.9860347509384155, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_3"}
{"score": 0.9114128351211548, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_4"}
{"score": 0.5538778901100159, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_5"}
{"score": 0.8833513855934143, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_6"}
{"score": 0.5646101832389832, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_7"}
{"score": 0.6830714344978333, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_8"}
{"score": 0.7921743392944336, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_9"}
{"score": 0.09353924542665482, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD822ZD_1_10"}
{"score": 0.9631717801094055, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_1"}
{"score": 0.9853000044822693, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_2"}
{"score": 0.8626927137374878, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_3"}
{"score": 0.8915784955024719, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_4"}
{"score": 0.14542604982852936, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_5"}
{"score": 0.20615051686763763, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_6"}
{"score": 0.4452858567237854, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_7"}
{"score": 0.5299932360649109, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_8"}
{"score": 0.1815817654132843, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_9"}
{"score": 0.405828058719635, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZOD437FQ_1_10"}
{"score": 0.9132625460624695, "chain_id": "38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_1"}
{"score": 0.6782987713813782, "chain_id": "38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_3"}
{"score": 0.11074530333280563, "chain_id": "38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_4"}
{"score": 0.03895919770002365, "chain_id": "38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_9"}
{"score": 0.4848049581050873, "chain_id": "38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_2"}
{"score": 0.06839422136545181, "chain_id": "38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_5"}
{"score": 0.12588931620121002, "chain_id": "38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_6"}
{"score": 0.1267748773097992, "chain_id": "38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_7"}
{"score": 0.024982405826449394, "chain_id": "38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_8"}
{"score": 0.1564793437719345, "chain_id": "38F71OA9GTV2SSSRCT9EV9WE8XOMFE_1_10"}
{"score": 0.991077184677124, "chain_id": "3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_1"}
{"score": 0.9898922443389893, "chain_id": "3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_2"}
{"score": 0.9867663383483887, "chain_id": "3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_3"}
{"score": 0.9241418242454529, "chain_id": "3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_4"}
{"score": 0.6654770374298096, "chain_id": "3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_5"}
{"score": 0.6543934345245361, "chain_id": "3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_6"}
{"score": 0.741221010684967, "chain_id": "3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_7"}
{"score": 0.18610671162605286, "chain_id": "3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_8"}
{"score": 0.649863064289093, "chain_id": "3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_9"}
{"score": 0.47712698578834534, "chain_id": "3WYGZ5XF3WEG69XAX1WXNVNP6FGSKJ_1_10"}
{"score": 0.991077184677124, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_1"}
{"score": 0.9898922443389893, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_2"}
{"score": 0.9867663383483887, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_3"}
{"score": 0.9241418242454529, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_4"}
{"score": 0.6654770374298096, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_5"}
{"score": 0.6543934345245361, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_6"}
{"score": 0.741221010684967, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_7"}
{"score": 0.18610671162605286, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_8"}
{"score": 0.649863064289093, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_9"}
{"score": 0.47712698578834534, "chain_id": "3UOUJI6MTDD25MOLLP6MSQDFO3SXU6_1_10"}
{"score": 0.32737934589385986, "chain_id": "3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_1"}
{"score": 0.8184342384338379, "chain_id": "3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_9"}
{"score": 0.81525057554245, "chain_id": "3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_10"}
{"score": 0.08277253806591034, "chain_id": "3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_2"}
{"score": 0.13038593530654907, "chain_id": "3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_3"}
{"score": 0.05798537656664848, "chain_id": "3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_4"}
{"score": 0.0582110621035099, "chain_id": "3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_5"}
{"score": 0.09253848344087601, "chain_id": "3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_6"}
{"score": 0.10524901002645493, "chain_id": "3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_7"}
{"score": 0.03713088482618332, "chain_id": "3IQ1VMJRYTJSPHSPC4JHCMF3BK3A9T_1_8"}
{"score": 0.23321394622325897, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_1"}
{"score": 0.08409051597118378, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_2"}
{"score": 0.3137383759021759, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_3"}
{"score": 0.328973650932312, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_4"}
{"score": 0.02379336953163147, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_5"}
{"score": 0.02959377132356167, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_6"}
{"score": 0.8309698104858398, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_7"}
{"score": 0.05465365946292877, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_8"}
{"score": 0.04873160272836685, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_9"}
{"score": 0.9139986038208008, "chain_id": "3BEFOD78W6SSUCV2SCDV45ZNEA24MW_1_10"}
{"score": 0.129815936088562, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_1"}
{"score": 0.08594854921102524, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_2"}
{"score": 0.04631809517741203, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_3"}
{"score": 0.2024330198764801, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_4"}
{"score": 0.042627278715372086, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_5"}
{"score": 0.10660886019468307, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_6"}
{"score": 0.4940270781517029, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_7"}
{"score": 0.034983765333890915, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_8"}
{"score": 0.9130019545555115, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_9"}
{"score": 0.19406820833683014, "chain_id": "3FPRZHYEPY6Q23676Q93HWQUR3NV3O_1_10"}
{"score": 0.28940802812576294, "chain_id": "32ZKVD547FMBTP8119I3GKWNODJ3B6_1_1"}
{"score": 0.013334264978766441, "chain_id": "32ZKVD547FMBTP8119I3GKWNODJ3B6_1_2"}
{"score": 0.021929116919636726, "chain_id": "32ZKVD547FMBTP8119I3GKWNODJ3B6_1_3"}
{"score": 0.046795666217803955, "chain_id": "32ZKVD547FMBTP8119I3GKWNODJ3B6_1_4"}
{"score": 0.6959851980209351, "chain_id": "32ZKVD547FMBTP8119I3GKWNODJ3B6_1_5"}
{"score": 0.04719274863600731, "chain_id": "32ZKVD547FMBTP8119I3GKWNODJ3B6_1_6"}
{"score": 0.6187950968742371, "chain_id": "32ZKVD547FMBTP8119I3GKWNODJ3B6_1_7"}
{"score": 0.6204848885536194, "chain_id": "32ZKVD547FMBTP8119I3GKWNODJ3B6_1_8"}
{"score": 0.07158520817756653, "chain_id": "32ZKVD547FMBTP8119I3GKWNODJ3B6_1_9"}
{"score": 0.10767626017332077, "chain_id": "32ZKVD547FMBTP8119I3GKWNODJ3B6_1_10"}
{"score": 0.9914871454238892, "chain_id": "3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_2"}
{"score": 0.9906987547874451, "chain_id": "3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_3"}
{"score": 0.9085816144943237, "chain_id": "3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_6"}
{"score": 0.7733434438705444, "chain_id": "3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_1"}
{"score": 0.9480693936347961, "chain_id": "3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_4"}
{"score": 0.488272488117218, "chain_id": "3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_5"}
{"score": 0.9591899514198303, "chain_id": "3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_7"}
{"score": 0.8894907236099243, "chain_id": "3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_8"}
{"score": 0.11296506971120834, "chain_id": "3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_9"}
{"score": 0.08052133023738861, "chain_id": "3HFNH7HEMHDZR7MEF6MDU3GVEN0QGY_1_10"}
{"score": 0.16783419251441956, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_2"}
{"score": 0.07342709600925446, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_1"}
{"score": 0.0335264578461647, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_3"}
{"score": 0.04852360114455223, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_4"}
{"score": 0.04252428933978081, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_5"}
{"score": 0.02305319532752037, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_6"}
{"score": 0.03819282352924347, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_7"}
{"score": 0.9530441761016846, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_8"}
{"score": 0.01658402942121029, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_9"}
{"score": 0.349844366312027, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYMWKDOP_1_10"}
{"score": 0.957295835018158, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_1"}
{"score": 0.9792176485061646, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_2"}
{"score": 0.16662395000457764, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_3"}
{"score": 0.16666074097156525, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_4"}
{"score": 0.033045340329408646, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_5"}
{"score": 0.2294481098651886, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_6"}
{"score": 0.8392643332481384, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_7"}
{"score": 0.8645846843719482, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_8"}
{"score": 0.7611668705940247, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_9"}
{"score": 0.06115814298391342, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGTUNV9V_1_10"}
{"score": 0.054505228996276855, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_4"}
{"score": 0.015492036007344723, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_1"}
{"score": 0.08158965408802032, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_2"}
{"score": 0.2987144887447357, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_3"}
{"score": 0.2742851972579956, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_5"}
{"score": 0.16781818866729736, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_6"}
{"score": 0.050513509660959244, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_7"}
{"score": 0.056275345385074615, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_8"}
{"score": 0.0898360162973404, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_9"}
{"score": 0.02211121656000614, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VKR4AWDP_1_10"}
{"score": 0.19656331837177277, "chain_id": "3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_1"}
{"score": 0.9521797895431519, "chain_id": "3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_3"}
{"score": 0.9306324124336243, "chain_id": "3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_4"}
{"score": 0.9416998028755188, "chain_id": "3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_5"}
{"score": 0.48514917492866516, "chain_id": "3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_2"}
{"score": 0.12950584292411804, "chain_id": "3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_6"}
{"score": 0.017216067761182785, "chain_id": "3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_7"}
{"score": 0.13007961213588715, "chain_id": "3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_8"}
{"score": 0.024353938177227974, "chain_id": "3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_9"}
{"score": 0.011885792016983032, "chain_id": "3QJOXOW4XJQAMESVHIP8DRBEUTSEMZ_1_10"}
{"score": 0.6876063346862793, "chain_id": "3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_1"}
{"score": 0.9078419208526611, "chain_id": "3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_3"}
{"score": 0.7120357155799866, "chain_id": "3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_10"}
{"score": 0.8756476044654846, "chain_id": "3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_2"}
{"score": 0.9041260480880737, "chain_id": "3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_4"}
{"score": 0.1542125940322876, "chain_id": "3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_5"}
{"score": 0.05431288108229637, "chain_id": "3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_6"}
{"score": 0.04701181873679161, "chain_id": "3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_7"}
{"score": 0.06294357776641846, "chain_id": "3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_8"}
{"score": 0.024727627635002136, "chain_id": "3I0BTBYZAXKBP52FSEE4MXWH9KE0Y7_1_9"}
{"score": 0.9829303622245789, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_1"}
{"score": 0.7380220293998718, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_2"}
{"score": 0.9840055704116821, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_4"}
{"score": 0.8942990303039551, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_8"}
{"score": 0.8953072428703308, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_3"}
{"score": 0.9102696180343628, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_5"}
{"score": 0.9648290872573853, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_6"}
{"score": 0.6490530371665955, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_7"}
{"score": 0.8097059726715088, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_9"}
{"score": 0.24369293451309204, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y7CL8B6_1_10"}
{"score": 0.9832135438919067, "chain_id": "30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_1"}
{"score": 0.9248539805412292, "chain_id": "30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_2"}
{"score": 0.2984370291233063, "chain_id": "30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_3"}
{"score": 0.9635352492332458, "chain_id": "30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_4"}
{"score": 0.18292933702468872, "chain_id": "30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_8"}
{"score": 0.3887980878353119, "chain_id": "30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_9"}
{"score": 0.6219495534896851, "chain_id": "30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_5"}
{"score": 0.20778776705265045, "chain_id": "30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_6"}
{"score": 0.36046475172042847, "chain_id": "30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_7"}
{"score": 0.5215984582901001, "chain_id": "30H4UDGLT2HEJ5HLQW5J73AI9FVMPO_1_10"}
{"score": 0.988740861415863, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N921MN3_1_1"}
{"score": 0.8900572657585144, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N921MN3_1_2"}
{"score": 0.9713899493217468, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N921MN3_1_3"}
{"score": 0.9737739562988281, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N921MN3_1_4"}
{"score": 0.9796929359436035, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N921MN3_1_6"}
{"score": 0.980834424495697, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N921MN3_1_7"}
{"score": 0.24587880074977875, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N921MN3_1_5"}
{"score": 0.4312998056411743, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N921MN3_1_8"}
{"score": 0.18498875200748444, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N921MN3_1_9"}
{"score": 0.37465402483940125, "chain_id": "33JKGHPFYCTEGK58AHSR3E5N921MN3_1_10"}
{"score": 0.7303325533866882, "chain_id": "36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_1"}
{"score": 0.8049795627593994, "chain_id": "36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_2"}
{"score": 0.723331868648529, "chain_id": "36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_4"}
{"score": 0.30511611700057983, "chain_id": "36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_3"}
{"score": 0.7348768711090088, "chain_id": "36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_5"}
{"score": 0.042826373130083084, "chain_id": "36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_6"}
{"score": 0.05938122421503067, "chain_id": "36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_7"}
{"score": 0.14052174985408783, "chain_id": "36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_8"}
{"score": 0.9256489276885986, "chain_id": "36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_9"}
{"score": 0.07133755832910538, "chain_id": "36AHBNMV1RB5OP394Q2Z14G04C4DY1_1_10"}
{"score": 0.9906548857688904, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_1"}
{"score": 0.9850651621818542, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_4"}
{"score": 0.9913500547409058, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_2"}
{"score": 0.9765058159828186, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_3"}
{"score": 0.08360092341899872, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_5"}
{"score": 0.04759003221988678, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_6"}
{"score": 0.5092600584030151, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_7"}
{"score": 0.2916525602340698, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_8"}
{"score": 0.7045870423316956, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_9"}
{"score": 0.32712292671203613, "chain_id": "3OVHNO1VE605TFDE0C4IFBP2BLCZDD_1_10"}
{"score": 0.029558580368757248, "chain_id": "3HL8HNGX450NL89XNK59QNQU31KF9C_1_1"}
{"score": 0.5591843724250793, "chain_id": "3HL8HNGX450NL89XNK59QNQU31KF9C_1_2"}
{"score": 0.08003950119018555, "chain_id": "3HL8HNGX450NL89XNK59QNQU31KF9C_1_3"}
{"score": 0.45089560747146606, "chain_id": "3HL8HNGX450NL89XNK59QNQU31KF9C_1_4"}
{"score": 0.047128643840551376, "chain_id": "3HL8HNGX450NL89XNK59QNQU31KF9C_1_5"}
{"score": 0.20352308452129364, "chain_id": "3HL8HNGX450NL89XNK59QNQU31KF9C_1_6"}
{"score": 0.03862004354596138, "chain_id": "3HL8HNGX450NL89XNK59QNQU31KF9C_1_7"}
{"score": 0.02355284057557583, "chain_id": "3HL8HNGX450NL89XNK59QNQU31KF9C_1_8"}
{"score": 0.28970015048980713, "chain_id": "3HL8HNGX450NL89XNK59QNQU31KF9C_1_9"}
{"score": 0.9216275811195374, "chain_id": "3HL8HNGX450NL89XNK59QNQU31KF9C_1_10"}
{"score": 0.05563349276781082, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_1"}
{"score": 0.06202266737818718, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_2"}
{"score": 0.016209090128540993, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_3"}
{"score": 0.018769646063447, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_4"}
{"score": 0.0161727461963892, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_5"}
{"score": 0.0157439224421978, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_6"}
{"score": 0.025548972189426422, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_7"}
{"score": 0.022576957941055298, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_8"}
{"score": 0.027435310184955597, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_9"}
{"score": 0.01636459492146969, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GPGIRI4_1_10"}
{"score": 0.21434010565280914, "chain_id": "3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_1"}
{"score": 0.7735546827316284, "chain_id": "3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_8"}
{"score": 0.1919427216053009, "chain_id": "3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_2"}
{"score": 0.14894846081733704, "chain_id": "3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_3"}
{"score": 0.05757710337638855, "chain_id": "3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_4"}
{"score": 0.11987213045358658, "chain_id": "3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_5"}
{"score": 0.816821277141571, "chain_id": "3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_6"}
{"score": 0.04410555586218834, "chain_id": "3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_7"}
{"score": 0.45659661293029785, "chain_id": "3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_9"}
{"score": 0.485823392868042, "chain_id": "3BC8WZX3V3VQSYAS8W5PYX47CDTRR2_1_10"}
{"score": 0.0501728318631649, "chain_id": "3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_1"}
{"score": 0.2985445261001587, "chain_id": "3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_2"}
{"score": 0.030387191101908684, "chain_id": "3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_3"}
{"score": 0.21450786292552948, "chain_id": "3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_4"}
{"score": 0.14085766673088074, "chain_id": "3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_5"}
{"score": 0.03372250497341156, "chain_id": "3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_6"}
{"score": 0.03156021237373352, "chain_id": "3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_7"}
{"score": 0.08227864652872086, "chain_id": "3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_8"}
{"score": 0.03202257305383682, "chain_id": "3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_9"}
{"score": 0.06434215605258942, "chain_id": "3SLE99ER0NCCEIFUMGDCKL1259QBZN_1_10"}
{"score": 0.0385073684155941, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_1"}
{"score": 0.05023779347538948, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_2"}
{"score": 0.0365811362862587, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_3"}
{"score": 0.027283240109682083, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_4"}
{"score": 0.057540036737918854, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_5"}
{"score": 0.06736702471971512, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_6"}
{"score": 0.022848129272460938, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_7"}
{"score": 0.029304852709174156, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_8"}
{"score": 0.024119697511196136, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_9"}
{"score": 0.03647676855325699, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8N7LA3_1_10"}
{"score": 0.406666100025177, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_6"}
{"score": 0.9884920120239258, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_1"}
{"score": 0.7020227909088135, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_2"}
{"score": 0.6617934703826904, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_3"}
{"score": 0.05748935416340828, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_4"}
{"score": 0.7074166536331177, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_5"}
{"score": 0.16769801080226898, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_7"}
{"score": 0.023283885791897774, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_8"}
{"score": 0.025166450068354607, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_9"}
{"score": 0.02553408592939377, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZ1DK5XB_1_10"}
{"score": 0.11437473446130753, "chain_id": "3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_1"}
{"score": 0.13393154740333557, "chain_id": "3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_2"}
{"score": 0.02353694662451744, "chain_id": "3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_3"}
{"score": 0.025148218497633934, "chain_id": "3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_4"}
{"score": 0.024846414104104042, "chain_id": "3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_5"}
{"score": 0.02789623662829399, "chain_id": "3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_6"}
{"score": 0.017683567479252815, "chain_id": "3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_7"}
{"score": 0.02934141457080841, "chain_id": "3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_8"}
{"score": 0.06241413950920105, "chain_id": "3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_9"}
{"score": 0.0515478141605854, "chain_id": "3HOSI13XHZN2QE8I8UFLOJ6ZYE8DDV_1_10"}
{"score": 0.7781425714492798, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_3"}
{"score": 0.9835926294326782, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_5"}
{"score": 0.040559180080890656, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_1"}
{"score": 0.13615524768829346, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_2"}
{"score": 0.6333134770393372, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_4"}
{"score": 0.043476179242134094, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_6"}
{"score": 0.016716094687581062, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_7"}
{"score": 0.6694657802581787, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_8"}
{"score": 0.02338511124253273, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_9"}
{"score": 0.017865188419818878, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8MQ1RLZ_1_10"}
{"score": 0.037352304905653, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_1"}
{"score": 0.027624910697340965, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_2"}
{"score": 0.02336714044213295, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_3"}
{"score": 0.03739791363477707, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_4"}
{"score": 0.014191591180860996, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_5"}
{"score": 0.011287244036793709, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_6"}
{"score": 0.05137062072753906, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_7"}
{"score": 0.08908789604902267, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_8"}
{"score": 0.07870396971702576, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_9"}
{"score": 0.02678837440907955, "chain_id": "30OG32W0SUAG4WDVTJ48Q60EC67NE0_1_10"}
{"score": 0.9884920120239258, "chain_id": "3OB0CAO74HOM058BQMLPSPVY892HY5_1_5"}
{"score": 0.7020227909088135, "chain_id": "3OB0CAO74HOM058BQMLPSPVY892HY5_1_6"}
{"score": 0.6617934703826904, "chain_id": "3OB0CAO74HOM058BQMLPSPVY892HY5_1_7"}
{"score": 0.4385696351528168, "chain_id": "3OB0CAO74HOM058BQMLPSPVY892HY5_1_1"}
{"score": 0.0987333357334137, "chain_id": "3OB0CAO74HOM058BQMLPSPVY892HY5_1_2"}
{"score": 0.4768114984035492, "chain_id": "3OB0CAO74HOM058BQMLPSPVY892HY5_1_3"}
{"score": 0.07012490928173065, "chain_id": "3OB0CAO74HOM058BQMLPSPVY892HY5_1_4"}
{"score": 0.7074166536331177, "chain_id": "3OB0CAO74HOM058BQMLPSPVY892HY5_1_8"}
{"score": 0.023283885791897774, "chain_id": "3OB0CAO74HOM058BQMLPSPVY892HY5_1_9"}
{"score": 0.025166450068354607, "chain_id": "3OB0CAO74HOM058BQMLPSPVY892HY5_1_10"}
{"score": 0.0289718396961689, "chain_id": "3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_1"}
{"score": 0.029523245990276337, "chain_id": "3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_2"}
{"score": 0.16770444810390472, "chain_id": "3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_3"}
{"score": 0.9465840458869934, "chain_id": "3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_4"}
{"score": 0.07988244295120239, "chain_id": "3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_5"}
{"score": 0.33485400676727295, "chain_id": "3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_6"}
{"score": 0.05966443568468094, "chain_id": "3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_7"}
{"score": 0.14758135378360748, "chain_id": "3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_8"}
{"score": 0.04024713113903999, "chain_id": "3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_9"}
{"score": 0.04901808127760887, "chain_id": "3KMS4QQVK2P724SORHWYGW4AUJEKFL_1_10"}
{"score": 0.035198844969272614, "chain_id": "3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_1"}
{"score": 0.03031822107732296, "chain_id": "3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_2"}
{"score": 0.02831096388399601, "chain_id": "3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_3"}
{"score": 0.03139164298772812, "chain_id": "3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_4"}
{"score": 0.039116960018873215, "chain_id": "3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_5"}
{"score": 0.029999682679772377, "chain_id": "3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_6"}
{"score": 0.05880952626466751, "chain_id": "3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_7"}
{"score": 0.034649718552827835, "chain_id": "3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_8"}
{"score": 0.0686267763376236, "chain_id": "3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_9"}
{"score": 0.015772484242916107, "chain_id": "3P59JYT76LJM4T6ZXVVJX4XH5VF2TB_1_10"}
{"score": 0.8798463344573975, "chain_id": "3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_3"}
{"score": 0.5590113997459412, "chain_id": "3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_1"}
{"score": 0.2252378761768341, "chain_id": "3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_2"}
{"score": 0.20724312961101532, "chain_id": "3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_4"}
{"score": 0.05051316320896149, "chain_id": "3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_5"}
{"score": 0.029898211359977722, "chain_id": "3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_6"}
{"score": 0.05040283873677254, "chain_id": "3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_7"}
{"score": 0.01623237505555153, "chain_id": "3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_8"}
{"score": 0.06796066462993622, "chain_id": "3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_9"}
{"score": 0.2034551203250885, "chain_id": "3TMFV4NEP8DPIPCI8H9VUFHJVPZW88_1_10"}
{"score": 0.9937785267829895, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_1"}
{"score": 0.9682888984680176, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_4"}
{"score": 0.9363601207733154, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_2"}
{"score": 0.4095642566680908, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_3"}
{"score": 0.028301890939474106, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_5"}
{"score": 0.028301890939474106, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_6"}
{"score": 0.04112706333398819, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_7"}
{"score": 0.03147093579173088, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_8"}
{"score": 0.04112706333398819, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_9"}
{"score": 0.03147093579173088, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBF12ACW_1_10"}
{"score": 0.9906814694404602, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_4"}
{"score": 0.9855459928512573, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_1"}
{"score": 0.85010826587677, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_2"}
{"score": 0.9527543783187866, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_3"}
{"score": 0.9182422757148743, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_5"}
{"score": 0.9864099621772766, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_6"}
{"score": 0.9907711744308472, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_7"}
{"score": 0.9838387966156006, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_8"}
{"score": 0.9510803818702698, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_9"}
{"score": 0.9848570823669434, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP212BKT_1_10"}
{"score": 0.9905219078063965, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_2"}
{"score": 0.9762343764305115, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_3"}
{"score": 0.8891420364379883, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_5"}
{"score": 0.9872647523880005, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_1"}
{"score": 0.9725279808044434, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_4"}
{"score": 0.985161304473877, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_6"}
{"score": 0.281639963388443, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_7"}
{"score": 0.987406313419342, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_8"}
{"score": 0.9829293489456177, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_9"}
{"score": 0.8936595320701599, "chain_id": "3OONKJ5DKCI0FE1NK72V4NUYM9POB9_1_10"}
{"score": 0.9936421513557434, "chain_id": "3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_3"}
{"score": 0.9695712327957153, "chain_id": "3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_6"}
{"score": 0.8795396685600281, "chain_id": "3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_10"}
{"score": 0.062008507549762726, "chain_id": "3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_1"}
{"score": 0.1481192260980606, "chain_id": "3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_2"}
{"score": 0.9398713111877441, "chain_id": "3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_4"}
{"score": 0.3801105320453644, "chain_id": "3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_5"}
{"score": 0.8127550482749939, "chain_id": "3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_7"}
{"score": 0.05649804323911667, "chain_id": "3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_8"}
{"score": 0.09897222369909286, "chain_id": "3GNA64GUZE31BAXUYA3MQ6P6RISQ5X_1_9"}
{"score": 0.9934906959533691, "chain_id": "3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_1"}
{"score": 0.9863821268081665, "chain_id": "3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_2"}
{"score": 0.9699153900146484, "chain_id": "3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_6"}
{"score": 0.8909424543380737, "chain_id": "3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_8"}
{"score": 0.9393293857574463, "chain_id": "3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_3"}
{"score": 0.41023752093315125, "chain_id": "3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_4"}
{"score": 0.8105255365371704, "chain_id": "3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_5"}
{"score": 0.3394473195075989, "chain_id": "3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_7"}
{"score": 0.9564254879951477, "chain_id": "3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_9"}
{"score": 0.7043722867965698, "chain_id": "3SUWZRL0MYC8XB73U2IROVES7JD6E0_1_10"}
{"score": 0.7384283542633057, "chain_id": "3A4TN5196KH9X276UU30VY3FVL1CHZ_1_1"}
{"score": 0.9199666976928711, "chain_id": "3A4TN5196KH9X276UU30VY3FVL1CHZ_1_3"}
{"score": 0.4997105896472931, "chain_id": "3A4TN5196KH9X276UU30VY3FVL1CHZ_1_4"}
{"score": 0.5481834411621094, "chain_id": "3A4TN5196KH9X276UU30VY3FVL1CHZ_1_2"}
{"score": 0.05095120146870613, "chain_id": "3A4TN5196KH9X276UU30VY3FVL1CHZ_1_5"}
{"score": 0.03806207701563835, "chain_id": "3A4TN5196KH9X276UU30VY3FVL1CHZ_1_6"}
{"score": 0.05139637365937233, "chain_id": "3A4TN5196KH9X276UU30VY3FVL1CHZ_1_7"}
{"score": 0.040275637060403824, "chain_id": "3A4TN5196KH9X276UU30VY3FVL1CHZ_1_8"}
{"score": 0.30363228917121887, "chain_id": "3A4TN5196KH9X276UU30VY3FVL1CHZ_1_9"}
{"score": 0.22643981873989105, "chain_id": "3A4TN5196KH9X276UU30VY3FVL1CHZ_1_10"}
{"score": 0.9916747212409973, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_6"}
{"score": 0.9832156300544739, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_9"}
{"score": 0.9783658981323242, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_1"}
{"score": 0.5012376308441162, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_2"}
{"score": 0.7810019254684448, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_3"}
{"score": 0.6884953379631042, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_4"}
{"score": 0.7366236448287964, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_5"}
{"score": 0.6941609978675842, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_7"}
{"score": 0.9795469641685486, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_8"}
{"score": 0.9750202298164368, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR5HBSXD_1_10"}
{"score": 0.9937785267829895, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_1"}
{"score": 0.9682888984680176, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_4"}
{"score": 0.9363601207733154, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_2"}
{"score": 0.4095642566680908, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_3"}
{"score": 0.028301890939474106, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_5"}
{"score": 0.028301890939474106, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_6"}
{"score": 0.04112706333398819, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_7"}
{"score": 0.03147093579173088, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_8"}
{"score": 0.04112706333398819, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_9"}
{"score": 0.03147093579173088, "chain_id": "3YGXWBAF70GFLQJBFNJH19UBRN84CD_1_10"}
{"score": 0.05384554713964462, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_1"}
{"score": 0.016460631042718887, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_2"}
{"score": 0.07216228544712067, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_3"}
{"score": 0.028170043602585793, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_4"}
{"score": 0.23144938051700592, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_5"}
{"score": 0.05494493618607521, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_6"}
{"score": 0.02194773405790329, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_7"}
{"score": 0.025241244584321976, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_8"}
{"score": 0.07948331534862518, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_9"}
{"score": 0.09735409915447235, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHGI9UML_1_10"}
{"score": 0.6317153573036194, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_1"}
{"score": 0.174299418926239, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_2"}
{"score": 0.38763174414634705, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_3"}
{"score": 0.34957846999168396, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_4"}
{"score": 0.06914262473583221, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_5"}
{"score": 0.5489354133605957, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_6"}
{"score": 0.026558728888630867, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_7"}
{"score": 0.022044412791728973, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_8"}
{"score": 0.041837725788354874, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_9"}
{"score": 0.03594062104821205, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSIR1QN8_1_10"}
{"score": 0.2730647325515747, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_6"}
{"score": 0.13656078279018402, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_1"}
{"score": 0.12017170339822769, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_2"}
{"score": 0.05990457907319069, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_3"}
{"score": 0.3360719680786133, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_4"}
{"score": 0.2599654197692871, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_5"}
{"score": 0.055302370339632034, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_7"}
{"score": 0.9147430062294006, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_8"}
{"score": 0.031693823635578156, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_9"}
{"score": 0.21910583972930908, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4DMEHVL_1_10"}
{"score": 0.5870486497879028, "chain_id": "3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_1"}
{"score": 0.8497002720832825, "chain_id": "3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_3"}
{"score": 0.9485198259353638, "chain_id": "3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_4"}
{"score": 0.41258832812309265, "chain_id": "3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_2"}
{"score": 0.06447559595108032, "chain_id": "3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_5"}
{"score": 0.05939342454075813, "chain_id": "3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_6"}
{"score": 0.05172823742032051, "chain_id": "3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_7"}
{"score": 0.03562819957733154, "chain_id": "3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_8"}
{"score": 0.02267969585955143, "chain_id": "3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_9"}
{"score": 0.06156584247946739, "chain_id": "3QL2OFSM96H17YTHXSYD0I0BFMNNC8_1_10"}
{"score": 0.9395163655281067, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_2"}
{"score": 0.8875075578689575, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_4"}
{"score": 0.5852980017662048, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_7"}
{"score": 0.46336281299591064, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_1"}
{"score": 0.421922892332077, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_3"}
{"score": 0.47290274500846863, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_5"}
{"score": 0.4413783848285675, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_6"}
{"score": 0.6172711849212646, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_8"}
{"score": 0.06355234980583191, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_9"}
{"score": 0.12000827491283417, "chain_id": "3180JW2OT4BKPNTH3KJDT5DKF6AJ5D_1_10"}
{"score": 0.05722519010305405, "chain_id": "3SNLUL3WO4M75S7W763YHWISIG8ULG_1_1"}
{"score": 0.05554254725575447, "chain_id": "3SNLUL3WO4M75S7W763YHWISIG8ULG_1_2"}
{"score": 0.21604156494140625, "chain_id": "3SNLUL3WO4M75S7W763YHWISIG8ULG_1_3"}
{"score": 0.07301443815231323, "chain_id": "3SNLUL3WO4M75S7W763YHWISIG8ULG_1_4"}
{"score": 0.045306768268346786, "chain_id": "3SNLUL3WO4M75S7W763YHWISIG8ULG_1_5"}
{"score": 0.020571302622556686, "chain_id": "3SNLUL3WO4M75S7W763YHWISIG8ULG_1_6"}
{"score": 0.054041579365730286, "chain_id": "3SNLUL3WO4M75S7W763YHWISIG8ULG_1_7"}
{"score": 0.024894384667277336, "chain_id": "3SNLUL3WO4M75S7W763YHWISIG8ULG_1_8"}
{"score": 0.04655594378709793, "chain_id": "3SNLUL3WO4M75S7W763YHWISIG8ULG_1_9"}
{"score": 0.03293755277991295, "chain_id": "3SNLUL3WO4M75S7W763YHWISIG8ULG_1_10"}
{"score": 0.13142827153205872, "chain_id": "3KWTYT08702QKDHH65VQ9KQCII05LV_1_2"}
{"score": 0.025894954800605774, "chain_id": "3KWTYT08702QKDHH65VQ9KQCII05LV_1_5"}
{"score": 0.1665760576725006, "chain_id": "3KWTYT08702QKDHH65VQ9KQCII05LV_1_1"}
{"score": 0.7258800864219666, "chain_id": "3KWTYT08702QKDHH65VQ9KQCII05LV_1_3"}
{"score": 0.024101268500089645, "chain_id": "3KWTYT08702QKDHH65VQ9KQCII05LV_1_4"}
{"score": 0.06701686233282089, "chain_id": "3KWTYT08702QKDHH65VQ9KQCII05LV_1_6"}
{"score": 0.06118295341730118, "chain_id": "3KWTYT08702QKDHH65VQ9KQCII05LV_1_7"}
{"score": 0.3373945653438568, "chain_id": "3KWTYT08702QKDHH65VQ9KQCII05LV_1_8"}
{"score": 0.055162135511636734, "chain_id": "3KWTYT08702QKDHH65VQ9KQCII05LV_1_9"}
{"score": 0.04086664319038391, "chain_id": "3KWTYT08702QKDHH65VQ9KQCII05LV_1_10"}
{"score": 0.7726855278015137, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_1"}
{"score": 0.5201540589332581, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_2"}
{"score": 0.044241633266210556, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_3"}
{"score": 0.2297079712152481, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_4"}
{"score": 0.034936822950839996, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_5"}
{"score": 0.1277880221605301, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_6"}
{"score": 0.4471224844455719, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_7"}
{"score": 0.05514273792505264, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_8"}
{"score": 0.0970463827252388, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_9"}
{"score": 0.031751710921525955, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURPLOEV2_1_10"}
{"score": 0.9243714213371277, "chain_id": "3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_1"}
{"score": 0.7563243508338928, "chain_id": "3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_2"}
{"score": 0.8402514457702637, "chain_id": "3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_3"}
{"score": 0.8592942953109741, "chain_id": "3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_4"}
{"score": 0.09608390182256699, "chain_id": "3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_5"}
{"score": 0.2293396145105362, "chain_id": "3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_6"}
{"score": 0.026695378124713898, "chain_id": "3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_7"}
{"score": 0.0226295106112957, "chain_id": "3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_8"}
{"score": 0.02292066626250744, "chain_id": "3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_9"}
{"score": 0.026296867057681084, "chain_id": "3X08E93BHVH4KWEOOKZTC7MFR7B66A_1_10"}
{"score": 0.9580856561660767, "chain_id": "3KOPY89HM81HB86DP1VKE8F03NC3JS_1_1"}
{"score": 0.8865735530853271, "chain_id": "3KOPY89HM81HB86DP1VKE8F03NC3JS_1_2"}
{"score": 0.7744685411453247, "chain_id": "3KOPY89HM81HB86DP1VKE8F03NC3JS_1_3"}
{"score": 0.9205316305160522, "chain_id": "3KOPY89HM81HB86DP1VKE8F03NC3JS_1_4"}
{"score": 0.04317152127623558, "chain_id": "3KOPY89HM81HB86DP1VKE8F03NC3JS_1_5"}
{"score": 0.12177427858114243, "chain_id": "3KOPY89HM81HB86DP1VKE8F03NC3JS_1_6"}
{"score": 0.04884923994541168, "chain_id": "3KOPY89HM81HB86DP1VKE8F03NC3JS_1_7"}
{"score": 0.09081585705280304, "chain_id": "3KOPY89HM81HB86DP1VKE8F03NC3JS_1_8"}
{"score": 0.02322051115334034, "chain_id": "3KOPY89HM81HB86DP1VKE8F03NC3JS_1_9"}
{"score": 0.043899063020944595, "chain_id": "3KOPY89HM81HB86DP1VKE8F03NC3JS_1_10"}
{"score": 0.8089805245399475, "chain_id": "3634BBTX0OTGW920REBM3GPX2XBIFU_1_6"}
{"score": 0.9554036259651184, "chain_id": "3634BBTX0OTGW920REBM3GPX2XBIFU_1_8"}
{"score": 0.031721170991659164, "chain_id": "3634BBTX0OTGW920REBM3GPX2XBIFU_1_1"}
{"score": 0.023509232327342033, "chain_id": "3634BBTX0OTGW920REBM3GPX2XBIFU_1_2"}
{"score": 0.023509232327342033, "chain_id": "3634BBTX0OTGW920REBM3GPX2XBIFU_1_3"}
{"score": 0.06034404784440994, "chain_id": "3634BBTX0OTGW920REBM3GPX2XBIFU_1_4"}
{"score": 0.8028084635734558, "chain_id": "3634BBTX0OTGW920REBM3GPX2XBIFU_1_5"}
{"score": 0.7419470548629761, "chain_id": "3634BBTX0OTGW920REBM3GPX2XBIFU_1_7"}
{"score": 0.03782234340906143, "chain_id": "3634BBTX0OTGW920REBM3GPX2XBIFU_1_9"}
{"score": 0.024735989049077034, "chain_id": "3634BBTX0OTGW920REBM3GPX2XBIFU_1_10"}
{"score": 0.9753450155258179, "chain_id": "3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_1"}
{"score": 0.8623677492141724, "chain_id": "3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_2"}
{"score": 0.9624016880989075, "chain_id": "3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_3"}
{"score": 0.8727531433105469, "chain_id": "3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_4"}
{"score": 0.11621987819671631, "chain_id": "3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_5"}
{"score": 0.0493813082575798, "chain_id": "3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_6"}
{"score": 0.03822491317987442, "chain_id": "3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_7"}
{"score": 0.08624905347824097, "chain_id": "3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_8"}
{"score": 0.10340842604637146, "chain_id": "3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_9"}
{"score": 0.05182913318276405, "chain_id": "3X4JMASXCM8FCX94IM0KEMYG16Q0BP_1_10"}
{"score": 0.9212334156036377, "chain_id": "3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_1"}
{"score": 0.8320169448852539, "chain_id": "3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_2"}
{"score": 0.8401315808296204, "chain_id": "3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_3"}
{"score": 0.8411901593208313, "chain_id": "3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_4"}
{"score": 0.11778195202350616, "chain_id": "3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_5"}
{"score": 0.3082536458969116, "chain_id": "3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_6"}
{"score": 0.030502716079354286, "chain_id": "3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_7"}
{"score": 0.02144453302025795, "chain_id": "3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_8"}
{"score": 0.022314859554171562, "chain_id": "3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_9"}
{"score": 0.02521779201924801, "chain_id": "3NG53N1RLVIZYGFHWVV02L9N2S28PV_1_10"}
{"score": 0.5983287692070007, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_3"}
{"score": 0.21116459369659424, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_9"}
{"score": 0.07293053716421127, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_10"}
{"score": 0.580913782119751, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_1"}
{"score": 0.6257460713386536, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_2"}
{"score": 0.408921480178833, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_4"}
{"score": 0.023026684299111366, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_5"}
{"score": 0.021469974890351295, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_6"}
{"score": 0.03107554465532303, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_7"}
{"score": 0.021920818835496902, "chain_id": "3XIQGXAUMC707BCP8HDBIYZVT99X7T_1_8"}
{"score": 0.8621808290481567, "chain_id": "3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_1"}
{"score": 0.11044476926326752, "chain_id": "3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_2"}
{"score": 0.04725709930062294, "chain_id": "3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_3"}
{"score": 0.36490848660469055, "chain_id": "3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_4"}
{"score": 0.07420700043439865, "chain_id": "3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_5"}
{"score": 0.06021294370293617, "chain_id": "3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_6"}
{"score": 0.12379574775695801, "chain_id": "3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_7"}
{"score": 0.03491450101137161, "chain_id": "3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_8"}
{"score": 0.05130235105752945, "chain_id": "3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_9"}
{"score": 0.6374764442443848, "chain_id": "3R6BYFZZP7BDM4RVQ0BN6QCCQ9OFXY_1_10"}
{"score": 0.14610625803470612, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_1"}
{"score": 0.07796294242143631, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_2"}
{"score": 0.30580633878707886, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_3"}
{"score": 0.06785538047552109, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_4"}
{"score": 0.28424468636512756, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_5"}
{"score": 0.0996258407831192, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_6"}
{"score": 0.3783250153064728, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_7"}
{"score": 0.07817210257053375, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_8"}
{"score": 0.05015821382403374, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_9"}
{"score": 0.019527221098542213, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJRCXVI0_1_10"}
{"score": 0.9063218832015991, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_1"}
{"score": 0.8231459856033325, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_2"}
{"score": 0.9031723737716675, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_3"}
{"score": 0.8591108918190002, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_4"}
{"score": 0.5589597821235657, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_5"}
{"score": 0.4787376821041107, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_6"}
{"score": 0.49215981364250183, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_7"}
{"score": 0.10466288775205612, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_8"}
{"score": 0.01972128637135029, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_9"}
{"score": 0.3344670534133911, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6C6ZO5Q_1_10"}
{"score": 0.9226984977722168, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_1"}
{"score": 0.8684446215629578, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_2"}
{"score": 0.9887895584106445, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_3"}
{"score": 0.6938542723655701, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_5"}
{"score": 0.6892922520637512, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_9"}
{"score": 0.9096857309341431, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_10"}
{"score": 0.8330395221710205, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_4"}
{"score": 0.8244547843933105, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_6"}
{"score": 0.7261760234832764, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_7"}
{"score": 0.6075165271759033, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC87NWXZ_1_8"}
{"score": 0.8476271033287048, "chain_id": "3OUYGIZWR7XHGRAE1RIL963554D0PT_1_10"}
{"score": 0.024527058005332947, "chain_id": "3OUYGIZWR7XHGRAE1RIL963554D0PT_1_1"}
{"score": 0.22360581159591675, "chain_id": "3OUYGIZWR7XHGRAE1RIL963554D0PT_1_2"}
{"score": 0.02499089017510414, "chain_id": "3OUYGIZWR7XHGRAE1RIL963554D0PT_1_3"}
{"score": 0.19317539036273956, "chain_id": "3OUYGIZWR7XHGRAE1RIL963554D0PT_1_4"}
{"score": 0.22304020822048187, "chain_id": "3OUYGIZWR7XHGRAE1RIL963554D0PT_1_5"}
{"score": 0.7825116515159607, "chain_id": "3OUYGIZWR7XHGRAE1RIL963554D0PT_1_6"}
{"score": 0.22241230309009552, "chain_id": "3OUYGIZWR7XHGRAE1RIL963554D0PT_1_7"}
{"score": 0.7001578211784363, "chain_id": "3OUYGIZWR7XHGRAE1RIL963554D0PT_1_8"}
{"score": 0.3459976613521576, "chain_id": "3OUYGIZWR7XHGRAE1RIL963554D0PT_1_9"}
{"score": 0.5228371024131775, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_1"}
{"score": 0.19624140858650208, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_2"}
{"score": 0.26935601234436035, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_3"}
{"score": 0.19272950291633606, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_4"}
{"score": 0.09480167180299759, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_5"}
{"score": 0.01637270301580429, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_6"}
{"score": 0.01637270301580429, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_7"}
{"score": 0.12174279242753983, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_8"}
{"score": 0.016830917447805405, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_9"}
{"score": 0.49971485137939453, "chain_id": "358UUM7WRZ2GAFQDZI7JTGD7E1T7RG_1_10"}
{"score": 0.9860095977783203, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_1"}
{"score": 0.9827760457992554, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_2"}
{"score": 0.9546597003936768, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_3"}
{"score": 0.9727162718772888, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_4"}
{"score": 0.6507756114006042, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_5"}
{"score": 0.22990374267101288, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_6"}
{"score": 0.23511077463626862, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_7"}
{"score": 0.10176905244588852, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_8"}
{"score": 0.9777206182479858, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_9"}
{"score": 0.24217934906482697, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6FT55OQ_1_10"}
{"score": 0.8346302509307861, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_1"}
{"score": 0.9319848418235779, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_2"}
{"score": 0.8066500425338745, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_3"}
{"score": 0.2862943112850189, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_4"}
{"score": 0.22277553379535675, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_5"}
{"score": 0.13009025156497955, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_6"}
{"score": 0.346798837184906, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_7"}
{"score": 0.41687503457069397, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_8"}
{"score": 0.7309994101524353, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_9"}
{"score": 0.6615251302719116, "chain_id": "3KRVW3HTZNKBWXXDID9D28FTYEESMI_1_10"}
{"score": 0.9780628085136414, "chain_id": "3KGTPGBS6XK146LOX0LT20JJD742U5_1_1"}
{"score": 0.8757359385490417, "chain_id": "3KGTPGBS6XK146LOX0LT20JJD742U5_1_5"}
{"score": 0.790221095085144, "chain_id": "3KGTPGBS6XK146LOX0LT20JJD742U5_1_7"}
{"score": 0.8623825311660767, "chain_id": "3KGTPGBS6XK146LOX0LT20JJD742U5_1_2"}
{"score": 0.8664078116416931, "chain_id": "3KGTPGBS6XK146LOX0LT20JJD742U5_1_3"}
{"score": 0.9491837620735168, "chain_id": "3KGTPGBS6XK146LOX0LT20JJD742U5_1_4"}
{"score": 0.9376215934753418, "chain_id": "3KGTPGBS6XK146LOX0LT20JJD742U5_1_6"}
{"score": 0.08509061485528946, "chain_id": "3KGTPGBS6XK146LOX0LT20JJD742U5_1_8"}
{"score": 0.7501826286315918, "chain_id": "3KGTPGBS6XK146LOX0LT20JJD742U5_1_9"}
{"score": 0.028852757066488266, "chain_id": "3KGTPGBS6XK146LOX0LT20JJD742U5_1_10"}
{"score": 0.9867460131645203, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59354EPE_1_1"}
{"score": 0.9859749674797058, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59354EPE_1_2"}
{"score": 0.9482893347740173, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59354EPE_1_3"}
{"score": 0.9726763367652893, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59354EPE_1_4"}
{"score": 0.5502200126647949, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59354EPE_1_5"}
{"score": 0.13342557847499847, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59354EPE_1_6"}
{"score": 0.16093450784683228, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59354EPE_1_7"}
{"score": 0.07112611830234528, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59354EPE_1_8"}
{"score": 0.9767133593559265, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59354EPE_1_9"}
{"score": 0.16922292113304138, "chain_id": "3XM0HYN6NKYG7HP89YH0UV59354EPE_1_10"}
{"score": 0.896294355392456, "chain_id": "38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_1"}
{"score": 0.10028928518295288, "chain_id": "38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_2"}
{"score": 0.12669654190540314, "chain_id": "38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_10"}
{"score": 0.8382030725479126, "chain_id": "38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_3"}
{"score": 0.3521723747253418, "chain_id": "38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_4"}
{"score": 0.3830278515815735, "chain_id": "38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_5"}
{"score": 0.2932344079017639, "chain_id": "38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_6"}
{"score": 0.34835079312324524, "chain_id": "38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_7"}
{"score": 0.42414650321006775, "chain_id": "38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_8"}
{"score": 0.7059732675552368, "chain_id": "38YMOXR4MUY2EBTUF2CXA1LSF98W65_1_9"}
{"score": 0.9860095977783203, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_1"}
{"score": 0.9827760457992554, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_2"}
{"score": 0.9546597003936768, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_3"}
{"score": 0.9727162718772888, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_4"}
{"score": 0.9777206182479858, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_9"}
{"score": 0.6507756114006042, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_5"}
{"score": 0.22990374267101288, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_6"}
{"score": 0.23511077463626862, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_7"}
{"score": 0.10176905244588852, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_8"}
{"score": 0.24217934906482697, "chain_id": "3PPTZCWALQJZIOHJ5YA2FAW1S4FQZT_1_10"}
{"score": 0.9002193212509155, "chain_id": "34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_4"}
{"score": 0.9524802565574646, "chain_id": "34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_7"}
{"score": 0.4954172670841217, "chain_id": "34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_9"}
{"score": 0.35063499212265015, "chain_id": "34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_10"}
{"score": 0.9366441965103149, "chain_id": "34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_1"}
{"score": 0.922120213508606, "chain_id": "34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_2"}
{"score": 0.7347709536552429, "chain_id": "34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_3"}
{"score": 0.6957268118858337, "chain_id": "34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_5"}
{"score": 0.7563473582267761, "chain_id": "34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_6"}
{"score": 0.8856726884841919, "chain_id": "34S6N1K2ZVI2061C77WZYHT2MM9LHZ_1_8"}
{"score": 0.023804886266589165, "chain_id": "3DPNQGW4LLEQ59AA5W6EF921R4F647_1_1"}
{"score": 0.0299697145819664, "chain_id": "3DPNQGW4LLEQ59AA5W6EF921R4F647_1_2"}
{"score": 0.016257666051387787, "chain_id": "3DPNQGW4LLEQ59AA5W6EF921R4F647_1_3"}
{"score": 0.027964025735855103, "chain_id": "3DPNQGW4LLEQ59AA5W6EF921R4F647_1_4"}
{"score": 0.01809893362224102, "chain_id": "3DPNQGW4LLEQ59AA5W6EF921R4F647_1_5"}
{"score": 0.018206236883997917, "chain_id": "3DPNQGW4LLEQ59AA5W6EF921R4F647_1_6"}
{"score": 0.015062184073030949, "chain_id": "3DPNQGW4LLEQ59AA5W6EF921R4F647_1_7"}
{"score": 0.04434873163700104, "chain_id": "3DPNQGW4LLEQ59AA5W6EF921R4F647_1_8"}
{"score": 0.02278798818588257, "chain_id": "3DPNQGW4LLEQ59AA5W6EF921R4F647_1_9"}
{"score": 0.032850585877895355, "chain_id": "3DPNQGW4LLEQ59AA5W6EF921R4F647_1_10"}
{"score": 0.9499014019966125, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_1"}
{"score": 0.9270567893981934, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_2"}
{"score": 0.7670076489448547, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_3"}
{"score": 0.8805373311042786, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_4"}
{"score": 0.3004121780395508, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_5"}
{"score": 0.22883276641368866, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_6"}
{"score": 0.7336214184761047, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_7"}
{"score": 0.5775091052055359, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_8"}
{"score": 0.4110807180404663, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_9"}
{"score": 0.5984405279159546, "chain_id": "3A1COHJ8NJU7LZHTDINVTC7W2FOH89_1_10"}
{"score": 0.37075144052505493, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_2"}
{"score": 0.8532643914222717, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_4"}
{"score": 0.09351546317338943, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_1"}
{"score": 0.4047551155090332, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_3"}
{"score": 0.573059618473053, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_5"}
{"score": 0.6851472854614258, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_6"}
{"score": 0.471297025680542, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_7"}
{"score": 0.04384404793381691, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_8"}
{"score": 0.05722155421972275, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_9"}
{"score": 0.6226058006286621, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C96NJPW3_1_10"}
{"score": 0.12018479406833649, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_1"}
{"score": 0.07358956336975098, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_2"}
{"score": 0.07444543391466141, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_3"}
{"score": 0.21969127655029297, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_4"}
{"score": 0.13876105844974518, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_5"}
{"score": 0.04494957998394966, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_6"}
{"score": 0.0481526143848896, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_7"}
{"score": 0.048390455543994904, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_8"}
{"score": 0.08433587104082108, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_9"}
{"score": 0.10995706915855408, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGMDACQI_1_10"}
{"score": 0.9917346239089966, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_1"}
{"score": 0.9876000881195068, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_2"}
{"score": 0.9763995409011841, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_7"}
{"score": 0.9543662071228027, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_8"}
{"score": 0.9758702516555786, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_9"}
{"score": 0.9808985590934753, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_3"}
{"score": 0.9639734625816345, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_4"}
{"score": 0.4902268648147583, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_5"}
{"score": 0.7731192111968994, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_6"}
{"score": 0.6811910271644592, "chain_id": "3YWRV122CSYCQLNDDHUUCRWM0O7U8A_1_10"}
{"score": 0.31360962986946106, "chain_id": "3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_1"}
{"score": 0.9283091425895691, "chain_id": "3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_3"}
{"score": 0.1025240495800972, "chain_id": "3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_2"}
{"score": 0.16814810037612915, "chain_id": "3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_4"}
{"score": 0.8818464279174805, "chain_id": "3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_5"}
{"score": 0.03153638169169426, "chain_id": "3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_6"}
{"score": 0.09551721066236496, "chain_id": "3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_7"}
{"score": 0.027555814012885094, "chain_id": "3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_8"}
{"score": 0.07692426443099976, "chain_id": "3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_9"}
{"score": 0.19254674017429352, "chain_id": "3DYGAII7PL754KFDIPC0OCUNI38PQ3_1_10"}
{"score": 0.9577396512031555, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_1"}
{"score": 0.665280818939209, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_2"}
{"score": 0.9209901690483093, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_3"}
{"score": 0.7588183879852295, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_4"}
{"score": 0.07797693461179733, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_5"}
{"score": 0.8886011242866516, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_6"}
{"score": 0.19719047844409943, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_7"}
{"score": 0.10257453471422195, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_8"}
{"score": 0.08353136479854584, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_9"}
{"score": 0.15027911961078644, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38SI9NS_1_10"}
{"score": 0.24877136945724487, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_5"}
{"score": 0.5674701929092407, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_1"}
{"score": 0.5338695049285889, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_2"}
{"score": 0.9316073656082153, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_3"}
{"score": 0.24695853888988495, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_4"}
{"score": 0.03892253711819649, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_6"}
{"score": 0.36599770188331604, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_7"}
{"score": 0.1910007894039154, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_8"}
{"score": 0.032164182513952255, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_9"}
{"score": 0.06206713989377022, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ38ZX9NL_1_10"}
{"score": 0.9733274579048157, "chain_id": "3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_1"}
{"score": 0.9851828217506409, "chain_id": "3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_2"}
{"score": 0.7946199178695679, "chain_id": "3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_3"}
{"score": 0.12681247293949127, "chain_id": "3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_4"}
{"score": 0.45150530338287354, "chain_id": "3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_5"}
{"score": 0.4340212941169739, "chain_id": "3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_6"}
{"score": 0.4572664797306061, "chain_id": "3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_7"}
{"score": 0.19881252944469452, "chain_id": "3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_8"}
{"score": 0.21054424345493317, "chain_id": "3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_9"}
{"score": 0.13089102506637573, "chain_id": "3EF8EXOTT1UL15SY2XH1QF0314U1JS_1_10"}
{"score": 0.9775230288505554, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_1"}
{"score": 0.6237416863441467, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_3"}
{"score": 0.8979552388191223, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_2"}
{"score": 0.8316547870635986, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_4"}
{"score": 0.47190871834754944, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_5"}
{"score": 0.3419429659843445, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_6"}
{"score": 0.6724469661712646, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_7"}
{"score": 0.5168236494064331, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_8"}
{"score": 0.24887171387672424, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_9"}
{"score": 0.26503047347068787, "chain_id": "3IJXV6UZ1XIDZZ79I9BGK53GTWXRIJ_1_10"}
{"score": 0.9861202239990234, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_1"}
{"score": 0.14043757319450378, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_3"}
{"score": 0.02885049767792225, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_6"}
{"score": 0.10330656170845032, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_8"}
{"score": 0.09240715205669403, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_9"}
{"score": 0.9698346257209778, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_10"}
{"score": 0.06548668444156647, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_2"}
{"score": 0.1809939593076706, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_4"}
{"score": 0.7806289792060852, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_5"}
{"score": 0.13911259174346924, "chain_id": "3I3WADAZ9Q3YQYKEJXBI11U6DVJ5O6_1_7"}
{"score": 0.3260151147842407, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_2"}
{"score": 0.6053037643432617, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_4"}
{"score": 0.43419525027275085, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_1"}
{"score": 0.148785799741745, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_3"}
{"score": 0.27875518798828125, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_5"}
{"score": 0.06630444526672363, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_6"}
{"score": 0.03946204483509064, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_7"}
{"score": 0.4203043580055237, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_8"}
{"score": 0.15700501203536987, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_9"}
{"score": 0.06413762271404266, "chain_id": "34QN5IT0TZQWAZBXFAGANK8FIN608Q_1_10"}
{"score": 0.9871735572814941, "chain_id": "3X3OR7WPZZZ97V0J432TL403LR5L8K_1_1"}
{"score": 0.9677687883377075, "chain_id": "3X3OR7WPZZZ97V0J432TL403LR5L8K_1_4"}
{"score": 0.910638689994812, "chain_id": "3X3OR7WPZZZ97V0J432TL403LR5L8K_1_2"}
{"score": 0.8611313700675964, "chain_id": "3X3OR7WPZZZ97V0J432TL403LR5L8K_1_3"}
{"score": 0.2876832187175751, "chain_id": "3X3OR7WPZZZ97V0J432TL403LR5L8K_1_5"}
{"score": 0.3523065745830536, "chain_id": "3X3OR7WPZZZ97V0J432TL403LR5L8K_1_6"}
{"score": 0.15763480961322784, "chain_id": "3X3OR7WPZZZ97V0J432TL403LR5L8K_1_7"}
{"score": 0.0874781608581543, "chain_id": "3X3OR7WPZZZ97V0J432TL403LR5L8K_1_8"}
{"score": 0.3473472595214844, "chain_id": "3X3OR7WPZZZ97V0J432TL403LR5L8K_1_9"}
{"score": 0.0905049592256546, "chain_id": "3X3OR7WPZZZ97V0J432TL403LR5L8K_1_10"}
{"score": 0.04171469062566757, "chain_id": "3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_1"}
{"score": 0.05899648740887642, "chain_id": "3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_2"}
{"score": 0.10220953822135925, "chain_id": "3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_3"}
{"score": 0.13552622497081757, "chain_id": "3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_4"}
{"score": 0.8707379102706909, "chain_id": "3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_5"}
{"score": 0.2485654354095459, "chain_id": "3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_6"}
{"score": 0.1065327599644661, "chain_id": "3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_7"}
{"score": 0.048127904534339905, "chain_id": "3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_8"}
{"score": 0.036301903426647186, "chain_id": "3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_9"}
{"score": 0.05608236789703369, "chain_id": "3C5W7UE9CFPJSEJCCNF01GWLC84XMC_1_10"}
{"score": 0.9199439287185669, "chain_id": "333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_1"}
{"score": 0.9285637736320496, "chain_id": "333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_3"}
{"score": 0.6757284998893738, "chain_id": "333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_10"}
{"score": 0.8411576747894287, "chain_id": "333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_2"}
{"score": 0.22740276157855988, "chain_id": "333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_4"}
{"score": 0.5785476565361023, "chain_id": "333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_5"}
{"score": 0.7083947658538818, "chain_id": "333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_6"}
{"score": 0.749603271484375, "chain_id": "333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_7"}
{"score": 0.28459957242012024, "chain_id": "333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_8"}
{"score": 0.10015558451414108, "chain_id": "333U7HK6I9EFT08AIQ1WRH1CQV5JDW_1_9"}
{"score": 0.9725489616394043, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_1"}
{"score": 0.9568439722061157, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_2"}
{"score": 0.9702525734901428, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_4"}
{"score": 0.13737532496452332, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_7"}
{"score": 0.2102174013853073, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_8"}
{"score": 0.6282076835632324, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_3"}
{"score": 0.37347909808158875, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_5"}
{"score": 0.8609185814857483, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_6"}
{"score": 0.02804936282336712, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_9"}
{"score": 0.05640992894768715, "chain_id": "3LRKMWOKB5GIQ5FY3NK1JSYYD902ZD_1_10"}
{"score": 0.9893503785133362, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_1"}
{"score": 0.9889606833457947, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_2"}
{"score": 0.7754523754119873, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_3"}
{"score": 0.8167638778686523, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_4"}
{"score": 0.6521157622337341, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_8"}
{"score": 0.11133435368537903, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_5"}
{"score": 0.08610664308071136, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_6"}
{"score": 0.04439215734601021, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_7"}
{"score": 0.1658153235912323, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_9"}
{"score": 0.027444448322057724, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XFDN8OY_1_10"}
{"score": 0.990253210067749, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_1"}
{"score": 0.9855931401252747, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_2"}
{"score": 0.9500805735588074, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_4"}
{"score": 0.2132723331451416, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_7"}
{"score": 0.8321226239204407, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_8"}
{"score": 0.6634834408760071, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_3"}
{"score": 0.8306535482406616, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_5"}
{"score": 0.6823092103004456, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_6"}
{"score": 0.9869462847709656, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_9"}
{"score": 0.9706674218177795, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9R3LB9M_1_10"}
{"score": 0.47781410813331604, "chain_id": "31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_10"}
{"score": 0.02070550248026848, "chain_id": "31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_1"}
{"score": 0.023775722831487656, "chain_id": "31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_2"}
{"score": 0.13220183551311493, "chain_id": "31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_3"}
{"score": 0.10709592700004578, "chain_id": "31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_4"}
{"score": 0.09337398409843445, "chain_id": "31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_5"}
{"score": 0.08553724735975266, "chain_id": "31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_6"}
{"score": 0.03728407621383667, "chain_id": "31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_7"}
{"score": 0.044489409774541855, "chain_id": "31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_8"}
{"score": 0.02002808265388012, "chain_id": "31LM9EDVOLROFCZN7KFZNMD64A2JNV_1_9"}
{"score": 0.9897879362106323, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_1"}
{"score": 0.9889049530029297, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_2"}
{"score": 0.9908676147460938, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_3"}
{"score": 0.9908612966537476, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_4"}
{"score": 0.5854811072349548, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_8"}
{"score": 0.7598505616188049, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_5"}
{"score": 0.4634622633457184, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_6"}
{"score": 0.5650063753128052, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_7"}
{"score": 0.45635056495666504, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_9"}
{"score": 0.4338456988334656, "chain_id": "3GU1KF0O4I0I0EDOZ7FATNZOXXZBPU_1_10"}
{"score": 0.9909544587135315, "chain_id": "323Q6SJS8IFG0ERGLWT134OIODRFH1_1_1"}
{"score": 0.690481960773468, "chain_id": "323Q6SJS8IFG0ERGLWT134OIODRFH1_1_2"}
{"score": 0.5944382548332214, "chain_id": "323Q6SJS8IFG0ERGLWT134OIODRFH1_1_4"}
{"score": 0.3164958357810974, "chain_id": "323Q6SJS8IFG0ERGLWT134OIODRFH1_1_3"}
{"score": 0.057061828672885895, "chain_id": "323Q6SJS8IFG0ERGLWT134OIODRFH1_1_5"}
{"score": 0.05887964740395546, "chain_id": "323Q6SJS8IFG0ERGLWT134OIODRFH1_1_6"}
{"score": 0.028464408591389656, "chain_id": "323Q6SJS8IFG0ERGLWT134OIODRFH1_1_7"}
{"score": 0.01696067862212658, "chain_id": "323Q6SJS8IFG0ERGLWT134OIODRFH1_1_8"}
{"score": 0.014493217691779137, "chain_id": "323Q6SJS8IFG0ERGLWT134OIODRFH1_1_9"}
{"score": 0.03918904811143875, "chain_id": "323Q6SJS8IFG0ERGLWT134OIODRFH1_1_10"}
{"score": 0.8290318846702576, "chain_id": "320DUZ38G7LI5KI1KG24X24923MGJZ_1_1"}
{"score": 0.25071772933006287, "chain_id": "320DUZ38G7LI5KI1KG24X24923MGJZ_1_3"}
{"score": 0.9733168482780457, "chain_id": "320DUZ38G7LI5KI1KG24X24923MGJZ_1_4"}
{"score": 0.08237729966640472, "chain_id": "320DUZ38G7LI5KI1KG24X24923MGJZ_1_2"}
{"score": 0.046678099781274796, "chain_id": "320DUZ38G7LI5KI1KG24X24923MGJZ_1_5"}
{"score": 0.17900718748569489, "chain_id": "320DUZ38G7LI5KI1KG24X24923MGJZ_1_6"}
{"score": 0.040459949523210526, "chain_id": "320DUZ38G7LI5KI1KG24X24923MGJZ_1_7"}
{"score": 0.0494520477950573, "chain_id": "320DUZ38G7LI5KI1KG24X24923MGJZ_1_8"}
{"score": 0.03315695375204086, "chain_id": "320DUZ38G7LI5KI1KG24X24923MGJZ_1_9"}
{"score": 0.050434961915016174, "chain_id": "320DUZ38G7LI5KI1KG24X24923MGJZ_1_10"}
{"score": 0.032915398478507996, "chain_id": "33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_4"}
{"score": 0.0793524757027626, "chain_id": "33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_1"}
{"score": 0.14169690012931824, "chain_id": "33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_2"}
{"score": 0.09563834965229034, "chain_id": "33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_3"}
{"score": 0.023879539221525192, "chain_id": "33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_5"}
{"score": 0.01635635271668434, "chain_id": "33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_6"}
{"score": 0.0801650658249855, "chain_id": "33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_7"}
{"score": 0.0974026620388031, "chain_id": "33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_8"}
{"score": 0.03169123828411102, "chain_id": "33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_9"}
{"score": 0.5168049335479736, "chain_id": "33OOO72IVHKZ2BY1UOKP9H634E8TCY_1_10"}
{"score": 0.41471007466316223, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_1"}
{"score": 0.5376397371292114, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_2"}
{"score": 0.5782523155212402, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_3"}
{"score": 0.5214086771011353, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_4"}
{"score": 0.15681111812591553, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_5"}
{"score": 0.058484796434640884, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_6"}
{"score": 0.054213620722293854, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_7"}
{"score": 0.05692166090011597, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_8"}
{"score": 0.025510625913739204, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_9"}
{"score": 0.03493021801114082, "chain_id": "3NGI5ARFTT4HNGVWXAMLNBMFBF51PM_1_10"}
{"score": 0.11750777810811996, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_4"}
{"score": 0.17913474142551422, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_1"}
{"score": 0.6298876404762268, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_2"}
{"score": 0.6251700520515442, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_3"}
{"score": 0.10640515387058258, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_5"}
{"score": 0.08433746546506882, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_6"}
{"score": 0.06434427201747894, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_7"}
{"score": 0.318341463804245, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_8"}
{"score": 0.03064829483628273, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_9"}
{"score": 0.22251766920089722, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04XH3LS3_1_10"}
{"score": 0.18276262283325195, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_1"}
{"score": 0.6668729782104492, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_2"}
{"score": 0.4677380919456482, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_3"}
{"score": 0.10917705297470093, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_4"}
{"score": 0.09090514481067657, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_5"}
{"score": 0.016162460669875145, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_6"}
{"score": 0.07869858294725418, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_7"}
{"score": 0.0572570264339447, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_8"}
{"score": 0.05109001696109772, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_9"}
{"score": 0.10248950123786926, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9D0OY9_1_10"}
{"score": 0.04413823038339615, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_2"}
{"score": 0.09357275068759918, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_1"}
{"score": 0.039810661226511, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_3"}
{"score": 0.02803533896803856, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_4"}
{"score": 0.2630555331707001, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_5"}
{"score": 0.015978842973709106, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_6"}
{"score": 0.012813454493880272, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_7"}
{"score": 0.01729281060397625, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_8"}
{"score": 0.03130624070763588, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_9"}
{"score": 0.031755391508340836, "chain_id": "3MTMREQS4VH31D5X5FT9Q6NE9RYAW5_1_10"}
{"score": 0.26784324645996094, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_1"}
{"score": 0.9891402721405029, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_5"}
{"score": 0.3929615616798401, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_6"}
{"score": 0.9609915614128113, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_10"}
{"score": 0.0775856301188469, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_2"}
{"score": 0.8234437704086304, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_3"}
{"score": 0.19200016558170319, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_4"}
{"score": 0.08575283735990524, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_7"}
{"score": 0.07934094220399857, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_8"}
{"score": 0.06219494715332985, "chain_id": "3G0WWMR1UVJ51Z302AZ8KNPSFZRQNB_1_9"}
{"score": 0.0505823940038681, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_1"}
{"score": 0.9324707984924316, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_2"}
{"score": 0.04492241516709328, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_3"}
{"score": 0.26808837056159973, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_4"}
{"score": 0.03918484225869179, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_5"}
{"score": 0.11620138585567474, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_6"}
{"score": 0.05093987286090851, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_7"}
{"score": 0.03090607561171055, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_8"}
{"score": 0.032010868191719055, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_9"}
{"score": 0.06153783202171326, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y9ATLA0_1_10"}
{"score": 0.9920340180397034, "chain_id": "3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_1"}
{"score": 0.9889084696769714, "chain_id": "3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_3"}
{"score": 0.9929897785186768, "chain_id": "3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_2"}
{"score": 0.9778028726577759, "chain_id": "3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_4"}
{"score": 0.9167996048927307, "chain_id": "3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_5"}
{"score": 0.040348902344703674, "chain_id": "3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_6"}
{"score": 0.3250361979007721, "chain_id": "3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_7"}
{"score": 0.030523071065545082, "chain_id": "3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_8"}
{"score": 0.15418006479740143, "chain_id": "3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_9"}
{"score": 0.15313179790973663, "chain_id": "3GLB5JMZFXU52YI9AKGTU49WY3BGDC_1_10"}
{"score": 0.09302506595849991, "chain_id": "32N49TQG3GHQMO5SF5OD44401NVVAO_1_1"}
{"score": 0.9861090779304504, "chain_id": "32N49TQG3GHQMO5SF5OD44401NVVAO_1_2"}
{"score": 0.17717593908309937, "chain_id": "32N49TQG3GHQMO5SF5OD44401NVVAO_1_5"}
{"score": 0.5954389572143555, "chain_id": "32N49TQG3GHQMO5SF5OD44401NVVAO_1_6"}
{"score": 0.24494995176792145, "chain_id": "32N49TQG3GHQMO5SF5OD44401NVVAO_1_7"}
{"score": 0.21951650083065033, "chain_id": "32N49TQG3GHQMO5SF5OD44401NVVAO_1_8"}
{"score": 0.9518452286720276, "chain_id": "32N49TQG3GHQMO5SF5OD44401NVVAO_1_9"}
{"score": 0.09815254807472229, "chain_id": "32N49TQG3GHQMO5SF5OD44401NVVAO_1_3"}
{"score": 0.04857126623392105, "chain_id": "32N49TQG3GHQMO5SF5OD44401NVVAO_1_4"}
{"score": 0.02302345633506775, "chain_id": "32N49TQG3GHQMO5SF5OD44401NVVAO_1_10"}
{"score": 0.6487894058227539, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_2"}
{"score": 0.8404378890991211, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_4"}
{"score": 0.9912046790122986, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_1"}
{"score": 0.669910192489624, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_3"}
{"score": 0.044401347637176514, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_5"}
{"score": 0.06056717410683632, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_6"}
{"score": 0.19917038083076477, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_7"}
{"score": 0.48971638083457947, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_8"}
{"score": 0.04266708344221115, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_9"}
{"score": 0.027303652837872505, "chain_id": "3X4JMASXCM8FCX94IM0KEMYGPD4B0G_1_10"}
{"score": 0.21955005824565887, "chain_id": "3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_1"}
{"score": 0.5279000401496887, "chain_id": "3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_2"}
{"score": 0.1029423177242279, "chain_id": "3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_3"}
{"score": 0.11914025992155075, "chain_id": "3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_4"}
{"score": 0.9144681096076965, "chain_id": "3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_5"}
{"score": 0.14506489038467407, "chain_id": "3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_6"}
{"score": 0.8489692807197571, "chain_id": "3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_7"}
{"score": 0.2875129282474518, "chain_id": "3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_8"}
{"score": 0.9634696841239929, "chain_id": "3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_9"}
{"score": 0.8963300585746765, "chain_id": "3HMVI3QICJRBWUNXOXI402FRFQ3Y1K_1_10"}
{"score": 0.9918265342712402, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_1"}
{"score": 0.9928964972496033, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_2"}
{"score": 0.9878752827644348, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_3"}
{"score": 0.9821210503578186, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_4"}
{"score": 0.05133301392197609, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_5"}
{"score": 0.41314107179641724, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_6"}
{"score": 0.04037247970700264, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_7"}
{"score": 0.02462221309542656, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_8"}
{"score": 0.14799456298351288, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_9"}
{"score": 0.22681234776973724, "chain_id": "3JAOYWH7VI39L0JT9V87L0VE5TWL9A_1_10"}
{"score": 0.9183309674263, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_2"}
{"score": 0.09591341763734818, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_9"}
{"score": 0.07485329359769821, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_1"}
{"score": 0.5270535349845886, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_3"}
{"score": 0.8446633219718933, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_4"}
{"score": 0.6069665551185608, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_5"}
{"score": 0.9731760025024414, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_6"}
{"score": 0.12873773276805878, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_7"}
{"score": 0.270154744386673, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_8"}
{"score": 0.05144959315657616, "chain_id": "3K5TEWLKGVA5S6OQRTGQL2SJFL8VIH_1_10"}
{"score": 0.978185772895813, "chain_id": "39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_3"}
{"score": 0.8346279263496399, "chain_id": "39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_9"}
{"score": 0.9933872818946838, "chain_id": "39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_1"}
{"score": 0.06669996678829193, "chain_id": "39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_2"}
{"score": 0.11417220532894135, "chain_id": "39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_4"}
{"score": 0.22433657944202423, "chain_id": "39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_5"}
{"score": 0.19893039762973785, "chain_id": "39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_6"}
{"score": 0.23939955234527588, "chain_id": "39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_7"}
{"score": 0.21735402941703796, "chain_id": "39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_8"}
{"score": 0.04374060779809952, "chain_id": "39L1G8WVWQQAGRQ9ZCPEA8JE71F31F_1_10"}
{"score": 0.6115992665290833, "chain_id": "39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_1"}
{"score": 0.8077930212020874, "chain_id": "39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_2"}
{"score": 0.9902570247650146, "chain_id": "39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_4"}
{"score": 0.6228774189949036, "chain_id": "39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_5"}
{"score": 0.600833535194397, "chain_id": "39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_8"}
{"score": 0.07145863771438599, "chain_id": "39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_3"}
{"score": 0.8507488369941711, "chain_id": "39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_6"}
{"score": 0.9399642944335938, "chain_id": "39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_7"}
{"score": 0.3519127070903778, "chain_id": "39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_9"}
{"score": 0.7280954122543335, "chain_id": "39N5ACM9HEMZCLYR1N1E2H4Y9U19PH_1_10"}
{"score": 0.026317913085222244, "chain_id": "3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_1"}
{"score": 0.06350862234830856, "chain_id": "3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_2"}
{"score": 0.05836887285113335, "chain_id": "3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_3"}
{"score": 0.0122615871950984, "chain_id": "3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_4"}
{"score": 0.04185209423303604, "chain_id": "3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_5"}
{"score": 0.024310512468218803, "chain_id": "3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_6"}
{"score": 0.038868021219968796, "chain_id": "3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_7"}
{"score": 0.04813770949840546, "chain_id": "3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_8"}
{"score": 0.021595342084765434, "chain_id": "3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_9"}
{"score": 0.054750844836235046, "chain_id": "3EJPLAJKEMF686YZQPW495FAQG6Z6W_1_10"}
{"score": 0.7074708342552185, "chain_id": "3CTOC39K37PZCR70RDYARPRG216J73_1_2"}
{"score": 0.034456051886081696, "chain_id": "3CTOC39K37PZCR70RDYARPRG216J73_1_1"}
{"score": 0.14386408030986786, "chain_id": "3CTOC39K37PZCR70RDYARPRG216J73_1_3"}
{"score": 0.05281465873122215, "chain_id": "3CTOC39K37PZCR70RDYARPRG216J73_1_4"}
{"score": 0.8742287755012512, "chain_id": "3CTOC39K37PZCR70RDYARPRG216J73_1_5"}
{"score": 0.250139981508255, "chain_id": "3CTOC39K37PZCR70RDYARPRG216J73_1_6"}
{"score": 0.02399420738220215, "chain_id": "3CTOC39K37PZCR70RDYARPRG216J73_1_7"}
{"score": 0.011619958095252514, "chain_id": "3CTOC39K37PZCR70RDYARPRG216J73_1_8"}
{"score": 0.8500914573669434, "chain_id": "3CTOC39K37PZCR70RDYARPRG216J73_1_9"}
{"score": 0.021027622744441032, "chain_id": "3CTOC39K37PZCR70RDYARPRG216J73_1_10"}
{"score": 0.9414957761764526, "chain_id": "30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_1"}
{"score": 0.03881162032485008, "chain_id": "30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_2"}
{"score": 0.059177152812480927, "chain_id": "30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_3"}
{"score": 0.19965504109859467, "chain_id": "30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_4"}
{"score": 0.5170784592628479, "chain_id": "30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_5"}
{"score": 0.019796593114733696, "chain_id": "30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_6"}
{"score": 0.14471064507961273, "chain_id": "30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_7"}
{"score": 0.13068874180316925, "chain_id": "30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_8"}
{"score": 0.09943398833274841, "chain_id": "30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_9"}
{"score": 0.033432163298130035, "chain_id": "30MVJZJNHMC3QAVT6AWU5LIMXT9J9Y_1_10"}
{"score": 0.06482626497745514, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8762LRB_1_1"}
{"score": 0.14726126194000244, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8762LRB_1_2"}
{"score": 0.04978402331471443, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8762LRB_1_3"}
{"score": 0.028193844482302666, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8762LRB_1_4"}
{"score": 0.07738047093153, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8762LRB_1_5"}
{"score": 0.013159298337996006, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8762LRB_1_6"}
{"score": 0.026083920150995255, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8762LRB_1_7"}
{"score": 0.10268975049257278, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8762LRB_1_8"}
{"score": 0.07147204130887985, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8762LRB_1_9"}
{"score": 0.794677734375, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8762LRB_1_10"}
{"score": 0.9279277324676514, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_1"}
{"score": 0.9172941446304321, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_3"}
{"score": 0.9284862875938416, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_2"}
{"score": 0.8931032419204712, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_4"}
{"score": 0.06674002856016159, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_5"}
{"score": 0.4601893126964569, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_6"}
{"score": 0.33154067397117615, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_7"}
{"score": 0.04611789807677269, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_8"}
{"score": 0.03824414685368538, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_9"}
{"score": 0.03273862600326538, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QZYT09R_1_10"}
{"score": 0.4743693768978119, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_1"}
{"score": 0.14534810185432434, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_2"}
{"score": 0.5568785071372986, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_3"}
{"score": 0.022440379485487938, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_4"}
{"score": 0.30875304341316223, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_5"}
{"score": 0.025150245055556297, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_6"}
{"score": 0.27564316987991333, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_7"}
{"score": 0.15778006613254547, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_8"}
{"score": 0.19734936952590942, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_9"}
{"score": 0.7606523633003235, "chain_id": "3QAVNHZ3EM3NQJTY11M7HV6Y8NPLAL_1_10"}
{"score": 0.8539673089981079, "chain_id": "3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_2"}
{"score": 0.6607425808906555, "chain_id": "3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_1"}
{"score": 0.8455579280853271, "chain_id": "3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_3"}
{"score": 0.7696303725242615, "chain_id": "3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_4"}
{"score": 0.21753273904323578, "chain_id": "3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_5"}
{"score": 0.054389141499996185, "chain_id": "3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_6"}
{"score": 0.021978911012411118, "chain_id": "3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_7"}
{"score": 0.014947704039514065, "chain_id": "3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_8"}
{"score": 0.24537131190299988, "chain_id": "3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_9"}
{"score": 0.025012804195284843, "chain_id": "3ZAK8W07I4DU8WIAIDHFJCQ4QNS0UT_1_10"}
{"score": 0.8353644013404846, "chain_id": "3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_1"}
{"score": 0.9328896403312683, "chain_id": "3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_2"}
{"score": 0.918700635433197, "chain_id": "3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_3"}
{"score": 0.9179177284240723, "chain_id": "3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_4"}
{"score": 0.048076387494802475, "chain_id": "3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_5"}
{"score": 0.16809207201004028, "chain_id": "3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_6"}
{"score": 0.05705900490283966, "chain_id": "3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_7"}
{"score": 0.030288727954030037, "chain_id": "3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_8"}
{"score": 0.01783556304872036, "chain_id": "3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_9"}
{"score": 0.022558940574526787, "chain_id": "3W8CV64QJ2Y7Z403IAT9T827ZP4H9H_1_10"}
{"score": 0.9899507164955139, "chain_id": "3MRNMEIQW55LOQWALBD97WE4725DL0_1_1"}
{"score": 0.9918906688690186, "chain_id": "3MRNMEIQW55LOQWALBD97WE4725DL0_1_2"}
{"score": 0.7552089691162109, "chain_id": "3MRNMEIQW55LOQWALBD97WE4725DL0_1_3"}
{"score": 0.8751322031021118, "chain_id": "3MRNMEIQW55LOQWALBD97WE4725DL0_1_4"}
{"score": 0.04846511781215668, "chain_id": "3MRNMEIQW55LOQWALBD97WE4725DL0_1_5"}
{"score": 0.026261409744620323, "chain_id": "3MRNMEIQW55LOQWALBD97WE4725DL0_1_6"}
{"score": 0.028304176405072212, "chain_id": "3MRNMEIQW55LOQWALBD97WE4725DL0_1_7"}
{"score": 0.1500927358865738, "chain_id": "3MRNMEIQW55LOQWALBD97WE4725DL0_1_8"}
{"score": 0.028186623007059097, "chain_id": "3MRNMEIQW55LOQWALBD97WE4725DL0_1_9"}
{"score": 0.028535572811961174, "chain_id": "3MRNMEIQW55LOQWALBD97WE4725DL0_1_10"}
{"score": 0.053054001182317734, "chain_id": "34Z02EIMISCF8J3LI8R5EG427UF0T2_1_1"}
{"score": 0.07131120562553406, "chain_id": "34Z02EIMISCF8J3LI8R5EG427UF0T2_1_2"}
{"score": 0.17813949286937714, "chain_id": "34Z02EIMISCF8J3LI8R5EG427UF0T2_1_3"}
{"score": 0.04399215057492256, "chain_id": "34Z02EIMISCF8J3LI8R5EG427UF0T2_1_4"}
{"score": 0.04398440942168236, "chain_id": "34Z02EIMISCF8J3LI8R5EG427UF0T2_1_5"}
{"score": 0.021588796749711037, "chain_id": "34Z02EIMISCF8J3LI8R5EG427UF0T2_1_6"}
{"score": 0.019839230924844742, "chain_id": "34Z02EIMISCF8J3LI8R5EG427UF0T2_1_7"}
{"score": 0.014677558094263077, "chain_id": "34Z02EIMISCF8J3LI8R5EG427UF0T2_1_8"}
{"score": 0.045818816870450974, "chain_id": "34Z02EIMISCF8J3LI8R5EG427UF0T2_1_9"}
{"score": 0.06745817512273788, "chain_id": "34Z02EIMISCF8J3LI8R5EG427UF0T2_1_10"}
{"score": 0.9326924681663513, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_1"}
{"score": 0.9247098565101624, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_3"}
{"score": 0.8900216221809387, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_5"}
{"score": 0.7850642800331116, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_7"}
{"score": 0.9526408910751343, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_8"}
{"score": 0.6506475806236267, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_2"}
{"score": 0.16667519509792328, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_4"}
{"score": 0.8593460917472839, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_6"}
{"score": 0.10010480135679245, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_9"}
{"score": 0.31660887598991394, "chain_id": "3BXQMRHWKZXRBAPH7I4DH9XHUEFMUP_1_10"}
{"score": 0.3987700045108795, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_10"}
{"score": 0.4913410246372223, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_1"}
{"score": 0.01565168984234333, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_2"}
{"score": 0.9206908941268921, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_3"}
{"score": 0.22703728079795837, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_4"}
{"score": 0.9628204703330994, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_5"}
{"score": 0.023421460762619972, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_6"}
{"score": 0.16140291094779968, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_7"}
{"score": 0.31095197796821594, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_8"}
{"score": 0.058901917189359665, "chain_id": "3QRYMNZ7FYGITFVSJET3PS0FGQITN5_1_9"}
{"score": 0.8270372152328491, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_3"}
{"score": 0.5986965298652649, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_4"}
{"score": 0.4301082193851471, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_8"}
{"score": 0.6964926719665527, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_1"}
{"score": 0.7131393551826477, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_2"}
{"score": 0.33659523725509644, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_5"}
{"score": 0.6626484990119934, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_6"}
{"score": 0.4109308421611786, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_7"}
{"score": 0.026484809815883636, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_9"}
{"score": 0.4153852164745331, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PLDSJHUE_1_10"}
{"score": 0.9765766859054565, "chain_id": "33CKWXB73JJE6OCUC8BVMF4HL6311D_1_1"}
{"score": 0.6823785901069641, "chain_id": "33CKWXB73JJE6OCUC8BVMF4HL6311D_1_2"}
{"score": 0.9745199084281921, "chain_id": "33CKWXB73JJE6OCUC8BVMF4HL6311D_1_3"}
{"score": 0.8907992839813232, "chain_id": "33CKWXB73JJE6OCUC8BVMF4HL6311D_1_4"}
{"score": 0.15953823924064636, "chain_id": "33CKWXB73JJE6OCUC8BVMF4HL6311D_1_5"}
{"score": 0.16055704653263092, "chain_id": "33CKWXB73JJE6OCUC8BVMF4HL6311D_1_6"}
{"score": 0.3020388185977936, "chain_id": "33CKWXB73JJE6OCUC8BVMF4HL6311D_1_7"}
{"score": 0.5436290502548218, "chain_id": "33CKWXB73JJE6OCUC8BVMF4HL6311D_1_8"}
{"score": 0.024015624076128006, "chain_id": "33CKWXB73JJE6OCUC8BVMF4HL6311D_1_9"}
{"score": 0.05016092211008072, "chain_id": "33CKWXB73JJE6OCUC8BVMF4HL6311D_1_10"}
{"score": 0.08022385835647583, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_5"}
{"score": 0.17183300852775574, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_1"}
{"score": 0.05045337975025177, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_2"}
{"score": 0.05830198526382446, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_3"}
{"score": 0.022869037464261055, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_4"}
{"score": 0.03940817341208458, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_6"}
{"score": 0.05156508833169937, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_7"}
{"score": 0.04557863995432854, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_8"}
{"score": 0.03576543927192688, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_9"}
{"score": 0.4791167974472046, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5LKRCU2_1_10"}
{"score": 0.5829353928565979, "chain_id": "3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_1"}
{"score": 0.26933953166007996, "chain_id": "3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_3"}
{"score": 0.865505576133728, "chain_id": "3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_4"}
{"score": 0.9533402919769287, "chain_id": "3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_6"}
{"score": 0.9274705052375793, "chain_id": "3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_7"}
{"score": 0.37793898582458496, "chain_id": "3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_2"}
{"score": 0.833214521408081, "chain_id": "3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_5"}
{"score": 0.624720573425293, "chain_id": "3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_8"}
{"score": 0.903397798538208, "chain_id": "3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_9"}
{"score": 0.6900392174720764, "chain_id": "3RUIQRXJBBN4M2K2YSBXQ9M93WZLL9_1_10"}
{"score": 0.9796463847160339, "chain_id": "3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_1"}
{"score": 0.9786531925201416, "chain_id": "3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_3"}
{"score": 0.9841023087501526, "chain_id": "3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_4"}
{"score": 0.6416722536087036, "chain_id": "3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_8"}
{"score": 0.11262215673923492, "chain_id": "3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_9"}
{"score": 0.9776080250740051, "chain_id": "3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_2"}
{"score": 0.7676711082458496, "chain_id": "3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_5"}
{"score": 0.04594719782471657, "chain_id": "3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_6"}
{"score": 0.6806489825248718, "chain_id": "3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_7"}
{"score": 0.8772367238998413, "chain_id": "3WI0P0II61RWRORNQVA5T8N31Y7DRZ_1_10"}
{"score": 0.9903744459152222, "chain_id": "33F859I566CQNXF0GU75KEXXCE8BHP_1_1"}
{"score": 0.9897474050521851, "chain_id": "33F859I566CQNXF0GU75KEXXCE8BHP_1_2"}
{"score": 0.8604963421821594, "chain_id": "33F859I566CQNXF0GU75KEXXCE8BHP_1_3"}
{"score": 0.9692124128341675, "chain_id": "33F859I566CQNXF0GU75KEXXCE8BHP_1_4"}
{"score": 0.07605656236410141, "chain_id": "33F859I566CQNXF0GU75KEXXCE8BHP_1_5"}
{"score": 0.5576391220092773, "chain_id": "33F859I566CQNXF0GU75KEXXCE8BHP_1_6"}
{"score": 0.5827485918998718, "chain_id": "33F859I566CQNXF0GU75KEXXCE8BHP_1_7"}
{"score": 0.6636814475059509, "chain_id": "33F859I566CQNXF0GU75KEXXCE8BHP_1_8"}
{"score": 0.03598105534911156, "chain_id": "33F859I566CQNXF0GU75KEXXCE8BHP_1_9"}
{"score": 0.04332379996776581, "chain_id": "33F859I566CQNXF0GU75KEXXCE8BHP_1_10"}
{"score": 0.6000063419342041, "chain_id": "32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_2"}
{"score": 0.5096256136894226, "chain_id": "32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_4"}
{"score": 0.5099530816078186, "chain_id": "32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_6"}
{"score": 0.7596856355667114, "chain_id": "32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_7"}
{"score": 0.981027364730835, "chain_id": "32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_8"}
{"score": 0.8051241636276245, "chain_id": "32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_9"}
{"score": 0.9014667868614197, "chain_id": "32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_1"}
{"score": 0.2608264684677124, "chain_id": "32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_3"}
{"score": 0.29445919394493103, "chain_id": "32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_5"}
{"score": 0.3411307632923126, "chain_id": "32Q90QCQ1SKFWQSSW6CSYEJA524KEZ_1_10"}
{"score": 0.36997294425964355, "chain_id": "3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_2"}
{"score": 0.04669157788157463, "chain_id": "3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_4"}
{"score": 0.6559773087501526, "chain_id": "3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_6"}
{"score": 0.2724083364009857, "chain_id": "3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_1"}
{"score": 0.5940114855766296, "chain_id": "3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_3"}
{"score": 0.02310474030673504, "chain_id": "3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_5"}
{"score": 0.2914750874042511, "chain_id": "3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_7"}
{"score": 0.44303277134895325, "chain_id": "3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_8"}
{"score": 0.08164437115192413, "chain_id": "3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_9"}
{"score": 0.03239215165376663, "chain_id": "3YW4XOSQKQKUFL3SEWLFXH9EIRW1U6_1_10"}
{"score": 0.9231166243553162, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_1"}
{"score": 0.8186090588569641, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_5"}
{"score": 0.7487332820892334, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_6"}
{"score": 0.9293065071105957, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_7"}
{"score": 0.9203543066978455, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_2"}
{"score": 0.9745396375656128, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_3"}
{"score": 0.750211775302887, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_4"}
{"score": 0.7597652077674866, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_8"}
{"score": 0.4619877338409424, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_9"}
{"score": 0.06915690004825592, "chain_id": "3IXQG4FA2TXX8RXHIIJD7XZ9SCS9BA_1_10"}
{"score": 0.5032399892807007, "chain_id": "3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_2"}
{"score": 0.6813413500785828, "chain_id": "3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_3"}
{"score": 0.4865659773349762, "chain_id": "3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_4"}
{"score": 0.8904328942298889, "chain_id": "3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_6"}
{"score": 0.9602428078651428, "chain_id": "3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_7"}
{"score": 0.8490818738937378, "chain_id": "3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_9"}
{"score": 0.9781758189201355, "chain_id": "3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_10"}
{"score": 0.6055639386177063, "chain_id": "3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_1"}
{"score": 0.2836915850639343, "chain_id": "3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_5"}
{"score": 0.7852371335029602, "chain_id": "3JPSL1DZ5SYDEJWJDE2MUNANFM2ANZ_1_8"}
{"score": 0.7644405364990234, "chain_id": "3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_1"}
{"score": 0.8109359741210938, "chain_id": "3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_6"}
{"score": 0.821462869644165, "chain_id": "3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_7"}
{"score": 0.6904844641685486, "chain_id": "3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_9"}
{"score": 0.8557685613632202, "chain_id": "3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_10"}
{"score": 0.6754098534584045, "chain_id": "3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_2"}
{"score": 0.8636941909790039, "chain_id": "3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_3"}
{"score": 0.9277201294898987, "chain_id": "3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_4"}
{"score": 0.9326534867286682, "chain_id": "3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_5"}
{"score": 0.5721968412399292, "chain_id": "3S3AMIZX3U4SLM248YKA4DOH1LRCDR_1_8"}
{"score": 0.9707947373390198, "chain_id": "35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_3"}
{"score": 0.443715363740921, "chain_id": "35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_7"}
{"score": 0.30840542912483215, "chain_id": "35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_10"}
{"score": 0.41981494426727295, "chain_id": "35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_1"}
{"score": 0.5064231157302856, "chain_id": "35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_2"}
{"score": 0.6009093523025513, "chain_id": "35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_4"}
{"score": 0.03338101506233215, "chain_id": "35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_5"}
{"score": 0.19845859706401825, "chain_id": "35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_6"}
{"score": 0.5253755450248718, "chain_id": "35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_8"}
{"score": 0.03369082137942314, "chain_id": "35K3O9HUABC4G40EVVLVI1R5YDDEFW_1_9"}
{"score": 0.9905954599380493, "chain_id": "39JEC7537U0EF32QZJK4AZUO2H2VCV_1_1"}
{"score": 0.8956401944160461, "chain_id": "39JEC7537U0EF32QZJK4AZUO2H2VCV_1_3"}
{"score": 0.5456706285476685, "chain_id": "39JEC7537U0EF32QZJK4AZUO2H2VCV_1_4"}
{"score": 0.08677656203508377, "chain_id": "39JEC7537U0EF32QZJK4AZUO2H2VCV_1_7"}
{"score": 0.9912944436073303, "chain_id": "39JEC7537U0EF32QZJK4AZUO2H2VCV_1_2"}
{"score": 0.06068357825279236, "chain_id": "39JEC7537U0EF32QZJK4AZUO2H2VCV_1_5"}
{"score": 0.06284456700086594, "chain_id": "39JEC7537U0EF32QZJK4AZUO2H2VCV_1_6"}
{"score": 0.052717164158821106, "chain_id": "39JEC7537U0EF32QZJK4AZUO2H2VCV_1_8"}
{"score": 0.20396332442760468, "chain_id": "39JEC7537U0EF32QZJK4AZUO2H2VCV_1_9"}
{"score": 0.04567604884505272, "chain_id": "39JEC7537U0EF32QZJK4AZUO2H2VCV_1_10"}
{"score": 0.9101116061210632, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_9"}
{"score": 0.7876293063163757, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_10"}
{"score": 0.08210690319538116, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_1"}
{"score": 0.2728193700313568, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_2"}
{"score": 0.11197835952043533, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_3"}
{"score": 0.047953661531209946, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_4"}
{"score": 0.3545686900615692, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_5"}
{"score": 0.09548737853765488, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_6"}
{"score": 0.0897706151008606, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_7"}
{"score": 0.13889311254024506, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLV4HEOGF_1_8"}
{"score": 0.7502835988998413, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_1"}
{"score": 0.1668655425310135, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_2"}
{"score": 0.06811301410198212, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_3"}
{"score": 0.22606171667575836, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_4"}
{"score": 0.6344049572944641, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_5"}
{"score": 0.4839573800563812, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_6"}
{"score": 0.27934861183166504, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_7"}
{"score": 0.08068130165338516, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_8"}
{"score": 0.09382607787847519, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_9"}
{"score": 0.40262770652770996, "chain_id": "3PDJHANYK5FKHLY5K3QX9YB5WBQ6HM_1_10"}
{"score": 0.9399719834327698, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_1"}
{"score": 0.8071067929267883, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_2"}
{"score": 0.9531199336051941, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_3"}
{"score": 0.8995262980461121, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_6"}
{"score": 0.9160874485969543, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_9"}
{"score": 0.7913275957107544, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_4"}
{"score": 0.6696240901947021, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_5"}
{"score": 0.06928971409797668, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_7"}
{"score": 0.48637476563453674, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_8"}
{"score": 0.11045067757368088, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFXMPTMG_1_10"}
{"score": 0.7604753375053406, "chain_id": "3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_6"}
{"score": 0.33216413855552673, "chain_id": "3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_7"}
{"score": 0.09856034815311432, "chain_id": "3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_1"}
{"score": 0.024888530373573303, "chain_id": "3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_2"}
{"score": 0.05233065038919449, "chain_id": "3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_3"}
{"score": 0.05030396208167076, "chain_id": "3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_4"}
{"score": 0.06912824511528015, "chain_id": "3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_5"}
{"score": 0.025567196309566498, "chain_id": "3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_8"}
{"score": 0.2611519396305084, "chain_id": "3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_9"}
{"score": 0.028364310041069984, "chain_id": "3PQ8K71NHXJ6U02U4AXH8HQNC80AAU_1_10"}
{"score": 0.027416536584496498, "chain_id": "36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_1"}
{"score": 0.06030963361263275, "chain_id": "36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_2"}
{"score": 0.03329141438007355, "chain_id": "36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_3"}
{"score": 0.07109694182872772, "chain_id": "36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_4"}
{"score": 0.033076003193855286, "chain_id": "36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_5"}
{"score": 0.1873142272233963, "chain_id": "36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_6"}
{"score": 0.4893374741077423, "chain_id": "36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_7"}
{"score": 0.03872944042086601, "chain_id": "36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_8"}
{"score": 0.582735002040863, "chain_id": "36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_9"}
{"score": 0.1428345888853073, "chain_id": "36PW28KO4ZV9KDJ6KFZ340GEXRVEAY_1_10"}
{"score": 0.08684368431568146, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_5"}
{"score": 0.05864590406417847, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_1"}
{"score": 0.0791584774851799, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_2"}
{"score": 0.04576427489519119, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_3"}
{"score": 0.12480663508176804, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_4"}
{"score": 0.13752520084381104, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_6"}
{"score": 0.029071614146232605, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_7"}
{"score": 0.031947050243616104, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_8"}
{"score": 0.028953734785318375, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_9"}
{"score": 0.029763372614979744, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H50B11JUI_1_10"}
{"score": 0.193949893116951, "chain_id": "3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_6"}
{"score": 0.12624573707580566, "chain_id": "3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_1"}
{"score": 0.046626146882772446, "chain_id": "3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_2"}
{"score": 0.017483694478869438, "chain_id": "3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_3"}
{"score": 0.26780709624290466, "chain_id": "3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_4"}
{"score": 0.03569310903549194, "chain_id": "3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_5"}
{"score": 0.04552720487117767, "chain_id": "3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_7"}
{"score": 0.056305818259716034, "chain_id": "3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_8"}
{"score": 0.03965624049305916, "chain_id": "3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_9"}
{"score": 0.06719513237476349, "chain_id": "3IO1LGZLK9WROAXCHZWRWLI6SAR86Z_1_10"}
{"score": 0.5292596817016602, "chain_id": "3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_3"}
{"score": 0.10071811825037003, "chain_id": "3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_4"}
{"score": 0.0474281944334507, "chain_id": "3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_8"}
{"score": 0.2707118093967438, "chain_id": "3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_1"}
{"score": 0.20060448348522186, "chain_id": "3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_2"}
{"score": 0.7174620628356934, "chain_id": "3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_5"}
{"score": 0.8289159536361694, "chain_id": "3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_6"}
{"score": 0.7162730097770691, "chain_id": "3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_7"}
{"score": 0.13409645855426788, "chain_id": "3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_9"}
{"score": 0.06984971463680267, "chain_id": "3LJ7UR74RHCYCUG24DSVHKONLFY4NE_1_10"}
{"score": 0.5655830502510071, "chain_id": "34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_1"}
{"score": 0.024647753685712814, "chain_id": "34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_2"}
{"score": 0.03424106538295746, "chain_id": "34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_3"}
{"score": 0.08801060914993286, "chain_id": "34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_4"}
{"score": 0.05478990077972412, "chain_id": "34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_5"}
{"score": 0.048693299293518066, "chain_id": "34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_6"}
{"score": 0.04759303480386734, "chain_id": "34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_7"}
{"score": 0.5271979570388794, "chain_id": "34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_8"}
{"score": 0.04822717234492302, "chain_id": "34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_9"}
{"score": 0.03433947265148163, "chain_id": "34YB12FSQYN86SOMNDFWDUWQA0ZGMV_1_10"}
{"score": 0.38162752985954285, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_7"}
{"score": 0.055841751396656036, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_1"}
{"score": 0.19445018470287323, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_2"}
{"score": 0.4586554169654846, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_3"}
{"score": 0.0844978466629982, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_4"}
{"score": 0.5940119624137878, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_5"}
{"score": 0.8605186939239502, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_6"}
{"score": 0.24437281489372253, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_8"}
{"score": 0.0513819195330143, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_9"}
{"score": 0.03545652702450752, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNXI6D38_1_10"}
{"score": 0.10567721724510193, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_1"}
{"score": 0.03580806404352188, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_2"}
{"score": 0.06136045232415199, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_3"}
{"score": 0.029245445504784584, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_4"}
{"score": 0.030770229175686836, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_5"}
{"score": 0.06736835837364197, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_6"}
{"score": 0.029455386102199554, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_7"}
{"score": 0.02930060401558876, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_8"}
{"score": 0.03172459825873375, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_9"}
{"score": 0.0594477653503418, "chain_id": "31LVTDXBL79FP0FF3C8TCLV8DAELR1_1_10"}
{"score": 0.3807510733604431, "chain_id": "3M68NM076H6X6FC8G82RN2DBNG06RR_1_1"}
{"score": 0.03199150413274765, "chain_id": "3M68NM076H6X6FC8G82RN2DBNG06RR_1_2"}
{"score": 0.020731261000037193, "chain_id": "3M68NM076H6X6FC8G82RN2DBNG06RR_1_3"}
{"score": 0.0888424664735794, "chain_id": "3M68NM076H6X6FC8G82RN2DBNG06RR_1_4"}
{"score": 0.3651167154312134, "chain_id": "3M68NM076H6X6FC8G82RN2DBNG06RR_1_5"}
{"score": 0.10620518773794174, "chain_id": "3M68NM076H6X6FC8G82RN2DBNG06RR_1_6"}
{"score": 0.1336667537689209, "chain_id": "3M68NM076H6X6FC8G82RN2DBNG06RR_1_7"}
{"score": 0.5623939037322998, "chain_id": "3M68NM076H6X6FC8G82RN2DBNG06RR_1_8"}
{"score": 0.4704301953315735, "chain_id": "3M68NM076H6X6FC8G82RN2DBNG06RR_1_9"}
{"score": 0.04204123467206955, "chain_id": "3M68NM076H6X6FC8G82RN2DBNG06RR_1_10"}
{"score": 0.07414133101701736, "chain_id": "3J2UYBXQQLB96LS9MVJC36COE4606K_1_10"}
{"score": 0.055035654455423355, "chain_id": "3J2UYBXQQLB96LS9MVJC36COE4606K_1_1"}
{"score": 0.08866222947835922, "chain_id": "3J2UYBXQQLB96LS9MVJC36COE4606K_1_2"}
{"score": 0.06423326581716537, "chain_id": "3J2UYBXQQLB96LS9MVJC36COE4606K_1_3"}
{"score": 0.38982462882995605, "chain_id": "3J2UYBXQQLB96LS9MVJC36COE4606K_1_4"}
{"score": 0.08443935215473175, "chain_id": "3J2UYBXQQLB96LS9MVJC36COE4606K_1_5"}
{"score": 0.8172670602798462, "chain_id": "3J2UYBXQQLB96LS9MVJC36COE4606K_1_6"}
{"score": 0.4406541585922241, "chain_id": "3J2UYBXQQLB96LS9MVJC36COE4606K_1_7"}
{"score": 0.6549341678619385, "chain_id": "3J2UYBXQQLB96LS9MVJC36COE4606K_1_8"}
{"score": 0.032632406800985336, "chain_id": "3J2UYBXQQLB96LS9MVJC36COE4606K_1_9"}
{"score": 0.07097027450799942, "chain_id": "39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_1"}
{"score": 0.0941348671913147, "chain_id": "39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_2"}
{"score": 0.032484568655490875, "chain_id": "39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_3"}
{"score": 0.05733667314052582, "chain_id": "39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_4"}
{"score": 0.04979002848267555, "chain_id": "39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_5"}
{"score": 0.020131327211856842, "chain_id": "39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_6"}
{"score": 0.02029733918607235, "chain_id": "39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_7"}
{"score": 0.08126363903284073, "chain_id": "39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_8"}
{"score": 0.20158326625823975, "chain_id": "39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_9"}
{"score": 0.021954413503408432, "chain_id": "39RP059MEHSCFBGB7RNICJ5TV9JMB5_1_10"}
{"score": 0.7006527185440063, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_1"}
{"score": 0.6207588315010071, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_2"}
{"score": 0.07465410977602005, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_3"}
{"score": 0.2454422116279602, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_4"}
{"score": 0.06170574203133583, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_5"}
{"score": 0.026933113113045692, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_6"}
{"score": 0.23270666599273682, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_7"}
{"score": 0.41866275668144226, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_8"}
{"score": 0.33193129301071167, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_9"}
{"score": 0.11718444526195526, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUQFIY4B_1_10"}
{"score": 0.9843458533287048, "chain_id": "3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_1"}
{"score": 0.9822606444358826, "chain_id": "3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_2"}
{"score": 0.7200284600257874, "chain_id": "3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_3"}
{"score": 0.8915992379188538, "chain_id": "3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_4"}
{"score": 0.3994975984096527, "chain_id": "3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_5"}
{"score": 0.06279066205024719, "chain_id": "3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_6"}
{"score": 0.30334967374801636, "chain_id": "3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_7"}
{"score": 0.2523154616355896, "chain_id": "3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_8"}
{"score": 0.8846829533576965, "chain_id": "3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_9"}
{"score": 0.5747057795524597, "chain_id": "3H0W84IWBK11JU5NMQLPZQ5O0QERE7_1_10"}
{"score": 0.9907884001731873, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_1"}
{"score": 0.9927444458007812, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_3"}
{"score": 0.9932405948638916, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_4"}
{"score": 0.9883500337600708, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_8"}
{"score": 0.9900947213172913, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_9"}
{"score": 0.990828275680542, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_10"}
{"score": 0.9855754375457764, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_2"}
{"score": 0.9612569808959961, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_5"}
{"score": 0.8047016859054565, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_6"}
{"score": 0.9934025406837463, "chain_id": "3PEIJLRY6TSFXQDQGPLNAEYC9FTXWN_1_7"}
{"score": 0.7344279289245605, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_1"}
{"score": 0.42881977558135986, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_2"}
{"score": 0.9851194024085999, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_3"}
{"score": 0.14652501046657562, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_4"}
{"score": 0.6242241859436035, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_5"}
{"score": 0.1524198353290558, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_6"}
{"score": 0.09156589955091476, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_7"}
{"score": 0.34786373376846313, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_8"}
{"score": 0.056408412754535675, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_9"}
{"score": 0.033079907298088074, "chain_id": "3TVRFO09GKEZMW1RCBEL13HWC3QLXA_1_10"}
{"score": 0.7006527185440063, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_1"}
{"score": 0.6207588315010071, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_2"}
{"score": 0.07465410977602005, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_3"}
{"score": 0.2454422116279602, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_4"}
{"score": 0.06170574203133583, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_5"}
{"score": 0.026933113113045692, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_6"}
{"score": 0.23270666599273682, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_7"}
{"score": 0.41866275668144226, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_8"}
{"score": 0.33193129301071167, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_9"}
{"score": 0.11718444526195526, "chain_id": "33IZTU6J810MQ9WHWKBMDPVR4ZBXSH_1_10"}
{"score": 0.39441636204719543, "chain_id": "3I02618YA05XWDMUZYW5YDRCLSBUPI_1_5"}
{"score": 0.15510833263397217, "chain_id": "3I02618YA05XWDMUZYW5YDRCLSBUPI_1_6"}
{"score": 0.1818496733903885, "chain_id": "3I02618YA05XWDMUZYW5YDRCLSBUPI_1_1"}
{"score": 0.5197967290878296, "chain_id": "3I02618YA05XWDMUZYW5YDRCLSBUPI_1_2"}
{"score": 0.20344193279743195, "chain_id": "3I02618YA05XWDMUZYW5YDRCLSBUPI_1_3"}
{"score": 0.22665069997310638, "chain_id": "3I02618YA05XWDMUZYW5YDRCLSBUPI_1_4"}
{"score": 0.5980919599533081, "chain_id": "3I02618YA05XWDMUZYW5YDRCLSBUPI_1_7"}
{"score": 0.7358593940734863, "chain_id": "3I02618YA05XWDMUZYW5YDRCLSBUPI_1_8"}
{"score": 0.2644544243812561, "chain_id": "3I02618YA05XWDMUZYW5YDRCLSBUPI_1_9"}
{"score": 0.8485924005508423, "chain_id": "3I02618YA05XWDMUZYW5YDRCLSBUPI_1_10"}
{"score": 0.9908158779144287, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_1"}
{"score": 0.9927347898483276, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_3"}
{"score": 0.9932489395141602, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_5"}
{"score": 0.9435710310935974, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_6"}
{"score": 0.92963707447052, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_7"}
{"score": 0.504949152469635, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_8"}
{"score": 0.9858382344245911, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_2"}
{"score": 0.7426580786705017, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_4"}
{"score": 0.8530617356300354, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_9"}
{"score": 0.49291688203811646, "chain_id": "3LYA37P8IQMHPNG8MFA2X6DP3BEBKQ_1_10"}
{"score": 0.19191870093345642, "chain_id": "39JEC7537U0EF32QZJK4AZUO122VC0_1_1"}
{"score": 0.2857207953929901, "chain_id": "39JEC7537U0EF32QZJK4AZUO122VC0_1_2"}
{"score": 0.15541501343250275, "chain_id": "39JEC7537U0EF32QZJK4AZUO122VC0_1_3"}
{"score": 0.33030563592910767, "chain_id": "39JEC7537U0EF32QZJK4AZUO122VC0_1_4"}
{"score": 0.2543206810951233, "chain_id": "39JEC7537U0EF32QZJK4AZUO122VC0_1_5"}
{"score": 0.32910481095314026, "chain_id": "39JEC7537U0EF32QZJK4AZUO122VC0_1_6"}
{"score": 0.5079098343849182, "chain_id": "39JEC7537U0EF32QZJK4AZUO122VC0_1_7"}
{"score": 0.031055709347128868, "chain_id": "39JEC7537U0EF32QZJK4AZUO122VC0_1_8"}
{"score": 0.14894254505634308, "chain_id": "39JEC7537U0EF32QZJK4AZUO122VC0_1_9"}
{"score": 0.2584543824195862, "chain_id": "39JEC7537U0EF32QZJK4AZUO122VC0_1_10"}
{"score": 0.9912336468696594, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_1"}
{"score": 0.9921283721923828, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_2"}
{"score": 0.9858461022377014, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_4"}
{"score": 0.9934812784194946, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_5"}
{"score": 0.9943599104881287, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_8"}
{"score": 0.9930372834205627, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_9"}
{"score": 0.993408203125, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_3"}
{"score": 0.9346349835395813, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_6"}
{"score": 0.9849306344985962, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_7"}
{"score": 0.23848745226860046, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EELWFAU_1_10"}
{"score": 0.0320216566324234, "chain_id": "3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_1"}
{"score": 0.031387850642204285, "chain_id": "3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_2"}
{"score": 0.11880000680685043, "chain_id": "3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_3"}
{"score": 0.11946448683738708, "chain_id": "3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_4"}
{"score": 0.05741419643163681, "chain_id": "3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_5"}
{"score": 0.03234333172440529, "chain_id": "3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_6"}
{"score": 0.033893853425979614, "chain_id": "3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_7"}
{"score": 0.035317376255989075, "chain_id": "3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_8"}
{"score": 0.026049189269542694, "chain_id": "3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_9"}
{"score": 0.05326378718018532, "chain_id": "3DBQWDE4Y6XG8DK2IIB5MCU1J02N5N_1_10"}
{"score": 0.2739390432834625, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_4"}
{"score": 0.06295442581176758, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_9"}
{"score": 0.4184707999229431, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_1"}
{"score": 0.1369866281747818, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_2"}
{"score": 0.4640263020992279, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_3"}
{"score": 0.023762525990605354, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_5"}
{"score": 0.026367323473095894, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_6"}
{"score": 0.0237861517816782, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_7"}
{"score": 0.024826809763908386, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_8"}
{"score": 0.034630246460437775, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MUALVR1G_1_10"}
{"score": 0.8997067809104919, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_1"}
{"score": 0.9913232326507568, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_2"}
{"score": 0.03305363282561302, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_4"}
{"score": 0.9895508885383606, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_6"}
{"score": 0.025618722662329674, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_3"}
{"score": 0.8877043128013611, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_5"}
{"score": 0.07540274411439896, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_7"}
{"score": 0.04719667509198189, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_8"}
{"score": 0.021762201562523842, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_9"}
{"score": 0.058050476014614105, "chain_id": "3YDTZAI2WXFVYN9DZQUXKDBKIFM411_1_10"}
{"score": 0.924132227897644, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_1"}
{"score": 0.991758406162262, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_2"}
{"score": 0.025344405323266983, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_3"}
{"score": 0.030144983902573586, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_4"}
{"score": 0.9026458859443665, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_5"}
{"score": 0.990179181098938, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_6"}
{"score": 0.07156546413898468, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_7"}
{"score": 0.042846955358982086, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_8"}
{"score": 0.020176446065306664, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_9"}
{"score": 0.05385642126202583, "chain_id": "3WQQ9FUS6ATXUME7DQDZ714Y85XB88_1_10"}
{"score": 0.8997067809104919, "chain_id": "3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_1"}
{"score": 0.9913232326507568, "chain_id": "3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_2"}
{"score": 0.9895508885383606, "chain_id": "3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_6"}
{"score": 0.025618722662329674, "chain_id": "3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_3"}
{"score": 0.03305363282561302, "chain_id": "3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_4"}
{"score": 0.8877043128013611, "chain_id": "3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_5"}
{"score": 0.07540274411439896, "chain_id": "3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_7"}
{"score": 0.04719667509198189, "chain_id": "3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_8"}
{"score": 0.021762201562523842, "chain_id": "3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_9"}
{"score": 0.058050476014614105, "chain_id": "3VA45EW49NMZ2GJVIA96YBHP1YO1OW_1_10"}
{"score": 0.9206468462944031, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_1"}
{"score": 0.47758254408836365, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_2"}
{"score": 0.021104391664266586, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_3"}
{"score": 0.7963007688522339, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_4"}
{"score": 0.9855055212974548, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_5"}
{"score": 0.0218205563724041, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_6"}
{"score": 0.026637928560376167, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_7"}
{"score": 0.011374303139746189, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_8"}
{"score": 0.06141361966729164, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_9"}
{"score": 0.0493791401386261, "chain_id": "317HQ483I7RSK1FHP2UZBLY6SUANIJ_1_10"}
{"score": 0.10732138901948929, "chain_id": "3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_2"}
{"score": 0.9366734623908997, "chain_id": "3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_3"}
{"score": 0.20296047627925873, "chain_id": "3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_1"}
{"score": 0.016115427017211914, "chain_id": "3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_4"}
{"score": 0.02141634374856949, "chain_id": "3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_5"}
{"score": 0.138248473405838, "chain_id": "3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_6"}
{"score": 0.09238224476575851, "chain_id": "3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_7"}
{"score": 0.024311283603310585, "chain_id": "3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_8"}
{"score": 0.04024963825941086, "chain_id": "3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_9"}
{"score": 0.04714168235659599, "chain_id": "3IKZ72A5B4F8AADROUOE8OLEDLEFNJ_1_10"}
{"score": 0.9278419613838196, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_1"}
{"score": 0.4196975529193878, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_4"}
{"score": 0.9739547371864319, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_2"}
{"score": 0.09089501202106476, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_3"}
{"score": 0.12227396667003632, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_5"}
{"score": 0.08763636648654938, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_6"}
{"score": 0.0201403945684433, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_7"}
{"score": 0.06054585054516792, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_8"}
{"score": 0.048337846994400024, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_9"}
{"score": 0.01590302586555481, "chain_id": "31IBVUNM9SYLIFM0QLA5I5FRZO4FVO_1_10"}
{"score": 0.06602673977613449, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_1"}
{"score": 0.8316957950592041, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_2"}
{"score": 0.4434759318828583, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_3"}
{"score": 0.10365650057792664, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_4"}
{"score": 0.05823842063546181, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_5"}
{"score": 0.07740908861160278, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_6"}
{"score": 0.04328101500868797, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_7"}
{"score": 0.014526484534144402, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_8"}
{"score": 0.02724079228937626, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_9"}
{"score": 0.025884998962283134, "chain_id": "30ZX6P7VF8USQQAUL1HFVYA8V6S2JO_1_10"}
{"score": 0.9290180206298828, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_1"}
{"score": 0.7934303283691406, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_2"}
{"score": 0.9414936900138855, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_3"}
{"score": 0.7862796783447266, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_5"}
{"score": 0.025790229439735413, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_4"}
{"score": 0.17037615180015564, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_6"}
{"score": 0.5376143455505371, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_7"}
{"score": 0.8895858526229858, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_8"}
{"score": 0.07478835433721542, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_9"}
{"score": 0.0932215079665184, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBFU9LTMJ_1_10"}
{"score": 0.987305223941803, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_1"}
{"score": 0.368375688791275, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_2"}
{"score": 0.47209104895591736, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_3"}
{"score": 0.7132428884506226, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_4"}
{"score": 0.42470547556877136, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_5"}
{"score": 0.3600069284439087, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_6"}
{"score": 0.4190509021282196, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_7"}
{"score": 0.4454507827758789, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_8"}
{"score": 0.9388449788093567, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_9"}
{"score": 0.30255022644996643, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJZAR23Z_1_10"}
{"score": 0.3958849608898163, "chain_id": "3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_7"}
{"score": 0.36603522300720215, "chain_id": "3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_9"}
{"score": 0.8227921724319458, "chain_id": "3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_10"}
{"score": 0.04568244889378548, "chain_id": "3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_1"}
{"score": 0.07468406856060028, "chain_id": "3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_2"}
{"score": 0.2503489851951599, "chain_id": "3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_3"}
{"score": 0.06316410005092621, "chain_id": "3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_4"}
{"score": 0.3258571922779083, "chain_id": "3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_5"}
{"score": 0.28732994198799133, "chain_id": "3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_6"}
{"score": 0.0833570584654808, "chain_id": "3TXWC2NHNZPWPDEJT458XM99VVBS9G_1_8"}
{"score": 0.10672290623188019, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_1"}
{"score": 0.12987761199474335, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_2"}
{"score": 0.06371332705020905, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_3"}
{"score": 0.07075466215610504, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_4"}
{"score": 0.3104087710380554, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_5"}
{"score": 0.5281332731246948, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_6"}
{"score": 0.43182703852653503, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_7"}
{"score": 0.5119349360466003, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_8"}
{"score": 0.10055939108133316, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_9"}
{"score": 0.08452124893665314, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2O2N2VD_1_10"}
{"score": 0.9885385632514954, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_1"}
{"score": 0.9845596551895142, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_2"}
{"score": 0.9800254106521606, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_4"}
{"score": 0.97098708152771, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_5"}
{"score": 0.6869246959686279, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_7"}
{"score": 0.848770260810852, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_10"}
{"score": 0.965081512928009, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_3"}
{"score": 0.029524529352784157, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_6"}
{"score": 0.16819708049297333, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_8"}
{"score": 0.6000270843505859, "chain_id": "3R9WASFE2ZF2RZRARIZ83BSNMG7ZF2_1_9"}
{"score": 0.8465388417243958, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_5"}
{"score": 0.3535071015357971, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_8"}
{"score": 0.12718945741653442, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_1"}
{"score": 0.08334921300411224, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_2"}
{"score": 0.9627270102500916, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_3"}
{"score": 0.7160776853561401, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_4"}
{"score": 0.7899752259254456, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_6"}
{"score": 0.32939523458480835, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_7"}
{"score": 0.06963679939508438, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_9"}
{"score": 0.0615462101995945, "chain_id": "3CP1TO84PT0KJRV9WZDLUOR3CE425Q_1_10"}
{"score": 0.20551429688930511, "chain_id": "3ZAZR5XV01HVON700G97V57KPELCZW_1_3"}
{"score": 0.5000284910202026, "chain_id": "3ZAZR5XV01HVON700G97V57KPELCZW_1_4"}
{"score": 0.1805732548236847, "chain_id": "3ZAZR5XV01HVON700G97V57KPELCZW_1_5"}
{"score": 0.39626559615135193, "chain_id": "3ZAZR5XV01HVON700G97V57KPELCZW_1_1"}
{"score": 0.39241862297058105, "chain_id": "3ZAZR5XV01HVON700G97V57KPELCZW_1_2"}
{"score": 0.20801270008087158, "chain_id": "3ZAZR5XV01HVON700G97V57KPELCZW_1_6"}
{"score": 0.08857738971710205, "chain_id": "3ZAZR5XV01HVON700G97V57KPELCZW_1_7"}
{"score": 0.12519589066505432, "chain_id": "3ZAZR5XV01HVON700G97V57KPELCZW_1_8"}
{"score": 0.49884623289108276, "chain_id": "3ZAZR5XV01HVON700G97V57KPELCZW_1_9"}
{"score": 0.6955850720405579, "chain_id": "3ZAZR5XV01HVON700G97V57KPELCZW_1_10"}
{"score": 0.5058310627937317, "chain_id": "3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_1"}
{"score": 0.08914405852556229, "chain_id": "3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_5"}
{"score": 0.5832900404930115, "chain_id": "3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_2"}
{"score": 0.3156430125236511, "chain_id": "3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_3"}
{"score": 0.18954232335090637, "chain_id": "3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_4"}
{"score": 0.45220690965652466, "chain_id": "3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_6"}
{"score": 0.025060994550585747, "chain_id": "3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_7"}
{"score": 0.35557088255882263, "chain_id": "3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_8"}
{"score": 0.521947979927063, "chain_id": "3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_9"}
{"score": 0.03179319575428963, "chain_id": "3LO69W1SU3CO0A61N1EHDHH18YYGLM_1_10"}
{"score": 0.3515814244747162, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_1"}
{"score": 0.06633555889129639, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_2"}
{"score": 0.3754913806915283, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_3"}
{"score": 0.1338878571987152, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_4"}
{"score": 0.1329999566078186, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_5"}
{"score": 0.15215833485126495, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_6"}
{"score": 0.04995296150445938, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_7"}
{"score": 0.10842160135507584, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_8"}
{"score": 0.06544142216444016, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_9"}
{"score": 0.02670017071068287, "chain_id": "3KYQYYSHYV6TUBZ7Y3T1ZDIYP2IODD_1_10"}
{"score": 0.14412999153137207, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_1"}
{"score": 0.18295909464359283, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_2"}
{"score": 0.6689203977584839, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_3"}
{"score": 0.30761370062828064, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_4"}
{"score": 0.15772469341754913, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_5"}
{"score": 0.6581021547317505, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_6"}
{"score": 0.3094988763332367, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_7"}
{"score": 0.06369046121835709, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_8"}
{"score": 0.33415454626083374, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_9"}
{"score": 0.16017886996269226, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3Q18S90L_1_10"}
{"score": 0.9851886034011841, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_4"}
{"score": 0.9516305327415466, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_6"}
{"score": 0.5682913064956665, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_8"}
{"score": 0.8523452281951904, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_9"}
{"score": 0.4723266661167145, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_10"}
{"score": 0.9627131819725037, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_1"}
{"score": 0.5879288911819458, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_2"}
{"score": 0.9787657856941223, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_3"}
{"score": 0.9559016823768616, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_5"}
{"score": 0.05319760739803314, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV1UHVPX_1_7"}
{"score": 0.9826524257659912, "chain_id": "3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_1"}
{"score": 0.985463559627533, "chain_id": "3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_2"}
{"score": 0.05735810846090317, "chain_id": "3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_3"}
{"score": 0.07454711943864822, "chain_id": "3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_4"}
{"score": 0.08458852022886276, "chain_id": "3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_5"}
{"score": 0.06432401388883591, "chain_id": "3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_6"}
{"score": 0.7555290460586548, "chain_id": "3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_7"}
{"score": 0.7734763622283936, "chain_id": "3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_8"}
{"score": 0.8871147632598877, "chain_id": "3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_9"}
{"score": 0.8550819754600525, "chain_id": "3VJ40NV2QIM0B0V4KTTG4H0KPCBTOF_1_10"}
{"score": 0.043707359582185745, "chain_id": "3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_1"}
{"score": 0.03130752220749855, "chain_id": "3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_2"}
{"score": 0.01699080318212509, "chain_id": "3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_3"}
{"score": 0.05234465003013611, "chain_id": "3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_4"}
{"score": 0.10521847754716873, "chain_id": "3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_5"}
{"score": 0.04195462912321091, "chain_id": "3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_6"}
{"score": 0.013586437329649925, "chain_id": "3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_7"}
{"score": 0.01938318833708763, "chain_id": "3Z4AIRP3C6CMWPXNJ1W2HO8I93FX1H_1_8"}
{"score": 0.1093851625919342, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_9"}
{"score": 0.048042621463537216, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_1"}
{"score": 0.01609884761273861, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_2"}
{"score": 0.033259328454732895, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_3"}
{"score": 0.024381551891565323, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_4"}
{"score": 0.02839028090238571, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_5"}
{"score": 0.03330624848604202, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_6"}
{"score": 0.03928065299987793, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_7"}
{"score": 0.0187983475625515, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_8"}
{"score": 0.06148061156272888, "chain_id": "36ZN444YTRXA2MFTQHUCQAYBE3JOIX_1_10"}
{"score": 0.06650079786777496, "chain_id": "3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_1"}
{"score": 0.014495001174509525, "chain_id": "3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_2"}
{"score": 0.019783541560173035, "chain_id": "3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_3"}
{"score": 0.015451934188604355, "chain_id": "3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_4"}
{"score": 0.12255995720624924, "chain_id": "3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_5"}
{"score": 0.017815707251429558, "chain_id": "3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_6"}
{"score": 0.012211249209940434, "chain_id": "3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_7"}
{"score": 0.014907967299222946, "chain_id": "3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_8"}
{"score": 0.013155657798051834, "chain_id": "3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_9"}
{"score": 0.013566805981099606, "chain_id": "3FE2ERCCZX72J82X3CKWZ7ZN2ESOPJ_1_10"}
{"score": 0.9219794869422913, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_1"}
{"score": 0.06277632713317871, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_4"}
{"score": 0.04173741862177849, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_2"}
{"score": 0.015329399146139622, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_3"}
{"score": 0.019067998975515366, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_5"}
{"score": 0.020428618416190147, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_6"}
{"score": 0.028147991746664047, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_7"}
{"score": 0.025210067629814148, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_8"}
{"score": 0.015144134871661663, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_9"}
{"score": 0.020034193992614746, "chain_id": "3300DTYQT2G17TQN9BWPU0VJ3EZQEB_1_10"}
{"score": 0.10401622951030731, "chain_id": "3VHHR074H3G57HV0UYAN74487O77LM_1_1"}
{"score": 0.30089786648750305, "chain_id": "3VHHR074H3G57HV0UYAN74487O77LM_1_2"}
{"score": 0.1442192643880844, "chain_id": "3VHHR074H3G57HV0UYAN74487O77LM_1_3"}
{"score": 0.6799950003623962, "chain_id": "3VHHR074H3G57HV0UYAN74487O77LM_1_4"}
{"score": 0.20149381458759308, "chain_id": "3VHHR074H3G57HV0UYAN74487O77LM_1_5"}
{"score": 0.1354808211326599, "chain_id": "3VHHR074H3G57HV0UYAN74487O77LM_1_6"}
{"score": 0.024992236867547035, "chain_id": "3VHHR074H3G57HV0UYAN74487O77LM_1_7"}
{"score": 0.07836493849754333, "chain_id": "3VHHR074H3G57HV0UYAN74487O77LM_1_8"}
{"score": 0.37044987082481384, "chain_id": "3VHHR074H3G57HV0UYAN74487O77LM_1_9"}
{"score": 0.024419989436864853, "chain_id": "3VHHR074H3G57HV0UYAN74487O77LM_1_10"}
{"score": 0.011173826642334461, "chain_id": "38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_1"}
{"score": 0.2931155562400818, "chain_id": "38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_2"}
{"score": 0.05516725033521652, "chain_id": "38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_3"}
{"score": 0.11365234851837158, "chain_id": "38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_4"}
{"score": 0.03716769069433212, "chain_id": "38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_5"}
{"score": 0.6423879265785217, "chain_id": "38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_6"}
{"score": 0.014776034280657768, "chain_id": "38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_7"}
{"score": 0.7167470455169678, "chain_id": "38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_8"}
{"score": 0.5008384585380554, "chain_id": "38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_9"}
{"score": 0.4278374910354614, "chain_id": "38JBBYETQO9UIO3PBEPCRXUELFAE4A_1_10"}
{"score": 0.21678997576236725, "chain_id": "34Z02EIMISCF8J3LI8R5EG427HIT08_1_1"}
{"score": 0.15817666053771973, "chain_id": "34Z02EIMISCF8J3LI8R5EG427HIT08_1_2"}
{"score": 0.018164528533816338, "chain_id": "34Z02EIMISCF8J3LI8R5EG427HIT08_1_3"}
{"score": 0.1963764876127243, "chain_id": "34Z02EIMISCF8J3LI8R5EG427HIT08_1_4"}
{"score": 0.04869338497519493, "chain_id": "34Z02EIMISCF8J3LI8R5EG427HIT08_1_5"}
{"score": 0.037998225539922714, "chain_id": "34Z02EIMISCF8J3LI8R5EG427HIT08_1_6"}
{"score": 0.0654330626130104, "chain_id": "34Z02EIMISCF8J3LI8R5EG427HIT08_1_7"}
{"score": 0.05110878869891167, "chain_id": "34Z02EIMISCF8J3LI8R5EG427HIT08_1_8"}
{"score": 0.021991493180394173, "chain_id": "34Z02EIMISCF8J3LI8R5EG427HIT08_1_9"}
{"score": 0.012979069724678993, "chain_id": "34Z02EIMISCF8J3LI8R5EG427HIT08_1_10"}
{"score": 0.03339776024222374, "chain_id": "3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_1"}
{"score": 0.6976304650306702, "chain_id": "3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_2"}
{"score": 0.9554104804992676, "chain_id": "3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_3"}
{"score": 0.9757919311523438, "chain_id": "3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_4"}
{"score": 0.7946673631668091, "chain_id": "3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_5"}
{"score": 0.030118871480226517, "chain_id": "3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_6"}
{"score": 0.03924992308020592, "chain_id": "3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_7"}
{"score": 0.019459104165434837, "chain_id": "3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_8"}
{"score": 0.023676395416259766, "chain_id": "3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_9"}
{"score": 0.015927424654364586, "chain_id": "3LS2AMNW5FPNJK3C3PZLZCPXLP5OQ9_1_10"}
{"score": 0.9061035513877869, "chain_id": "3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_3"}
{"score": 0.6475719213485718, "chain_id": "3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_1"}
{"score": 0.8971995115280151, "chain_id": "3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_2"}
{"score": 0.8158969879150391, "chain_id": "3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_4"}
{"score": 0.043872177600860596, "chain_id": "3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_5"}
{"score": 0.018540313467383385, "chain_id": "3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_6"}
{"score": 0.044568125158548355, "chain_id": "3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_7"}
{"score": 0.058590829372406006, "chain_id": "3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_8"}
{"score": 0.02208712510764599, "chain_id": "3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_9"}
{"score": 0.01715671643614769, "chain_id": "3FK0YFF9PZFAEC8QQ0F90RIDYC6VV5_1_10"}
{"score": 0.05657956376671791, "chain_id": "34T446B1C0DYM21AWMWFP64YKB60CH_1_1"}
{"score": 0.027397368103265762, "chain_id": "34T446B1C0DYM21AWMWFP64YKB60CH_1_2"}
{"score": 0.06821084767580032, "chain_id": "34T446B1C0DYM21AWMWFP64YKB60CH_1_3"}
{"score": 0.024693720042705536, "chain_id": "34T446B1C0DYM21AWMWFP64YKB60CH_1_4"}
{"score": 0.021101204678416252, "chain_id": "34T446B1C0DYM21AWMWFP64YKB60CH_1_5"}
{"score": 0.8355462551116943, "chain_id": "34T446B1C0DYM21AWMWFP64YKB60CH_1_6"}
{"score": 0.10812168568372726, "chain_id": "34T446B1C0DYM21AWMWFP64YKB60CH_1_7"}
{"score": 0.04498578608036041, "chain_id": "34T446B1C0DYM21AWMWFP64YKB60CH_1_8"}
{"score": 0.07745622843503952, "chain_id": "34T446B1C0DYM21AWMWFP64YKB60CH_1_9"}
{"score": 0.031198104843497276, "chain_id": "34T446B1C0DYM21AWMWFP64YKB60CH_1_10"}
{"score": 0.18841660022735596, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_3"}
{"score": 0.16303598880767822, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_1"}
{"score": 0.03828192502260208, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_2"}
{"score": 0.08202600479125977, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_4"}
{"score": 0.09020529687404633, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_5"}
{"score": 0.03925536945462227, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_6"}
{"score": 0.12944869697093964, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_7"}
{"score": 0.07989498227834702, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_8"}
{"score": 0.04559154435992241, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_9"}
{"score": 0.09966009855270386, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELW97NGBL_1_10"}
{"score": 0.02252502739429474, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_1"}
{"score": 0.02225193940103054, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_2"}
{"score": 0.02728070318698883, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_3"}
{"score": 0.020387372002005577, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_4"}
{"score": 0.03686683252453804, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_5"}
{"score": 0.013882538303732872, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_6"}
{"score": 0.016720000654459, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_7"}
{"score": 0.022230740636587143, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_8"}
{"score": 0.057648662477731705, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_9"}
{"score": 0.025028148666024208, "chain_id": "3WMINLGALB2UNFZSOOT8ECGBTZKACO_1_10"}
{"score": 0.0981735959649086, "chain_id": "3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_1"}
{"score": 0.01989079639315605, "chain_id": "3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_2"}
{"score": 0.01761375367641449, "chain_id": "3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_3"}
{"score": 0.17305050790309906, "chain_id": "3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_4"}
{"score": 0.8349778056144714, "chain_id": "3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_5"}
{"score": 0.01580335758626461, "chain_id": "3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_6"}
{"score": 0.02487715519964695, "chain_id": "3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_7"}
{"score": 0.015513977967202663, "chain_id": "3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_8"}
{"score": 0.010946708731353283, "chain_id": "3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_9"}
{"score": 0.5511491298675537, "chain_id": "3U8YCDAGXPF2G3BT14XA9BTFNNS0Q0_1_10"}
{"score": 0.9801046848297119, "chain_id": "3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_1"}
{"score": 0.902777373790741, "chain_id": "3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_2"}
{"score": 0.2279651015996933, "chain_id": "3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_3"}
{"score": 0.8159409165382385, "chain_id": "3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_4"}
{"score": 0.08228273689746857, "chain_id": "3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_5"}
{"score": 0.02088942751288414, "chain_id": "3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_6"}
{"score": 0.03823690488934517, "chain_id": "3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_7"}
{"score": 0.2666260004043579, "chain_id": "3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_8"}
{"score": 0.016091879457235336, "chain_id": "3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_9"}
{"score": 0.16175656020641327, "chain_id": "3U5JL4WY5K83OOU66JF4FMFLLHY4XY_1_10"}
{"score": 0.9811722636222839, "chain_id": "32N49TQG3GHQMO5SF5OD4440XJHAVD_1_1"}
{"score": 0.9441102147102356, "chain_id": "32N49TQG3GHQMO5SF5OD4440XJHAVD_1_2"}
{"score": 0.4301004707813263, "chain_id": "32N49TQG3GHQMO5SF5OD4440XJHAVD_1_3"}
{"score": 0.7252141237258911, "chain_id": "32N49TQG3GHQMO5SF5OD4440XJHAVD_1_4"}
{"score": 0.6944835186004639, "chain_id": "32N49TQG3GHQMO5SF5OD4440XJHAVD_1_5"}
{"score": 0.08772557973861694, "chain_id": "32N49TQG3GHQMO5SF5OD4440XJHAVD_1_6"}
{"score": 0.133445143699646, "chain_id": "32N49TQG3GHQMO5SF5OD4440XJHAVD_1_7"}
{"score": 0.041044287383556366, "chain_id": "32N49TQG3GHQMO5SF5OD4440XJHAVD_1_8"}
{"score": 0.07658170163631439, "chain_id": "32N49TQG3GHQMO5SF5OD4440XJHAVD_1_9"}
{"score": 0.24697719514369965, "chain_id": "32N49TQG3GHQMO5SF5OD4440XJHAVD_1_10"}
{"score": 0.9557174444198608, "chain_id": "3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_2"}
{"score": 0.5436840057373047, "chain_id": "3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_3"}
{"score": 0.7107616066932678, "chain_id": "3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_1"}
{"score": 0.8926669359207153, "chain_id": "3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_4"}
{"score": 0.037813540548086166, "chain_id": "3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_5"}
{"score": 0.04161790758371353, "chain_id": "3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_6"}
{"score": 0.3720884323120117, "chain_id": "3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_7"}
{"score": 0.7552136778831482, "chain_id": "3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_8"}
{"score": 0.06258253008127213, "chain_id": "3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_9"}
{"score": 0.11803384870290756, "chain_id": "3GGAI1SQEVXVPG8HLRJDN3BB6WECMU_1_10"}
{"score": 0.37491852045059204, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_1"}
{"score": 0.574634313583374, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_2"}
{"score": 0.10008551925420761, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_3"}
{"score": 0.06474816054105759, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_4"}
{"score": 0.29439881443977356, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_5"}
{"score": 0.7160665392875671, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_6"}
{"score": 0.5839754939079285, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_7"}
{"score": 0.48307719826698303, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_8"}
{"score": 0.3504844903945923, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_9"}
{"score": 0.8311302065849304, "chain_id": "3QFUFYSY9YEMO23L6P9I9FFEJ95F4H_1_10"}
{"score": 0.6124433279037476, "chain_id": "33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_1"}
{"score": 0.15199847519397736, "chain_id": "33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_2"}
{"score": 0.07977047562599182, "chain_id": "33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_3"}
{"score": 0.08704067021608353, "chain_id": "33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_4"}
{"score": 0.0572381317615509, "chain_id": "33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_5"}
{"score": 0.2217738777399063, "chain_id": "33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_6"}
{"score": 0.2122350037097931, "chain_id": "33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_7"}
{"score": 0.1754588782787323, "chain_id": "33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_8"}
{"score": 0.2804451286792755, "chain_id": "33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_9"}
{"score": 0.3814610540866852, "chain_id": "33PPUNGG384ZUPWJIDZ2K066NYOZRP_1_10"}
{"score": 0.9442885518074036, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_1"}
{"score": 0.9222685694694519, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_3"}
{"score": 0.7393324375152588, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_2"}
{"score": 0.8028088808059692, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_4"}
{"score": 0.012067971751093864, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_5"}
{"score": 0.5990684628486633, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_6"}
{"score": 0.21520189940929413, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_7"}
{"score": 0.0738210380077362, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_8"}
{"score": 0.13869334757328033, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_9"}
{"score": 0.028810057789087296, "chain_id": "3MX2NQ3YC9TLK7Y6KOYEKELZNHG5X1_1_10"}
{"score": 0.9702327251434326, "chain_id": "379J5II41OFQGWAAH6OTDEWPT2PELB_1_1"}
{"score": 0.9570897221565247, "chain_id": "379J5II41OFQGWAAH6OTDEWPT2PELB_1_2"}
{"score": 0.20730231702327728, "chain_id": "379J5II41OFQGWAAH6OTDEWPT2PELB_1_7"}
{"score": 0.2810819745063782, "chain_id": "379J5II41OFQGWAAH6OTDEWPT2PELB_1_8"}
{"score": 0.48915398120880127, "chain_id": "379J5II41OFQGWAAH6OTDEWPT2PELB_1_3"}
{"score": 0.08924774825572968, "chain_id": "379J5II41OFQGWAAH6OTDEWPT2PELB_1_4"}
{"score": 0.28567802906036377, "chain_id": "379J5II41OFQGWAAH6OTDEWPT2PELB_1_5"}
{"score": 0.13675442337989807, "chain_id": "379J5II41OFQGWAAH6OTDEWPT2PELB_1_6"}
{"score": 0.029645659029483795, "chain_id": "379J5II41OFQGWAAH6OTDEWPT2PELB_1_9"}
{"score": 0.023010892793536186, "chain_id": "379J5II41OFQGWAAH6OTDEWPT2PELB_1_10"}
{"score": 0.03652810677886009, "chain_id": "3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_1"}
{"score": 0.4423617720603943, "chain_id": "3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_2"}
{"score": 0.08256098628044128, "chain_id": "3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_3"}
{"score": 0.49008357524871826, "chain_id": "3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_4"}
{"score": 0.045736778527498245, "chain_id": "3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_5"}
{"score": 0.0241134874522686, "chain_id": "3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_6"}
{"score": 0.2481575757265091, "chain_id": "3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_7"}
{"score": 0.06069185212254524, "chain_id": "3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_8"}
{"score": 0.09304334968328476, "chain_id": "3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_9"}
{"score": 0.03675992414355278, "chain_id": "3I2PTA7R3TT4TTIX5X7SSV8OL0CQKO_1_10"}
{"score": 0.04992837458848953, "chain_id": "3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_1"}
{"score": 0.06485694646835327, "chain_id": "3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_2"}
{"score": 0.0568777471780777, "chain_id": "3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_3"}
{"score": 0.04957747086882591, "chain_id": "3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_4"}
{"score": 0.029648978263139725, "chain_id": "3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_5"}
{"score": 0.025286998599767685, "chain_id": "3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_6"}
{"score": 0.48162102699279785, "chain_id": "3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_7"}
{"score": 0.8289049863815308, "chain_id": "3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_8"}
{"score": 0.01369687169790268, "chain_id": "3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_9"}
{"score": 0.013297866098582745, "chain_id": "3QBD8R3Z21IGUFGE5SS8W9OS9G3O4T_1_10"}
{"score": 0.9638262987136841, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_1"}
{"score": 0.9300445914268494, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_2"}
{"score": 0.36356648802757263, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_3"}
{"score": 0.9324454069137573, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_4"}
{"score": 0.29491713643074036, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_5"}
{"score": 0.35630589723587036, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_6"}
{"score": 0.059914812445640564, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_7"}
{"score": 0.15095160901546478, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_8"}
{"score": 0.21249574422836304, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_9"}
{"score": 0.10646486282348633, "chain_id": "3P1L2B7AD1OCSNNZBKRPIQQ3V2KOL8_1_10"}
{"score": 0.6260544061660767, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUR37O21_1_1"}
{"score": 0.051288433372974396, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUR37O21_1_2"}
{"score": 0.09334775060415268, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUR37O21_1_3"}
{"score": 0.14934870600700378, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUR37O21_1_4"}
{"score": 0.19350869953632355, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUR37O21_1_5"}
{"score": 0.40014174580574036, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUR37O21_1_6"}
{"score": 0.3476385772228241, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUR37O21_1_7"}
{"score": 0.3940916359424591, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUR37O21_1_8"}
{"score": 0.17960546910762787, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUR37O21_1_9"}
{"score": 0.06682023406028748, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUR37O21_1_10"}
{"score": 0.9733428359031677, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_4"}
{"score": 0.13051094114780426, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_1"}
{"score": 0.04810451716184616, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_2"}
{"score": 0.028842482715845108, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_3"}
{"score": 0.05851084366440773, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_5"}
{"score": 0.01876188814640045, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_6"}
{"score": 0.10882007330656052, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_7"}
{"score": 0.019618552178144455, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_8"}
{"score": 0.027538854628801346, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_9"}
{"score": 0.050183895975351334, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTV2IMPV9_1_10"}
{"score": 0.9413265585899353, "chain_id": "32N49TQG3GHQMO5SF5OD44404J5VAT_1_1"}
{"score": 0.6278968453407288, "chain_id": "32N49TQG3GHQMO5SF5OD44404J5VAT_1_2"}
{"score": 0.029164355248212814, "chain_id": "32N49TQG3GHQMO5SF5OD44404J5VAT_1_9"}
{"score": 0.0345325842499733, "chain_id": "32N49TQG3GHQMO5SF5OD44404J5VAT_1_3"}
{"score": 0.14665505290031433, "chain_id": "32N49TQG3GHQMO5SF5OD44404J5VAT_1_4"}
{"score": 0.13644711673259735, "chain_id": "32N49TQG3GHQMO5SF5OD44404J5VAT_1_5"}
{"score": 0.07580530643463135, "chain_id": "32N49TQG3GHQMO5SF5OD44404J5VAT_1_6"}
{"score": 0.8029125332832336, "chain_id": "32N49TQG3GHQMO5SF5OD44404J5VAT_1_7"}
{"score": 0.15409933030605316, "chain_id": "32N49TQG3GHQMO5SF5OD44404J5VAT_1_8"}
{"score": 0.03152432292699814, "chain_id": "32N49TQG3GHQMO5SF5OD44404J5VAT_1_10"}
{"score": 0.4914538264274597, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_1"}
{"score": 0.09199605137109756, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_2"}
{"score": 0.09199605137109756, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_3"}
{"score": 0.5062057971954346, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_4"}
{"score": 0.07050807774066925, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_5"}
{"score": 0.0626477599143982, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_6"}
{"score": 0.14568525552749634, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_7"}
{"score": 0.7133998870849609, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_8"}
{"score": 0.6326968669891357, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_9"}
{"score": 0.16897250711917877, "chain_id": "3E1QT0TDFP87HUSDJ05GTO8BZTN8IC_1_10"}
{"score": 0.9785436987876892, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUW242OF_1_1"}
{"score": 0.8690801858901978, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUW242OF_1_2"}
{"score": 0.2151927798986435, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUW242OF_1_3"}
{"score": 0.9726438522338867, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUW242OF_1_4"}
{"score": 0.2810271382331848, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUW242OF_1_5"}
{"score": 0.3284761309623718, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUW242OF_1_8"}
{"score": 0.9300126433372498, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUW242OF_1_6"}
{"score": 0.033453088253736496, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUW242OF_1_7"}
{"score": 0.19927842915058136, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUW242OF_1_9"}
{"score": 0.07250146567821503, "chain_id": "3HUTX6F6VUM6R11R1E9K3URUW242OF_1_10"}
{"score": 0.9372816681861877, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_1"}
{"score": 0.920660674571991, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_2"}
{"score": 0.6328613758087158, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_3"}
{"score": 0.8925496339797974, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_4"}
{"score": 0.8458051085472107, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_6"}
{"score": 0.2688213288784027, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_9"}
{"score": 0.8565037250518799, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_10"}
{"score": 0.16903503239154816, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_5"}
{"score": 0.052244238555431366, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_7"}
{"score": 0.05473557114601135, "chain_id": "3E47SOBEYQV9TXIQ0CLLVA4USOVICQ_1_8"}
{"score": 0.9122827649116516, "chain_id": "3BQU611VFPJEKYIKKY5HGR4J4EM991_1_1"}
{"score": 0.9237146377563477, "chain_id": "3BQU611VFPJEKYIKKY5HGR4J4EM991_1_2"}
{"score": 0.04364433512091637, "chain_id": "3BQU611VFPJEKYIKKY5HGR4J4EM991_1_3"}
{"score": 0.038071103394031525, "chain_id": "3BQU611VFPJEKYIKKY5HGR4J4EM991_1_4"}
{"score": 0.027930209413170815, "chain_id": "3BQU611VFPJEKYIKKY5HGR4J4EM991_1_5"}
{"score": 0.029362428933382034, "chain_id": "3BQU611VFPJEKYIKKY5HGR4J4EM991_1_6"}
{"score": 0.03729819506406784, "chain_id": "3BQU611VFPJEKYIKKY5HGR4J4EM991_1_7"}
{"score": 0.0937233492732048, "chain_id": "3BQU611VFPJEKYIKKY5HGR4J4EM991_1_8"}
{"score": 0.031414128839969635, "chain_id": "3BQU611VFPJEKYIKKY5HGR4J4EM991_1_9"}
{"score": 0.04310606047511101, "chain_id": "3BQU611VFPJEKYIKKY5HGR4J4EM991_1_10"}
{"score": 0.13154636323451996, "chain_id": "3CTOC39K37PZCR70RDYARPRG690J7H_1_1"}
{"score": 0.033245909959077835, "chain_id": "3CTOC39K37PZCR70RDYARPRG690J7H_1_2"}
{"score": 0.12546423077583313, "chain_id": "3CTOC39K37PZCR70RDYARPRG690J7H_1_3"}
{"score": 0.06373145431280136, "chain_id": "3CTOC39K37PZCR70RDYARPRG690J7H_1_4"}
{"score": 0.8020957708358765, "chain_id": "3CTOC39K37PZCR70RDYARPRG690J7H_1_5"}
{"score": 0.8099273443222046, "chain_id": "3CTOC39K37PZCR70RDYARPRG690J7H_1_6"}
{"score": 0.312520295381546, "chain_id": "3CTOC39K37PZCR70RDYARPRG690J7H_1_7"}
{"score": 0.244354709982872, "chain_id": "3CTOC39K37PZCR70RDYARPRG690J7H_1_8"}
{"score": 0.08922423422336578, "chain_id": "3CTOC39K37PZCR70RDYARPRG690J7H_1_9"}
{"score": 0.04714424908161163, "chain_id": "3CTOC39K37PZCR70RDYARPRG690J7H_1_10"}
{"score": 0.9860212206840515, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_1"}
{"score": 0.42981693148612976, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_2"}
{"score": 0.7884041666984558, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_3"}
{"score": 0.725070059299469, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_4"}
{"score": 0.0514121875166893, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_5"}
{"score": 0.3984871804714203, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_6"}
{"score": 0.026861364021897316, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_7"}
{"score": 0.17641489207744598, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_8"}
{"score": 0.0528733916580677, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_9"}
{"score": 0.03839084878563881, "chain_id": "38F5OAUN5NB3LLCA3DVPFCB14XF7HQ_1_10"}
{"score": 0.9793986082077026, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_1"}
{"score": 0.6550308465957642, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_2"}
{"score": 0.6256632804870605, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_3"}
{"score": 0.3166775405406952, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_4"}
{"score": 0.19763332605361938, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_5"}
{"score": 0.0368405319750309, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_6"}
{"score": 0.09972836822271347, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_7"}
{"score": 0.05708976462483406, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_8"}
{"score": 0.015471656806766987, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_9"}
{"score": 0.29672306776046753, "chain_id": "3IOEN3P9S7I9DADRIENCHBVYSXK61C_1_10"}
{"score": 0.17820051312446594, "chain_id": "32ZKVD547FMBTP8119I3GKWN4KKB39_1_1"}
{"score": 0.02862040512263775, "chain_id": "32ZKVD547FMBTP8119I3GKWN4KKB39_1_2"}
{"score": 0.5427024364471436, "chain_id": "32ZKVD547FMBTP8119I3GKWN4KKB39_1_3"}
{"score": 0.03753683716058731, "chain_id": "32ZKVD547FMBTP8119I3GKWN4KKB39_1_4"}
{"score": 0.02266225405037403, "chain_id": "32ZKVD547FMBTP8119I3GKWN4KKB39_1_5"}
{"score": 0.39440596103668213, "chain_id": "32ZKVD547FMBTP8119I3GKWN4KKB39_1_6"}
{"score": 0.04193280264735222, "chain_id": "32ZKVD547FMBTP8119I3GKWN4KKB39_1_7"}
{"score": 0.16941598057746887, "chain_id": "32ZKVD547FMBTP8119I3GKWN4KKB39_1_8"}
{"score": 0.06460995972156525, "chain_id": "32ZKVD547FMBTP8119I3GKWN4KKB39_1_9"}
{"score": 0.13471679389476776, "chain_id": "32ZKVD547FMBTP8119I3GKWN4KKB39_1_10"}
{"score": 0.16854095458984375, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_1"}
{"score": 0.01442934200167656, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_2"}
{"score": 0.0427875742316246, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_3"}
{"score": 0.12291258573532104, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_4"}
{"score": 0.06866514682769775, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_5"}
{"score": 0.06003596633672714, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_6"}
{"score": 0.3087683916091919, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_7"}
{"score": 0.038922738283872604, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_8"}
{"score": 0.14311625063419342, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_9"}
{"score": 0.0401749424636364, "chain_id": "3EQHHY4HQSRAYL3GVEYAWSL4Y7H5GG_1_10"}
{"score": 0.02005976252257824, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_1"}
{"score": 0.015939267352223396, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_2"}
{"score": 0.017219610512256622, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_3"}
{"score": 0.01528843306005001, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_4"}
{"score": 0.08931013941764832, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_5"}
{"score": 0.04815658926963806, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_6"}
{"score": 0.09187071770429611, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_7"}
{"score": 0.11663711816072464, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_8"}
{"score": 0.03829651698470116, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_9"}
{"score": 0.07733125984668732, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHA92GFK_1_10"}
{"score": 0.03233117237687111, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_1"}
{"score": 0.06539224088191986, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_2"}
{"score": 0.06875448673963547, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_3"}
{"score": 0.36761173605918884, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_4"}
{"score": 0.12227274477481842, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_5"}
{"score": 0.0510932058095932, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_6"}
{"score": 0.04838709533214569, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_7"}
{"score": 0.02692732773721218, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_8"}
{"score": 0.7707823514938354, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_9"}
{"score": 0.12030671536922455, "chain_id": "3DY4FPOOA1NIL5R9HGAZZUTA1QNVRY_1_10"}
{"score": 0.8384106755256653, "chain_id": "35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_1"}
{"score": 0.8384106755256653, "chain_id": "35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_2"}
{"score": 0.11092481762170792, "chain_id": "35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_3"}
{"score": 0.19794291257858276, "chain_id": "35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_4"}
{"score": 0.07673142105340958, "chain_id": "35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_5"}
{"score": 0.03344293683767319, "chain_id": "35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_6"}
{"score": 0.013386091217398643, "chain_id": "35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_7"}
{"score": 0.02977406419813633, "chain_id": "35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_8"}
{"score": 0.06415440887212753, "chain_id": "35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_9"}
{"score": 0.02269689179956913, "chain_id": "35K3O9HUABC4G40EVVLVI1R5AFUFEU_1_10"}
{"score": 0.37326279282569885, "chain_id": "36TFCYNS449X00I1LQZN9BOPTWPXH5_1_4"}
{"score": 0.3606208264827728, "chain_id": "36TFCYNS449X00I1LQZN9BOPTWPXH5_1_9"}
{"score": 0.1413130909204483, "chain_id": "36TFCYNS449X00I1LQZN9BOPTWPXH5_1_1"}
{"score": 0.7526001930236816, "chain_id": "36TFCYNS449X00I1LQZN9BOPTWPXH5_1_2"}
{"score": 0.05759165808558464, "chain_id": "36TFCYNS449X00I1LQZN9BOPTWPXH5_1_3"}
{"score": 0.3496326804161072, "chain_id": "36TFCYNS449X00I1LQZN9BOPTWPXH5_1_5"}
{"score": 0.03452694043517113, "chain_id": "36TFCYNS449X00I1LQZN9BOPTWPXH5_1_6"}
{"score": 0.030416741967201233, "chain_id": "36TFCYNS449X00I1LQZN9BOPTWPXH5_1_7"}
{"score": 0.11173219233751297, "chain_id": "36TFCYNS449X00I1LQZN9BOPTWPXH5_1_8"}
{"score": 0.07117951661348343, "chain_id": "36TFCYNS449X00I1LQZN9BOPTWPXH5_1_10"}
{"score": 0.9825626611709595, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_1"}
{"score": 0.9564521908760071, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_3"}
{"score": 0.4116779565811157, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_6"}
{"score": 0.8590763211250305, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_10"}
{"score": 0.809370756149292, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_2"}
{"score": 0.4242537021636963, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_4"}
{"score": 0.8646987080574036, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_5"}
{"score": 0.19071261584758759, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_7"}
{"score": 0.05456475540995598, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_8"}
{"score": 0.04919658973813057, "chain_id": "3LOTDFNYA7YYX4M5GVF147Y54GAFW2_1_9"}
{"score": 0.1202094703912735, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_10"}
{"score": 0.4218917787075043, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_1"}
{"score": 0.9715226292610168, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_2"}
{"score": 0.39493730664253235, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_3"}
{"score": 0.939578115940094, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_4"}
{"score": 0.8778170943260193, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_5"}
{"score": 0.14851707220077515, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_6"}
{"score": 0.5546610951423645, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_7"}
{"score": 0.7075504064559937, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_8"}
{"score": 0.6840738654136658, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSYCBV8D5_1_9"}
{"score": 0.9796438217163086, "chain_id": "3I02618YA05XWDMUZYW5YDRCM9TPUU_1_1"}
{"score": 0.9794799089431763, "chain_id": "3I02618YA05XWDMUZYW5YDRCM9TPUU_1_2"}
{"score": 0.9673639535903931, "chain_id": "3I02618YA05XWDMUZYW5YDRCM9TPUU_1_3"}
{"score": 0.5773493051528931, "chain_id": "3I02618YA05XWDMUZYW5YDRCM9TPUU_1_7"}
{"score": 0.926590085029602, "chain_id": "3I02618YA05XWDMUZYW5YDRCM9TPUU_1_8"}
{"score": 0.943450391292572, "chain_id": "3I02618YA05XWDMUZYW5YDRCM9TPUU_1_4"}
{"score": 0.4940052628517151, "chain_id": "3I02618YA05XWDMUZYW5YDRCM9TPUU_1_5"}
{"score": 0.558013379573822, "chain_id": "3I02618YA05XWDMUZYW5YDRCM9TPUU_1_6"}
{"score": 0.21335142850875854, "chain_id": "3I02618YA05XWDMUZYW5YDRCM9TPUU_1_9"}
{"score": 0.10678723454475403, "chain_id": "3I02618YA05XWDMUZYW5YDRCM9TPUU_1_10"}
{"score": 0.9873680472373962, "chain_id": "3HWRJOOET51DK9501FLUP0AKP7IES0_1_1"}
{"score": 0.9770893454551697, "chain_id": "3HWRJOOET51DK9501FLUP0AKP7IES0_1_3"}
{"score": 0.2625569999217987, "chain_id": "3HWRJOOET51DK9501FLUP0AKP7IES0_1_6"}
{"score": 0.821988046169281, "chain_id": "3HWRJOOET51DK9501FLUP0AKP7IES0_1_10"}
{"score": 0.8947615623474121, "chain_id": "3HWRJOOET51DK9501FLUP0AKP7IES0_1_2"}
{"score": 0.5838497281074524, "chain_id": "3HWRJOOET51DK9501FLUP0AKP7IES0_1_4"}
{"score": 0.8418757319450378, "chain_id": "3HWRJOOET51DK9501FLUP0AKP7IES0_1_5"}
{"score": 0.13193458318710327, "chain_id": "3HWRJOOET51DK9501FLUP0AKP7IES0_1_7"}
{"score": 0.04096521437168121, "chain_id": "3HWRJOOET51DK9501FLUP0AKP7IES0_1_8"}
{"score": 0.04806977137923241, "chain_id": "3HWRJOOET51DK9501FLUP0AKP7IES0_1_9"}
{"score": 0.983146607875824, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_1"}
{"score": 0.9731060266494751, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_3"}
{"score": 0.23300118744373322, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_6"}
{"score": 0.8628008365631104, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_2"}
{"score": 0.6266837120056152, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_4"}
{"score": 0.8094238638877869, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_5"}
{"score": 0.1242581158876419, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_7"}
{"score": 0.040466152131557465, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_8"}
{"score": 0.047473784536123276, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_9"}
{"score": 0.7785788178443909, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9A0WWPH_1_10"}
{"score": 0.1055155098438263, "chain_id": "3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_3"}
{"score": 0.6445529460906982, "chain_id": "3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_1"}
{"score": 0.06526731699705124, "chain_id": "3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_2"}
{"score": 0.0868285596370697, "chain_id": "3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_4"}
{"score": 0.051892973482608795, "chain_id": "3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_5"}
{"score": 0.017946962267160416, "chain_id": "3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_6"}
{"score": 0.016580404713749886, "chain_id": "3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_7"}
{"score": 0.8000346422195435, "chain_id": "3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_8"}
{"score": 0.10256002843379974, "chain_id": "3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_9"}
{"score": 0.11532667279243469, "chain_id": "3AQF3RZ558H03P7ZPD2X6DZSK436FY_1_10"}
{"score": 0.20346413552761078, "chain_id": "3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_1"}
{"score": 0.8731870055198669, "chain_id": "3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_2"}
{"score": 0.33461451530456543, "chain_id": "3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_4"}
{"score": 0.6469814777374268, "chain_id": "3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_7"}
{"score": 0.2729649543762207, "chain_id": "3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_8"}
{"score": 0.21492546796798706, "chain_id": "3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_3"}
{"score": 0.03815823048353195, "chain_id": "3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_5"}
{"score": 0.22118929028511047, "chain_id": "3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_6"}
{"score": 0.2861438989639282, "chain_id": "3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_9"}
{"score": 0.029086677357554436, "chain_id": "3AWETUDC92RM1QT0SQ5T685F1RYZIT_1_10"}
{"score": 0.1850355863571167, "chain_id": "3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_6"}
{"score": 0.09181347489356995, "chain_id": "3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_1"}
{"score": 0.8257176280021667, "chain_id": "3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_2"}
{"score": 0.11647084355354309, "chain_id": "3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_3"}
{"score": 0.3192174434661865, "chain_id": "3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_4"}
{"score": 0.11763497442007065, "chain_id": "3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_5"}
{"score": 0.02712339162826538, "chain_id": "3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_7"}
{"score": 0.6451728940010071, "chain_id": "3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_8"}
{"score": 0.02170667238533497, "chain_id": "3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_9"}
{"score": 0.021121691912412643, "chain_id": "3NC5L260MOLQSVD3P9ORNDLJ11PFOE_1_10"}
{"score": 0.9931634664535522, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_1"}
{"score": 0.7480411529541016, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_2"}
{"score": 0.8205676674842834, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_3"}
{"score": 0.8724973797798157, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_4"}
{"score": 0.6654886603355408, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_5"}
{"score": 0.8731579184532166, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_6"}
{"score": 0.781036376953125, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_7"}
{"score": 0.9800149202346802, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_8"}
{"score": 0.8854208588600159, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_9"}
{"score": 0.7284432053565979, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ385SN96_1_10"}
{"score": 0.214212566614151, "chain_id": "3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_1"}
{"score": 0.05136697366833687, "chain_id": "3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_3"}
{"score": 0.7923832535743713, "chain_id": "3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_4"}
{"score": 0.9431554079055786, "chain_id": "3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_6"}
{"score": 0.06026478111743927, "chain_id": "3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_7"}
{"score": 0.771210253238678, "chain_id": "3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_8"}
{"score": 0.04515543580055237, "chain_id": "3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_2"}
{"score": 0.045243773609399796, "chain_id": "3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_5"}
{"score": 0.02506859228014946, "chain_id": "3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_9"}
{"score": 0.9623396396636963, "chain_id": "3VFJCI1K4ZYZ381ESLBDZTQ0DFHRGU_1_10"}
{"score": 0.32473933696746826, "chain_id": "31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_1"}
{"score": 0.6003976464271545, "chain_id": "31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_2"}
{"score": 0.01861424930393696, "chain_id": "31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_3"}
{"score": 0.016135141253471375, "chain_id": "31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_4"}
{"score": 0.022804921492934227, "chain_id": "31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_5"}
{"score": 0.6590448021888733, "chain_id": "31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_6"}
{"score": 0.0779382735490799, "chain_id": "31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_7"}
{"score": 0.0458078570663929, "chain_id": "31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_8"}
{"score": 0.09477932006120682, "chain_id": "31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_9"}
{"score": 0.053582772612571716, "chain_id": "31T4R4OBOSFC4D1UHLHO4LELF8XC73_1_10"}
{"score": 0.7880358099937439, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_3"}
{"score": 0.8494555354118347, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_4"}
{"score": 0.35392525792121887, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_5"}
{"score": 0.8518862128257751, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_6"}
{"score": 0.9504674673080444, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_1"}
{"score": 0.9588498473167419, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_2"}
{"score": 0.9537092447280884, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_7"}
{"score": 0.8041806817054749, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_8"}
{"score": 0.7247335910797119, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_9"}
{"score": 0.1881871223449707, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO447L7XJJ_1_10"}
{"score": 0.9632719159126282, "chain_id": "374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_1"}
{"score": 0.8587329387664795, "chain_id": "374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_2"}
{"score": 0.8589411377906799, "chain_id": "374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_7"}
{"score": 0.06821277737617493, "chain_id": "374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_9"}
{"score": 0.9239509701728821, "chain_id": "374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_3"}
{"score": 0.8866487741470337, "chain_id": "374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_4"}
{"score": 0.4113549590110779, "chain_id": "374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_5"}
{"score": 0.1444053202867508, "chain_id": "374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_6"}
{"score": 0.2686096131801605, "chain_id": "374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_8"}
{"score": 0.3635387718677521, "chain_id": "374TNBHA8BUZDY7E9C8J13NZNZ6YQK_1_10"}
{"score": 0.04311505705118179, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_2"}
{"score": 0.9476605653762817, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_6"}
{"score": 0.05285045877099037, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_7"}
{"score": 0.7650109529495239, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_8"}
{"score": 0.9607446193695068, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_10"}
{"score": 0.18584595620632172, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_1"}
{"score": 0.05136697366833687, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_3"}
{"score": 0.7611142992973328, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_4"}
{"score": 0.04108928143978119, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_5"}
{"score": 0.024033280089497566, "chain_id": "3F6HPJW4JDZEWAATS00UKO4GXUBW27_1_9"}
{"score": 0.18234467506408691, "chain_id": "35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_1"}
{"score": 0.7753292322158813, "chain_id": "35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_4"}
{"score": 0.9412473440170288, "chain_id": "35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_6"}
{"score": 0.05303419753909111, "chain_id": "35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_7"}
{"score": 0.7558279037475586, "chain_id": "35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_8"}
{"score": 0.04701714217662811, "chain_id": "35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_2"}
{"score": 0.05674714595079422, "chain_id": "35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_3"}
{"score": 0.04182294011116028, "chain_id": "35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_5"}
{"score": 0.023562954738736153, "chain_id": "35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_9"}
{"score": 0.9637272953987122, "chain_id": "35K3O9HUABC4G40EVVLVI1R5ZUNFE6_1_10"}
{"score": 0.9825589656829834, "chain_id": "3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_1"}
{"score": 0.9903897643089294, "chain_id": "3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_2"}
{"score": 0.58482426404953, "chain_id": "3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_3"}
{"score": 0.9013376235961914, "chain_id": "3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_4"}
{"score": 0.8763706684112549, "chain_id": "3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_7"}
{"score": 0.9501082897186279, "chain_id": "3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_5"}
{"score": 0.24694277346134186, "chain_id": "3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_6"}
{"score": 0.31797224283218384, "chain_id": "3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_8"}
{"score": 0.09196104109287262, "chain_id": "3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_9"}
{"score": 0.09662456065416336, "chain_id": "3OB0CAO74HOM058BQMLPSPVYXPYYH3_1_10"}
{"score": 0.669204831123352, "chain_id": "3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_5"}
{"score": 0.19202911853790283, "chain_id": "3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_6"}
{"score": 0.8611013293266296, "chain_id": "3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_1"}
{"score": 0.8236163258552551, "chain_id": "3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_2"}
{"score": 0.6050009727478027, "chain_id": "3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_3"}
{"score": 0.5454185605049133, "chain_id": "3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_4"}
{"score": 0.2547372877597809, "chain_id": "3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_7"}
{"score": 0.7751531600952148, "chain_id": "3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_8"}
{"score": 0.10231832414865494, "chain_id": "3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_9"}
{"score": 0.5233879685401917, "chain_id": "3ZR9AIQJUB8VRYOV37QX68SAFF0040_1_10"}
{"score": 0.7291103005409241, "chain_id": "3G2UL9A02DDNOWST7U4LILMBIK876N_1_1"}
{"score": 0.16920152306556702, "chain_id": "3G2UL9A02DDNOWST7U4LILMBIK876N_1_3"}
{"score": 0.1727638691663742, "chain_id": "3G2UL9A02DDNOWST7U4LILMBIK876N_1_10"}
{"score": 0.6885530948638916, "chain_id": "3G2UL9A02DDNOWST7U4LILMBIK876N_1_2"}
{"score": 0.05573464184999466, "chain_id": "3G2UL9A02DDNOWST7U4LILMBIK876N_1_4"}
{"score": 0.12512466311454773, "chain_id": "3G2UL9A02DDNOWST7U4LILMBIK876N_1_5"}
{"score": 0.5531158447265625, "chain_id": "3G2UL9A02DDNOWST7U4LILMBIK876N_1_6"}
{"score": 0.5042633414268494, "chain_id": "3G2UL9A02DDNOWST7U4LILMBIK876N_1_7"}
{"score": 0.08953291177749634, "chain_id": "3G2UL9A02DDNOWST7U4LILMBIK876N_1_8"}
{"score": 0.5048033595085144, "chain_id": "3G2UL9A02DDNOWST7U4LILMBIK876N_1_9"}
{"score": 0.9227306246757507, "chain_id": "37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_4"}
{"score": 0.28917357325553894, "chain_id": "37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_5"}
{"score": 0.8277022242546082, "chain_id": "37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_1"}
{"score": 0.4304412007331848, "chain_id": "37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_2"}
{"score": 0.23808550834655762, "chain_id": "37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_3"}
{"score": 0.14238208532333374, "chain_id": "37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_6"}
{"score": 0.17642055451869965, "chain_id": "37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_7"}
{"score": 0.23750227689743042, "chain_id": "37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_8"}
{"score": 0.5427939295768738, "chain_id": "37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_9"}
{"score": 0.08564585447311401, "chain_id": "37TRT2X24QQME3AQ4UAQWRDCNKEJBU_1_10"}
{"score": 0.9657419919967651, "chain_id": "384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_1"}
{"score": 0.8643925189971924, "chain_id": "384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_4"}
{"score": 0.5450475215911865, "chain_id": "384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_8"}
{"score": 0.47473496198654175, "chain_id": "384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_10"}
{"score": 0.8201829195022583, "chain_id": "384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_2"}
{"score": 0.5524961948394775, "chain_id": "384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_3"}
{"score": 0.4939960539340973, "chain_id": "384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_5"}
{"score": 0.09056214243173599, "chain_id": "384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_6"}
{"score": 0.369128555059433, "chain_id": "384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_7"}
{"score": 0.15732009708881378, "chain_id": "384PI804XS0ETJQ6T8MF4B8GV6AS0U_1_9"}
{"score": 0.7225006818771362, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_2"}
{"score": 0.9404172897338867, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_3"}
{"score": 0.7114068269729614, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_4"}
{"score": 0.4802230894565582, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_5"}
{"score": 0.45373958349227905, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_6"}
{"score": 0.4918138086795807, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_8"}
{"score": 0.8908689022064209, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_1"}
{"score": 0.5830057859420776, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_7"}
{"score": 0.08952134847640991, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_9"}
{"score": 0.25437769293785095, "chain_id": "3NLZY2D53POFDZ0FQXJT7VL3GKJQL3_1_10"}
{"score": 0.507226288318634, "chain_id": "3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_1"}
{"score": 0.20223049819469452, "chain_id": "3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_2"}
{"score": 0.6222405433654785, "chain_id": "3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_3"}
{"score": 0.3197760283946991, "chain_id": "3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_4"}
{"score": 0.01921486109495163, "chain_id": "3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_5"}
{"score": 0.20007523894309998, "chain_id": "3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_6"}
{"score": 0.05101653188467026, "chain_id": "3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_7"}
{"score": 0.03340328857302666, "chain_id": "3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_8"}
{"score": 0.3033924996852875, "chain_id": "3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_9"}
{"score": 0.13844719529151917, "chain_id": "3LO69W1SU3CO0A61N1EHDHH1B9TLGB_1_10"}
{"score": 0.41251349449157715, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_1"}
{"score": 0.3687959313392639, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_2"}
{"score": 0.46565136313438416, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_3"}
{"score": 0.6570301651954651, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_4"}
{"score": 0.9444075226783752, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_6"}
{"score": 0.9478194713592529, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_7"}
{"score": 0.22971481084823608, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_9"}
{"score": 0.9329063296318054, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_5"}
{"score": 0.8140560388565063, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_8"}
{"score": 0.9641981720924377, "chain_id": "3LOZAJ85YDCTLAFJ25WGM7IN7KN2XP_1_10"}
{"score": 0.9137376546859741, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_5"}
{"score": 0.07196665555238724, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_9"}
{"score": 0.37285494804382324, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_1"}
{"score": 0.05613686516880989, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_2"}
{"score": 0.5567802786827087, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_3"}
{"score": 0.9050872921943665, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_4"}
{"score": 0.08890809118747711, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_6"}
{"score": 0.06293460726737976, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_7"}
{"score": 0.4937765300273895, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_8"}
{"score": 0.03564877063035965, "chain_id": "3SBEHTYCWN2MW0JVW43AS1WDZ8QYIN_1_10"}
{"score": 0.8898991942405701, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_1"}
{"score": 0.549602746963501, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_2"}
{"score": 0.02377162128686905, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_3"}
{"score": 0.05784239619970322, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_4"}
{"score": 0.026000412181019783, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_5"}
{"score": 0.6195244789123535, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_6"}
{"score": 0.48388436436653137, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_7"}
{"score": 0.037743695080280304, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_8"}
{"score": 0.09558628499507904, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_9"}
{"score": 0.3329949378967285, "chain_id": "3VAR3R6G1P0HDG3GHVILDL4XATDO8V_1_10"}
{"score": 0.10667096078395844, "chain_id": "31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_3"}
{"score": 0.09150847047567368, "chain_id": "31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_1"}
{"score": 0.08685626834630966, "chain_id": "31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_2"}
{"score": 0.02077862247824669, "chain_id": "31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_4"}
{"score": 0.05891876667737961, "chain_id": "31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_5"}
{"score": 0.12729111313819885, "chain_id": "31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_6"}
{"score": 0.5131028294563293, "chain_id": "31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_7"}
{"score": 0.16251814365386963, "chain_id": "31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_8"}
{"score": 0.038752224296331406, "chain_id": "31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_9"}
{"score": 0.47940874099731445, "chain_id": "31LM9EDVOLROFCZN7KFZNMD60TTJNK_1_10"}
{"score": 0.05705438554286957, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_1"}
{"score": 0.027819301933050156, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_2"}
{"score": 0.018887275829911232, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_3"}
{"score": 0.05530038848519325, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_4"}
{"score": 0.031873445957899094, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_5"}
{"score": 0.03618212044239044, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_6"}
{"score": 0.028398364782333374, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_7"}
{"score": 0.03241768851876259, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_8"}
{"score": 0.08552878350019455, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_9"}
{"score": 0.058425914496183395, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIKEQAZ_1_10"}
{"score": 0.20100541412830353, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_1"}
{"score": 0.49129530787467957, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_2"}
{"score": 0.12391183525323868, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_3"}
{"score": 0.5171208381652832, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_4"}
{"score": 0.26295819878578186, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_5"}
{"score": 0.045819707214832306, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_6"}
{"score": 0.19558599591255188, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_7"}
{"score": 0.05173434317111969, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_8"}
{"score": 0.05099257454276085, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_9"}
{"score": 0.026679880917072296, "chain_id": "3V5Q80FXIXQH5C85IGPSFRTJT4D324_1_10"}
{"score": 0.2600262463092804, "chain_id": "3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_1"}
{"score": 0.15558598935604095, "chain_id": "3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_2"}
{"score": 0.04336703196167946, "chain_id": "3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_3"}
{"score": 0.09111856669187546, "chain_id": "3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_4"}
{"score": 0.19273261725902557, "chain_id": "3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_5"}
{"score": 0.04219653457403183, "chain_id": "3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_6"}
{"score": 0.21929802000522614, "chain_id": "3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_7"}
{"score": 0.05562572553753853, "chain_id": "3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_8"}
{"score": 0.043623290956020355, "chain_id": "3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_9"}
{"score": 0.07488968968391418, "chain_id": "3YMU66OBIN7MEENBWGZJLPOUMS0GH7_1_10"}
{"score": 0.07710454612970352, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_1"}
{"score": 0.08871620893478394, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_2"}
{"score": 0.06165222078561783, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_3"}
{"score": 0.05256262421607971, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_4"}
{"score": 0.05664495751261711, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_5"}
{"score": 0.1825229525566101, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_6"}
{"score": 0.07734936475753784, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_7"}
{"score": 0.06673472374677658, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_8"}
{"score": 0.046738192439079285, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_9"}
{"score": 0.16695688664913177, "chain_id": "3TS1AR6UQQDJ7PL48N7PCRZO7C27FZ_1_10"}
{"score": 0.9123125672340393, "chain_id": "3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_2"}
{"score": 0.9071104526519775, "chain_id": "3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_3"}
{"score": 0.8968566656112671, "chain_id": "3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_4"}
{"score": 0.7788392305374146, "chain_id": "3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_5"}
{"score": 0.9370175004005432, "chain_id": "3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_7"}
{"score": 0.1564686894416809, "chain_id": "3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_1"}
{"score": 0.970737874507904, "chain_id": "3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_6"}
{"score": 0.4204248785972595, "chain_id": "3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_8"}
{"score": 0.7092394232749939, "chain_id": "3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_9"}
{"score": 0.9214490652084351, "chain_id": "3QEMNNSB2XYM9578HHCZORW3YWJ7DI_1_10"}
{"score": 0.6595115065574646, "chain_id": "3IXEICO792IAMUP0KX7MNHET5T0T68_1_1"}
{"score": 0.9468364119529724, "chain_id": "3IXEICO792IAMUP0KX7MNHET5T0T68_1_2"}
{"score": 0.9658678770065308, "chain_id": "3IXEICO792IAMUP0KX7MNHET5T0T68_1_3"}
{"score": 0.9391937255859375, "chain_id": "3IXEICO792IAMUP0KX7MNHET5T0T68_1_4"}
{"score": 0.022379836067557335, "chain_id": "3IXEICO792IAMUP0KX7MNHET5T0T68_1_5"}
{"score": 0.024492355063557625, "chain_id": "3IXEICO792IAMUP0KX7MNHET5T0T68_1_6"}
{"score": 0.37049975991249084, "chain_id": "3IXEICO792IAMUP0KX7MNHET5T0T68_1_7"}
{"score": 0.03341865539550781, "chain_id": "3IXEICO792IAMUP0KX7MNHET5T0T68_1_8"}
{"score": 0.07298371940851212, "chain_id": "3IXEICO792IAMUP0KX7MNHET5T0T68_1_9"}
{"score": 0.015674732625484467, "chain_id": "3IXEICO792IAMUP0KX7MNHET5T0T68_1_10"}
{"score": 0.6121972799301147, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_1"}
{"score": 0.9611775875091553, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_9"}
{"score": 0.9570435285568237, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_2"}
{"score": 0.039136409759521484, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_3"}
{"score": 0.02350773476064205, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_4"}
{"score": 0.4332660734653473, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_5"}
{"score": 0.07844137400388718, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_6"}
{"score": 0.12226562201976776, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_7"}
{"score": 0.9123250246047974, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_8"}
{"score": 0.5749903321266174, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4BS1VHW_1_10"}
{"score": 0.9753310680389404, "chain_id": "3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_1"}
{"score": 0.8709502816200256, "chain_id": "3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_3"}
{"score": 0.9778732061386108, "chain_id": "3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_8"}
{"score": 0.9563980102539062, "chain_id": "3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_2"}
{"score": 0.9469307661056519, "chain_id": "3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_4"}
{"score": 0.2596448063850403, "chain_id": "3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_5"}
{"score": 0.6506345868110657, "chain_id": "3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_6"}
{"score": 0.4108152389526367, "chain_id": "3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_7"}
{"score": 0.07473696768283844, "chain_id": "3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_9"}
{"score": 0.3748806118965149, "chain_id": "3NPI0JQDAO4IW075ZT6VTH5A00VPTJ_1_10"}
{"score": 0.9868444204330444, "chain_id": "3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_3"}
{"score": 0.9547449946403503, "chain_id": "3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_7"}
{"score": 0.8756271600723267, "chain_id": "3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_8"}
{"score": 0.9870931506156921, "chain_id": "3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_1"}
{"score": 0.9669545292854309, "chain_id": "3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_2"}
{"score": 0.6017292141914368, "chain_id": "3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_4"}
{"score": 0.13154742121696472, "chain_id": "3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_5"}
{"score": 0.9463115334510803, "chain_id": "3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_6"}
{"score": 0.19280141592025757, "chain_id": "3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_9"}
{"score": 0.0355888232588768, "chain_id": "3DI28L7YXADDQP66OW6ATZNBUAI1EH_1_10"}
{"score": 0.19042262434959412, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_1"}
{"score": 0.24923454225063324, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_2"}
{"score": 0.09590952843427658, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_3"}
{"score": 0.04243480786681175, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_4"}
{"score": 0.13771934807300568, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_5"}
{"score": 0.03412945568561554, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_6"}
{"score": 0.05711529776453972, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_7"}
{"score": 0.02454688772559166, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_8"}
{"score": 0.02437150478363037, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_9"}
{"score": 0.1026318147778511, "chain_id": "3BWI6RSP7G8R1BL8DCNJU9EOXWIE7R_1_10"}
{"score": 0.8677151799201965, "chain_id": "3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_1"}
{"score": 0.9662438035011292, "chain_id": "3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_3"}
{"score": 0.8924311995506287, "chain_id": "3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_10"}
{"score": 0.7390774488449097, "chain_id": "3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_2"}
{"score": 0.7166509032249451, "chain_id": "3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_4"}
{"score": 0.07298184931278229, "chain_id": "3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_5"}
{"score": 0.487682580947876, "chain_id": "3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_6"}
{"score": 0.7349413633346558, "chain_id": "3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_7"}
{"score": 0.02858559973537922, "chain_id": "3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_8"}
{"score": 0.05392782762646675, "chain_id": "3SITXWYCNV8N9NFFLYPRN0LRXFTBX3_1_9"}
{"score": 0.8730975985527039, "chain_id": "3X3OR7WPZZZ97V0J432TL403I028LJ_1_2"}
{"score": 0.9899190664291382, "chain_id": "3X3OR7WPZZZ97V0J432TL403I028LJ_1_4"}
{"score": 0.9736604690551758, "chain_id": "3X3OR7WPZZZ97V0J432TL403I028LJ_1_5"}
{"score": 0.9490594267845154, "chain_id": "3X3OR7WPZZZ97V0J432TL403I028LJ_1_6"}
{"score": 0.8088275194168091, "chain_id": "3X3OR7WPZZZ97V0J432TL403I028LJ_1_1"}
{"score": 0.9772505164146423, "chain_id": "3X3OR7WPZZZ97V0J432TL403I028LJ_1_3"}
{"score": 0.5436381101608276, "chain_id": "3X3OR7WPZZZ97V0J432TL403I028LJ_1_7"}
{"score": 0.057776033878326416, "chain_id": "3X3OR7WPZZZ97V0J432TL403I028LJ_1_8"}
{"score": 0.852643609046936, "chain_id": "3X3OR7WPZZZ97V0J432TL403I028LJ_1_9"}
{"score": 0.4531160295009613, "chain_id": "3X3OR7WPZZZ97V0J432TL403I028LJ_1_10"}
{"score": 0.9930194020271301, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_1"}
{"score": 0.9890272617340088, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_2"}
{"score": 0.9927254915237427, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_3"}
{"score": 0.9039775729179382, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_4"}
{"score": 0.12487666308879852, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_8"}
{"score": 0.08035741746425629, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_5"}
{"score": 0.03511691838502884, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_6"}
{"score": 0.12795662879943848, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_7"}
{"score": 0.09927283227443695, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_9"}
{"score": 0.5734879374504089, "chain_id": "3COPXFW7XBBJTHHI5KS3SQIEIB2KPJ_1_10"}
{"score": 0.8388729095458984, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_1"}
{"score": 0.655609667301178, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_2"}
{"score": 0.8127828240394592, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_3"}
{"score": 0.3541891276836395, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_4"}
{"score": 0.20652128756046295, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_5"}
{"score": 0.40452367067337036, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_6"}
{"score": 0.17748276889324188, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_7"}
{"score": 0.07201887667179108, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_8"}
{"score": 0.21665357053279877, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_9"}
{"score": 0.7836982607841492, "chain_id": "39GHHAVOMFQ2T4PHPF03OD76CTZ4JC_1_10"}
{"score": 0.991576611995697, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_1"}
{"score": 0.9040723443031311, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_2"}
{"score": 0.9016364216804504, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_4"}
{"score": 0.22652201354503632, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_5"}
{"score": 0.9237200021743774, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_6"}
{"score": 0.980274498462677, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_7"}
{"score": 0.9132698178291321, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_9"}
{"score": 0.09229519218206406, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_3"}
{"score": 0.9599925875663757, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_8"}
{"score": 0.6633633971214294, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZMW5SSL_1_10"}
{"score": 0.5672847628593445, "chain_id": "358010RM5ES2I1DLQFGROCFY5SGXVD_1_1"}
{"score": 0.026940464973449707, "chain_id": "358010RM5ES2I1DLQFGROCFY5SGXVD_1_2"}
{"score": 0.03843623772263527, "chain_id": "358010RM5ES2I1DLQFGROCFY5SGXVD_1_3"}
{"score": 0.012274721637368202, "chain_id": "358010RM5ES2I1DLQFGROCFY5SGXVD_1_4"}
{"score": 0.6300244331359863, "chain_id": "358010RM5ES2I1DLQFGROCFY5SGXVD_1_5"}
{"score": 0.7793747186660767, "chain_id": "358010RM5ES2I1DLQFGROCFY5SGXVD_1_6"}
{"score": 0.7176294922828674, "chain_id": "358010RM5ES2I1DLQFGROCFY5SGXVD_1_7"}
{"score": 0.046665292233228683, "chain_id": "358010RM5ES2I1DLQFGROCFY5SGXVD_1_8"}
{"score": 0.15539857745170593, "chain_id": "358010RM5ES2I1DLQFGROCFY5SGXVD_1_9"}
{"score": 0.04810214042663574, "chain_id": "358010RM5ES2I1DLQFGROCFY5SGXVD_1_10"}
{"score": 0.02039898931980133, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_1"}
{"score": 0.029801685363054276, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_2"}
{"score": 0.019129447638988495, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_3"}
{"score": 0.03246943652629852, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_4"}
{"score": 0.023985380306839943, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_5"}
{"score": 0.09388952702283859, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_6"}
{"score": 0.053435083478689194, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_7"}
{"score": 0.04640437290072441, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_8"}
{"score": 0.041978247463703156, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_9"}
{"score": 0.055630914866924286, "chain_id": "3ZPBJO59KP0J2UDKUQYBF4LXH4IDH8_1_10"}
{"score": 0.9838666915893555, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_1"}
{"score": 0.6635459065437317, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_5"}
{"score": 0.5723658204078674, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_6"}
{"score": 0.5826608538627625, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_7"}
{"score": 0.8434186577796936, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_2"}
{"score": 0.8048439025878906, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_3"}
{"score": 0.6612354516983032, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_4"}
{"score": 0.04095875099301338, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_8"}
{"score": 0.03956053778529167, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_9"}
{"score": 0.23466959595680237, "chain_id": "3AAJC4I4FGRIW1D6A8QTI9KFRGFJZE_1_10"}
{"score": 0.10889358818531036, "chain_id": "3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_4"}
{"score": 0.9899401068687439, "chain_id": "3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_7"}
{"score": 0.23216918110847473, "chain_id": "3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_10"}
{"score": 0.05620089918375015, "chain_id": "3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_1"}
{"score": 0.05811790004372597, "chain_id": "3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_2"}
{"score": 0.17988057434558868, "chain_id": "3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_3"}
{"score": 0.051997169852256775, "chain_id": "3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_5"}
{"score": 0.7987406849861145, "chain_id": "3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_6"}
{"score": 0.8331166505813599, "chain_id": "3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_8"}
{"score": 0.6241106986999512, "chain_id": "3GNCZX450IMDH48WTTFEYCFIRFBAP7_1_9"}
{"score": 0.7295015454292297, "chain_id": "3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_5"}
{"score": 0.8280764818191528, "chain_id": "3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_8"}
{"score": 0.5561606884002686, "chain_id": "3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_1"}
{"score": 0.6576045751571655, "chain_id": "3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_2"}
{"score": 0.7987393140792847, "chain_id": "3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_3"}
{"score": 0.7447609305381775, "chain_id": "3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_4"}
{"score": 0.030584413558244705, "chain_id": "3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_6"}
{"score": 0.013920615427196026, "chain_id": "3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_7"}
{"score": 0.23129211366176605, "chain_id": "3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_9"}
{"score": 0.08972518891096115, "chain_id": "3UJ1CZ6IZHODOQC7QESRL647NP5S5W_1_10"}
{"score": 0.9490699172019958, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_3"}
{"score": 0.96598881483078, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_4"}
{"score": 0.20170822739601135, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_9"}
{"score": 0.8705440163612366, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_1"}
{"score": 0.4566705822944641, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_2"}
{"score": 0.17224301397800446, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_5"}
{"score": 0.18962536752223969, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_6"}
{"score": 0.300541490316391, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_7"}
{"score": 0.016791582107543945, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_8"}
{"score": 0.26340505480766296, "chain_id": "3DQQ64TANGKAOHBZUYB6G1C9AERWP4_1_10"}
{"score": 0.05878133326768875, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_1"}
{"score": 0.06216033175587654, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_2"}
{"score": 0.05432562902569771, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_3"}
{"score": 0.06393332034349442, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_4"}
{"score": 0.2870362102985382, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_5"}
{"score": 0.07903248071670532, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_6"}
{"score": 0.14126332104206085, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_7"}
{"score": 0.030931660905480385, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_8"}
{"score": 0.017358362674713135, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_9"}
{"score": 0.03399306535720825, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3LY59N4_1_10"}
{"score": 0.9152611494064331, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_2"}
{"score": 0.5675478577613831, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_3"}
{"score": 0.31397587060928345, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_1"}
{"score": 0.49796608090400696, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_4"}
{"score": 0.03460421413183212, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_5"}
{"score": 0.07857023179531097, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_6"}
{"score": 0.05302094668149948, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_7"}
{"score": 0.01990589313209057, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_8"}
{"score": 0.02114671654999256, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_9"}
{"score": 0.3170507848262787, "chain_id": "31EUONYN2V2FOSZTPOTV5ZO5MXDOVC_1_10"}
{"score": 0.7402917742729187, "chain_id": "3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_1"}
{"score": 0.9655640721321106, "chain_id": "3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_5"}
{"score": 0.7648903727531433, "chain_id": "3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_7"}
{"score": 0.9027394652366638, "chain_id": "3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_9"}
{"score": 0.0968569964170456, "chain_id": "3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_2"}
{"score": 0.08263739198446274, "chain_id": "3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_3"}
{"score": 0.06310148537158966, "chain_id": "3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_4"}
{"score": 0.061150066554546356, "chain_id": "3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_6"}
{"score": 0.10117518156766891, "chain_id": "3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_8"}
{"score": 0.04760640487074852, "chain_id": "3DIP6YHAPCRV1PQRNHFP89AJ7XO8E5_1_10"}
{"score": 0.08878158032894135, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_3"}
{"score": 0.27395081520080566, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_1"}
{"score": 0.30657273530960083, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_2"}
{"score": 0.10709596425294876, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_4"}
{"score": 0.3051716983318329, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_5"}
{"score": 0.152327299118042, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_6"}
{"score": 0.02716139145195484, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_7"}
{"score": 0.16915485262870789, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_8"}
{"score": 0.04399985074996948, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_9"}
{"score": 0.07850237935781479, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHA7R96N_1_10"}
{"score": 0.9371589422225952, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_2"}
{"score": 0.9109808802604675, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_3"}
{"score": 0.6317013502120972, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_9"}
{"score": 0.4306640326976776, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_1"}
{"score": 0.1002872958779335, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_4"}
{"score": 0.04155878350138664, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_5"}
{"score": 0.13443580269813538, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_6"}
{"score": 0.2667769491672516, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_7"}
{"score": 0.02287575975060463, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_8"}
{"score": 0.550178050994873, "chain_id": "37UEWGM5HT72ZTBBA2QAS6MU8IS1RF_1_10"}
{"score": 0.9797806739807129, "chain_id": "3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_1"}
{"score": 0.944057285785675, "chain_id": "3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_4"}
{"score": 0.761716902256012, "chain_id": "3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_2"}
{"score": 0.9232766032218933, "chain_id": "3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_3"}
{"score": 0.059294912964105606, "chain_id": "3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_5"}
{"score": 0.0203122366219759, "chain_id": "3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_6"}
{"score": 0.018798161298036575, "chain_id": "3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_7"}
{"score": 0.07736842334270477, "chain_id": "3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_8"}
{"score": 0.021748125553131104, "chain_id": "3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_9"}
{"score": 0.02470923773944378, "chain_id": "3RGU30DZTA7IXUENVJ0ZA7O6WIQMJP_1_10"}
{"score": 0.5237719416618347, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_1"}
{"score": 0.4557165503501892, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_10"}
{"score": 0.4119187593460083, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_2"}
{"score": 0.27782589197158813, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_3"}
{"score": 0.027700325474143028, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_4"}
{"score": 0.3129349946975708, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_5"}
{"score": 0.7985759973526001, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_6"}
{"score": 0.12279020994901657, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_7"}
{"score": 0.615582287311554, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_8"}
{"score": 0.8866746425628662, "chain_id": "3S0TNUHWKTHQ9JCRRM452RSY9OT8DQ_1_9"}
{"score": 0.8501101136207581, "chain_id": "32EYX73OY08I8Q29CQ0U38RRLGQURC_1_6"}
{"score": 0.884676992893219, "chain_id": "32EYX73OY08I8Q29CQ0U38RRLGQURC_1_8"}
{"score": 0.11442866176366806, "chain_id": "32EYX73OY08I8Q29CQ0U38RRLGQURC_1_1"}
{"score": 0.649062991142273, "chain_id": "32EYX73OY08I8Q29CQ0U38RRLGQURC_1_2"}
{"score": 0.4783647954463959, "chain_id": "32EYX73OY08I8Q29CQ0U38RRLGQURC_1_3"}
{"score": 0.41176122426986694, "chain_id": "32EYX73OY08I8Q29CQ0U38RRLGQURC_1_4"}
{"score": 0.3668137788772583, "chain_id": "32EYX73OY08I8Q29CQ0U38RRLGQURC_1_5"}
{"score": 0.40353846549987793, "chain_id": "32EYX73OY08I8Q29CQ0U38RRLGQURC_1_7"}
{"score": 0.20501410961151123, "chain_id": "32EYX73OY08I8Q29CQ0U38RRLGQURC_1_9"}
{"score": 0.04619297385215759, "chain_id": "32EYX73OY08I8Q29CQ0U38RRLGQURC_1_10"}
{"score": 0.9931579232215881, "chain_id": "3P4RDNWND55W1BOWA427IEHPH73IJ0_1_1"}
{"score": 0.9931700229644775, "chain_id": "3P4RDNWND55W1BOWA427IEHPH73IJ0_1_2"}
{"score": 0.992088794708252, "chain_id": "3P4RDNWND55W1BOWA427IEHPH73IJ0_1_3"}
{"score": 0.9934792518615723, "chain_id": "3P4RDNWND55W1BOWA427IEHPH73IJ0_1_4"}
{"score": 0.797666609287262, "chain_id": "3P4RDNWND55W1BOWA427IEHPH73IJ0_1_5"}
{"score": 0.041131969541311264, "chain_id": "3P4RDNWND55W1BOWA427IEHPH73IJ0_1_6"}
{"score": 0.19092996418476105, "chain_id": "3P4RDNWND55W1BOWA427IEHPH73IJ0_1_7"}
{"score": 0.013852659612894058, "chain_id": "3P4RDNWND55W1BOWA427IEHPH73IJ0_1_8"}
{"score": 0.025386832654476166, "chain_id": "3P4RDNWND55W1BOWA427IEHPH73IJ0_1_9"}
{"score": 0.022900022566318512, "chain_id": "3P4RDNWND55W1BOWA427IEHPH73IJ0_1_10"}
{"score": 0.048492394387722015, "chain_id": "39JEC7537U0EF32QZJK4AZUODJCCV1_1_7"}
{"score": 0.1793692708015442, "chain_id": "39JEC7537U0EF32QZJK4AZUODJCCV1_1_1"}
{"score": 0.10036386549472809, "chain_id": "39JEC7537U0EF32QZJK4AZUODJCCV1_1_2"}
{"score": 0.06789931654930115, "chain_id": "39JEC7537U0EF32QZJK4AZUODJCCV1_1_3"}
{"score": 0.13974498212337494, "chain_id": "39JEC7537U0EF32QZJK4AZUODJCCV1_1_4"}
{"score": 0.09736977517604828, "chain_id": "39JEC7537U0EF32QZJK4AZUODJCCV1_1_5"}
{"score": 0.04163971170783043, "chain_id": "39JEC7537U0EF32QZJK4AZUODJCCV1_1_6"}
{"score": 0.29282885789871216, "chain_id": "39JEC7537U0EF32QZJK4AZUODJCCV1_1_8"}
{"score": 0.03585021570324898, "chain_id": "39JEC7537U0EF32QZJK4AZUODJCCV1_1_9"}
{"score": 0.053520213812589645, "chain_id": "39JEC7537U0EF32QZJK4AZUODJCCV1_1_10"}
{"score": 0.9248103499412537, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_4"}
{"score": 0.9099322557449341, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_1"}
{"score": 0.9655501246452332, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_2"}
{"score": 0.3891332447528839, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_3"}
{"score": 0.018126243725419044, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_5"}
{"score": 0.013900495134294033, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_6"}
{"score": 0.22470398247241974, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_7"}
{"score": 0.9323638677597046, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_8"}
{"score": 0.032378822565078735, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_9"}
{"score": 0.012809830717742443, "chain_id": "3IAEQB9FMEJ1ZK89PPKBG7VK5TLWDS_1_10"}
{"score": 0.7368147373199463, "chain_id": "3VHHR074H3G57HV0UYAN7448LBK7LN_1_1"}
{"score": 0.9859920144081116, "chain_id": "3VHHR074H3G57HV0UYAN7448LBK7LN_1_5"}
{"score": 0.9256269335746765, "chain_id": "3VHHR074H3G57HV0UYAN7448LBK7LN_1_7"}
{"score": 0.9846070408821106, "chain_id": "3VHHR074H3G57HV0UYAN7448LBK7LN_1_2"}
{"score": 0.9927776455879211, "chain_id": "3VHHR074H3G57HV0UYAN7448LBK7LN_1_3"}
{"score": 0.9792460203170776, "chain_id": "3VHHR074H3G57HV0UYAN7448LBK7LN_1_4"}
{"score": 0.8776112198829651, "chain_id": "3VHHR074H3G57HV0UYAN7448LBK7LN_1_6"}
{"score": 0.3568742275238037, "chain_id": "3VHHR074H3G57HV0UYAN7448LBK7LN_1_8"}
{"score": 0.8068726658821106, "chain_id": "3VHHR074H3G57HV0UYAN7448LBK7LN_1_9"}
{"score": 0.056474924087524414, "chain_id": "3VHHR074H3G57HV0UYAN7448LBK7LN_1_10"}
{"score": 0.9397361278533936, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_1"}
{"score": 0.985040545463562, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_2"}
{"score": 0.8654618859291077, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_3"}
{"score": 0.9077264666557312, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_4"}
{"score": 0.05928724631667137, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_9"}
{"score": 0.1782280057668686, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_5"}
{"score": 0.06339334696531296, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_6"}
{"score": 0.03679480776190758, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_7"}
{"score": 0.05029880255460739, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_8"}
{"score": 0.2763892114162445, "chain_id": "3TK8OJTYM1KX9SBU4O6AUZTVC81VPK_1_10"}
{"score": 0.827564537525177, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_5"}
{"score": 0.8790101408958435, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_6"}
{"score": 0.041306812316179276, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_1"}
{"score": 0.2153109610080719, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_2"}
{"score": 0.07707159966230392, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_3"}
{"score": 0.029593754559755325, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_4"}
{"score": 0.48833954334259033, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_7"}
{"score": 0.06931349635124207, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_8"}
{"score": 0.06853901594877243, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_9"}
{"score": 0.1542142927646637, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF603F7ZV_1_10"}
{"score": 0.9370269179344177, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_2"}
{"score": 0.9041363000869751, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_3"}
{"score": 0.5081300735473633, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_5"}
{"score": 0.11312387883663177, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_8"}
{"score": 0.3954760730266571, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_1"}
{"score": 0.6651642322540283, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_4"}
{"score": 0.07977721095085144, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_6"}
{"score": 0.0334656685590744, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_7"}
{"score": 0.21959811449050903, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_9"}
{"score": 0.017824780195951462, "chain_id": "3QXNC7EIPIUWO4U7K2MONG3QDPU09O_1_10"}
{"score": 0.8149022459983826, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_3"}
{"score": 0.44125139713287354, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_1"}
{"score": 0.7532663941383362, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_2"}
{"score": 0.14438781142234802, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_4"}
{"score": 0.43170228600502014, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_5"}
{"score": 0.08682089298963547, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_6"}
{"score": 0.12554210424423218, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_7"}
{"score": 0.1341239959001541, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_8"}
{"score": 0.09858568012714386, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_9"}
{"score": 0.3541208505630493, "chain_id": "3A7Y0R2P2ONTR6DR9Q28LO44K28XJV_1_10"}
{"score": 0.9284783601760864, "chain_id": "3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_1"}
{"score": 0.17332731187343597, "chain_id": "3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_2"}
{"score": 0.09466678649187088, "chain_id": "3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_3"}
{"score": 0.024699067696928978, "chain_id": "3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_4"}
{"score": 0.03744608536362648, "chain_id": "3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_5"}
{"score": 0.020724868401885033, "chain_id": "3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_6"}
{"score": 0.026563894003629684, "chain_id": "3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_7"}
{"score": 0.030676621943712234, "chain_id": "3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_8"}
{"score": 0.038216251879930496, "chain_id": "3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_9"}
{"score": 0.024104217067360878, "chain_id": "3YHH42UU5BERP6VG9ZPESPULEMV0LC_1_10"}
{"score": 0.9911412000656128, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_1"}
{"score": 0.9915564060211182, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_2"}
{"score": 0.9879387617111206, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_3"}
{"score": 0.5454429388046265, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_4"}
{"score": 0.3519054055213928, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_5"}
{"score": 0.05270044878125191, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_6"}
{"score": 0.3398987054824829, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_7"}
{"score": 0.28911253809928894, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_8"}
{"score": 0.7393916249275208, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_9"}
{"score": 0.23339775204658508, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX50H5H59_1_10"}
{"score": 0.9701636433601379, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_3"}
{"score": 0.9537873268127441, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_5"}
{"score": 0.21650849282741547, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_7"}
{"score": 0.09075932949781418, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_1"}
{"score": 0.09849830716848373, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_2"}
{"score": 0.26058363914489746, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_4"}
{"score": 0.03404556214809418, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_6"}
{"score": 0.4648422300815582, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_8"}
{"score": 0.573468804359436, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_9"}
{"score": 0.05827384069561958, "chain_id": "3XC1O3LBOSLS5FS771DOC0WQXFSLTV_1_10"}
{"score": 0.9500613212585449, "chain_id": "33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_1"}
{"score": 0.9513761401176453, "chain_id": "33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_2"}
{"score": 0.9844329357147217, "chain_id": "33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_4"}
{"score": 0.9320622682571411, "chain_id": "33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_5"}
{"score": 0.9219931364059448, "chain_id": "33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_3"}
{"score": 0.2440357208251953, "chain_id": "33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_6"}
{"score": 0.22360707819461823, "chain_id": "33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_7"}
{"score": 0.056213848292827606, "chain_id": "33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_8"}
{"score": 0.7196550965309143, "chain_id": "33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_9"}
{"score": 0.07539691030979156, "chain_id": "33LK57MYLT4BV4WWX2Z7AAB2B3RSZY_1_10"}
{"score": 0.9234769344329834, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_1"}
{"score": 0.9584911465644836, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_2"}
{"score": 0.9619503021240234, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_4"}
{"score": 0.9469819664955139, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_5"}
{"score": 0.9547551870346069, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_6"}
{"score": 0.9559493064880371, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_3"}
{"score": 0.7728909850120544, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_7"}
{"score": 0.9357352256774902, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_8"}
{"score": 0.03410017117857933, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_9"}
{"score": 0.9578048586845398, "chain_id": "3E13VNJ1NNUP6U8SKFW1EEL3P3R1IV_1_10"}
{"score": 0.989385724067688, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N01HLG_1_2"}
{"score": 0.9872689247131348, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N01HLG_1_3"}
{"score": 0.26495078206062317, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N01HLG_1_6"}
{"score": 0.9873025417327881, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N01HLG_1_1"}
{"score": 0.6788386106491089, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N01HLG_1_4"}
{"score": 0.7591531872749329, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N01HLG_1_5"}
{"score": 0.3953125774860382, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N01HLG_1_7"}
{"score": 0.052270032465457916, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N01HLG_1_8"}
{"score": 0.39466920495033264, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N01HLG_1_9"}
{"score": 0.3549373149871826, "chain_id": "34S6N1K2ZVI2061C77WZYHT2N01HLG_1_10"}
{"score": 0.9924431443214417, "chain_id": "3HRMW88U16PBVOD19BQTS29A1O10MB_1_1"}
{"score": 0.9925919771194458, "chain_id": "3HRMW88U16PBVOD19BQTS29A1O10MB_1_2"}
{"score": 0.4823726713657379, "chain_id": "3HRMW88U16PBVOD19BQTS29A1O10MB_1_4"}
{"score": 0.08370941877365112, "chain_id": "3HRMW88U16PBVOD19BQTS29A1O10MB_1_9"}
{"score": 0.6949993968009949, "chain_id": "3HRMW88U16PBVOD19BQTS29A1O10MB_1_3"}
{"score": 0.26041871309280396, "chain_id": "3HRMW88U16PBVOD19BQTS29A1O10MB_1_5"}
{"score": 0.5557044148445129, "chain_id": "3HRMW88U16PBVOD19BQTS29A1O10MB_1_6"}
{"score": 0.05171849578619003, "chain_id": "3HRMW88U16PBVOD19BQTS29A1O10MB_1_7"}
{"score": 0.7311990857124329, "chain_id": "3HRMW88U16PBVOD19BQTS29A1O10MB_1_8"}
{"score": 0.09448745101690292, "chain_id": "3HRMW88U16PBVOD19BQTS29A1O10MB_1_10"}
{"score": 0.09414617717266083, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_1"}
{"score": 0.03789866343140602, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_2"}
{"score": 0.10343729704618454, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_3"}
{"score": 0.18756303191184998, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_4"}
{"score": 0.06260009855031967, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_5"}
{"score": 0.02683061733841896, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_6"}
{"score": 0.1320200264453888, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_7"}
{"score": 0.1512463390827179, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_8"}
{"score": 0.05469933897256851, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_9"}
{"score": 0.21418710052967072, "chain_id": "3CN4LGXD5XNSOTKGBF16Y0MUSDEY45_1_10"}
{"score": 0.9911412000656128, "chain_id": "3ON104KXQKVOZOPGWEJID31EH8SW4O_1_1"}
{"score": 0.9879387617111206, "chain_id": "3ON104KXQKVOZOPGWEJID31EH8SW4O_1_3"}
{"score": 0.5454429388046265, "chain_id": "3ON104KXQKVOZOPGWEJID31EH8SW4O_1_4"}
{"score": 0.9915564060211182, "chain_id": "3ON104KXQKVOZOPGWEJID31EH8SW4O_1_2"}
{"score": 0.3519054055213928, "chain_id": "3ON104KXQKVOZOPGWEJID31EH8SW4O_1_5"}
{"score": 0.05270044878125191, "chain_id": "3ON104KXQKVOZOPGWEJID31EH8SW4O_1_6"}
{"score": 0.3398987054824829, "chain_id": "3ON104KXQKVOZOPGWEJID31EH8SW4O_1_7"}
{"score": 0.28911253809928894, "chain_id": "3ON104KXQKVOZOPGWEJID31EH8SW4O_1_8"}
{"score": 0.7393916249275208, "chain_id": "3ON104KXQKVOZOPGWEJID31EH8SW4O_1_9"}
{"score": 0.23339775204658508, "chain_id": "3ON104KXQKVOZOPGWEJID31EH8SW4O_1_10"}
{"score": 0.975583553314209, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_1"}
{"score": 0.9818987846374512, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_2"}
{"score": 0.07144536077976227, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_3"}
{"score": 0.8955197334289551, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_4"}
{"score": 0.35928311944007874, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_5"}
{"score": 0.6139234304428101, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_6"}
{"score": 0.08803892880678177, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_7"}
{"score": 0.08229970186948776, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_8"}
{"score": 0.02350766211748123, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_9"}
{"score": 0.12838736176490784, "chain_id": "3X87C8JFV6A2HCV5A6GUJHZY5MBSQG_1_10"}
{"score": 0.9286932945251465, "chain_id": "37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_3"}
{"score": 0.9022470116615295, "chain_id": "37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_4"}
{"score": 0.5733755230903625, "chain_id": "37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_9"}
{"score": 0.9559585452079773, "chain_id": "37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_1"}
{"score": 0.977510392665863, "chain_id": "37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_2"}
{"score": 0.12998946011066437, "chain_id": "37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_5"}
{"score": 0.8422825932502747, "chain_id": "37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_6"}
{"score": 0.8573161363601685, "chain_id": "37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_7"}
{"score": 0.9107280969619751, "chain_id": "37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_8"}
{"score": 0.41937798261642456, "chain_id": "37U1UTWH9VLKATVW9NZP7G92PRQ8R7_1_10"}
{"score": 0.8941967487335205, "chain_id": "3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_9"}
{"score": 0.9592616558074951, "chain_id": "3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_10"}
{"score": 0.07392957806587219, "chain_id": "3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_1"}
{"score": 0.03171267732977867, "chain_id": "3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_2"}
{"score": 0.038761042058467865, "chain_id": "3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_3"}
{"score": 0.025439172983169556, "chain_id": "3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_4"}
{"score": 0.08171079307794571, "chain_id": "3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_5"}
{"score": 0.09056725353002548, "chain_id": "3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_6"}
{"score": 0.23437395691871643, "chain_id": "3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_7"}
{"score": 0.2249758243560791, "chain_id": "3WETL7AQWT7949RS0ZRQDYWVIVD53J_1_8"}
{"score": 0.9304951429367065, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_1"}
{"score": 0.8457614779472351, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_2"}
{"score": 0.47452083230018616, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_3"}
{"score": 0.9155700206756592, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_4"}
{"score": 0.7761457562446594, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_5"}
{"score": 0.692997932434082, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_6"}
{"score": 0.032247673720121384, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_7"}
{"score": 0.13073614239692688, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_8"}
{"score": 0.4521399438381195, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_9"}
{"score": 0.7082409262657166, "chain_id": "3CPLWGV3MOYZ90MEL8OMYSZ3BC89NP_1_10"}
{"score": 0.06435142457485199, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_1"}
{"score": 0.02962491102516651, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_2"}
{"score": 0.02412967011332512, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_3"}
{"score": 0.02474788762629032, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_4"}
{"score": 0.10786899924278259, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_5"}
{"score": 0.07021905481815338, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_6"}
{"score": 0.3357139229774475, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_7"}
{"score": 0.04108813777565956, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_8"}
{"score": 0.10226189345121384, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_9"}
{"score": 0.028141168877482414, "chain_id": "3EKVH9QMEY3FN4A2B5V4S0FVR702DM_1_10"}
{"score": 0.2816557288169861, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_1"}
{"score": 0.5168759226799011, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_2"}
{"score": 0.25391876697540283, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_3"}
{"score": 0.4574108421802521, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_4"}
{"score": 0.3300494849681854, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_5"}
{"score": 0.4554673433303833, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_6"}
{"score": 0.1464983969926834, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_7"}
{"score": 0.13975656032562256, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_8"}
{"score": 0.19901332259178162, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_9"}
{"score": 0.13872723281383514, "chain_id": "3XLBSAQ9Z4BPC6C49Z1WFJF6Q4U7Z2_1_10"}
{"score": 0.985104501247406, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_1"}
{"score": 0.9878128170967102, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_2"}
{"score": 0.8855710625648499, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_3"}
{"score": 0.9705913066864014, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_7"}
{"score": 0.6297586560249329, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_9"}
{"score": 0.6940547227859497, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_4"}
{"score": 0.8894538283348083, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_5"}
{"score": 0.39119935035705566, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_6"}
{"score": 0.9771032333374023, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_8"}
{"score": 0.7975403666496277, "chain_id": "3B4YI393V9VEUSAI2A5ZEHEZNDQSS5_1_10"}
{"score": 0.7203274369239807, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_1"}
{"score": 0.3985511064529419, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_2"}
{"score": 0.17878098785877228, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_3"}
{"score": 0.3389628529548645, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_4"}
{"score": 0.2532750964164734, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_5"}
{"score": 0.3055393695831299, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_6"}
{"score": 0.17266309261322021, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_7"}
{"score": 0.8434653878211975, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_8"}
{"score": 0.12486838549375534, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_9"}
{"score": 0.25975301861763, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNU89D3O_1_10"}
{"score": 0.371199369430542, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_1"}
{"score": 0.8755092620849609, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_2"}
{"score": 0.49602678418159485, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_3"}
{"score": 0.06325878202915192, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_4"}
{"score": 0.7653234601020813, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_5"}
{"score": 0.06852202862501144, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_6"}
{"score": 0.7175394892692566, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_7"}
{"score": 0.768619954586029, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_8"}
{"score": 0.9119682908058167, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_9"}
{"score": 0.06277018785476685, "chain_id": "31HQ4X3T3S9RQFFSI18Y2V04ZDLSLM_1_10"}
{"score": 0.7039282321929932, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_1"}
{"score": 0.7259421944618225, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_3"}
{"score": 0.9485774636268616, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_2"}
{"score": 0.7526618838310242, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_4"}
{"score": 0.480444073677063, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_5"}
{"score": 0.038734495639801025, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_6"}
{"score": 0.2017560452222824, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_7"}
{"score": 0.1496296525001526, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_8"}
{"score": 0.04215997830033302, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_9"}
{"score": 0.27183234691619873, "chain_id": "3SEPORI8WNY7V8A2G2DGPAHWKLDZA9_1_10"}
{"score": 0.9373577833175659, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_1"}
{"score": 0.9307597279548645, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_2"}
{"score": 0.9316142797470093, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_3"}
{"score": 0.8835011124610901, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_10"}
{"score": 0.4656927287578583, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_4"}
{"score": 0.17284758388996124, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_5"}
{"score": 0.03937001898884773, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_6"}
{"score": 0.7646206617355347, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_7"}
{"score": 0.5962219834327698, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_8"}
{"score": 0.7792434096336365, "chain_id": "3JV9LGBJWTDW6V9Y0TU95YLVYBSOGB_1_9"}
{"score": 0.9892157316207886, "chain_id": "3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_1"}
{"score": 0.9866553544998169, "chain_id": "3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_2"}
{"score": 0.11654862016439438, "chain_id": "3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_3"}
{"score": 0.9637656211853027, "chain_id": "3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_4"}
{"score": 0.48135048151016235, "chain_id": "3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_5"}
{"score": 0.4770289361476898, "chain_id": "3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_6"}
{"score": 0.6048957109451294, "chain_id": "3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_7"}
{"score": 0.6323962807655334, "chain_id": "3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_8"}
{"score": 0.5279843807220459, "chain_id": "3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_9"}
{"score": 0.12232233583927155, "chain_id": "3PMBY0YE272GIWPNWIF8IH5R8QV9C4_1_10"}
{"score": 0.10007143020629883, "chain_id": "3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_1"}
{"score": 0.02330140396952629, "chain_id": "3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_2"}
{"score": 0.04050569608807564, "chain_id": "3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_3"}
{"score": 0.04966859519481659, "chain_id": "3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_4"}
{"score": 0.06262548267841339, "chain_id": "3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_5"}
{"score": 0.053697939962148666, "chain_id": "3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_6"}
{"score": 0.0327845998108387, "chain_id": "3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_7"}
{"score": 0.07214315235614777, "chain_id": "3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_8"}
{"score": 0.04036815091967583, "chain_id": "3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_9"}
{"score": 0.023873278871178627, "chain_id": "3OCHAWUVGOJO2QJ9RB2KM34HK81XK6_1_10"}
{"score": 0.03623440861701965, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_4"}
{"score": 0.04904032498598099, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_6"}
{"score": 0.34383049607276917, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_10"}
{"score": 0.09315416216850281, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_1"}
{"score": 0.33410096168518066, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_2"}
{"score": 0.029605641961097717, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_3"}
{"score": 0.02664381079375744, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_5"}
{"score": 0.10763447731733322, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_7"}
{"score": 0.03201819956302643, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_8"}
{"score": 0.07126771658658981, "chain_id": "3V0Z7YWSIYZ1HLAO2QVYYML2KMO2VE_1_9"}
{"score": 0.07413273304700851, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_1"}
{"score": 0.03021029755473137, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_2"}
{"score": 0.11357197910547256, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_3"}
{"score": 0.0410899855196476, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_4"}
{"score": 0.24142299592494965, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_5"}
{"score": 0.33365097641944885, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_6"}
{"score": 0.03207913786172867, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_7"}
{"score": 0.1313140094280243, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_8"}
{"score": 0.04317371919751167, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_9"}
{"score": 0.05277268588542938, "chain_id": "39ZSFO5CA8V1A2JW4LRL1H508VHJUJ_1_10"}
{"score": 0.8665762543678284, "chain_id": "3JBT3HLQF81EICG45LVDF56RN6OPZS_1_1"}
{"score": 0.9299886226654053, "chain_id": "3JBT3HLQF81EICG45LVDF56RN6OPZS_1_4"}
{"score": 0.4190259575843811, "chain_id": "3JBT3HLQF81EICG45LVDF56RN6OPZS_1_2"}
{"score": 0.08959508687257767, "chain_id": "3JBT3HLQF81EICG45LVDF56RN6OPZS_1_3"}
{"score": 0.0748099610209465, "chain_id": "3JBT3HLQF81EICG45LVDF56RN6OPZS_1_5"}
{"score": 0.05029595270752907, "chain_id": "3JBT3HLQF81EICG45LVDF56RN6OPZS_1_6"}
{"score": 0.043651171028614044, "chain_id": "3JBT3HLQF81EICG45LVDF56RN6OPZS_1_7"}
{"score": 0.025844929739832878, "chain_id": "3JBT3HLQF81EICG45LVDF56RN6OPZS_1_8"}
{"score": 0.03649374470114708, "chain_id": "3JBT3HLQF81EICG45LVDF56RN6OPZS_1_9"}
{"score": 0.0572853684425354, "chain_id": "3JBT3HLQF81EICG45LVDF56RN6OPZS_1_10"}
{"score": 0.8200407028198242, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_1"}
{"score": 0.03357328847050667, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_2"}
{"score": 0.13045024871826172, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_3"}
{"score": 0.03768966346979141, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_4"}
{"score": 0.03238387405872345, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_5"}
{"score": 0.07609093189239502, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_6"}
{"score": 0.1822500228881836, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_7"}
{"score": 0.01868278533220291, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_8"}
{"score": 0.09913386404514313, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_9"}
{"score": 0.0640428215265274, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QWA7D1G_1_10"}
{"score": 0.9861311912536621, "chain_id": "3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_1"}
{"score": 0.9851925373077393, "chain_id": "3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_2"}
{"score": 0.3894752264022827, "chain_id": "3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_9"}
{"score": 0.3272625207901001, "chain_id": "3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_3"}
{"score": 0.09538961946964264, "chain_id": "3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_4"}
{"score": 0.11374291777610779, "chain_id": "3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_5"}
{"score": 0.13065317273139954, "chain_id": "3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_6"}
{"score": 0.8174380660057068, "chain_id": "3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_7"}
{"score": 0.7123695611953735, "chain_id": "3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_8"}
{"score": 0.13775019347667694, "chain_id": "3137ONMDKG4AU4W96FRD0MRHYQLGE0_1_10"}
{"score": 0.9595767259597778, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_1"}
{"score": 0.9644328355789185, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_2"}
{"score": 0.4511675536632538, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_3"}
{"score": 0.0698520615696907, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_4"}
{"score": 0.1948462724685669, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_5"}
{"score": 0.15326550602912903, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_6"}
{"score": 0.08292423188686371, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_7"}
{"score": 0.047368429601192474, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_8"}
{"score": 0.30458030104637146, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_9"}
{"score": 0.646352231502533, "chain_id": "35L9RVQFCOH5JWO6GLO0P4PL0CIUHH_1_10"}
{"score": 0.8206232786178589, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_1"}
{"score": 0.919900119304657, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_3"}
{"score": 0.03297882899641991, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_2"}
{"score": 0.07507028430700302, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_4"}
{"score": 0.04659107327461243, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_5"}
{"score": 0.6771008372306824, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_6"}
{"score": 0.747796893119812, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_7"}
{"score": 0.6377186179161072, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_8"}
{"score": 0.03696179762482643, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_9"}
{"score": 0.020774411037564278, "chain_id": "3SB5N7Y3O33B3EHFY8SYFXPD5R3G0B_1_10"}
{"score": 0.6508806347846985, "chain_id": "33PPO7FECVEJYPO408GWFGMCDAOIDF_1_1"}
{"score": 0.7696019411087036, "chain_id": "33PPO7FECVEJYPO408GWFGMCDAOIDF_1_2"}
{"score": 0.16439040005207062, "chain_id": "33PPO7FECVEJYPO408GWFGMCDAOIDF_1_3"}
{"score": 0.11047597229480743, "chain_id": "33PPO7FECVEJYPO408GWFGMCDAOIDF_1_4"}
{"score": 0.37113863229751587, "chain_id": "33PPO7FECVEJYPO408GWFGMCDAOIDF_1_5"}
{"score": 0.09996318072080612, "chain_id": "33PPO7FECVEJYPO408GWFGMCDAOIDF_1_6"}
{"score": 0.1312263309955597, "chain_id": "33PPO7FECVEJYPO408GWFGMCDAOIDF_1_7"}
{"score": 0.10688218474388123, "chain_id": "33PPO7FECVEJYPO408GWFGMCDAOIDF_1_8"}
{"score": 0.3555869162082672, "chain_id": "33PPO7FECVEJYPO408GWFGMCDAOIDF_1_9"}
{"score": 0.3598127067089081, "chain_id": "33PPO7FECVEJYPO408GWFGMCDAOIDF_1_10"}
{"score": 0.9854554533958435, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_1"}
{"score": 0.9170889258384705, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_2"}
{"score": 0.955203115940094, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_3"}
{"score": 0.9288350939750671, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_4"}
{"score": 0.07584678381681442, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_5"}
{"score": 0.12949515879154205, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_6"}
{"score": 0.024214336648583412, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_7"}
{"score": 0.04442114382982254, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_8"}
{"score": 0.05987384915351868, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_9"}
{"score": 0.03869834169745445, "chain_id": "3JZQSN0I3Q920IW51QBJI4CHBR5FGN_1_10"}
{"score": 0.9153101444244385, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_1"}
{"score": 0.327162504196167, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_2"}
{"score": 0.8351789712905884, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_3"}
{"score": 0.37494418025016785, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_4"}
{"score": 0.33511707186698914, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_5"}
{"score": 0.5956321358680725, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_6"}
{"score": 0.12173295021057129, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_7"}
{"score": 0.5157716870307922, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_8"}
{"score": 0.28656500577926636, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_9"}
{"score": 0.06280107796192169, "chain_id": "3OS4RQUCR9E691OUL4J5HTLKWUQFBG_1_10"}
{"score": 0.8328372240066528, "chain_id": "3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_1"}
{"score": 0.8132418394088745, "chain_id": "3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_2"}
{"score": 0.31080466508865356, "chain_id": "3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_6"}
{"score": 0.05086228623986244, "chain_id": "3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_3"}
{"score": 0.04940200597047806, "chain_id": "3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_4"}
{"score": 0.04488708823919296, "chain_id": "3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_5"}
{"score": 0.12674780189990997, "chain_id": "3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_7"}
{"score": 0.018627779558300972, "chain_id": "3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_8"}
{"score": 0.04691970348358154, "chain_id": "3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_9"}
{"score": 0.13956224918365479, "chain_id": "3T111IHZ5EPKOYE6EF537C4DBPA9RL_1_10"}
{"score": 0.11114989966154099, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_10"}
{"score": 0.0534592904150486, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_1"}
{"score": 0.20848946273326874, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_2"}
{"score": 0.11582678556442261, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_3"}
{"score": 0.7728883624076843, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_4"}
{"score": 0.0352637805044651, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_5"}
{"score": 0.3690553307533264, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_6"}
{"score": 0.12561728060245514, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_7"}
{"score": 0.1640329360961914, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_8"}
{"score": 0.05541866272687912, "chain_id": "3S06PH7KSR38YJS6S1VQNH5QUTUD13_1_9"}
{"score": 0.9478998184204102, "chain_id": "3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_1"}
{"score": 0.9756757020950317, "chain_id": "3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_2"}
{"score": 0.7035666704177856, "chain_id": "3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_4"}
{"score": 0.6671239733695984, "chain_id": "3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_3"}
{"score": 0.019838441163301468, "chain_id": "3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_5"}
{"score": 0.02882516384124756, "chain_id": "3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_6"}
{"score": 0.13676314055919647, "chain_id": "3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_7"}
{"score": 0.05882963910698891, "chain_id": "3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_8"}
{"score": 0.026430286467075348, "chain_id": "3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_9"}
{"score": 0.05501439794898033, "chain_id": "3AUQQEL7U5SULB7AN3RKFYSNPA50V3_1_10"}
{"score": 0.9898603558540344, "chain_id": "3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_1"}
{"score": 0.9400548934936523, "chain_id": "3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_5"}
{"score": 0.24321627616882324, "chain_id": "3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_6"}
{"score": 0.5583677887916565, "chain_id": "3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_10"}
{"score": 0.8543221354484558, "chain_id": "3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_2"}
{"score": 0.6206228733062744, "chain_id": "3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_3"}
{"score": 0.9362301230430603, "chain_id": "3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_4"}
{"score": 0.5604197382926941, "chain_id": "3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_7"}
{"score": 0.4671285152435303, "chain_id": "3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_8"}
{"score": 0.5804473161697388, "chain_id": "3RRCEFRB7MBWBLR51NNMQPOT3VPB4A_1_9"}
{"score": 0.038478754460811615, "chain_id": "3KKG4CDWKIXDNSC8339QZJT3FVY495_1_1"}
{"score": 0.015183629468083382, "chain_id": "3KKG4CDWKIXDNSC8339QZJT3FVY495_1_2"}
{"score": 0.04515805467963219, "chain_id": "3KKG4CDWKIXDNSC8339QZJT3FVY495_1_3"}
{"score": 0.0693303793668747, "chain_id": "3KKG4CDWKIXDNSC8339QZJT3FVY495_1_4"}
{"score": 0.057956136763095856, "chain_id": "3KKG4CDWKIXDNSC8339QZJT3FVY495_1_5"}
{"score": 0.018290700390934944, "chain_id": "3KKG4CDWKIXDNSC8339QZJT3FVY495_1_6"}
{"score": 0.05897916108369827, "chain_id": "3KKG4CDWKIXDNSC8339QZJT3FVY495_1_7"}
{"score": 0.02057814411818981, "chain_id": "3KKG4CDWKIXDNSC8339QZJT3FVY495_1_8"}
{"score": 0.43323272466659546, "chain_id": "3KKG4CDWKIXDNSC8339QZJT3FVY495_1_9"}
{"score": 0.05920135974884033, "chain_id": "3KKG4CDWKIXDNSC8339QZJT3FVY495_1_10"}
{"score": 0.017445262521505356, "chain_id": "3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_1"}
{"score": 0.1103937104344368, "chain_id": "3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_2"}
{"score": 0.03125349059700966, "chain_id": "3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_3"}
{"score": 0.0166427381336689, "chain_id": "3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_4"}
{"score": 0.027569981291890144, "chain_id": "3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_5"}
{"score": 0.02527167834341526, "chain_id": "3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_6"}
{"score": 0.05757890269160271, "chain_id": "3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_7"}
{"score": 0.0396931953728199, "chain_id": "3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_8"}
{"score": 0.05047374963760376, "chain_id": "3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_9"}
{"score": 0.02872523106634617, "chain_id": "3T111IHZ5EPKOYE6EF537C4D8OY9R4_1_10"}
{"score": 0.060079626739025116, "chain_id": "3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_1"}
{"score": 0.4571964740753174, "chain_id": "3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_2"}
{"score": 0.0540136955678463, "chain_id": "3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_3"}
{"score": 0.44360002875328064, "chain_id": "3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_4"}
{"score": 0.060300618410110474, "chain_id": "3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_5"}
{"score": 0.2242891490459442, "chain_id": "3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_6"}
{"score": 0.0686982274055481, "chain_id": "3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_7"}
{"score": 0.07322326302528381, "chain_id": "3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_8"}
{"score": 0.24867519736289978, "chain_id": "3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_9"}
{"score": 0.2965419292449951, "chain_id": "3U088ZLJVKS7007FDDWG10B1X0E0WJ_1_10"}
{"score": 0.09923050552606583, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_1"}
{"score": 0.17064528167247772, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_2"}
{"score": 0.5405291318893433, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_3"}
{"score": 0.06580367684364319, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_4"}
{"score": 0.03045237436890602, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_5"}
{"score": 0.05288715660572052, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_6"}
{"score": 0.022661028429865837, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_7"}
{"score": 0.03221901133656502, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_8"}
{"score": 0.0314701609313488, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_9"}
{"score": 0.08094891905784607, "chain_id": "3TXMY6UCAENMAV69DKQU4CVGIU1CQ3_1_10"}
{"score": 0.6553331613540649, "chain_id": "39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_1"}
{"score": 0.9599562883377075, "chain_id": "39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_2"}
{"score": 0.9804926514625549, "chain_id": "39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_6"}
{"score": 0.7804470062255859, "chain_id": "39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_8"}
{"score": 0.9609110951423645, "chain_id": "39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_10"}
{"score": 0.07809165120124817, "chain_id": "39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_3"}
{"score": 0.08037737756967545, "chain_id": "39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_4"}
{"score": 0.07146000117063522, "chain_id": "39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_5"}
{"score": 0.13866065442562103, "chain_id": "39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_7"}
{"score": 0.8920915722846985, "chain_id": "39GAF6DQWRZUS0SSJMVKT3BX946V1F_1_9"}
{"score": 0.8483325839042664, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_6"}
{"score": 0.799140453338623, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_1"}
{"score": 0.4146888852119446, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_2"}
{"score": 0.891966700553894, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_3"}
{"score": 0.021455813199281693, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_4"}
{"score": 0.09312549978494644, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_5"}
{"score": 0.2552732527256012, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_7"}
{"score": 0.05716394633054733, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_8"}
{"score": 0.10330892354249954, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_9"}
{"score": 0.017235111445188522, "chain_id": "3R0T90IZ1SBVX6CVAOLIAYREQPFGCD_1_10"}
{"score": 0.9216620922088623, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_1"}
{"score": 0.8837831616401672, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_2"}
{"score": 0.30978870391845703, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_6"}
{"score": 0.4745659828186035, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_3"}
{"score": 0.597590446472168, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_4"}
{"score": 0.5063552856445312, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_5"}
{"score": 0.3326197862625122, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_7"}
{"score": 0.3399866223335266, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_8"}
{"score": 0.24566875398159027, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_9"}
{"score": 0.18306317925453186, "chain_id": "3XUHV3NRVKXOYHYRFKGSHSX5DAD5H4_1_10"}
{"score": 0.5903520584106445, "chain_id": "3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_1"}
{"score": 0.8085780739784241, "chain_id": "3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_2"}
{"score": 0.6516692638397217, "chain_id": "3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_5"}
{"score": 0.5650129914283752, "chain_id": "3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_3"}
{"score": 0.3152865469455719, "chain_id": "3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_4"}
{"score": 0.5203465819358826, "chain_id": "3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_6"}
{"score": 0.18566718697547913, "chain_id": "3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_7"}
{"score": 0.0724666640162468, "chain_id": "3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_8"}
{"score": 0.1731821894645691, "chain_id": "3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_9"}
{"score": 0.03478375822305679, "chain_id": "3WI0P0II61RWRORNQVA5T8N3E4KDR1_1_10"}
{"score": 0.767958402633667, "chain_id": "39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_1"}
{"score": 0.24601532518863678, "chain_id": "39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_2"}
{"score": 0.01816265471279621, "chain_id": "39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_3"}
{"score": 0.12739114463329315, "chain_id": "39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_4"}
{"score": 0.13207995891571045, "chain_id": "39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_5"}
{"score": 0.30529889464378357, "chain_id": "39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_6"}
{"score": 0.128875270485878, "chain_id": "39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_7"}
{"score": 0.48204678297042847, "chain_id": "39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_8"}
{"score": 0.051689263433218, "chain_id": "39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_9"}
{"score": 0.05322062224149704, "chain_id": "39ASUFLU6X6LGQRZVPRHO8RCE7NXEA_1_10"}
{"score": 0.950742781162262, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_1"}
{"score": 0.39568936824798584, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_6"}
{"score": 0.2970902919769287, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_2"}
{"score": 0.0818534716963768, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_3"}
{"score": 0.35836610198020935, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_4"}
{"score": 0.4292200803756714, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_5"}
{"score": 0.9143308401107788, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_7"}
{"score": 0.9215296506881714, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_8"}
{"score": 0.49937567114830017, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_9"}
{"score": 0.2691768705844879, "chain_id": "3TVSS0C0E1Z8G946BFKQLBD6SCYTWB_1_10"}
{"score": 0.9908350706100464, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_6"}
{"score": 0.7944714426994324, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_7"}
{"score": 0.9727277755737305, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_8"}
{"score": 0.08033885806798935, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_1"}
{"score": 0.036160413175821304, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_2"}
{"score": 0.03589317575097084, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_3"}
{"score": 0.09170092642307281, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_4"}
{"score": 0.9569641351699829, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_5"}
{"score": 0.10159562528133392, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_9"}
{"score": 0.0427391454577446, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SGO6QAX_1_10"}
{"score": 0.9285199642181396, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_2"}
{"score": 0.04040679335594177, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_1"}
{"score": 0.12514188885688782, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_3"}
{"score": 0.32388603687286377, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_4"}
{"score": 0.18625657260417938, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_5"}
{"score": 0.16577745974063873, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_6"}
{"score": 0.4923850893974304, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_7"}
{"score": 0.5999857187271118, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_8"}
{"score": 0.05253418907523155, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_9"}
{"score": 0.0457100011408329, "chain_id": "3KV0LJBBH2KZVIX03O98CYAX6YURMU_1_10"}
{"score": 0.03973516449332237, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_9"}
{"score": 0.10633885115385056, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_1"}
{"score": 0.03895881772041321, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_2"}
{"score": 0.07940404117107391, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_3"}
{"score": 0.1855773627758026, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_4"}
{"score": 0.033763498067855835, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_5"}
{"score": 0.09419643133878708, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_6"}
{"score": 0.038977425545454025, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_7"}
{"score": 0.02317507565021515, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_8"}
{"score": 0.14399483799934387, "chain_id": "3CFVK00FWLKM3HHVBO5V1Q4CCZN6LU_1_10"}
{"score": 0.49770379066467285, "chain_id": "3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_1"}
{"score": 0.9773462414741516, "chain_id": "3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_3"}
{"score": 0.5857381820678711, "chain_id": "3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_2"}
{"score": 0.9887199997901917, "chain_id": "3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_4"}
{"score": 0.03662234544754028, "chain_id": "3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_5"}
{"score": 0.031179914250969887, "chain_id": "3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_6"}
{"score": 0.05584404990077019, "chain_id": "3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_7"}
{"score": 0.03719054535031319, "chain_id": "3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_8"}
{"score": 0.023491889238357544, "chain_id": "3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_9"}
{"score": 0.023636404424905777, "chain_id": "3WYP994K17Q63GOUU3ULVY68Q51Y6B_1_10"}
{"score": 0.9775456786155701, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_3"}
{"score": 0.47255846858024597, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_1"}
{"score": 0.15760701894760132, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_2"}
{"score": 0.32161271572113037, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_4"}
{"score": 0.8936588764190674, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_5"}
{"score": 0.0433337427675724, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_6"}
{"score": 0.2978527843952179, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_7"}
{"score": 0.5638799667358398, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_8"}
{"score": 0.042530957609415054, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_9"}
{"score": 0.04987848922610283, "chain_id": "3OLF68YTN901QRJ2FQJ9MI1EGJ0FAW_1_10"}
{"score": 0.3077777922153473, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_1"}
{"score": 0.09716838598251343, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_2"}
{"score": 0.08763743191957474, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_3"}
{"score": 0.024064822122454643, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_4"}
{"score": 0.049944501370191574, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_5"}
{"score": 0.05056395381689072, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_6"}
{"score": 0.10408812016248703, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_7"}
{"score": 0.05996761843562126, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_8"}
{"score": 0.03494657576084137, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_9"}
{"score": 0.38072001934051514, "chain_id": "3ERET4BTVM8Y1U1BOVW660IZB23K9V_1_10"}
{"score": 0.9269133806228638, "chain_id": "3U4J9857OEATU89O3LLTT183WSWB73_1_1"}
{"score": 0.9722198843955994, "chain_id": "3U4J9857OEATU89O3LLTT183WSWB73_1_2"}
{"score": 0.4880606532096863, "chain_id": "3U4J9857OEATU89O3LLTT183WSWB73_1_8"}
{"score": 0.3096214532852173, "chain_id": "3U4J9857OEATU89O3LLTT183WSWB73_1_3"}
{"score": 0.31274887919425964, "chain_id": "3U4J9857OEATU89O3LLTT183WSWB73_1_4"}
{"score": 0.8628719449043274, "chain_id": "3U4J9857OEATU89O3LLTT183WSWB73_1_5"}
{"score": 0.9566511511802673, "chain_id": "3U4J9857OEATU89O3LLTT183WSWB73_1_6"}
{"score": 0.04792344942688942, "chain_id": "3U4J9857OEATU89O3LLTT183WSWB73_1_7"}
{"score": 0.20388296246528625, "chain_id": "3U4J9857OEATU89O3LLTT183WSWB73_1_9"}
{"score": 0.021692924201488495, "chain_id": "3U4J9857OEATU89O3LLTT183WSWB73_1_10"}
{"score": 0.7870141863822937, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_3"}
{"score": 0.9351210594177246, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_5"}
{"score": 0.8296969532966614, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_8"}
{"score": 0.9348196387290955, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_1"}
{"score": 0.9673051834106445, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_2"}
{"score": 0.1968909502029419, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_4"}
{"score": 0.18683917820453644, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_6"}
{"score": 0.5183967351913452, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_7"}
{"score": 0.7701874375343323, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_9"}
{"score": 0.4430491030216217, "chain_id": "3ZWFC4W1UU6TP85JH15VH8QODW1RF8_1_10"}
{"score": 0.9757862687110901, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_1"}
{"score": 0.9736185669898987, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_2"}
{"score": 0.9444557428359985, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_3"}
{"score": 0.8384350538253784, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_9"}
{"score": 0.8594325184822083, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_10"}
{"score": 0.09312398731708527, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_4"}
{"score": 0.8299717307090759, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_5"}
{"score": 0.42406272888183594, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_6"}
{"score": 0.7721505165100098, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_7"}
{"score": 0.8629634380340576, "chain_id": "3K9FOBBF2HIUA2NNA5RC31QY9XONLF_1_8"}
{"score": 0.9341188669204712, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_1"}
{"score": 0.718771755695343, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_2"}
{"score": 0.1788395792245865, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_4"}
{"score": 0.8794098496437073, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_5"}
{"score": 0.14653341472148895, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_8"}
{"score": 0.2259674221277237, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_9"}
{"score": 0.1485026329755783, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_3"}
{"score": 0.0614602193236351, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_6"}
{"score": 0.10412610322237015, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_7"}
{"score": 0.052393838763237, "chain_id": "36U2A8VAG1YD2V9JW7OM5HBQOURYK7_1_10"}
{"score": 0.969953179359436, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_4"}
{"score": 0.5838521122932434, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_5"}
{"score": 0.9832095503807068, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_6"}
{"score": 0.703306257724762, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_8"}
{"score": 0.9030343890190125, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_1"}
{"score": 0.9878427982330322, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_2"}
{"score": 0.873046875, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_3"}
{"score": 0.987392008304596, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_7"}
{"score": 0.19152623414993286, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_9"}
{"score": 0.8672230839729309, "chain_id": "3G5F9DBFOPW5WBD6LBY5LQR4F6YHVB_1_10"}
{"score": 0.9506452679634094, "chain_id": "386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_3"}
{"score": 0.9854682087898254, "chain_id": "386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_4"}
{"score": 0.9114052057266235, "chain_id": "386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_9"}
{"score": 0.8548688888549805, "chain_id": "386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_10"}
{"score": 0.9871609210968018, "chain_id": "386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_1"}
{"score": 0.6607890129089355, "chain_id": "386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_2"}
{"score": 0.8456385731697083, "chain_id": "386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_5"}
{"score": 0.829936683177948, "chain_id": "386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_6"}
{"score": 0.409974068403244, "chain_id": "386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_7"}
{"score": 0.6082670092582703, "chain_id": "386CSBG1OZLXUEX83TDRIC36P5YQ6U_1_8"}
{"score": 0.9699829816818237, "chain_id": "3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_2"}
{"score": 0.7321716547012329, "chain_id": "3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_4"}
{"score": 0.9488534927368164, "chain_id": "3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_6"}
{"score": 0.7330936193466187, "chain_id": "3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_1"}
{"score": 0.2647087574005127, "chain_id": "3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_3"}
{"score": 0.45884063839912415, "chain_id": "3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_5"}
{"score": 0.694968044757843, "chain_id": "3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_7"}
{"score": 0.16085684299468994, "chain_id": "3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_8"}
{"score": 0.957347571849823, "chain_id": "3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_9"}
{"score": 0.6748356819152832, "chain_id": "3KMS4QQVK2P724SORHWYGW4AJZBFKY_1_10"}
{"score": 0.9899371862411499, "chain_id": "3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_1"}
{"score": 0.499436616897583, "chain_id": "3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_2"}
{"score": 0.8687851428985596, "chain_id": "3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_4"}
{"score": 0.7624066472053528, "chain_id": "3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_5"}
{"score": 0.03577631711959839, "chain_id": "3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_8"}
{"score": 0.9814691543579102, "chain_id": "3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_3"}
{"score": 0.11536940932273865, "chain_id": "3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_6"}
{"score": 0.13510194420814514, "chain_id": "3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_7"}
{"score": 0.07377517968416214, "chain_id": "3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_9"}
{"score": 0.029939688742160797, "chain_id": "3LO69W1SU3CO0A61N1EHDHH17A1LGH_1_10"}
{"score": 0.98636394739151, "chain_id": "3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_1"}
{"score": 0.7166418433189392, "chain_id": "3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_2"}
{"score": 0.9691680669784546, "chain_id": "3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_3"}
{"score": 0.8038702011108398, "chain_id": "3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_4"}
{"score": 0.11336824297904968, "chain_id": "3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_5"}
{"score": 0.0271952822804451, "chain_id": "3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_6"}
{"score": 0.6762306690216064, "chain_id": "3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_7"}
{"score": 0.11547844111919403, "chain_id": "3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_8"}
{"score": 0.0339152067899704, "chain_id": "3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_9"}
{"score": 0.12267383933067322, "chain_id": "3I33IC7ZWF1HPX7QRV422Z7P3R9A25_1_10"}
{"score": 0.8144210577011108, "chain_id": "3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_1"}
{"score": 0.15469948947429657, "chain_id": "3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_2"}
{"score": 0.2281503677368164, "chain_id": "3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_3"}
{"score": 0.9357713460922241, "chain_id": "3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_4"}
{"score": 0.4883854389190674, "chain_id": "3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_5"}
{"score": 0.25121885538101196, "chain_id": "3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_6"}
{"score": 0.08771539479494095, "chain_id": "3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_7"}
{"score": 0.9155815839767456, "chain_id": "3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_8"}
{"score": 0.2998414933681488, "chain_id": "3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_9"}
{"score": 0.21889826655387878, "chain_id": "3RSDURM96ALAGVH90LDJ7MYL3MYYE3_1_10"}
{"score": 0.7297110557556152, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_2"}
{"score": 0.1636526882648468, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_3"}
{"score": 0.9820927381515503, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_4"}
{"score": 0.38322529196739197, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_5"}
{"score": 0.9573777318000793, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_10"}
{"score": 0.9740628600120544, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_1"}
{"score": 0.9146682024002075, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_6"}
{"score": 0.9740043878555298, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_7"}
{"score": 0.9420827031135559, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_8"}
{"score": 0.2234923094511032, "chain_id": "3Z9WI9EOZZNRG0JUM7KYJHGNYLIHK5_1_9"}
{"score": 0.03836963698267937, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_1"}
{"score": 0.07727673649787903, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_2"}
{"score": 0.13924387097358704, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_3"}
{"score": 0.030118046328425407, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_4"}
{"score": 0.043911222368478775, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_5"}
{"score": 0.07973483204841614, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_6"}
{"score": 0.09462511539459229, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_7"}
{"score": 0.059345487505197525, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_8"}
{"score": 0.04513835906982422, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_9"}
{"score": 0.1872275173664093, "chain_id": "3JMSRU9HQITTC1M4VAQZ0NURNNDVEA_1_10"}
{"score": 0.893051266670227, "chain_id": "39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_4"}
{"score": 0.5037833452224731, "chain_id": "39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_9"}
{"score": 0.7990497350692749, "chain_id": "39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_10"}
{"score": 0.2599528729915619, "chain_id": "39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_1"}
{"score": 0.2501981556415558, "chain_id": "39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_2"}
{"score": 0.11996385455131531, "chain_id": "39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_3"}
{"score": 0.07628747075796127, "chain_id": "39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_5"}
{"score": 0.07408113777637482, "chain_id": "39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_6"}
{"score": 0.06967765092849731, "chain_id": "39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_7"}
{"score": 0.8004512190818787, "chain_id": "39DD6S19JPAALLREW7F2LT7NB2NZEQ_1_8"}
{"score": 0.09752330183982849, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_3"}
{"score": 0.8878728747367859, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_4"}
{"score": 0.41167008876800537, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_9"}
{"score": 0.7565540075302124, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_10"}
{"score": 0.20929217338562012, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_1"}
{"score": 0.2032339870929718, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_2"}
{"score": 0.0698409453034401, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_5"}
{"score": 0.06198561564087868, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_6"}
{"score": 0.06101188808679581, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_7"}
{"score": 0.7448777556419373, "chain_id": "39KFRKBFINUWSMUYUZGFCYSZ9E0OYB_1_8"}
{"score": 0.9610774517059326, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_1"}
{"score": 0.9881343841552734, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_2"}
{"score": 0.40472912788391113, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_3"}
{"score": 0.9231776595115662, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_5"}
{"score": 0.9683735966682434, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_6"}
{"score": 0.8457914590835571, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_8"}
{"score": 0.6790665984153748, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_9"}
{"score": 0.5501762628555298, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_10"}
{"score": 0.4815683364868164, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_4"}
{"score": 0.3745162785053253, "chain_id": "308Q0PEVB8C7VZBNOSBUTK3MOXI9IG_1_7"}
{"score": 0.028930015861988068, "chain_id": "37UQDCYH6XU83M7U82CTUD2A114V7L_1_2"}
{"score": 0.9068384766578674, "chain_id": "37UQDCYH6XU83M7U82CTUD2A114V7L_1_6"}
{"score": 0.7488786578178406, "chain_id": "37UQDCYH6XU83M7U82CTUD2A114V7L_1_10"}
{"score": 0.06813611090183258, "chain_id": "37UQDCYH6XU83M7U82CTUD2A114V7L_1_1"}
{"score": 0.0951489731669426, "chain_id": "37UQDCYH6XU83M7U82CTUD2A114V7L_1_3"}
{"score": 0.02244095876812935, "chain_id": "37UQDCYH6XU83M7U82CTUD2A114V7L_1_4"}
{"score": 0.059602558612823486, "chain_id": "37UQDCYH6XU83M7U82CTUD2A114V7L_1_5"}
{"score": 0.9695669412612915, "chain_id": "37UQDCYH6XU83M7U82CTUD2A114V7L_1_7"}
{"score": 0.9051046967506409, "chain_id": "37UQDCYH6XU83M7U82CTUD2A114V7L_1_8"}
{"score": 0.37679392099380493, "chain_id": "37UQDCYH6XU83M7U82CTUD2A114V7L_1_9"}
{"score": 0.22052715718746185, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_1"}
{"score": 0.4465635120868683, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_2"}
{"score": 0.7486068606376648, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_5"}
{"score": 0.32050177454948425, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_6"}
{"score": 0.12523725628852844, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_10"}
{"score": 0.9707133769989014, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_3"}
{"score": 0.3575234115123749, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_4"}
{"score": 0.02845439314842224, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_7"}
{"score": 0.014018526300787926, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_8"}
{"score": 0.03453104943037033, "chain_id": "3W92K5RLWUGTGITBK9XWWTOE6JR5V6_1_9"}
{"score": 0.019798239693045616, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_5"}
{"score": 0.41293588280677795, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_6"}
{"score": 0.9479944705963135, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_10"}
{"score": 0.014548501931130886, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_1"}
{"score": 0.015624267980456352, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_2"}
{"score": 0.019720159471035004, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_3"}
{"score": 0.012439526617527008, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_4"}
{"score": 0.014173097908496857, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_7"}
{"score": 0.019233187660574913, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_8"}
{"score": 0.01594424806535244, "chain_id": "3Y9N9SS8LYA48M6LF599BAKNTDN3D1_1_9"}
{"score": 0.28380247950553894, "chain_id": "3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_1"}
{"score": 0.23512743413448334, "chain_id": "3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_2"}
{"score": 0.3146130442619324, "chain_id": "3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_3"}
{"score": 0.9284481406211853, "chain_id": "3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_4"}
{"score": 0.2687695622444153, "chain_id": "3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_5"}
{"score": 0.2604295313358307, "chain_id": "3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_6"}
{"score": 0.03829130530357361, "chain_id": "3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_7"}
{"score": 0.18128599226474762, "chain_id": "3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_8"}
{"score": 0.08111874014139175, "chain_id": "3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_9"}
{"score": 0.2333095818758011, "chain_id": "3NPFYT4IZC3J04NQ1KH5OBCOBJBXGC_1_10"}
{"score": 0.10297221690416336, "chain_id": "3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_1"}
{"score": 0.05787428095936775, "chain_id": "3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_2"}
{"score": 0.04627838730812073, "chain_id": "3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_3"}
{"score": 0.047205403447151184, "chain_id": "3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_4"}
{"score": 0.04809780791401863, "chain_id": "3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_5"}
{"score": 0.23182415962219238, "chain_id": "3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_6"}
{"score": 0.07872364670038223, "chain_id": "3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_7"}
{"score": 0.03524450585246086, "chain_id": "3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_8"}
{"score": 0.018991854041814804, "chain_id": "3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_9"}
{"score": 0.08467073738574982, "chain_id": "3PM8NZGV8YFADTH44GMHIPGQH9CQXY_1_10"}
{"score": 0.15970377624034882, "chain_id": "3RXPCZQMQPABA32XURWYT28N6B91GF_1_1"}
{"score": 0.34651443362236023, "chain_id": "3RXPCZQMQPABA32XURWYT28N6B91GF_1_2"}
{"score": 0.10467925667762756, "chain_id": "3RXPCZQMQPABA32XURWYT28N6B91GF_1_3"}
{"score": 0.10675696283578873, "chain_id": "3RXPCZQMQPABA32XURWYT28N6B91GF_1_4"}
{"score": 0.10008160769939423, "chain_id": "3RXPCZQMQPABA32XURWYT28N6B91GF_1_5"}
{"score": 0.07854864001274109, "chain_id": "3RXPCZQMQPABA32XURWYT28N6B91GF_1_6"}
{"score": 0.04594540596008301, "chain_id": "3RXPCZQMQPABA32XURWYT28N6B91GF_1_7"}
{"score": 0.07275261729955673, "chain_id": "3RXPCZQMQPABA32XURWYT28N6B91GF_1_8"}
{"score": 0.08363918960094452, "chain_id": "3RXPCZQMQPABA32XURWYT28N6B91GF_1_9"}
{"score": 0.04967619851231575, "chain_id": "3RXPCZQMQPABA32XURWYT28N6B91GF_1_10"}
{"score": 0.9175705313682556, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_1"}
{"score": 0.9029292464256287, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_2"}
{"score": 0.06937716156244278, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_3"}
{"score": 0.22018545866012573, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_4"}
{"score": 0.21997250616550446, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_5"}
{"score": 0.409977525472641, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_6"}
{"score": 0.11726798862218857, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_7"}
{"score": 0.24484334886074066, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_8"}
{"score": 0.061174534261226654, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_9"}
{"score": 0.1015675812959671, "chain_id": "3YZ8UPK3VTLE2ODQUTAZEDS5JG6UCP_1_10"}
{"score": 0.10912932455539703, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_1"}
{"score": 0.052789073437452316, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_2"}
{"score": 0.10376982390880585, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_3"}
{"score": 0.04108872637152672, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_4"}
{"score": 0.0896637812256813, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_5"}
{"score": 0.38412657380104065, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_6"}
{"score": 0.26327475905418396, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_7"}
{"score": 0.5199355483055115, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_8"}
{"score": 0.3202895224094391, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_9"}
{"score": 0.11618207395076752, "chain_id": "3FUI0JHJPXX6QU4OMG3XY1YB5F3335_1_10"}
{"score": 0.061766400933265686, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHF3B691_1_1"}
{"score": 0.08852318674325943, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHF3B691_1_2"}
{"score": 0.22525502741336823, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHF3B691_1_3"}
{"score": 0.18707047402858734, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHF3B691_1_4"}
{"score": 0.056523360311985016, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHF3B691_1_5"}
{"score": 0.07388082891702652, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHF3B691_1_6"}
{"score": 0.21577316522598267, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHF3B691_1_7"}
{"score": 0.1019279733300209, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHF3B691_1_8"}
{"score": 0.11619658768177032, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHF3B691_1_9"}
{"score": 0.22553794085979462, "chain_id": "34V1S5K3GS0R2FGMMR25WHDHF3B691_1_10"}
{"score": 0.12899740040302277, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_1"}
{"score": 0.19953590631484985, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_2"}
{"score": 0.02025803178548813, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_3"}
{"score": 0.015339952893555164, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_4"}
{"score": 0.014899369329214096, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_5"}
{"score": 0.03337910771369934, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_6"}
{"score": 0.01935611478984356, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_7"}
{"score": 0.019140439108014107, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_8"}
{"score": 0.020114561542868614, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_9"}
{"score": 0.02730652689933777, "chain_id": "3M1CVSFP604YHG9BT6U3YH5SIUPAQE_1_10"}
{"score": 0.9927700161933899, "chain_id": "324G5B4FB37SAL6E55O49KCK2EM076_1_1"}
{"score": 0.9928163290023804, "chain_id": "324G5B4FB37SAL6E55O49KCK2EM076_1_2"}
{"score": 0.9921533465385437, "chain_id": "324G5B4FB37SAL6E55O49KCK2EM076_1_3"}
{"score": 0.9930577278137207, "chain_id": "324G5B4FB37SAL6E55O49KCK2EM076_1_4"}
{"score": 0.033463265746831894, "chain_id": "324G5B4FB37SAL6E55O49KCK2EM076_1_6"}
{"score": 0.9650779366493225, "chain_id": "324G5B4FB37SAL6E55O49KCK2EM076_1_7"}
{"score": 0.9652075171470642, "chain_id": "324G5B4FB37SAL6E55O49KCK2EM076_1_8"}
{"score": 0.9379726052284241, "chain_id": "324G5B4FB37SAL6E55O49KCK2EM076_1_10"}
{"score": 0.033910080790519714, "chain_id": "324G5B4FB37SAL6E55O49KCK2EM076_1_5"}
{"score": 0.6321719884872437, "chain_id": "324G5B4FB37SAL6E55O49KCK2EM076_1_9"}
{"score": 0.9483044147491455, "chain_id": "3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_1"}
{"score": 0.9684011936187744, "chain_id": "3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_2"}
{"score": 0.2578470706939697, "chain_id": "3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_3"}
{"score": 0.2827787697315216, "chain_id": "3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_5"}
{"score": 0.3318272531032562, "chain_id": "3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_4"}
{"score": 0.6355707049369812, "chain_id": "3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_6"}
{"score": 0.8280319571495056, "chain_id": "3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_7"}
{"score": 0.32805657386779785, "chain_id": "3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_8"}
{"score": 0.3787441849708557, "chain_id": "3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_9"}
{"score": 0.14627858996391296, "chain_id": "3W2LOLRXLBE45UXXICWSXLITJHBRKF_1_10"}
{"score": 0.06878671050071716, "chain_id": "3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_9"}
{"score": 0.07880356162786484, "chain_id": "3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_1"}
{"score": 0.06422373652458191, "chain_id": "3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_2"}
{"score": 0.18586209416389465, "chain_id": "3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_3"}
{"score": 0.27273789048194885, "chain_id": "3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_4"}
{"score": 0.253695547580719, "chain_id": "3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_5"}
{"score": 0.03488532081246376, "chain_id": "3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_6"}
{"score": 0.9534302353858948, "chain_id": "3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_7"}
{"score": 0.11573760956525803, "chain_id": "3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_8"}
{"score": 0.1850091964006424, "chain_id": "3L4PIM1GQTFZPZMEMRXJ6TX4H3QYR1_1_10"}
{"score": 0.9919865131378174, "chain_id": "31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_1"}
{"score": 0.9920657277107239, "chain_id": "31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_2"}
{"score": 0.9895045757293701, "chain_id": "31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_3"}
{"score": 0.96481853723526, "chain_id": "31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_5"}
{"score": 0.9650829434394836, "chain_id": "31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_6"}
{"score": 0.9421709775924683, "chain_id": "31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_7"}
{"score": 0.9672998189926147, "chain_id": "31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_8"}
{"score": 0.9920685291290283, "chain_id": "31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_4"}
{"score": 0.09388666599988937, "chain_id": "31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_9"}
{"score": 0.12379266321659088, "chain_id": "31LVTDXBL79FP0FF3C8TCLV88RFRL1_1_10"}
{"score": 0.9547975659370422, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_1"}
{"score": 0.42127084732055664, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_2"}
{"score": 0.5201781392097473, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_3"}
{"score": 0.7135601043701172, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_4"}
{"score": 0.024483071640133858, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_5"}
{"score": 0.047616127878427505, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_6"}
{"score": 0.017344901338219643, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_7"}
{"score": 0.02347097359597683, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_8"}
{"score": 0.022085661068558693, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_9"}
{"score": 0.02440379373729229, "chain_id": "33FBRBDW6OYG4R6DRQ9UILAGORH8CW_1_10"}
{"score": 0.954002857208252, "chain_id": "39DD6S19JPAALLREW7F2LT7NCN8EZX_1_3"}
{"score": 0.9223288893699646, "chain_id": "39DD6S19JPAALLREW7F2LT7NCN8EZX_1_4"}
{"score": 0.047359395772218704, "chain_id": "39DD6S19JPAALLREW7F2LT7NCN8EZX_1_6"}
{"score": 0.45354214310646057, "chain_id": "39DD6S19JPAALLREW7F2LT7NCN8EZX_1_1"}
{"score": 0.08448843657970428, "chain_id": "39DD6S19JPAALLREW7F2LT7NCN8EZX_1_2"}
{"score": 0.10566884279251099, "chain_id": "39DD6S19JPAALLREW7F2LT7NCN8EZX_1_5"}
{"score": 0.25458094477653503, "chain_id": "39DD6S19JPAALLREW7F2LT7NCN8EZX_1_7"}
{"score": 0.271797239780426, "chain_id": "39DD6S19JPAALLREW7F2LT7NCN8EZX_1_8"}
{"score": 0.09160985052585602, "chain_id": "39DD6S19JPAALLREW7F2LT7NCN8EZX_1_9"}
{"score": 0.4848363995552063, "chain_id": "39DD6S19JPAALLREW7F2LT7NCN8EZX_1_10"}
{"score": 0.17530816793441772, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_2"}
{"score": 0.9861637949943542, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_3"}
{"score": 0.9834437370300293, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_4"}
{"score": 0.24574480950832367, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_6"}
{"score": 0.1648341864347458, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_9"}
{"score": 0.47512564063072205, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_1"}
{"score": 0.26647838950157166, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_5"}
{"score": 0.6271229386329651, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_7"}
{"score": 0.6972605586051941, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_8"}
{"score": 0.4211880564689636, "chain_id": "3UNH76FOCS48SJ9MHJ12KU3UFVEYMY_1_10"}
{"score": 0.8830859065055847, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_2"}
{"score": 0.7290204763412476, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_3"}
{"score": 0.9225864410400391, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_1"}
{"score": 0.9768373966217041, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_4"}
{"score": 0.018254252150654793, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_5"}
{"score": 0.03069135546684265, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_6"}
{"score": 0.04615394398570061, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_7"}
{"score": 0.036515068262815475, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_8"}
{"score": 0.029049668461084366, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_9"}
{"score": 0.14821889996528625, "chain_id": "3KB8R4ZV1E6CN1KPWOPNZELWZMVBG8_1_10"}
{"score": 0.9919865131378174, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_1"}
{"score": 0.9920657277107239, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_2"}
{"score": 0.9895045757293701, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_3"}
{"score": 0.9920685291290283, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_4"}
{"score": 0.96481853723526, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_5"}
{"score": 0.9421709775924683, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_7"}
{"score": 0.9672998189926147, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_8"}
{"score": 0.9650829434394836, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_6"}
{"score": 0.09388666599988937, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_9"}
{"score": 0.12379266321659088, "chain_id": "3VNXK88KKCHCH5VNNZAD89TGY1H9VM_1_10"}
{"score": 0.14296145737171173, "chain_id": "3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_2"}
{"score": 0.9411361217498779, "chain_id": "3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_3"}
{"score": 0.17169448733329773, "chain_id": "3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_4"}
{"score": 0.6395418047904968, "chain_id": "3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_6"}
{"score": 0.22798681259155273, "chain_id": "3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_1"}
{"score": 0.5994134545326233, "chain_id": "3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_5"}
{"score": 0.9139712452888489, "chain_id": "3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_7"}
{"score": 0.44349735975265503, "chain_id": "3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_8"}
{"score": 0.0724354088306427, "chain_id": "3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_9"}
{"score": 0.07272373139858246, "chain_id": "3FDJT1UU747F07ZZL5JPUKDXRDHK5H_1_10"}
{"score": 0.9875712394714355, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_1"}
{"score": 0.9887337684631348, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_2"}
{"score": 0.6298354268074036, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_3"}
{"score": 0.8290703296661377, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_6"}
{"score": 0.6890929341316223, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_4"}
{"score": 0.6419202089309692, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_5"}
{"score": 0.24655592441558838, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_7"}
{"score": 0.11641914397478104, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_8"}
{"score": 0.025746602565050125, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_9"}
{"score": 0.0399320088326931, "chain_id": "3GM6G9ZBKNWCBXAS7DE3CDBF13STML_1_10"}
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/eqasc/code/predictions/grc.test.predict/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/eqasc/code/predictions/grc.test.predict",
"repo_id": "ContextualSP",
"token_count": 426542
}
| 252 |
from collections import OrderedDict, defaultdict
from typing import NamedTuple, Dict, List
from errors import corrupted_action_file
from process.constants import LOCATION_UNKNOWN, NO_LOCATION, NO_ACTION, CREATE, MOVE, DESTROY
from process import ProcessSummary, Process
def _accumulate_action(locations, actions, num_steps, participant, action, before_location, after_location, step_id):
existing_locations = locations.setdefault(participant, [LOCATION_UNKNOWN] * (1 + num_steps))
existing_actions = actions.setdefault(participant, [NO_ACTION] * num_steps)
if step_id == 1:
existing_locations[0] = before_location
existing_locations[step_id] = after_location
existing_actions[step_id - 1] = action
return locations, actions
def _num_sentences_in_actions_file(actions_filename: str) -> Dict[int, int]:
num_sentences = defaultdict(int) # type: Dict[int, int]
with open(actions_filename) as f:
line_num = 0
for line in f:
line_num += 1
try:
process_id_str, step_id_str = line.strip().split('\t', 2)[:2]
except ValueError as e:
corrupted_action_file(
filename=actions_filename,
line_num=line_num,
details=str(e)
)
process_id = int(process_id_str)
step_id = int(step_id_str)
num_sentences[process_id] = max(num_sentences[process_id], step_id)
if not num_sentences:
corrupted_action_file(actions_filename, "no lines to iterate")
return num_sentences
class ActionFile(NamedTuple):
filename: str
# key = process_id
# value = OrderedDict like this:
# key = participant string (like "water vapor ; lifted vapor ; vapor")
# value = list of location strings, length = 1 + number of sentences
locations: Dict[int, Dict[str, List[str]]]
# key = process_id
# value = OrderedDict like this:
# key = participant string (like "water vapor ; lifted vapor ; vapor")
# value = list of actions (CREATE, DESTROY, MOVE or NONE), length = number of sentences
actions: Dict[int, Dict[str, List[str]]]
# key = process_id
# value = number of sentences per process
num_sentences: Dict[int, int]
def has_process_id(self, process_id: int):
return process_id in self.locations
def summarize(self) -> Dict[int, ProcessSummary]:
summary_by_process_id = dict() # type: Dict[int, ProcessSummary]
for process_id in self.locations.keys():
locations = self.locations[process_id]
actions = self.actions[process_id]
p = Process(process_id=process_id, locations=locations, actions=actions,
num_steps=self.num_sentences[process_id])
summary_by_process_id[p.process_id] = ProcessSummary(
process_id=p.process_id,
inputs=p.inputs(),
outputs=p.outputs(),
conversions=p.conversions(),
moves=p.moves(),
)
return summary_by_process_id
def diff_participants(self, other: "ActionFile") -> List[str]:
report: List[str] = []
for process_id in self.process_ids():
self_participants = self.participants(process_id)
if not other.has_process_id(process_id):
report.append(f"Process {process_id} missing in {other.filename}")
continue
other_participants = other.participants(process_id)
process_report: List[str] = []
for p in self_participants:
if p not in other_participants:
process_report.append(f"Process {process_id} in {other.filename}: participant \"{p}\" is missing.")
for op in other_participants:
if op not in self_participants:
process_report.append(
f"Process {process_id} in {other.filename}: participant \"{op}\" is unexpected.")
report += sorted(process_report)
return report
def process_ids(self) -> List[int]:
return sorted(self.locations.keys())
def participants(self, process_id) -> List[str]:
return sorted(self.locations[process_id].keys())
# Reads an actionfile from disk.
@classmethod
def from_file(cls, action_filename: str) -> "ActionFile":
num_sentences = _num_sentences_in_actions_file(action_filename)
locations = defaultdict(OrderedDict) # type: Dict[int, Dict[str, List[str]]]
actions = defaultdict(OrderedDict) # type: Dict[int, Dict[str, List[str]]]
line_num = 0
with open(action_filename) as f:
for line in f:
line_num += 1
try:
process_id_str, step_id_str, participant, action, before_location, after_location = \
line.strip("\n\r").split('\t', 6)[:6]
except ValueError as e:
corrupted_action_file(
filename=action_filename,
line_num=line_num,
details=str(e)
)
process_id = int(process_id_str)
step_id = int(step_id_str)
if action == NO_ACTION:
if before_location != after_location:
corrupted_action_file(
filename=action_filename,
line_num=line_num,
details=f"Unequal NONE locations: {before_location} -- {after_location}"
)
elif action == CREATE:
if before_location != '-':
corrupted_action_file(
filename=action_filename,
line_num=line_num,
details=f"Invalid CREATE before_location: {before_location}"
)
before_location = NO_LOCATION
if after_location == "" or after_location == '-':
corrupted_action_file(
filename=action_filename,
line_num=line_num,
details=f"Invalid CREATE after_location: {after_location}"
)
elif action == DESTROY:
if before_location == "" or before_location == '-':
corrupted_action_file(
filename=action_filename,
line_num=line_num,
details=f"Invalid DESTROY before_location: {before_location}"
)
if after_location != '-':
corrupted_action_file(
filename=action_filename,
line_num=line_num,
details=f"Invalid DESTROY after_location: {after_location}"
)
elif action == MOVE:
if before_location == "" or before_location == '-':
corrupted_action_file(
filename=action_filename,
line_num=line_num,
details=f"Invalid MOVE before_location: {before_location}"
)
if after_location == "" or after_location == '-':
corrupted_action_file(
filename=action_filename,
line_num=line_num,
details=f"Invalid MOVE after_location: {after_location}"
)
else:
corrupted_action_file(
filename=action_filename,
line_num=line_num,
details=f"Invalid action: {action}"
)
if before_location == "-":
before_location = NO_LOCATION
elif before_location == "?":
before_location = LOCATION_UNKNOWN
if after_location == "-":
after_location = NO_LOCATION
elif after_location == "?":
after_location = LOCATION_UNKNOWN
# update locations and actions for this process_id
locations[process_id], actions[process_id] = \
_accumulate_action(
locations[process_id],
actions[process_id],
num_sentences[process_id],
participant,
action,
before_location,
after_location,
step_id,
)
if not locations:
corrupted_action_file(action_filename, "no lines to iterate")
return cls(
filename=action_filename,
locations=locations,
actions=actions,
num_sentences=num_sentences
)
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/evaluator/process/action_file.py/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/propara/evaluator/process/action_file.py",
"repo_id": "ContextualSP",
"token_count": 4746
}
| 253 |
{ "id": "P1", "gold_label": "E" }
{ "id": "P2", "gold_label": "E" }
{ "id": "P3", "gold_label": "N" }
{ "id": "P4", "gold_label": "N" }
{ "id": "P5", "gold_label": "N" }
|
ContextualSP/lemon/propara_evaluator/aristo-leaderboard/scitail/evaluator/answers.jsonl/0
|
{
"file_path": "ContextualSP/lemon/propara_evaluator/aristo-leaderboard/scitail/evaluator/answers.jsonl",
"repo_id": "ContextualSP",
"token_count": 90
}
| 254 |
import random
from random import shuffle
import os
from tqdm import tqdm
def expand_numbers_in_text(text, delim=" ", ignore_chars=[","], reverse_num=False):
number_pattern = r"[-+]?[.]?[\d]+(,\d+)*[\.]?\d*(?:[eE][-+]?\d+)?%?"
num_char_spans = [(m.start(0), m.end(0)) for m in re.finditer(number_pattern, text)]
if len(num_char_spans) == 0: return text
out_text = ""
last_e = -1
for i, (s, e) in enumerate(num_char_spans):
out_text += text[:s] if i == 0 else text[last_e:s]
num_str = delim.join([c for c in list(text[s:e]) if c not in ignore_chars])
out_text += num_str if not reverse_num else num_str[::-1]
last_e = e
out_text += text[last_e:] # append rest
return out_text
def random_sample_numbers(with_vars):
# the number of var_numbers
op_num = random.randint(1, 2)
candi_num = 30
text_mapping = [chr(i) for i in list(range(65, 91)) + list(range(97, 122))]
shuffle(text_mapping)
var_numbers = []
real_numbers = []
candidate_numbers = []
for i in range(candi_num):
# random sample a number
# 1000 float number
is_int = random.randint(0, 9) < 8
if is_int:
final_num = str(random.randint(1, 100))
else:
final_num = str(random.randint(1, 1000) / 10)
if i <= op_num:
var_numbers.append(text_mapping[i])
real_numbers.append(final_num)
# random sample a + and -
operator = random.choice(["*", "/"])
if i != op_num:
var_numbers.append(operator)
real_numbers.append(operator)
if i >= op_num and not with_vars:
break
candidate_numbers.append(final_num)
if with_vars:
input_expression = " ".join(var_numbers)
zipped_values = list(zip(text_mapping[:candi_num], candidate_numbers))
shuffle(zipped_values)
candi_expression = " ".join(["{} = {} ;".format(var_name, var_value)
for var_name, var_value in zipped_values])
input_line = input_expression + " col : " + candi_expression
else:
input_line = " ".join(real_numbers)
# always plus 3
output_num = eval(" ".join(real_numbers)) + 1
if isinstance(output_num, int):
output_line = str(output_num)
else:
output_line = "{:.1f}".format(eval(" ".join(real_numbers)))
return input_line, output_line
if __name__ == '__main__':
output_dir = "pretrain_math"
if not os.path.exists(output_dir):
os.makedirs(output_dir)
train_src_f = open(os.path.join(output_dir, "train.src"), "w", encoding="utf8")
train_tgt_f = open(os.path.join(output_dir, "train.tgt"), "w", encoding="utf8")
dev_src_f = open(os.path.join(output_dir, "dev.src"), "w", encoding="utf8")
dev_tgt_f = open(os.path.join(output_dir, "dev.tgt"), "w", encoding="utf8")
for _ in tqdm(range(4000000)):
input_line, output_line = random_sample_numbers(with_vars=True)
input_line = expand_numbers_in_text(input_line)
output_line = expand_numbers_in_text(output_line)
train_src_f.write(input_line + "\n")
train_tgt_f.write(output_line + "\n")
for _ in tqdm(range(20000)):
input_line, output_line = random_sample_numbers(with_vars=True)
input_line = expand_numbers_in_text(input_line)
output_line = expand_numbers_in_text(output_line)
dev_src_f.write(input_line + "\n")
dev_tgt_f.write(output_line + "\n")
train_src_f.close()
train_tgt_f.close()
dev_src_f.close()
dev_tgt_f.close()
|
ContextualSP/poet/synthesize_math_corpus.py/0
|
{
"file_path": "ContextualSP/poet/synthesize_math_corpus.py",
"repo_id": "ContextualSP",
"token_count": 1702
}
| 255 |
#!/usr/bin/env bash
split=mcd1
data_path=../data/$split/
key=$split-sketch
model_path=../model/sketch_prediction-$key
output_file=train_log-$key
echo $output_file
mkdir $model_path
CUDA_VISIBLE_DEVICES=4 python3 main.py \
--src_path $data_path/train/train_encode.txt --trg_path $data_path/train/train_sketch.txt \
--src_vocabulary $data_path/vocab.cfq.tokens.src --trg_vocabulary $data_path/vocab.cfq.tokens.sketch \
--embedding_size 300 --batch_size 64 --validate_batch_size 64 \
--save_path $model_path/ --save_interval 500 --log_interval 500 --cuda \
--iterations 100 \
--validation_src_path $data_path/dev/dev_encode.txt --validation_trg_path $data_path/dev/dev_sketch.txt \
> $output_file
|
ContextualSP/poset_decoding/sketch_prediction/train.sh/0
|
{
"file_path": "ContextualSP/poset_decoding/sketch_prediction/train.sh",
"repo_id": "ContextualSP",
"token_count": 281
}
| 256 |
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=source
set BUILDDIR=_build
set SPHINXPROJ=MatchZoo
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
:end
popd
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/docs/make.bat/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/docs/make.bat",
"repo_id": "ContextualSP",
"token_count": 321
}
| 257 |
"""Matchzoo DataPack, pair-wise tuple (feature) and context as input."""
import typing
import inspect
from pathlib import Path
import functools
import dill
from tqdm import tqdm
import numpy as np
import pandas as pd
import matchzoo
tqdm.pandas()
def _convert_to_list_index(
index: typing.Union[int, slice, np.array],
length: int
):
if isinstance(index, int):
index = [index]
elif isinstance(index, slice):
index = list(range(*index.indices(length)))
return index
class DataPack(object):
"""
Matchzoo :class:`DataPack` data structure, store dataframe and context.
`DataPack` is a MatchZoo native data structure that most MatchZoo data
handling processes build upon. A `DataPack` consists of three parts:
`left`, `right` and `relation`, each one of is a `pandas.DataFrame`.
:param relation: Store the relation between left document
and right document use ids.
:param left: Store the content or features for id_left.
:param right: Store the content or features for
id_right.
Example:
>>> left = [
... ['qid1', 'query 1'],
... ['qid2', 'query 2']
... ]
>>> right = [
... ['did1', 'document 1'],
... ['did2', 'document 2']
... ]
>>> relation = [['qid1', 'did1', 1], ['qid2', 'did2', 1]]
>>> relation_df = pd.DataFrame(relation)
>>> left = pd.DataFrame(left)
>>> right = pd.DataFrame(right)
>>> dp = DataPack(
... relation=relation_df,
... left=left,
... right=right,
... )
>>> len(dp)
2
"""
DATA_FILENAME = 'data.dill'
def __init__(
self,
relation: pd.DataFrame,
left: pd.DataFrame,
right: pd.DataFrame
):
""":class:`DataPack` initializer."""
self._relation = relation
self._left = left
self._right = right
@property
def has_label(self) -> bool:
""":return: `True` if `label` column exists, `False` other wise."""
return 'label' in self._relation.columns
def __len__(self) -> int:
"""Get numer of rows in the class:`DataPack` object."""
return self._relation.shape[0]
@property
def frame(self) -> 'DataPack.FrameView':
"""
View the data pack as a :class:`pandas.DataFrame`.
Returned data frame is created by merging the left data frame,
the right dataframe and the relation data frame. Use `[]` to access
an item or a slice of items.
:return: A :class:`matchzoo.DataPack.FrameView` instance.
Example:
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> type(data_pack.frame)
<class 'matchzoo.data_pack.data_pack.DataPack.FrameView'>
>>> frame_slice = data_pack.frame[0:5]
>>> type(frame_slice)
<class 'pandas.core.frame.DataFrame'>
>>> list(frame_slice.columns)
['id_left', 'text_left', 'id_right', 'text_right', 'label']
>>> full_frame = data_pack.frame()
>>> len(full_frame) == len(data_pack)
True
"""
return DataPack.FrameView(self)
def unpack(self) -> typing.Tuple[typing.Dict[str, np.array],
typing.Optional[np.array]]:
"""
Unpack the data for training.
The return value can be directly feed to `model.fit` or
`model.fit_generator`.
:return: A tuple of (X, y). `y` is `None` if `self` has no label.
Example:
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> X, y = data_pack.unpack()
>>> type(X)
<class 'dict'>
>>> sorted(X.keys())
['id_left', 'id_right', 'text_left', 'text_right']
>>> type(y)
<class 'numpy.ndarray'>
>>> X, y = data_pack.drop_label().unpack()
>>> type(y)
<class 'NoneType'>
"""
frame = self.frame()
columns = list(frame.columns)
if self.has_label:
columns.remove('label')
y = np.vstack(np.asarray(frame['label']))
else:
y = None
x = frame[columns].to_dict(orient='list')
for key, val in x.items():
x[key] = np.array(val)
return x, y
def __getitem__(self, index: typing.Union[int, slice, np.array]
) -> 'DataPack':
"""
Get specific item(s) as a new :class:`DataPack`.
The returned :class:`DataPack` will be a copy of the subset of the
original :class:`DataPack`.
:param index: Index of the item(s) to get.
:return: An instance of :class:`DataPack`.
"""
index = _convert_to_list_index(index, len(self))
relation = self._relation.loc[index].reset_index(drop=True)
left = self._left.loc[relation['id_left'].unique()]
right = self._right.loc[relation['id_right'].unique()]
return DataPack(left=left.copy(),
right=right.copy(),
relation=relation.copy())
@property
def relation(self):
"""`relation` getter."""
return self._relation
@relation.setter
def relation(self, value):
"""`relation` setter."""
self._relation = value
@property
def left(self) -> pd.DataFrame:
"""Get :meth:`left` of :class:`DataPack`."""
return self._left
@property
def right(self) -> pd.DataFrame:
"""Get :meth:`right` of :class:`DataPack`."""
return self._right
def copy(self) -> 'DataPack':
""":return: A deep copy."""
return DataPack(left=self._left.copy(),
right=self._right.copy(),
relation=self._relation.copy())
def save(self, dirpath: typing.Union[str, Path]):
"""
Save the :class:`DataPack` object.
A saved :class:`DataPack` is represented as a directory with a
:class:`DataPack` object (transformed user input as features and
context), it will be saved by `pickle`.
:param dirpath: directory path of the saved :class:`DataPack`.
"""
dirpath = Path(dirpath)
data_file_path = dirpath.joinpath(self.DATA_FILENAME)
if not dirpath.exists():
dirpath.mkdir(parents=True)
dill.dump(self, open(data_file_path, mode='wb'))
def _optional_inplace(func):
"""
Decorator that adds `inplace` key word argument to a method.
Decorate any method that modifies inplace to make that inplace change
optional.
"""
doc = ":param inplace: `True` to modify inplace, `False` to return " \
"a modified copy. (default: `False`)"
def _clean(s):
return s.replace(' ', '').replace('\n', '')
if _clean(doc) not in _clean(inspect.getdoc(func)):
raise NotImplementedError(
f"`inplace` parameter of {func} not documented.\n"
f"Please add the following line to its documentation:\n{doc}")
@functools.wraps(func)
def wrapper(
self, *args, inplace: bool = False, **kwargs
) -> typing.Optional['DataPack']:
if inplace:
target = self
else:
target = self.copy()
func(target, *args, **kwargs)
if not inplace:
return target
return wrapper
@_optional_inplace
def drop_empty(self):
"""
Process empty data by removing corresponding rows.
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
"""
empty_left_id = self._left[
self._left['length_left'] == 0].index.tolist()
empty_right_id = self._right[
self._right['length_right'] == 0].index.tolist()
empty_id = self._relation[
self._relation['id_left'].isin(empty_left_id) | self._relation[
'id_right'].isin(empty_right_id)
].index.tolist()
self._left = self._left.drop(empty_left_id)
self._right = self._right.drop(empty_right_id)
self._relation = self._relation.drop(empty_id)
self._relation.reset_index(drop=True, inplace=True)
@_optional_inplace
def shuffle(self):
"""
Shuffle the data pack by shuffling the relation column.
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
Example:
>>> import matchzoo as mz
>>> import numpy.random
>>> numpy.random.seed(0)
>>> data_pack = mz.datasets.toy.load_data()
>>> orig_ids = data_pack.relation['id_left']
>>> shuffled = data_pack.shuffle()
>>> (shuffled.relation['id_left'] != orig_ids).any()
True
"""
self._relation = self._relation.sample(frac=1)
self._relation.reset_index(drop=True, inplace=True)
@_optional_inplace
def drop_label(self):
"""
Remove `label` column from the data pack.
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
Example:
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> data_pack.has_label
True
>>> data_pack.drop_label(inplace=True)
>>> data_pack.has_label
False
"""
self._relation = self._relation.drop(columns='label')
@_optional_inplace
def append_text_length(self, verbose=1):
"""
Append `length_left` and `length_right` columns.
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
:param verbose: Verbosity.
Example:
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> 'length_left' in data_pack.frame[0].columns
False
>>> new_data_pack = data_pack.append_text_length(verbose=0)
>>> 'length_left' in new_data_pack.frame[0].columns
True
>>> 'length_left' in data_pack.frame[0].columns
False
>>> data_pack.append_text_length(inplace=True, verbose=0)
>>> 'length_left' in data_pack.frame[0].columns
True
"""
self.apply_on_text(len, rename=('length_left', 'length_right'),
inplace=True, verbose=verbose)
@_optional_inplace
def apply_on_text(
self, func: typing.Callable,
mode: str = 'both',
rename: typing.Optional[str] = None,
verbose: int = 1
):
"""
Apply `func` to text columns based on `mode`.
:param func: The function to apply.
:param mode: One of "both", "left" and "right".
:param rename: If set, use new names for results instead of replacing
the original columns. To set `rename` in "both" mode, use a tuple
of `str`, e.g. ("text_left_new_name", "text_right_new_name").
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
:param verbose: Verbosity.
Examples::
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> frame = data_pack.frame
To apply `len` on the left text and add the result as 'length_left':
>>> data_pack.apply_on_text(len, mode='left',
... rename='length_left',
... inplace=True,
... verbose=0)
>>> list(frame[0].columns) # noqa: E501
['id_left', 'text_left', 'length_left', 'id_right', 'text_right', 'label']
To do the same to the right text:
>>> data_pack.apply_on_text(len, mode='right',
... rename='length_right',
... inplace=True,
... verbose=0)
>>> list(frame[0].columns) # noqa: E501
['id_left', 'text_left', 'length_left', 'id_right', 'text_right', 'length_right', 'label']
To do the same to the both texts at the same time:
>>> data_pack.apply_on_text(len, mode='both',
... rename=('extra_left', 'extra_right'),
... inplace=True,
... verbose=0)
>>> list(frame[0].columns) # noqa: E501
['id_left', 'text_left', 'length_left', 'extra_left', 'id_right', 'text_right', 'length_right', 'extra_right', 'label']
To suppress outputs:
>>> data_pack.apply_on_text(len, mode='both', verbose=0,
... inplace=True)
"""
if mode == 'both':
self._apply_on_text_both(func, rename, verbose=verbose)
elif mode == 'left':
self._apply_on_text_left(func, rename, verbose=verbose)
elif mode == 'right':
self._apply_on_text_right(func, rename, verbose=verbose)
else:
raise ValueError(f"{mode} is not a valid mode type."
f"Must be one of `left` `right` `both`.")
def _apply_on_text_right(self, func, rename, verbose=1):
name = rename or 'text_right'
if verbose:
tqdm.pandas(desc="Processing " + name + " with " + func.__name__)
self._right[name] = self._right['text_right'].progress_apply(func)
else:
self._right[name] = self._right['text_right'].apply(func)
def _apply_on_text_left(self, func, rename, verbose=1):
name = rename or 'text_left'
if verbose:
tqdm.pandas(desc="Processing " + name + " with " + func.__name__)
self._left[name] = self._left['text_left'].progress_apply(func)
else:
self._left[name] = self._left['text_left'].apply(func)
def _apply_on_text_both(self, func, rename, verbose=1):
left_name, right_name = rename or ('text_left', 'text_right')
self._apply_on_text_left(func, rename=left_name, verbose=verbose)
self._apply_on_text_right(func, rename=right_name, verbose=verbose)
class FrameView(object):
"""FrameView."""
def __init__(self, data_pack: 'DataPack'):
"""
View a data pack as a frame.
A slice of the view is genereated by merging three parts of the
data pack being viewed into a big table.
:param data_pack: :class:`DataPack` to view.
Examples::
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> frame = data_pack.frame
Use `()` to get a full copy of the frame:
>>> list(frame().columns)
['id_left', 'text_left', 'id_right', 'text_right', 'label']
>>> len(frame()) == len(data_pack)
True
Notice that a view is binded to the original data pack, so changing
contents of the data pack will affect a view previously created:
>>> data_pack.drop_label(inplace=True)
>>> list(frame().columns)
['id_left', 'text_left', 'id_right', 'text_right']
To slice the view:
>>> frame_slice = frame[3:5]
>>> len(frame_slice)
2
"""
self._data_pack = data_pack
def __getitem__(self, index: typing.Union[int, slice, np.array]
) -> pd.DataFrame:
"""Slicer."""
dp = self._data_pack
index = _convert_to_list_index(index, len(dp))
left_df = dp.left.loc[dp.relation['id_left'][index]].reset_index()
right_df = dp.right.loc[
dp.relation['id_right'][index]].reset_index()
joined_table = left_df.join(right_df)
for column in dp.relation.columns:
if column not in ['id_left', 'id_right']:
labels = dp.relation[column][index].to_frame()
labels = labels.reset_index(drop=True)
joined_table = joined_table.join(labels)
return joined_table
def __call__(self):
""":return: A full copy. Equivalant to `frame[:]`."""
return self[:]
def load_data_pack(dirpath: typing.Union[str, Path]) -> DataPack:
"""
Load a :class:`DataPack`. The reverse function of :meth:`save`.
:param dirpath: directory path of the saved model.
:return: a :class:`DataPack` instance.
"""
dirpath = Path(dirpath)
data_file_path = dirpath.joinpath(DataPack.DATA_FILENAME)
dp = dill.load(open(data_file_path, 'rb'))
return dp
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/data_pack/data_pack.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/data_pack/data_pack.py",
"repo_id": "ContextualSP",
"token_count": 8291
}
| 258 |
from .load_data import load_data
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/datasets/wiki_qa/__init__.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/datasets/wiki_qa/__init__.py",
"repo_id": "ContextualSP",
"token_count": 10
}
| 259 |
from .precision import Precision
from .average_precision import AveragePrecision
from .discounted_cumulative_gain import DiscountedCumulativeGain
from .mean_reciprocal_rank import MeanReciprocalRank
from .mean_average_precision import MeanAveragePrecision
from .normalized_discounted_cumulative_gain import \
NormalizedDiscountedCumulativeGain
from .accuracy import Accuracy
from .cross_entropy import CrossEntropy
def list_available() -> list:
from matchzoo.engine.base_metric import BaseMetric
from matchzoo.utils import list_recursive_concrete_subclasses
return list_recursive_concrete_subclasses(BaseMetric)
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/metrics/__init__.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/metrics/__init__.py",
"repo_id": "ContextualSP",
"token_count": 186
}
| 260 |
"""An implementation of CDSSM (CLSM) model."""
import typing
import torch
from torch import nn
import torch.nn.functional as F
from matchzoo import preprocessors
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine.param import Param
from matchzoo.engine.param_table import ParamTable
from matchzoo.engine.base_callback import BaseCallback
from matchzoo.dataloader import callbacks
from matchzoo.utils import TensorType, parse_activation
from matchzoo.engine.base_preprocessor import BasePreprocessor
class CDSSM(BaseModel):
"""
CDSSM Model implementation.
Learning Semantic Representations Using Convolutional Neural Networks
for Web Search. (2014a)
A Latent Semantic Model with Convolutional-Pooling Structure for
Information Retrieval. (2014b)
Examples:
>>> import matchzoo as mz
>>> model = CDSSM()
>>> model.params['task'] = mz.tasks.Ranking()
>>> model.params['vocab_size'] = 4
>>> model.params['filters'] = 32
>>> model.params['kernel_size'] = 3
>>> model.params['conv_activation_func'] = 'relu'
>>> model.build()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
""":return: model default parameters."""
# set :attr:`with_multi_layer_perceptron` to False to support
# user-defined variable dense layer units
params = super().get_default_params(with_multi_layer_perceptron=True)
params.add(Param(name='vocab_size', value=419,
desc="Size of vocabulary."))
params.add(Param(name='filters', value=3,
desc="Number of filters in the 1D convolution "
"layer."))
params.add(Param(name='kernel_size', value=3,
desc="Number of kernel size in the 1D "
"convolution layer."))
params.add(Param(name='conv_activation_func', value='relu',
desc="Activation function in the convolution"
" layer."))
params.add(Param(name='dropout_rate', value=0.3,
desc="The dropout rate."))
return params
@classmethod
def get_default_preprocessor(
cls,
truncated_mode: str = 'pre',
truncated_length_left: typing.Optional[int] = None,
truncated_length_right: typing.Optional[int] = None,
filter_mode: str = 'df',
filter_low_freq: float = 1,
filter_high_freq: float = float('inf'),
remove_stop_words: bool = False,
ngram_size: typing.Optional[int] = 3,
) -> BasePreprocessor:
"""
Model default preprocessor.
The preprocessor's transform should produce a correctly shaped data
pack that can be used for training.
:return: Default preprocessor.
"""
return preprocessors.BasicPreprocessor(
truncated_mode=truncated_mode,
truncated_length_left=truncated_length_left,
truncated_length_right=truncated_length_right,
filter_mode=filter_mode,
filter_low_freq=filter_low_freq,
filter_high_freq=filter_high_freq,
remove_stop_words=remove_stop_words,
ngram_size=ngram_size
)
@classmethod
def get_default_padding_callback(
cls,
fixed_length_left: int = None,
fixed_length_right: int = None,
pad_word_value: typing.Union[int, str] = 0,
pad_word_mode: str = 'pre',
with_ngram: bool = True,
fixed_ngram_length: int = None,
pad_ngram_value: typing.Union[int, str] = 0,
pad_ngram_mode: str = 'pre'
) -> BaseCallback:
"""
Model default padding callback.
The padding callback's on_batch_unpacked would pad a batch of data to
a fixed length.
:return: Default padding callback.
"""
return callbacks.BasicPadding(
fixed_length_left=fixed_length_left,
fixed_length_right=fixed_length_right,
pad_word_value=pad_word_value,
pad_word_mode=pad_word_mode,
with_ngram=with_ngram,
fixed_ngram_length=fixed_ngram_length,
pad_ngram_value=pad_ngram_value,
pad_ngram_mode=pad_ngram_mode
)
def _create_base_network(self) -> nn.Module:
"""
Apply conv and maxpooling operation towards to each letter-ngram.
The input shape is `fixed_text_length`*`number of letter-ngram`,
as described in the paper, `n` is 3, `number of letter-trigram`
is about 30,000 according to their observation.
:return: A :class:`nn.Module` of CDSSM network, tensor in tensor out.
"""
pad = nn.ConstantPad1d((0, self._params['kernel_size'] - 1), 0)
conv = nn.Conv1d(
in_channels=self._params['vocab_size'],
out_channels=self._params['filters'],
kernel_size=self._params['kernel_size']
)
activation = parse_activation(
self._params['conv_activation_func']
)
dropout = nn.Dropout(p=self._params['dropout_rate'])
pool = nn.AdaptiveMaxPool1d(1)
squeeze = Squeeze()
mlp = self._make_multi_layer_perceptron_layer(
self._params['filters']
)
return nn.Sequential(
pad, conv, activation, dropout, pool, squeeze, mlp
)
def build(self):
"""
Build model structure.
CDSSM use Siamese architecture.
"""
self.net_left = self._create_base_network()
self.net_right = self._create_base_network()
self.out = self._make_output_layer(1)
def forward(self, inputs):
"""Forward."""
# Process left & right input.
input_left, input_right = inputs['ngram_left'], inputs['ngram_right']
input_left = input_left.transpose(1, 2)
input_right = input_right.transpose(1, 2)
input_left = self.net_left(input_left)
input_right = self.net_right(input_right)
# Dot product with cosine similarity.
x = F.cosine_similarity(input_left, input_right)
out = self.out(x.unsqueeze(dim=1))
return out
def guess_and_fill_missing_params(self, verbose: int = 1):
"""
Guess and fill missing parameters in :attr:`params`.
Use this method to automatically fill-in hyper parameters.
This involves some guessing so the parameter it fills could be
wrong. For example, the default task is `Ranking`, and if we do not
set it to `Classification` manually for data packs prepared for
classification, then the shape of the model output and the data will
mismatch.
:param verbose: Verbosity.
"""
super().guess_and_fill_missing_params(verbose)
class Squeeze(nn.Module):
"""Squeeze."""
def forward(self, x):
"""Forward."""
return x.squeeze(dim=-1)
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/models/cdssm.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/models/cdssm.py",
"repo_id": "ContextualSP",
"token_count": 3087
}
| 261 |
"""matchzoo/models/README.md generater."""
from pathlib import Path
import tabulate
import inspect
import pandas as pd
import matchzoo
def _generate():
full = _make_title()
for model_class in matchzoo.models.list_available():
full += _make_model_class_subtitle(model_class)
full += _make_doc_section_subsubtitle()
full += _make_model_doc(model_class)
model = model_class()
full += _make_params_section_subsubtitle()
full += _make_model_params_table(model)
_write_to_files(full)
def _make_title():
title = 'MatchZoo Model Reference'
line = '*' * len(title)
return line + '\n' + title + '\n' + line + '\n\n'
def _make_model_class_subtitle(model_class):
subtitle = model_class.__name__
line = '#' * len(subtitle)
return subtitle + '\n' + line + '\n\n'
def _make_doc_section_subsubtitle():
subsubtitle = 'Model Documentation'
line = '*' * len(subsubtitle)
return subsubtitle + '\n' + line + '\n\n'
def _make_params_section_subsubtitle():
subsubtitle = 'Model Hyper Parameters'
line = '*' * len(subsubtitle)
return subsubtitle + '\n' + line + '\n\n'
def _make_model_doc(model_class):
return inspect.getdoc(model_class) + '\n\n'
def _make_model_params_table(model):
params = model.get_default_params()
df = params.to_frame()
df = df.rename({
'Value': 'Default Value',
'Hyper-Space': 'Default Hyper-Space'
}, axis='columns')
return tabulate.tabulate(df, tablefmt='rst', headers='keys') + '\n\n'
def _write_to_files(full):
readme_file_path = Path(__file__).parent.joinpath('README.rst')
doc_file_path = Path(__file__).parent.parent.parent. \
joinpath('docs').joinpath('source').joinpath('model_reference.rst')
for file_path in readme_file_path, doc_file_path:
with open(file_path, 'w', encoding='utf-8') as out_file:
out_file.write(full)
if __name__ == '__main__':
_generate()
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/models/parameter_readme_generator.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/models/parameter_readme_generator.py",
"repo_id": "ContextualSP",
"token_count": 815
}
| 262 |
"""Bert Preprocessor."""
from pytorch_transformers import BertTokenizer
from . import units
from matchzoo import DataPack
from matchzoo.engine.base_preprocessor import BasePreprocessor
class BertPreprocessor(BasePreprocessor):
"""
Baisc preprocessor helper.
:param mode: String, supported mode can be referred
https://huggingface.co/pytorch-transformers/pretrained_models.html.
"""
def __init__(self, mode: str = 'bert-base-uncased'):
"""Initialization."""
super().__init__()
self._tokenizer = BertTokenizer.from_pretrained(mode)
def fit(self, data_pack: DataPack, verbose: int = 1):
"""Tokenizer is all BertPreprocessor's need."""
return
def transform(self, data_pack: DataPack, verbose: int = 1) -> DataPack:
"""
Apply transformation on data.
:param data_pack: Inputs to be preprocessed.
:param verbose: Verbosity.
:return: Transformed data as :class:`DataPack` object.
"""
data_pack = data_pack.copy()
data_pack.apply_on_text(self._tokenizer.encode,
mode='both', inplace=True, verbose=verbose)
data_pack.append_text_length(inplace=True, verbose=verbose)
data_pack.drop_empty(inplace=True)
return data_pack
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/preprocessors/bert_preprocessor.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/preprocessors/bert_preprocessor.py",
"repo_id": "ContextualSP",
"token_count": 528
}
| 263 |
import nltk
from .unit import Unit
class StopRemoval(Unit):
"""
Process unit to remove stop words.
Example:
>>> unit = StopRemoval()
>>> unit.transform(['a', 'the', 'test'])
['test']
>>> type(unit.stopwords)
<class 'list'>
"""
def __init__(self, lang: str = 'english'):
"""Initialization."""
self._lang = lang
self._stop = nltk.corpus.stopwords.words(self._lang)
def transform(self, input_: list) -> list:
"""
Remove stopwords from list of tokenized tokens.
:param input_: list of tokenized tokens.
:param lang: language code for stopwords.
:return tokens: list of tokenized tokens without stopwords.
"""
return [token
for token
in input_
if token not in self._stop]
@property
def stopwords(self) -> list:
"""
Get stopwords based on language.
:params lang: language code.
:return: list of stop words.
"""
return self._stop
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/preprocessors/units/stop_removal.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/preprocessors/units/stop_removal.py",
"repo_id": "ContextualSP",
"token_count": 478
}
| 264 |
import inspect
def list_recursive_concrete_subclasses(base):
"""List all concrete subclasses of `base` recursively."""
return _filter_concrete(_bfs(base))
def _filter_concrete(classes):
return list(filter(lambda c: not inspect.isabstract(c), classes))
def _bfs(base):
return base.__subclasses__() + sum([
_bfs(subclass)
for subclass in base.__subclasses__()
], [])
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/utils/list_recursive_subclasses.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/matchzoo/utils/list_recursive_subclasses.py",
"repo_id": "ContextualSP",
"token_count": 152
}
| 265 |
import pytest
from matchzoo.engine.param import Param
from matchzoo.engine.param_table import ParamTable
from matchzoo.engine.hyper_spaces import quniform
@pytest.fixture
def param_table():
params = ParamTable()
params.add(Param('ham', 'Parma Ham'))
return params
def test_get(param_table):
assert param_table['ham'] == 'Parma Ham'
def test_set(param_table):
new_param = Param('egg', 'Over Easy')
param_table.set('egg', new_param)
assert 'egg' in param_table.keys()
def test_keys(param_table):
assert 'ham' in param_table.keys()
def test_hyper_space(param_table):
new_param = Param(
name='my_param',
value=1,
hyper_space=quniform(low=1, high=5)
)
param_table.add(new_param)
hyper_space = param_table.hyper_space
assert hyper_space
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/tests/engine/test_param_table.py/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/tests/engine/test_param_table.py",
"repo_id": "ContextualSP",
"token_count": 320
}
| 266 |
<jupyter_start><jupyter_code>%run init.ipynb
preprocessor = mz.models.ArcI.get_default_preprocessor(
filter_mode='df',
filter_low_freq=2,
)
train_pack_processed = preprocessor.fit_transform(train_pack_raw)
dev_pack_processed = preprocessor.transform(dev_pack_raw)
test_pack_processed = preprocessor.transform(test_pack_raw)
preprocessor.context
glove_embedding = mz.datasets.embeddings.load_glove_embedding(dimension=300)
term_index = preprocessor.context['vocab_unit'].state['term_index']
embedding_matrix = glove_embedding.build_matrix(term_index)
l2_norm = np.sqrt((embedding_matrix * embedding_matrix).sum(axis=1))
embedding_matrix = embedding_matrix / l2_norm[:, np.newaxis]
trainset = mz.dataloader.Dataset(
data_pack=train_pack_processed,
mode='pair',
num_dup=2,
num_neg=1
)
testset = mz.dataloader.Dataset(
data_pack=test_pack_processed
)
padding_callback = mz.models.ArcI.get_default_padding_callback(
fixed_length_left=10,
fixed_length_right=100,
pad_word_value=0,
pad_word_mode='pre'
)
trainloader = mz.dataloader.DataLoader(
dataset=trainset,
batch_size=20,
stage='train',
resample=True,
sort=False,
callback=padding_callback
)
testloader = mz.dataloader.DataLoader(
dataset=testset,
batch_size=20,
stage='dev',
callback=padding_callback
)
model = mz.models.ArcI()
model.params['task'] = ranking_task
model.params['embedding'] = embedding_matrix
model.params['left_length'] = 10
model.params['right_length'] = 100
model.params['left_filters'] = [128]
model.params['left_kernel_sizes'] = [3]
model.params['left_pool_sizes'] = [4]
model.params['right_filters'] = [128]
model.params['right_kernel_sizes'] = [3]
model.params['right_pool_sizes'] = [4]
model.params['conv_activation_func'] = 'relu'
model.params['mlp_num_layers'] = 1
model.params['mlp_num_units'] = 100
model.params['mlp_num_fan_out'] = 1
model.params['mlp_activation_func'] = 'relu'
model.params['dropout_rate'] = 0.9
model.build()
print(model)
print('Trainable params: ', sum(p.numel() for p in model.parameters() if p.requires_grad))
optimizer = torch.optim.Adadelta(model.parameters())
trainer = mz.trainers.Trainer(
model=model,
optimizer=optimizer,
trainloader=trainloader,
validloader=testloader,
validate_interval=None,
epochs=10
)
trainer.run()<jupyter_output><empty_output>
|
ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/tutorials/ranking/arci.ipynb/0
|
{
"file_path": "ContextualSP/poset_decoding/traversal_path_prediction/MatchZoo-py/tutorials/ranking/arci.ipynb",
"repo_id": "ContextualSP",
"token_count": 939
}
| 267 |
{
"aggregation_loss_weight": 1.0,
"aggregation_temperature": 1.0,
"allow_empty_column_selection": false,
"answer_loss_cutoff": null,
"answer_loss_importance": 1.0,
"architectures": [
"TapasModel"
],
"attention_probs_dropout_prob": 0.0,
"average_approximation_function": "ratio",
"average_logits_per_cell": false,
"cell_selection_preference": null,
"disable_per_token_loss": false,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.07,
"hidden_size": 1024,
"huber_loss_delta": null,
"init_cell_selection_weights_to_zero": false,
"initializer_range": 0.02,
"intermediate_size": 4096,
"layer_norm_eps": 1e-12,
"max_num_columns": 32,
"max_num_rows": 64,
"max_position_embeddings": 1024,
"model_type": "tapas",
"num_aggregation_labels": 0,
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 0,
"positive_label_weight": 10.0,
"reset_position_index_per_cell": true,
"select_one_column": true,
"softmax_temperature": 1.0,
"type_vocab_size": [
3,
256,
256,
2,
256,
256,
10
],
"type_vocab_sizes": [
3,
256,
256,
2,
256,
256,
10
],
"use_answer_as_supervision": null,
"use_gumbel_for_aggregation": false,
"use_gumbel_for_cells": false,
"use_normalized_answer_loss": false,
"vocab_size": 30522
}
|
ContextualSP/robustness_of_text_to_sql/CTA/tapas-torch/tapas_retrieval/tapas_nq_hn_retriever_large_table/config.json/0
|
{
"file_path": "ContextualSP/robustness_of_text_to_sql/CTA/tapas-torch/tapas_retrieval/tapas_nq_hn_retriever_large_table/config.json",
"repo_id": "ContextualSP",
"token_count": 716
}
| 268 |
set seed=1
set config_file=train_configs/concat.none.jsonnet
set model_file=checkpoints_cosql/cosql_concat_none_model
set tables_file=dataset_cosql/tables.json
set database_path=dataset_cosql/database
set dataset_path=dataset_cosql
set train_data_path=dataset_cosql/train.json
set validation_data_path=dataset_cosql/dev.json
set pretrained_file=glove/glove.twitter.27B.100d.txt
allennlp train -s %model_file% %config_file% ^
--include-package dataset_reader.sparc_reader ^
--include-package models.sparc_parser ^
-o {"""model.serialization_dir""":"""%model_file%""","""random_seed""":"""%seed%""","""numpy_seed""":"""%seed%""","""pytorch_seed""":"""%seed%""","""dataset_reader.tables_file""":"""%tables_file%""","""dataset_reader.database_path""":"""%database_path%""","""train_data_path""":"""%train_data_path%""","""validation_data_path""":"""%validation_data_path%""","""model.text_embedder.tokens.pretrained_file""":"""%pretrained_file%""","""model.dataset_path""":"""%dataset_path%"""}
|
ContextualSP/semantic_parsing_in_context/bash_files/windows/train_cosql.bat/0
|
{
"file_path": "ContextualSP/semantic_parsing_in_context/bash_files/windows/train_cosql.bat",
"repo_id": "ContextualSP",
"token_count": 377
}
| 269 |
import json
import shutil
import sys
from allennlp.commands import main
if __name__ == '__main__':
serialization_dir = "checkpoints/debug_model"
config_file = "train_configs_bert/concat.none.mem.jsonnet"
overrides = json.dumps({
"dataset_reader.tables_file": "dataset_sparc/tables.json",
"dataset_reader.database_path": "dataset_sparc/database",
"train_data_path": "dataset_sparc/train.json",
"validation_data_path": "dataset_sparc/dev.json",
"model.dataset_path": "dataset_sparc",
"model.serialization_dir": serialization_dir,
})
# Training will fail if the serialization directory already
# has stuff in it. If you are running the same training loop
# over and over again for debugging purposes, it will.
# Hence we wipe it out in advance.
# BE VERY CAREFUL NOT TO DO THIS FOR ACTUAL TRAINING!
shutil.rmtree(serialization_dir, ignore_errors=True)
# in debug mode.
sys.argv = [
"allennlp", # command name, not used by main
"train",
config_file,
"-s", serialization_dir,
"-f",
"--include-package", "dataset_reader.sparc_reader",
"--include-package", "models.sparc_parser",
"-o", overrides
]
main()
|
ContextualSP/semantic_parsing_in_context/debug.py/0
|
{
"file_path": "ContextualSP/semantic_parsing_in_context/debug.py",
"repo_id": "ContextualSP",
"token_count": 529
}
| 270 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
from typing import Dict, List
import matplotlib
import torch
from allennlp.data.vocabulary import Vocabulary
from tensorboardX import SummaryWriter
matplotlib.use('agg', warn=False, force=True)
EMOJI_CORRECT = "😋"
EMOJI_ERROR = "😡"
class Visualizer(object):
def __init__(self, summary_dir, validation_size, vocab: Vocabulary):
"""
:param summary_dir: folder to store the tensorboard X log files
:param validation_size:
"""
if not os.path.exists(summary_dir):
os.makedirs(summary_dir)
self.log_writer = SummaryWriter(summary_dir)
self.validation_size = validation_size
self.global_step = 0
self.ind_to_token = vocab.get_token_from_index
# define template
self.sql_template = '**Utterance** : {0} \n\n**GroundTruth**: {3}\n\n{1} **SQL**: {2}'
def log_sql(self, inter_utterance: Dict[str, torch.LongTensor],
judge_result: List[int],
ground_truth: List[str],
encoder_mask: torch.LongTensor,
inter_sql: List[str]):
"""
This method is designed to log latent rotated text into tensorboard
"""
logging_strs = []
if 'tokens' in inter_utterance:
inter_tokens = inter_utterance['tokens']
name_space = 'tokens'
else:
inter_tokens = inter_utterance['bert']
name_space = 'bert'
for inter_ind, token_seq in enumerate(inter_tokens):
# fetch the actual sequence length and convert them into token str
token_len = encoder_mask[inter_ind].sum().long().data.cpu().item()
origin_tokens = [self.ind_to_token(ind, name_space) for ind in token_seq[:token_len].data.cpu().numpy()]
# original string
utterance_str = ' '.join(origin_tokens)
# segment ids logging
sql_str = ' , '.join(inter_sql[inter_ind])
emoji_str = EMOJI_CORRECT if judge_result[inter_ind] == 1 else EMOJI_ERROR
# record the actual translating length for avoiding extra logging
logging_str = self.sql_template.format(utterance_str, emoji_str, sql_str, ground_truth[inter_ind])
logging_strs.append(logging_str)
# if not anyone, log into the EMPTY
if len(logging_strs) == 0:
logging_strs.append('*EMPTY*')
# merge multiple segment
logging_str = ('\n\n' + '=' * 120 + '\n\n').join(logging_strs)
dev_case = self.global_step % self.validation_size
dev_step = self.global_step // self.validation_size
self.log_writer.add_text(f'{dev_case}-th Latent Interaction Text', logging_str, global_step=dev_step)
def update_global_step(self):
"""
Update global step for logging
:return:
"""
self.global_step += 1
|
ContextualSP/semantic_parsing_in_context/models/visualizer.py/0
|
{
"file_path": "ContextualSP/semantic_parsing_in_context/models/visualizer.py",
"repo_id": "ContextualSP",
"token_count": 1318
}
| 271 |
from easydict import EasyDict as edict
import yaml
cfg = edict()
def _edict2dict(dest_dict, src_edict):
if isinstance(dest_dict, dict) and isinstance(src_edict, dict):
for k, v in src_edict.items():
if not isinstance(v, edict):
dest_dict[k] = v
else:
dest_dict[k] = {}
_edict2dict(dest_dict[k], v)
else:
return
def gen_config(config_file):
cfg_dict = {}
_edict2dict(cfg_dict, cfg)
with open(config_file, 'w') as f:
yaml.dump(cfg_dict, f, default_flow_style=False)
def _update_config(base_cfg, exp_cfg):
if isinstance(base_cfg, edict) and isinstance(exp_cfg, edict):
for k, v in exp_cfg.items():
base_cfg[k] = v
else:
return
def update_config_from_file(filename):
exp_config = None
with open(filename) as f:
exp_config = edict(yaml.safe_load(f))
_update_config(cfg, exp_config)
|
Cream/AutoFormer/lib/config.py/0
|
{
"file_path": "Cream/AutoFormer/lib/config.py",
"repo_id": "Cream",
"token_count": 470
}
| 272 |
import argparse
import datetime
import numpy as np
import time
import torch
import torch.backends.cudnn as cudnn
import json
import yaml
from pathlib import Path
from timm.data import Mixup
from timm.models import create_model
from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy
from timm.scheduler import create_scheduler
from timm.optim import create_optimizer
from timm.utils import NativeScaler
from lib.datasets import build_dataset
from supernet_engine import train_one_epoch, evaluate
from lib.samplers import RASampler
from lib import utils
from lib.config import cfg, update_config_from_file
from model.supernet_transformer import Vision_TransformerSuper
def get_args_parser():
parser = argparse.ArgumentParser('AutoFormer training and evaluation script', add_help=False)
parser.add_argument('--batch-size', default=64, type=int)
parser.add_argument('--epochs', default=300, type=int)
# config file
parser.add_argument('--cfg',help='experiment configure file name',required=True,type=str)
# custom parameters
parser.add_argument('--platform', default='pai', type=str, choices=['itp', 'pai', 'aml'],
help='Name of model to train')
parser.add_argument('--teacher_model', default='', type=str,
help='Name of teacher model to train')
parser.add_argument('--relative_position', action='store_true')
parser.add_argument('--gp', action='store_true')
parser.add_argument('--change_qkv', action='store_true')
parser.add_argument('--max_relative_position', type=int, default=14, help='max distance in relative position embedding')
# Model parameters
parser.add_argument('--model', default='', type=str, metavar='MODEL',
help='Name of model to train')
# AutoFormer config
parser.add_argument('--mode', type=str, default='super', choices=['super', 'retrain'], help='mode of AutoFormer')
parser.add_argument('--input-size', default=224, type=int)
parser.add_argument('--patch_size', default=16, type=int)
parser.add_argument('--drop', type=float, default=0.0, metavar='PCT',
help='Dropout rate (default: 0.)')
parser.add_argument('--drop-path', type=float, default=0.1, metavar='PCT',
help='Drop path rate (default: 0.1)')
parser.add_argument('--drop-block', type=float, default=None, metavar='PCT',
help='Drop block rate (default: None)')
parser.add_argument('--model-ema', action='store_true')
parser.add_argument('--no-model-ema', action='store_false', dest='model_ema')
# parser.set_defaults(model_ema=True)
parser.add_argument('--model-ema-decay', type=float, default=0.99996, help='')
parser.add_argument('--model-ema-force-cpu', action='store_true', default=False, help='')
parser.add_argument('--rpe_type', type=str, default='bias', choices=['bias', 'direct'])
parser.add_argument('--post_norm', action='store_true')
parser.add_argument('--no_abs_pos', action='store_true')
# Optimizer parameters
parser.add_argument('--opt', default='adamw', type=str, metavar='OPTIMIZER',
help='Optimizer (default: "adamw"')
parser.add_argument('--opt-eps', default=1e-8, type=float, metavar='EPSILON',
help='Optimizer Epsilon (default: 1e-8)')
parser.add_argument('--opt-betas', default=None, type=float, nargs='+', metavar='BETA',
help='Optimizer Betas (default: None, use opt default)')
parser.add_argument('--clip-grad', type=float, default=None, metavar='NORM',
help='Clip gradient norm (default: None, no clipping)')
parser.add_argument('--momentum', type=float, default=0.9, metavar='M',
help='SGD momentum (default: 0.9)')
parser.add_argument('--weight-decay', type=float, default=0.05,
help='weight decay (default: 0.05)')
# Learning rate schedule parameters
parser.add_argument('--sched', default='cosine', type=str, metavar='SCHEDULER',
help='LR scheduler (default: "cosine"')
parser.add_argument('--lr', type=float, default=5e-4, metavar='LR',
help='learning rate (default: 5e-4)')
parser.add_argument('--lr-noise', type=float, nargs='+', default=None, metavar='pct, pct',
help='learning rate noise on/off epoch percentages')
parser.add_argument('--lr-noise-pct', type=float, default=0.67, metavar='PERCENT',
help='learning rate noise limit percent (default: 0.67)')
parser.add_argument('--lr-noise-std', type=float, default=1.0, metavar='STDDEV',
help='learning rate noise std-dev (default: 1.0)')
parser.add_argument('--warmup-lr', type=float, default=1e-6, metavar='LR',
help='warmup learning rate (default: 1e-6)')
parser.add_argument('--min-lr', type=float, default=1e-5, metavar='LR',
help='lower lr bound for cyclic schedulers that hit 0 (1e-5)')
parser.add_argument('--lr-power', type=float, default=1.0,
help='power of the polynomial lr scheduler')
parser.add_argument('--decay-epochs', type=float, default=30, metavar='N',
help='epoch interval to decay LR')
parser.add_argument('--warmup-epochs', type=int, default=5, metavar='N',
help='epochs to warmup LR, if scheduler supports')
parser.add_argument('--cooldown-epochs', type=int, default=10, metavar='N',
help='epochs to cooldown LR at min_lr, after cyclic schedule ends')
parser.add_argument('--patience-epochs', type=int, default=10, metavar='N',
help='patience epochs for Plateau LR scheduler (default: 10')
parser.add_argument('--decay-rate', '--dr', type=float, default=0.1, metavar='RATE',
help='LR decay rate (default: 0.1)')
# Augmentation parameters
parser.add_argument('--color-jitter', type=float, default=0.4, metavar='PCT',
help='Color jitter factor (default: 0.4)')
parser.add_argument('--aa', type=str, default='rand-m9-mstd0.5-inc1', metavar='NAME',
help='Use AutoAugment policy. "v0" or "original". " + \
"(default: rand-m9-mstd0.5-inc1)'),
parser.add_argument('--smoothing', type=float, default=0.1, help='Label smoothing (default: 0.1)')
parser.add_argument('--train-interpolation', type=str, default='bicubic',
help='Training interpolation (random, bilinear, bicubic default: "bicubic")')
parser.add_argument('--repeated-aug', action='store_true')
parser.add_argument('--no-repeated-aug', action='store_false', dest='repeated_aug')
parser.set_defaults(repeated_aug=True)
# * Random Erase params
parser.add_argument('--reprob', type=float, default=0.25, metavar='PCT',
help='Random erase prob (default: 0.25)')
parser.add_argument('--remode', type=str, default='pixel',
help='Random erase mode (default: "pixel")')
parser.add_argument('--recount', type=int, default=1,
help='Random erase count (default: 1)')
parser.add_argument('--resplit', action='store_true', default=False,
help='Do not random erase first (clean) augmentation split')
# * Mixup params
parser.add_argument('--mixup', type=float, default=0.8,
help='mixup alpha, mixup enabled if > 0. (default: 0.8)')
parser.add_argument('--cutmix', type=float, default=1.0,
help='cutmix alpha, cutmix enabled if > 0. (default: 1.0)')
parser.add_argument('--cutmix-minmax', type=float, nargs='+', default=None,
help='cutmix min/max ratio, overrides alpha and enables cutmix if set (default: None)')
parser.add_argument('--mixup-prob', type=float, default=1.0,
help='Probability of performing mixup or cutmix when either/both is enabled')
parser.add_argument('--mixup-switch-prob', type=float, default=0.5,
help='Probability of switching to cutmix when both mixup and cutmix enabled')
parser.add_argument('--mixup-mode', type=str, default='batch',
help='How to apply mixup/cutmix params. Per "batch", "pair", or "elem"')
# Dataset parameters
parser.add_argument('--data-path', default='./data/imagenet/', type=str,
help='dataset path')
parser.add_argument('--data-set', default='IMNET', choices=['CIFAR', 'IMNET', 'INAT', 'INAT19'],
type=str, help='Image Net dataset path')
parser.add_argument('--inat-category', default='name',
choices=['kingdom', 'phylum', 'class', 'order', 'supercategory', 'family', 'genus', 'name'],
type=str, help='semantic granularity')
parser.add_argument('--output_dir', default='./',
help='path where to save, empty for no saving')
parser.add_argument('--device', default='cuda',
help='device to use for training / testing')
parser.add_argument('--seed', default=0, type=int)
parser.add_argument('--resume', default='', help='resume from checkpoint')
parser.add_argument('--start_epoch', default=0, type=int, metavar='N',
help='start epoch')
parser.add_argument('--eval', action='store_true', help='Perform evaluation only')
parser.add_argument('--num_workers', default=10, type=int)
parser.add_argument('--dist-eval', action='store_true', default=False, help='Enabling distributed evaluation')
parser.add_argument('--pin-mem', action='store_true',
help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.')
parser.add_argument('--no-pin-mem', action='store_false', dest='pin_mem',
help='')
parser.set_defaults(pin_mem=True)
# distributed training parameters
parser.add_argument('--world_size', default=1, type=int,
help='number of distributed processes')
parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training')
parser.add_argument('--amp', action='store_true')
parser.add_argument('--no-amp', action='store_false', dest='amp')
parser.set_defaults(amp=True)
return parser
def main(args):
utils.init_distributed_mode(args)
update_config_from_file(args.cfg)
print(args)
args_text = yaml.safe_dump(args.__dict__, default_flow_style=False)
device = torch.device(args.device)
# fix the seed for reproducibility
seed = args.seed + utils.get_rank()
torch.manual_seed(seed)
np.random.seed(seed)
# random.seed(seed)
cudnn.benchmark = True
dataset_train, args.nb_classes = build_dataset(is_train=True, args=args)
dataset_val, _ = build_dataset(is_train=False, args=args)
if args.distributed:
num_tasks = utils.get_world_size()
global_rank = utils.get_rank()
if args.repeated_aug:
sampler_train = RASampler(
dataset_train, num_replicas=num_tasks, rank=global_rank, shuffle=True
)
else:
sampler_train = torch.utils.data.DistributedSampler(
dataset_train, num_replicas=num_tasks, rank=global_rank, shuffle=True
)
if args.dist_eval:
if len(dataset_val) % num_tasks != 0:
print(
'Warning: Enabling distributed evaluation with an eval dataset not divisible by process number. '
'This will slightly alter validation results as extra duplicate entries are added to achieve '
'equal num of samples per-process.')
sampler_val = torch.utils.data.DistributedSampler(
dataset_val, num_replicas=num_tasks, rank=global_rank, shuffle=False)
else:
sampler_val = torch.utils.data.SequentialSampler(dataset_val)
else:
sampler_val = torch.utils.data.SequentialSampler(dataset_val)
sampler_train = torch.utils.data.RandomSampler(dataset_train)
data_loader_train = torch.utils.data.DataLoader(
dataset_train, sampler=sampler_train,
batch_size=args.batch_size,
num_workers=args.num_workers,
pin_memory=args.pin_mem,
drop_last=True,
)
data_loader_val = torch.utils.data.DataLoader(
dataset_val, batch_size=int(2 * args.batch_size),
sampler=sampler_val, num_workers=args.num_workers,
pin_memory=args.pin_mem, drop_last=False
)
mixup_fn = None
mixup_active = args.mixup > 0 or args.cutmix > 0. or args.cutmix_minmax is not None
if mixup_active:
mixup_fn = Mixup(
mixup_alpha=args.mixup, cutmix_alpha=args.cutmix, cutmix_minmax=args.cutmix_minmax,
prob=args.mixup_prob, switch_prob=args.mixup_switch_prob, mode=args.mixup_mode,
label_smoothing=args.smoothing, num_classes=args.nb_classes)
print(f"Creating SuperVisionTransformer")
print(cfg)
model = Vision_TransformerSuper(img_size=args.input_size,
patch_size=args.patch_size,
embed_dim=cfg.SUPERNET.EMBED_DIM, depth=cfg.SUPERNET.DEPTH,
num_heads=cfg.SUPERNET.NUM_HEADS,mlp_ratio=cfg.SUPERNET.MLP_RATIO,
qkv_bias=True, drop_rate=args.drop,
drop_path_rate=args.drop_path,
gp=args.gp,
num_classes=args.nb_classes,
max_relative_position=args.max_relative_position,
relative_position=args.relative_position,
change_qkv=args.change_qkv, abs_pos=not args.no_abs_pos)
choices = {'num_heads': cfg.SEARCH_SPACE.NUM_HEADS, 'mlp_ratio': cfg.SEARCH_SPACE.MLP_RATIO,
'embed_dim': cfg.SEARCH_SPACE.EMBED_DIM , 'depth': cfg.SEARCH_SPACE.DEPTH}
model.to(device)
if args.teacher_model:
teacher_model = create_model(
args.teacher_model,
pretrained=True,
num_classes=args.nb_classes,
)
teacher_model.to(device)
teacher_loss = LabelSmoothingCrossEntropy(smoothing=args.smoothing)
else:
teacher_model = None
teacher_loss = None
model_ema = None
model_without_ddp = model
if args.distributed:
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], find_unused_parameters=True)
model_without_ddp = model.module
n_parameters = sum(p.numel() for p in model.parameters() if p.requires_grad)
print('number of params:', n_parameters)
linear_scaled_lr = args.lr * args.batch_size * utils.get_world_size() / 512.0
args.lr = linear_scaled_lr
optimizer = create_optimizer(args, model_without_ddp)
loss_scaler = NativeScaler()
lr_scheduler, _ = create_scheduler(args, optimizer)
# criterion = LabelSmoothingCrossEntropy()
if args.mixup > 0.:
# smoothing is handled with mixup label transform
criterion = SoftTargetCrossEntropy()
elif args.smoothing:
criterion = LabelSmoothingCrossEntropy(smoothing=args.smoothing)
else:
criterion = torch.nn.CrossEntropyLoss()
output_dir = Path(args.output_dir)
if not output_dir.exists():
output_dir.mkdir(parents=True)
# save config for later experiments
with open(output_dir / "config.yaml", 'w') as f:
f.write(args_text)
if args.resume:
if args.resume.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
args.resume, map_location='cpu', check_hash=True)
else:
checkpoint = torch.load(args.resume, map_location='cpu')
model_without_ddp.load_state_dict(checkpoint['model'])
if not args.eval and 'optimizer' in checkpoint and 'lr_scheduler' in checkpoint and 'epoch' in checkpoint:
optimizer.load_state_dict(checkpoint['optimizer'])
lr_scheduler.load_state_dict(checkpoint['lr_scheduler'])
args.start_epoch = checkpoint['epoch'] + 1
if 'scaler' in checkpoint:
loss_scaler.load_state_dict(checkpoint['scaler'])
if args.model_ema:
utils._load_checkpoint_for_ema(model_ema, checkpoint['model_ema'])
retrain_config = None
if args.mode == 'retrain' and "RETRAIN" in cfg:
retrain_config = {'layer_num': cfg.RETRAIN.DEPTH, 'embed_dim': [cfg.RETRAIN.EMBED_DIM]*cfg.RETRAIN.DEPTH,
'num_heads': cfg.RETRAIN.NUM_HEADS,'mlp_ratio': cfg.RETRAIN.MLP_RATIO}
if args.eval:
test_stats = evaluate(data_loader_val, model, device, mode = args.mode, retrain_config=retrain_config)
print(f"Accuracy of the network on the {len(dataset_val)} test images: {test_stats['acc1']:.1f}%")
return
print("Start training")
start_time = time.time()
max_accuracy = 0.0
for epoch in range(args.start_epoch, args.epochs):
if args.distributed:
data_loader_train.sampler.set_epoch(epoch)
train_stats = train_one_epoch(
model, criterion, data_loader_train,
optimizer, device, epoch, loss_scaler,
args.clip_grad, model_ema, mixup_fn,
amp=args.amp, teacher_model=teacher_model,
teach_loss=teacher_loss,
choices=choices, mode = args.mode, retrain_config=retrain_config,
)
lr_scheduler.step(epoch)
if args.output_dir:
checkpoint_paths = [output_dir / 'checkpoint.pth']
for checkpoint_path in checkpoint_paths:
utils.save_on_master({
'model': model_without_ddp.state_dict(),
'optimizer': optimizer.state_dict(),
'lr_scheduler': lr_scheduler.state_dict(),
'epoch': epoch,
# 'model_ema': get_state_dict(model_ema),
'scaler': loss_scaler.state_dict(),
'args': args,
}, checkpoint_path)
test_stats = evaluate(data_loader_val, model, device, amp=args.amp, choices=choices, mode = args.mode, retrain_config=retrain_config)
print(f"Accuracy of the network on the {len(dataset_val)} test images: {test_stats['acc1']:.1f}%")
max_accuracy = max(max_accuracy, test_stats["acc1"])
print(f'Max accuracy: {max_accuracy:.2f}%')
log_stats = {**{f'train_{k}': v for k, v in train_stats.items()},
**{f'test_{k}': v for k, v in test_stats.items()},
'epoch': epoch,
'n_parameters': n_parameters}
if args.output_dir and utils.is_main_process():
with (output_dir / "log.txt").open("a") as f:
f.write(json.dumps(log_stats) + "\n")
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
print('Training time {}'.format(total_time_str))
if __name__ == '__main__':
parser = argparse.ArgumentParser('AutoFormer training and evaluation script', parents=[get_args_parser()])
args = parser.parse_args()
if args.output_dir:
Path(args.output_dir).mkdir(parents=True, exist_ok=True)
main(args)
|
Cream/AutoFormer/supernet_train.py/0
|
{
"file_path": "Cream/AutoFormer/supernet_train.py",
"repo_id": "Cream",
"token_count": 8777
}
| 273 |
from .alexnet import AlexNet
from .vgg import VGG, make_vgg_layer
from .resnet import ResNet, make_res_layer
from .weight_init import (constant_init, xavier_init, normal_init,
uniform_init, kaiming_init, caffe2_xavier_init)
__all__ = [
'AlexNet', 'VGG', 'make_vgg_layer', 'ResNet', 'make_res_layer',
'constant_init', 'xavier_init', 'normal_init', 'uniform_init',
'kaiming_init', 'caffe2_xavier_init'
]
|
Cream/CDARTS/CDARTS_detection/mmcv/cnn/__init__.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmcv/cnn/__init__.py",
"repo_id": "Cream",
"token_count": 192
}
| 274 |
import cv2
import numpy as np
def iminvert(img):
"""Invert (negate) an image
Args:
img (ndarray): Image to be inverted.
Returns:
ndarray: The inverted image.
"""
return np.full_like(img, 255) - img
def bgr2gray(img, keepdim=False):
"""Convert a BGR image to grayscale image.
Args:
img (ndarray): The input image.
keepdim (bool): If False (by default), then return the grayscale image
with 2 dims, otherwise 3 dims.
Returns:
ndarray: The converted grayscale image.
"""
out_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
if keepdim:
out_img = out_img[..., None]
return out_img
def gray2bgr(img):
"""Convert a grayscale image to BGR image.
Args:
img (ndarray or str): The input image.
Returns:
ndarray: The converted BGR image.
"""
img = img[..., None] if img.ndim == 2 else img
out_img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
return out_img
def convert_color_factory(src, dst):
code = getattr(cv2, 'COLOR_{}2{}'.format(src.upper(), dst.upper()))
def convert_color(img):
out_img = cv2.cvtColor(img, code)
return out_img
convert_color.__doc__ = """Convert a {0} image to {1} image.
Args:
img (ndarray or str): The input image.
Returns:
ndarray: The converted {1} image.
""".format(src.upper(), dst.upper())
return convert_color
bgr2rgb = convert_color_factory('bgr', 'rgb')
rgb2bgr = convert_color_factory('rgb', 'bgr')
bgr2hsv = convert_color_factory('bgr', 'hsv')
hsv2bgr = convert_color_factory('hsv', 'bgr')
bgr2hls = convert_color_factory('bgr', 'hls')
hls2bgr = convert_color_factory('hls', 'bgr')
|
Cream/CDARTS/CDARTS_detection/mmcv/image/transforms/colorspace.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmcv/image/transforms/colorspace.py",
"repo_id": "Cream",
"token_count": 768
}
| 275 |
from ..utils import master_only
from .hook import Hook
class CheckpointHook(Hook):
def __init__(self,
interval=-1,
save_optimizer=True,
out_dir=None,
**kwargs):
self.interval = interval
self.save_optimizer = save_optimizer
self.out_dir = out_dir
self.args = kwargs
@master_only
def after_train_epoch(self, runner):
if not self.every_n_epochs(runner, self.interval):
return
if not self.out_dir:
self.out_dir = runner.work_dir
runner.save_checkpoint(
self.out_dir, save_optimizer=self.save_optimizer, **self.args)
|
Cream/CDARTS/CDARTS_detection/mmcv/runner/hooks/checkpoint.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmcv/runner/hooks/checkpoint.py",
"repo_id": "Cream",
"token_count": 343
}
| 276 |
import logging
import os
import os.path as osp
import time
import math
import torch
import numpy as np
import mmcv
from . import hooks
from .checkpoint import load_checkpoint, save_checkpoint
from .hooks import (CheckpointHook, Hook, IterTimerHook, LrUpdaterHook,
OptimizerHook, OptimizerArchHook, lr_updater)
from .log_buffer import LogBuffer
from .priority import get_priority
from .utils import get_dist_info, get_host_info, get_time_str, obj_from_dict
class Runner(object):
"""A training helper for PyTorch.
Args:
model (:obj:`torch.nn.Module`): The model to be run.
batch_processor (callable): A callable method that process a data
batch. The interface of this method should be
`batch_processor(model, data, train_mode) -> dict`
optimizer (dict or :obj:`torch.optim.Optimizer`): If it is a dict,
runner will construct an optimizer according to it.
work_dir (str, optional): The working directory to save checkpoints
and logs.
log_level (int): Logging level.
logger (:obj:`logging.Logger`): Custom logger. If `None`, use the
default logger.
"""
def __init__(self,
model,
batch_processor,
optimizer=None,
optimizer_arch=None,
work_dir=None,
log_level=logging.INFO,
logger=None,
arch_name=None):
assert callable(batch_processor)
self.model = model
self.arch_name = arch_name
if optimizer is not None:
self.optimizer = self.init_optimizer(optimizer)
else:
self.optimizer = None
if optimizer_arch is not None:
self.optimizer_arch = self.init_optimizer(optimizer_arch)
else:
self.optimizer_arch = None
self.batch_processor = batch_processor
# create work_dir
if mmcv.is_str(work_dir):
self.work_dir = osp.abspath(work_dir)
mmcv.mkdir_or_exist(self.work_dir)
elif work_dir is None:
self.work_dir = None
else:
raise TypeError('"work_dir" must be a str or None')
# get model name from the model class
if hasattr(self.model, 'module'):
self._model_name = self.model.module.__class__.__name__
else:
self._model_name = self.model.__class__.__name__
self._rank, self._world_size = get_dist_info()
self.timestamp = get_time_str()
if logger is None:
self.logger = self.init_logger(work_dir, log_level)
else:
self.logger = logger
self.log_buffer = LogBuffer()
self.mode = None
self._hooks = []
self._epoch = 0
self._iter = 0
self._inner_iter = 0
self._max_epochs = 0
self._max_iters = 0
@property
def model_name(self):
"""str: Name of the model, usually the module class name."""
return self._model_name
@property
def rank(self):
"""int: Rank of current process. (distributed training)"""
return self._rank
@property
def world_size(self):
"""int: Number of processes participating in the job.
(distributed training)"""
return self._world_size
@property
def hooks(self):
"""list[:obj:`Hook`]: A list of registered hooks."""
return self._hooks
@property
def epoch(self):
"""int: Current epoch."""
return self._epoch
@property
def iter(self):
"""int: Current iteration."""
return self._iter
@property
def inner_iter(self):
"""int: Iteration in an epoch."""
return self._inner_iter
@property
def max_epochs(self):
"""int: Maximum training epochs."""
return self._max_epochs
@property
def max_iters(self):
"""int: Maximum training iterations."""
return self._max_iters
def init_optimizer(self, optimizer):
"""Init the optimizer.
Args:
optimizer (dict or :obj:`~torch.optim.Optimizer`): Either an
optimizer object or a dict used for constructing the optimizer.
Returns:
:obj:`~torch.optim.Optimizer`: An optimizer object.
Examples:
>>> optimizer = dict(type='SGD', lr=0.01, momentum=0.9)
>>> type(runner.init_optimizer(optimizer))
<class 'torch.optim.sgd.SGD'>
"""
if isinstance(optimizer, dict):
optimizer = obj_from_dict(optimizer, torch.optim,
dict(params=self.model.parameters()))
elif not isinstance(optimizer, torch.optim.Optimizer):
raise TypeError(
'optimizer must be either an Optimizer object or a dict, '
'but got {}'.format(type(optimizer)))
return optimizer
def _add_file_handler(self,
logger,
filename=None,
mode='w',
level=logging.INFO):
# TODO: move this method out of runner
file_handler = logging.FileHandler(filename, mode)
file_handler.setFormatter(
logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
file_handler.setLevel(level)
logger.addHandler(file_handler)
return logger
def init_logger(self, log_dir=None, level=logging.INFO):
"""Init the logger.
Args:
log_dir(str, optional): Log file directory. If not specified, no
log file will be used.
level (int or str): See the built-in python logging module.
Returns:
:obj:`~logging.Logger`: Python logger.
"""
logging.basicConfig(
format='%(asctime)s - %(levelname)s - %(message)s', level=level)
logger = logging.getLogger(__name__)
if log_dir and self.rank == 0:
filename = '{}.log'.format(self.timestamp)
log_file = osp.join(log_dir, filename)
self._add_file_handler(logger, log_file, level=level)
return logger
def current_lr(self):
"""Get current learning rates.
Returns:
list: Current learning rate of all param groups.
"""
if self.optimizer is None:
raise RuntimeError(
'lr is not applicable because optimizer does not exist.')
return [group['lr'] for group in self.optimizer.param_groups]
def register_hook(self, hook, priority='NORMAL'):
"""Register a hook into the hook list.
Args:
hook (:obj:`Hook`): The hook to be registered.
priority (int or str or :obj:`Priority`): Hook priority.
Lower value means higher priority.
"""
assert isinstance(hook, Hook)
if hasattr(hook, 'priority'):
raise ValueError('"priority" is a reserved attribute for hooks')
priority = get_priority(priority)
hook.priority = priority
# insert the hook to a sorted list
inserted = False
for i in range(len(self._hooks) - 1, -1, -1):
if priority >= self._hooks[i].priority:
self._hooks.insert(i + 1, hook)
inserted = True
break
if not inserted:
self._hooks.insert(0, hook)
def build_hook(self, args, hook_type=None):
if isinstance(args, Hook):
return args
elif isinstance(args, dict):
assert issubclass(hook_type, Hook)
return hook_type(**args)
else:
raise TypeError('"args" must be either a Hook object'
' or dict, not {}'.format(type(args)))
def call_hook(self, fn_name):
for hook in self._hooks:
getattr(hook, fn_name)(self)
def load_checkpoint(self, filename, map_location='cpu', strict=False):
self.logger.info('load checkpoint from %s', filename)
return load_checkpoint(self.model, filename, map_location, strict,
self.logger)
def save_checkpoint(self,
out_dir,
filename_tmpl='epoch_{}.pth',
save_optimizer=True,
meta=None):
if meta is None:
meta = dict(epoch=self.epoch + 1, iter=self.iter)
else:
meta.update(epoch=self.epoch + 1, iter=self.iter)
filename = filename_tmpl.format(self.epoch + 1)
filepath = osp.join(out_dir, filename)
linkpath = osp.join(out_dir, 'latest.pth')
optimizer = self.optimizer if save_optimizer else None
save_checkpoint(self.model, filepath, optimizer=optimizer, meta=meta)
# use relative symlink
mmcv.symlink(filename, linkpath)
def train(self, data_loader, data_loader_arch, **kwargs):
self.model.train()
self.mode = 'train'
self.data_loader = data_loader
self._max_iters = self._max_epochs * len(data_loader)
self.call_hook('before_train_epoch')
for i, data_batch in enumerate(data_loader):
self._inner_iter = i
self.call_hook('before_train_iter')
outputs = self.batch_processor(
self.model, data_batch, train_mode=True, **kwargs)
if not isinstance(outputs, dict):
raise TypeError('batch_processor() must return a dict')
if 'log_vars' in outputs:
self.log_buffer.update(outputs['log_vars'],
outputs['num_samples'])
self.outputs = outputs
self.call_hook('after_train_iter')
self._iter += 1
self.call_hook('after_train_epoch')
self._epoch += 1
def val(self, data_loader, data_loader_arch, **kwargs):
self.model.eval()
self.mode = 'val'
self.data_loader = data_loader
self.call_hook('before_val_epoch')
for i, data_batch in enumerate(data_loader):
self._inner_iter = i
self.call_hook('before_val_iter')
with torch.no_grad():
outputs = self.batch_processor(
self.model, data_batch, train_mode=False, **kwargs)
if not isinstance(outputs, dict):
raise TypeError('batch_processor() must return a dict')
if 'log_vars' in outputs:
self.log_buffer.update(outputs['log_vars'],
outputs['num_samples'])
self.outputs = outputs
self.call_hook('after_val_iter')
self.call_hook('after_val_epoch')
def resume(self, checkpoint, resume_optimizer=True,
map_location='default'):
if map_location == 'default':
device_id = torch.cuda.current_device()
checkpoint = self.load_checkpoint(
checkpoint,
map_location=lambda storage, loc: storage.cuda(device_id))
else:
checkpoint = self.load_checkpoint(
checkpoint, map_location=map_location)
self._epoch = checkpoint['meta']['epoch']
self._iter = checkpoint['meta']['iter']
if 'optimizer' in checkpoint and resume_optimizer:
self.optimizer.load_state_dict(checkpoint['optimizer'])
self.logger.info('resumed epoch %d, iter %d', self.epoch, self.iter)
def run(self, data_loaders, data_loaders_arch, workflow, max_epochs, **kwargs):
"""Start running.
Args:
data_loaders (list[:obj:`DataLoader`]): Dataloaders for training
and validation.
workflow (list[tuple]): A list of (phase, epochs) to specify the
running order and epochs. E.g, [('train', 2), ('val', 1)] means
running 2 epochs for training and 1 epoch for validation,
iteratively.
max_epochs (int): Total training epochs.
"""
assert isinstance(data_loaders, list)
assert mmcv.is_list_of(workflow, tuple)
assert len(data_loaders) == len(workflow)
self._max_epochs = max_epochs
work_dir = self.work_dir if self.work_dir is not None else 'NONE'
self.logger.info('Start running, host: %s, work_dir: %s',
get_host_info(), work_dir)
self.logger.info('workflow: %s, max: %d epochs', workflow, max_epochs)
self.call_hook('before_run')
while self.epoch < max_epochs:
for i, flow in enumerate(workflow):
mode, epochs = flow
if isinstance(mode, str): # self.train()
if not hasattr(self, mode):
raise ValueError(
'runner has no method named "{}" to run an epoch'.
format(mode))
epoch_runner = getattr(self, mode)
elif callable(mode): # custom train()
epoch_runner = mode
else:
raise TypeError('mode in workflow must be a str or '
'callable function, not {}'.format(
type(mode)))
for _ in range(epochs):
if mode == 'train' and self.epoch >= max_epochs:
return
if data_loaders_arch is not None:
epoch_runner(data_loaders[i], data_loaders_arch[i], **kwargs)
else:
epoch_runner(data_loaders[i], None, **kwargs)
time.sleep(1) # wait for some hooks like loggers to finish
self.call_hook('after_run')
def register_lr_hooks(self, lr_config):
if isinstance(lr_config, LrUpdaterHook):
self.register_hook(lr_config)
elif isinstance(lr_config, dict):
assert 'policy' in lr_config
# from .hooks import lr_updater
hook_name = lr_config['policy'].title() + 'LrUpdaterHook'
if not hasattr(lr_updater, hook_name):
raise ValueError('"{}" does not exist'.format(hook_name))
hook_cls = getattr(lr_updater, hook_name)
self.register_hook(hook_cls(**lr_config))
else:
raise TypeError('"lr_config" must be either a LrUpdaterHook object'
' or dict, not {}'.format(type(lr_config)))
def register_logger_hooks(self, log_config):
log_interval = log_config['interval']
for info in log_config['hooks']:
logger_hook = obj_from_dict(
info, hooks, default_args=dict(interval=log_interval))
self.register_hook(logger_hook, priority='VERY_LOW')
def register_training_hooks(self,
lr_config,
optimizer_config=None,
optimizer_arch_config=None,
checkpoint_config=None,
log_config=None):
"""Register default hooks for training.
Default hooks include:
- LrUpdaterHook
- OptimizerStepperHook
- CheckpointSaverHook
- IterTimerHook
- LoggerHook(s)
"""
if optimizer_config is None:
optimizer_config = {}
if checkpoint_config is None:
checkpoint_config = {}
self.register_lr_hooks(lr_config)
self.register_hook(self.build_hook(optimizer_config, OptimizerHook))
self.register_hook(self.build_hook(optimizer_arch_config, OptimizerArchHook))
self.register_hook(self.build_hook(checkpoint_config, CheckpointHook))
self.register_hook(IterTimerHook())
if log_config is not None:
self.register_logger_hooks(log_config)
|
Cream/CDARTS/CDARTS_detection/mmcv/runner/runner.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmcv/runner/runner.py",
"repo_id": "Cream",
"token_count": 7773
}
| 277 |
STUFF = "Hi"
import numpy as np
cimport numpy as np
np.import_array()
cdef extern from "flow_warp.hpp":
void FlowWarp(double* img, double* flow1, double* out, const int height, const int width, const int channels, const int filling_value, const int interpolateMode)
def flow_warp_c(np.ndarray[double, ndim=3, mode="c"] img_array not None,
np.ndarray[double, ndim=3, mode="c"] flow_array not None,
int filling_value=0,
int interpolate_mode=1):
out_array = np.zeros_like(img_array)
FlowWarp(<double*> np.PyArray_DATA(img_array),
<double*> np.PyArray_DATA(flow_array),
<double*> np.PyArray_DATA(out_array),
out_array.shape[0],
out_array.shape[1],
out_array.shape[2],
filling_value,
interpolate_mode)
return out_array
|
Cream/CDARTS/CDARTS_detection/mmcv/video/optflow_warp/flow_warp_module.pyx/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmcv/video/optflow_warp/flow_warp_module.pyx",
"repo_id": "Cream",
"token_count": 412
}
| 278 |
from __future__ import division
import re
from collections import OrderedDict
import torch
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import Runner, DistSamplerSeedHook, obj_from_dict
from mmdet import datasets
from mmdet.core import (DistEvalHook, DistOptimizerHook,
DistOptimizerArchHook, Fp16OptimizerHook)
from mmdet.datasets import DATASETS, build_dataloader, build_dataloader_arch
from mmdet.models import RPN
from .env import get_root_logger
def parse_losses(losses):
log_vars = OrderedDict()
for loss_name, loss_value in losses.items():
if isinstance(loss_value, torch.Tensor):
log_vars[loss_name] = loss_value.mean()
elif isinstance(loss_value, list):
log_vars[loss_name] = sum(_loss.mean() for _loss in loss_value)
else:
raise TypeError(
'{} is not a tensor or list of tensors'.format(loss_name))
loss = sum(_value for _key, _value in log_vars.items() if 'loss' in _key)
log_vars['loss'] = loss
for name in log_vars:
log_vars[name] = log_vars[name].item()
return loss, log_vars
def batch_processor(model, data, train_mode, **kwargs):
losses = model(**data)
losses_ = losses[0]
loss_latency = losses[1]
if loss_latency is not None:
losses_['loss_latency'] = loss_latency
loss, log_vars = parse_losses(losses_)
outputs = dict(
loss=loss, log_vars=log_vars, num_samples=len(data['img'].data))
return outputs
def train_detector(model,
dataset,
cfg,
distributed=False,
validate=False,
logger=None):
if logger is None:
logger = get_root_logger(cfg.log_level)
# start training
if distributed:
_dist_train(model, dataset, cfg, validate=validate)
else:
_non_dist_train(model, dataset, cfg, validate=validate)
def build_optimizer(model, optimizer_cfg, optimizer_exclude_arch):
"""Build optimizer from configs.
Args:
model (:obj:`nn.Module`): The model with parameters to be optimized.
optimizer_cfg (dict): The config dict of the optimizer.
Positional fields are:
- type: class name of the optimizer.
- lr: base learning rate.
Optional fields are:
- any arguments of the corresponding optimizer type, e.g.,
weight_decay, momentum, etc.
- paramwise_options: a dict with 3 accepted fileds
(bias_lr_mult, bias_decay_mult, norm_decay_mult).
`bias_lr_mult` and `bias_decay_mult` will be multiplied to
the lr and weight decay respectively for all bias parameters
(except for the normalization layers), and
`norm_decay_mult` will be multiplied to the weight decay
for all weight and bias parameters of normalization layers.
Returns:
torch.optim.Optimizer: The initialized optimizer.
Example:
>>> model = torch.nn.modules.Conv1d(1, 1, 1)
>>> optimizer_cfg = dict(type='SGD', lr=0.01, momentum=0.9,
>>> weight_decay=0.0001)
>>> optimizer = build_optimizer(model, optimizer_cfg)
"""
if hasattr(model, 'module'):
model = model.module
if hasattr(model, 'module'): # For distributed model
model = model.module
optimizer_cfg = optimizer_cfg.copy()
paramwise_options = optimizer_cfg.pop('paramwise_options', None)
# if no paramwise option is specified, just use the global setting
if paramwise_options is None:
if not optimizer_exclude_arch:
params = model.parameters()
else:
params = [p for n, p in model.named_parameters() if 'alpha' not in n]
return obj_from_dict(optimizer_cfg, torch.optim, dict(params=params))
else:
assert isinstance(paramwise_options, dict)
# get base lr and weight decay
base_lr = optimizer_cfg['lr']
base_wd = optimizer_cfg.get('weight_decay', None)
# weight_decay must be explicitly specified if mult is specified
if ('bias_decay_mult' in paramwise_options
or 'norm_decay_mult' in paramwise_options):
assert base_wd is not None
# get param-wise options
bias_lr_mult = paramwise_options.get('bias_lr_mult', 1.)
bias_decay_mult = paramwise_options.get('bias_decay_mult', 1.)
norm_decay_mult = paramwise_options.get('norm_decay_mult', 1.)
offset_lr_mult = paramwise_options.get('bias_decay_mult', 1.) # Noted by Jianyuan, for offset lr
# set param-wise lr and weight decay
params = []
for name, param in model.named_parameters():
param_group = {'params': [param]}
if not param.requires_grad:
# FP16 training needs to copy gradient/weight between master
# weight copy and model weight, it is convenient to keep all
# parameters here to align with model.parameters()
params.append(param_group)
continue
# Noted by Jianyuan, for huang lang offset
if 'offset' in name:
param_group['lr'] = base_lr * offset_lr_mult
# for norm layers, overwrite the weight decay of weight and bias
# TODO: obtain the norm layer prefixes dynamically
if re.search(r'(bn|gn)(\d+)?.(weight|bias)', name):
if base_wd is not None:
param_group['weight_decay'] = base_wd * norm_decay_mult
# for other layers, overwrite both lr and weight decay of bias
elif name.endswith('.bias'):
param_group['lr'] = base_lr * bias_lr_mult
if base_wd is not None:
param_group['weight_decay'] = base_wd * bias_decay_mult
# otherwise use the global settings
params.append(param_group)
optimizer_cls = getattr(torch.optim, optimizer_cfg.pop('type'))
return optimizer_cls(params, **optimizer_cfg)
def _dist_train(model, dataset, cfg, validate=False):
# put model on gpus
model = MMDistributedDataParallel(model.cuda())
# build runner
optimizer = build_optimizer(model, cfg.optimizer, cfg.get('optimizer_exclude_arch'))
arch_name = None
optimizer_arch = None
if 'optimizer_arch' in cfg:
raise NotImplementedError
runner = Runner(model, batch_processor, optimizer, optimizer_arch, cfg.work_dir, cfg.log_level, arch_name=arch_name)
# fp16 setting
fp16_cfg = cfg.get('fp16', None)
if fp16_cfg is not None:
optimizer_config = Fp16OptimizerHook(**cfg.optimizer_config,
**fp16_cfg)
else:
optimizer_config = DistOptimizerHook(**cfg.optimizer_config)
optimizer_arch_config = DistOptimizerArchHook(**cfg.optimizer_config)
# register hooks
runner.register_training_hooks(cfg.lr_config, optimizer_config, optimizer_arch_config,
cfg.checkpoint_config, cfg.log_config)
runner.register_hook(DistSamplerSeedHook())
# register eval hooks
if validate:
val_dataset_cfg = cfg.data.val
eval_cfg = cfg.get('evaluation', {})
runner.register_hook(DistEvalHook(val_dataset_cfg, **eval_cfg))
if cfg.resume_from:
runner.resume(cfg.resume_from)
elif cfg.load_from:
runner.load_checkpoint(cfg.load_from)
if 'optimizer_arch' in cfg:
raise NotImplementedError
else:
data_loaders = [
build_dataloader(
dataset,
cfg.data.imgs_per_gpu,
cfg.data.workers_per_gpu,
dist=True)
]
runner.run(data_loaders, None, cfg.workflow, cfg.total_epochs)
def _non_dist_train(model, dataset, cfg, validate=False):
if validate:
raise NotImplementedError('Built-in validation is not implemented '
'yet in not-distributed training. Use '
'distributed training or test.py and '
'*eval.py scripts instead.')
# put model on gpus
model = MMDataParallel(model, device_ids=range(cfg.gpus)).cuda()
# build runner
optimizer = build_optimizer(model, cfg.optimizer, cfg.get('optimizer_exclude_arch'))
arch_name = None
optimizer_arch = None
if 'optimizer_arch' in cfg:
raise NotImplementedError
runner = Runner(model, batch_processor, optimizer, optimizer_arch, cfg.work_dir, cfg.log_level, arch_name=arch_name)
# fp16 setting
fp16_cfg = cfg.get('fp16', None)
if fp16_cfg is not None:
optimizer_config = Fp16OptimizerHook(
**cfg.optimizer_config, **fp16_cfg, distributed=False)
else:
optimizer_config = cfg.optimizer_config
optimizer_arch_config = cfg.optimizer_config
runner.register_training_hooks(cfg.lr_config, optimizer_config, optimizer_arch_config,
cfg.checkpoint_config, cfg.log_config)
if cfg.resume_from:
runner.resume(cfg.resume_from)
elif cfg.load_from:
runner.load_checkpoint(cfg.load_from)
if 'optimizer_arch' in cfg:
raise NotImplementedError
else:
data_loaders = [
build_dataloader(
dataset,
cfg.data.imgs_per_gpu,
cfg.data.workers_per_gpu,
cfg.gpus,
dist=False)
]
runner.run(data_loaders, None, cfg.workflow, cfg.total_epochs)
|
Cream/CDARTS/CDARTS_detection/mmdet/apis/train.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/apis/train.py",
"repo_id": "Cream",
"token_count": 4481
}
| 279 |
from abc import ABCMeta, abstractmethod
import torch
from .sampling_result import SamplingResult
class BaseSampler(metaclass=ABCMeta):
def __init__(self,
num,
pos_fraction,
neg_pos_ub=-1,
add_gt_as_proposals=True,
**kwargs):
self.num = num
self.pos_fraction = pos_fraction
self.neg_pos_ub = neg_pos_ub
self.add_gt_as_proposals = add_gt_as_proposals
self.pos_sampler = self
self.neg_sampler = self
@abstractmethod
def _sample_pos(self, assign_result, num_expected, **kwargs):
pass
@abstractmethod
def _sample_neg(self, assign_result, num_expected, **kwargs):
pass
def sample(self,
assign_result,
bboxes,
gt_bboxes,
gt_labels=None,
**kwargs):
"""Sample positive and negative bboxes.
This is a simple implementation of bbox sampling given candidates,
assigning results and ground truth bboxes.
Args:
assign_result (:obj:`AssignResult`): Bbox assigning results.
bboxes (Tensor): Boxes to be sampled from.
gt_bboxes (Tensor): Ground truth bboxes.
gt_labels (Tensor, optional): Class labels of ground truth bboxes.
Returns:
:obj:`SamplingResult`: Sampling result.
"""
bboxes = bboxes[:, :4]
gt_flags = bboxes.new_zeros((bboxes.shape[0], ), dtype=torch.uint8)
if self.add_gt_as_proposals:
bboxes = torch.cat([gt_bboxes, bboxes], dim=0)
assign_result.add_gt_(gt_labels)
gt_ones = bboxes.new_ones(gt_bboxes.shape[0], dtype=torch.uint8)
gt_flags = torch.cat([gt_ones, gt_flags])
num_expected_pos = int(self.num * self.pos_fraction)
pos_inds = self.pos_sampler._sample_pos(
assign_result, num_expected_pos, bboxes=bboxes, **kwargs)
# We found that sampled indices have duplicated items occasionally.
# (may be a bug of PyTorch)
pos_inds = pos_inds.unique()
num_sampled_pos = pos_inds.numel()
num_expected_neg = self.num - num_sampled_pos
if self.neg_pos_ub >= 0:
_pos = max(1, num_sampled_pos)
neg_upper_bound = int(self.neg_pos_ub * _pos)
if num_expected_neg > neg_upper_bound:
num_expected_neg = neg_upper_bound
neg_inds = self.neg_sampler._sample_neg(
assign_result, num_expected_neg, bboxes=bboxes, **kwargs)
neg_inds = neg_inds.unique()
return SamplingResult(pos_inds, neg_inds, bboxes, gt_bboxes,
assign_result, gt_flags)
|
Cream/CDARTS/CDARTS_detection/mmdet/core/bbox/samplers/base_sampler.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/core/bbox/samplers/base_sampler.py",
"repo_id": "Cream",
"token_count": 1360
}
| 280 |
from .decorators import auto_fp16, force_fp32
from .hooks import Fp16OptimizerHook, wrap_fp16_model
__all__ = ['auto_fp16', 'force_fp32', 'Fp16OptimizerHook', 'wrap_fp16_model']
|
Cream/CDARTS/CDARTS_detection/mmdet/core/fp16/__init__.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/core/fp16/__init__.py",
"repo_id": "Cream",
"token_count": 73
}
| 281 |
import logging
import os.path as osp
import tempfile
import mmcv
import numpy as np
from pycocotools.coco import COCO
from pycocotools.cocoeval import COCOeval
from mmdet.core import eval_recalls
from mmdet.utils import print_log
from .custom import CustomDataset
from .registry import DATASETS
@DATASETS.register_module
class CocoDataset(CustomDataset):
CLASSES = ('person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus',
'train', 'truck', 'boat', 'traffic_light', 'fire_hydrant',
'stop_sign', 'parking_meter', 'bench', 'bird', 'cat', 'dog',
'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe',
'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee',
'skis', 'snowboard', 'sports_ball', 'kite', 'baseball_bat',
'baseball_glove', 'skateboard', 'surfboard', 'tennis_racket',
'bottle', 'wine_glass', 'cup', 'fork', 'knife', 'spoon', 'bowl',
'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot',
'hot_dog', 'pizza', 'donut', 'cake', 'chair', 'couch',
'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', 'laptop',
'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave',
'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock',
'vase', 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush')
def load_annotations(self, ann_file):
self.coco = COCO(ann_file)
self.cat_ids = self.coco.getCatIds()
self.cat2label = {
cat_id: i + 1
for i, cat_id in enumerate(self.cat_ids)
}
self.img_ids = self.coco.getImgIds()
img_infos = []
for i in self.img_ids:
info = self.coco.loadImgs([i])[0]
info['filename'] = info['file_name']
img_infos.append(info)
return img_infos
def get_ann_info(self, idx):
img_id = self.img_infos[idx]['id']
ann_ids = self.coco.getAnnIds(imgIds=[img_id])
ann_info = self.coco.loadAnns(ann_ids)
return self._parse_ann_info(self.img_infos[idx], ann_info)
def _filter_imgs(self, min_size=32):
"""Filter images too small or without ground truths."""
valid_inds = []
ids_with_ann = set(_['image_id'] for _ in self.coco.anns.values())
for i, img_info in enumerate(self.img_infos):
if self.filter_empty_gt and self.img_ids[i] not in ids_with_ann:
continue
if min(img_info['width'], img_info['height']) >= min_size:
valid_inds.append(i)
return valid_inds
def _parse_ann_info(self, img_info, ann_info):
"""Parse bbox and mask annotation.
Args:
ann_info (list[dict]): Annotation info of an image.
with_mask (bool): Whether to parse mask annotations.
Returns:
dict: A dict containing the following keys: bboxes, bboxes_ignore,
labels, masks, seg_map. "masks" are raw annotations and not
decoded into binary masks.
"""
gt_bboxes = []
gt_labels = []
gt_bboxes_ignore = []
gt_masks_ann = []
for i, ann in enumerate(ann_info):
if ann.get('ignore', False):
continue
x1, y1, w, h = ann['bbox']
if ann['area'] <= 0 or w < 1 or h < 1:
continue
bbox = [x1, y1, x1 + w - 1, y1 + h - 1]
if ann.get('iscrowd', False):
gt_bboxes_ignore.append(bbox)
else:
gt_bboxes.append(bbox)
gt_labels.append(self.cat2label[ann['category_id']])
gt_masks_ann.append(ann['segmentation'])
if gt_bboxes:
gt_bboxes = np.array(gt_bboxes, dtype=np.float32)
gt_labels = np.array(gt_labels, dtype=np.int64)
else:
gt_bboxes = np.zeros((0, 4), dtype=np.float32)
gt_labels = np.array([], dtype=np.int64)
if gt_bboxes_ignore:
gt_bboxes_ignore = np.array(gt_bboxes_ignore, dtype=np.float32)
else:
gt_bboxes_ignore = np.zeros((0, 4), dtype=np.float32)
seg_map = img_info['filename'].replace('jpg', 'png')
ann = dict(
bboxes=gt_bboxes,
labels=gt_labels,
bboxes_ignore=gt_bboxes_ignore,
masks=gt_masks_ann,
seg_map=seg_map)
return ann
def xyxy2xywh(self, bbox):
_bbox = bbox.tolist()
return [
_bbox[0],
_bbox[1],
_bbox[2] - _bbox[0] + 1,
_bbox[3] - _bbox[1] + 1,
]
def _proposal2json(self, results):
json_results = []
for idx in range(len(self)):
img_id = self.img_ids[idx]
bboxes = results[idx]
for i in range(bboxes.shape[0]):
data = dict()
data['image_id'] = img_id
data['bbox'] = self.xyxy2xywh(bboxes[i])
data['score'] = float(bboxes[i][4])
data['category_id'] = 1
json_results.append(data)
return json_results
def _det2json(self, results):
json_results = []
for idx in range(len(self)):
img_id = self.img_ids[idx]
result = results[idx]
for label in range(len(result)):
bboxes = result[label]
for i in range(bboxes.shape[0]):
data = dict()
data['image_id'] = img_id
data['bbox'] = self.xyxy2xywh(bboxes[i])
data['score'] = float(bboxes[i][4])
data['category_id'] = self.cat_ids[label]
json_results.append(data)
return json_results
def _segm2json(self, results):
bbox_json_results = []
segm_json_results = []
for idx in range(len(self)):
img_id = self.img_ids[idx]
det, seg = results[idx]
for label in range(len(det)):
# bbox results
bboxes = det[label]
for i in range(bboxes.shape[0]):
data = dict()
data['image_id'] = img_id
data['bbox'] = self.xyxy2xywh(bboxes[i])
data['score'] = float(bboxes[i][4])
data['category_id'] = self.cat_ids[label]
bbox_json_results.append(data)
# segm results
# some detectors use different scores for bbox and mask
if isinstance(seg, tuple):
segms = seg[0][label]
mask_score = seg[1][label]
else:
segms = seg[label]
mask_score = [bbox[4] for bbox in bboxes]
for i in range(bboxes.shape[0]):
data = dict()
data['image_id'] = img_id
data['bbox'] = self.xyxy2xywh(bboxes[i])
data['score'] = float(mask_score[i])
data['category_id'] = self.cat_ids[label]
if isinstance(segms[i]['counts'], bytes):
segms[i]['counts'] = segms[i]['counts'].decode()
data['segmentation'] = segms[i]
segm_json_results.append(data)
return bbox_json_results, segm_json_results
def results2json(self, results, outfile_prefix):
"""Dump the detection results to a json file.
There are 3 types of results: proposals, bbox predictions, mask
predictions, and they have different data types. This method will
automatically recognize the type, and dump them to json files.
Args:
results (list[list | tuple | ndarray]): Testing results of the
dataset.
outfile_prefix (str): The filename prefix of the json files. If the
prefix is "somepath/xxx", the json files will be named
"somepath/xxx.bbox.json", "somepath/xxx.segm.json",
"somepath/xxx.proposal.json".
Returns:
dict[str: str]: Possible keys are "bbox", "segm", "proposal", and
values are corresponding filenames.
"""
result_files = dict()
if isinstance(results[0], list):
json_results = self._det2json(results)
result_files['bbox'] = '{}.{}.json'.format(outfile_prefix, 'bbox')
result_files['proposal'] = '{}.{}.json'.format(
outfile_prefix, 'bbox')
mmcv.dump(json_results, result_files['bbox'])
elif isinstance(results[0], tuple):
json_results = self._segm2json(results)
result_files['bbox'] = '{}.{}.json'.format(outfile_prefix, 'bbox')
result_files['proposal'] = '{}.{}.json'.format(
outfile_prefix, 'bbox')
result_files['segm'] = '{}.{}.json'.format(outfile_prefix, 'segm')
mmcv.dump(json_results[0], result_files['bbox'])
mmcv.dump(json_results[1], result_files['segm'])
elif isinstance(results[0], np.ndarray):
json_results = self._proposal2json(results)
result_files['proposal'] = '{}.{}.json'.format(
outfile_prefix, 'proposal')
mmcv.dump(json_results, result_files['proposal'])
else:
raise TypeError('invalid type of results')
return result_files
def fast_eval_recall(self, results, proposal_nums, iou_thrs, logger=None):
gt_bboxes = []
for i in range(len(self.img_ids)):
ann_ids = self.coco.getAnnIds(imgIds=self.img_ids[i])
ann_info = self.coco.loadAnns(ann_ids)
if len(ann_info) == 0:
gt_bboxes.append(np.zeros((0, 4)))
continue
bboxes = []
for ann in ann_info:
if ann.get('ignore', False) or ann['iscrowd']:
continue
x1, y1, w, h = ann['bbox']
bboxes.append([x1, y1, x1 + w - 1, y1 + h - 1])
bboxes = np.array(bboxes, dtype=np.float32)
if bboxes.shape[0] == 0:
bboxes = np.zeros((0, 4))
gt_bboxes.append(bboxes)
recalls = eval_recalls(
gt_bboxes, results, proposal_nums, iou_thrs, logger=logger)
ar = recalls.mean(axis=1)
return ar
def evaluate(self,
results,
metric='bbox',
logger=None,
jsonfile_prefix=None,
classwise=False,
proposal_nums=(100, 300, 1000),
iou_thrs=np.arange(0.5, 0.96, 0.05)):
"""Evaluation in COCO protocol.
Args:
results (list): Testing results of the dataset.
metric (str | list[str]): Metrics to be evaluated.
logger (logging.Logger | str | None): Logger used for printing
related information during evaluation. Default: None.
jsonfile_prefix (str | None):
classwise (bool): Whether to evaluating the AP for each class.
proposal_nums (Sequence[int]): Proposal number used for evaluating
recalls, such as recall@100, recall@1000.
Default: (100, 300, 1000).
iou_thrs (Sequence[float]): IoU threshold used for evaluating
recalls. If set to a list, the average recall of all IoUs will
also be computed. Default: 0.5.
Returns:
dict[str: float]
"""
assert isinstance(results, list), 'results must be a list'
assert len(results) == len(self), (
'The length of results is not equal to the dataset len: {} != {}'.
format(len(results), len(self)))
metrics = metric if isinstance(metric, list) else [metric]
allowed_metrics = ['bbox', 'segm', 'proposal', 'proposal_fast']
for metric in metrics:
if metric not in allowed_metrics:
raise KeyError('metric {} is not supported'.format(metric))
if jsonfile_prefix is None:
tmp_dir = tempfile.TemporaryDirectory()
jsonfile_prefix = osp.join(tmp_dir.name, 'results')
else:
tmp_dir = None
result_files = self.results2json(results, jsonfile_prefix)
eval_results = {}
cocoGt = self.coco
for metric in metrics:
msg = 'Evaluating {}...'.format(metric)
if logger is None:
msg = '\n' + msg
print_log(msg, logger=logger)
if metric == 'proposal_fast':
ar = self.fast_eval_recall(
results, proposal_nums, iou_thrs, logger='silent')
log_msg = []
for i, num in enumerate(proposal_nums):
eval_results['AR@{}'.format(num)] = ar[i]
log_msg.append('\nAR@{}\t{:.4f}'.format(num, ar[i]))
log_msg = ''.join(log_msg)
print_log(log_msg, logger=logger)
continue
if metric not in result_files:
raise KeyError('{} is not in results'.format(metric))
try:
cocoDt = cocoGt.loadRes(result_files[metric])
except IndexError:
print_log(
'The testing results of the whole dataset is empty.',
logger=logger,
level=logging.ERROR)
break
iou_type = 'bbox' if metric == 'proposal' else metric
cocoEval = COCOeval(cocoGt, cocoDt, iou_type)
cocoEval.params.imgIds = self.img_ids
if metric == 'proposal':
cocoEval.params.useCats = 0
cocoEval.params.maxDets = list(proposal_nums)
cocoEval.evaluate()
cocoEval.accumulate()
cocoEval.summarize()
metric_items = [
'AR@100', 'AR@300', 'AR@1000', 'AR_s@1000', 'AR_m@1000',
'AR_l@1000'
]
for i, item in enumerate(metric_items):
val = float('{:.3f}'.format(cocoEval.stats[i + 6]))
eval_results[item] = val
else:
cocoEval.evaluate()
cocoEval.accumulate()
cocoEval.summarize()
if classwise: # Compute per-category AP
pass # TODO
metric_items = [
'mAP', 'mAP_50', 'mAP_75', 'mAP_s', 'mAP_m', 'mAP_l'
]
for i in range(len(metric_items)):
key = '{}_{}'.format(metric, metric_items[i])
val = float('{:.3f}'.format(cocoEval.stats[i]))
eval_results[key] = val
eval_results['{}_mAP_copypaste'.format(metric)] = (
'{ap[0]:.3f} {ap[1]:.3f} {ap[2]:.3f} {ap[3]:.3f} '
'{ap[4]:.3f} {ap[5]:.3f}').format(ap=cocoEval.stats[:6])
if tmp_dir is not None:
tmp_dir.cleanup()
return eval_results
|
Cream/CDARTS/CDARTS_detection/mmdet/datasets/coco.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/datasets/coco.py",
"repo_id": "Cream",
"token_count": 8194
}
| 282 |
import os.path as osp
import xml.etree.ElementTree as ET
import mmcv
from .registry import DATASETS
from .xml_style import XMLDataset
@DATASETS.register_module
class WIDERFaceDataset(XMLDataset):
"""
Reader for the WIDER Face dataset in PASCAL VOC format.
Conversion scripts can be found in
https://github.com/sovrasov/wider-face-pascal-voc-annotations
"""
CLASSES = ('face', )
def __init__(self, **kwargs):
super(WIDERFaceDataset, self).__init__(**kwargs)
def load_annotations(self, ann_file):
img_infos = []
img_ids = mmcv.list_from_file(ann_file)
for img_id in img_ids:
filename = '{}.jpg'.format(img_id)
xml_path = osp.join(self.img_prefix, 'Annotations',
'{}.xml'.format(img_id))
tree = ET.parse(xml_path)
root = tree.getroot()
size = root.find('size')
width = int(size.find('width').text)
height = int(size.find('height').text)
folder = root.find('folder').text
img_infos.append(
dict(
id=img_id,
filename=osp.join(folder, filename),
width=width,
height=height))
return img_infos
|
Cream/CDARTS/CDARTS_detection/mmdet/datasets/wider_face.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/datasets/wider_face.py",
"repo_id": "Cream",
"token_count": 645
}
| 283 |
""" PyTorch EfficientNet Family
An implementation of EfficienNet that covers variety of related models with efficient architectures:
* EfficientNet (B0-B8, L2 + Tensorflow pretrained AutoAug/RandAug/AdvProp/NoisyStudent weight ports)
- EfficientNet: Rethinking Model Scaling for CNNs - https://arxiv.org/abs/1905.11946
- CondConv: Conditionally Parameterized Convolutions for Efficient Inference - https://arxiv.org/abs/1904.04971
- Adversarial Examples Improve Image Recognition - https://arxiv.org/abs/1911.09665
- Self-training with Noisy Student improves ImageNet classification - https://arxiv.org/abs/1911.04252
* MixNet (Small, Medium, and Large)
- MixConv: Mixed Depthwise Convolutional Kernels - https://arxiv.org/abs/1907.09595
* MNasNet B1, A1 (SE), Small
- MnasNet: Platform-Aware Neural Architecture Search for Mobile - https://arxiv.org/abs/1807.11626
* FBNet-C
- FBNet: Hardware-Aware Efficient ConvNet Design via Differentiable NAS - https://arxiv.org/abs/1812.03443
* Single-Path NAS Pixel1
- Single-Path NAS: Designing Hardware-Efficient ConvNets - https://arxiv.org/abs/1904.02877
* And likely more...
Hacked together by Ross Wightman
"""
import torch
import torch.nn as nn
from torch.nn import functional as F
import torch.utils.model_zoo as model_zoo
from .efficientnet_builder import *
from .feature_hooks import FeatureHooks
from ..registry import BACKBONES
IMAGENET_DEFAULT_MEAN = (0.485, 0.456, 0.406)
IMAGENET_DEFAULT_STD = (0.229, 0.224, 0.225)
IMAGENET_INCEPTION_MEAN = (0.5, 0.5, 0.5)
IMAGENET_INCEPTION_STD = (0.5, 0.5, 0.5)
def hard_sigmoid(x, inplace: bool = False):
if inplace:
return x.add_(3.).clamp_(0., 6.).div_(6.)
else:
return F.relu6(x + 3.) / 6.
class HardSigmoid(nn.Module):
def __init__(self, inplace: bool = False):
super(HardSigmoid, self).__init__()
self.inplace = inplace
def forward(self, x):
return hard_sigmoid(x, self.inplace)
def adaptive_pool_feat_mult(pool_type='avg'):
if pool_type == 'catavgmax':
return 2
else:
return 1
def adaptive_avgmax_pool2d(x, output_size=1):
x_avg = F.adaptive_avg_pool2d(x, output_size)
x_max = F.adaptive_max_pool2d(x, output_size)
return 0.5 * (x_avg + x_max)
def adaptive_catavgmax_pool2d(x, output_size=1):
x_avg = F.adaptive_avg_pool2d(x, output_size)
x_max = F.adaptive_max_pool2d(x, output_size)
return torch.cat((x_avg, x_max), 1)
def select_adaptive_pool2d(x, pool_type='avg', output_size=1):
"""Selectable global pooling function with dynamic input kernel size
"""
if pool_type == 'avg':
x = F.adaptive_avg_pool2d(x, output_size)
elif pool_type == 'avgmax':
x = adaptive_avgmax_pool2d(x, output_size)
elif pool_type == 'catavgmax':
x = adaptive_catavgmax_pool2d(x, output_size)
elif pool_type == 'max':
x = F.adaptive_max_pool2d(x, output_size)
else:
assert False, 'Invalid pool type: %s' % pool_type
return x
class AdaptiveAvgMaxPool2d(nn.Module):
def __init__(self, output_size=1):
super(AdaptiveAvgMaxPool2d, self).__init__()
self.output_size = output_size
def forward(self, x):
return adaptive_avgmax_pool2d(x, self.output_size)
class AdaptiveCatAvgMaxPool2d(nn.Module):
def __init__(self, output_size=1):
super(AdaptiveCatAvgMaxPool2d, self).__init__()
self.output_size = output_size
def forward(self, x):
return adaptive_catavgmax_pool2d(x, self.output_size)
class SelectAdaptivePool2d(nn.Module):
"""Selectable global pooling layer with dynamic input kernel size
"""
def __init__(self, output_size=1, pool_type='avg', flatten=False):
super(SelectAdaptivePool2d, self).__init__()
self.output_size = output_size
self.pool_type = pool_type
self.flatten = flatten
if pool_type == 'avgmax':
self.pool = AdaptiveAvgMaxPool2d(output_size)
elif pool_type == 'catavgmax':
self.pool = AdaptiveCatAvgMaxPool2d(output_size)
elif pool_type == 'max':
self.pool = nn.AdaptiveMaxPool2d(output_size)
else:
if pool_type != 'avg':
assert False, 'Invalid pool type: %s' % pool_type
self.pool = nn.AdaptiveAvgPool2d(output_size)
def forward(self, x):
x = self.pool(x)
if self.flatten:
x = x.flatten(1)
return x
def feat_mult(self):
return adaptive_pool_feat_mult(self.pool_type)
def __repr__(self):
return self.__class__.__name__ + ' (' \
+ 'output_size=' + str(self.output_size) \
+ ', pool_type=' + self.pool_type + ')'
def create_conv2d(in_chs, out_chs, kernel_size, **kwargs):
""" Select a 2d convolution implementation based on arguments
Creates and returns one of torch.nn.Conv2d, Conv2dSame, MixedConv2d, or CondConv2d.
Used extensively by EfficientNet, MobileNetv3 and related networks.
"""
assert 'groups' not in kwargs # only use 'depthwise' bool arg
if isinstance(kernel_size, list):
assert 'num_experts' not in kwargs # MixNet + CondConv combo not supported currently
# We're going to use only lists for defining the MixedConv2d kernel groups,
# ints, tuples, other iterables will continue to pass to normal conv and specify h, w.
m = MixedConv2d(in_chs, out_chs, kernel_size, **kwargs)
else:
depthwise = kwargs.pop('depthwise', False)
groups = out_chs if depthwise else 1
if 'num_experts' in kwargs and kwargs['num_experts'] > 0:
m = CondConv2d(in_chs, out_chs, kernel_size, groups=groups, **kwargs)
else:
m = create_conv2d_pad(in_chs, out_chs, kernel_size, groups=groups, **kwargs)
return m
def conv_bn(inp, oup, stride, groups=1, act_fn=nn.ReLU):
return nn.Sequential(
nn.Conv2d(inp, oup, 3, stride, 1, bias=False, groups=groups),
nn.BatchNorm2d(oup),
act_fn(inplace=True)
)
def conv_1x1_bn(inp, oup, groups=1, act_fn=nn.ReLU):
return nn.Sequential(
nn.Conv2d(inp, oup, 1, 1, 0, bias=False, groups=groups),
nn.BatchNorm2d(oup),
act_fn(inplace=True)
)
__all__ = ['EfficientNet']
def _cfg(url='', **kwargs):
return {
'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7),
'crop_pct': 0.875, 'interpolation': 'bicubic',
'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,
'first_conv': 'conv_stem', 'classifier': 'classifier',
**kwargs
}
default_cfgs = {
'mnasnet_050': _cfg(url=''),
'mnasnet_075': _cfg(url=''),
'mnasnet_100': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mnasnet_b1-74cb7081.pth'),
'mnasnet_140': _cfg(url=''),
'semnasnet_050': _cfg(url=''),
'semnasnet_075': _cfg(url=''),
'semnasnet_100': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mnasnet_a1-d9418771.pth'),
'semnasnet_140': _cfg(url=''),
'mnasnet_small': _cfg(url=''),
'mobilenetv2_100': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_100_ra-b33bc2c4.pth'),
'mobilenetv2_110d': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_110d_ra-77090ade.pth'),
'mobilenetv2_120d': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_120d_ra-5987e2ed.pth'),
'mobilenetv2_140': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mobilenetv2_140_ra-21a4e913.pth'),
'fbnetc_100': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/fbnetc_100-c345b898.pth',
interpolation='bilinear'),
'spnasnet_100': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/spnasnet_100-048bc3f4.pth',
interpolation='bilinear'),
'efficientnet_b0': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b0_ra-3dd342df.pth'),
'efficientnet_b1': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b1-533bc792.pth',
input_size=(3, 240, 240), pool_size=(8, 8)),
'efficientnet_b2': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b2_ra-bcdf34b7.pth',
input_size=(3, 260, 260), pool_size=(9, 9)),
'efficientnet_b2a': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b2_ra-bcdf34b7.pth',
input_size=(3, 288, 288), pool_size=(9, 9), crop_pct=1.0),
'efficientnet_b3': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b3_ra-a5e2fbc7.pth',
input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),
'efficientnet_b3a': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_b3_ra-a5e2fbc7.pth',
input_size=(3, 320, 320), pool_size=(10, 10), crop_pct=1.0),
'efficientnet_b4': _cfg(
url='', input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922),
'efficientnet_b5': _cfg(
url='', input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934),
'efficientnet_b6': _cfg(
url='', input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942),
'efficientnet_b7': _cfg(
url='', input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949),
'efficientnet_b8': _cfg(
url='', input_size=(3, 672, 672), pool_size=(21, 21), crop_pct=0.954),
'efficientnet_l2': _cfg(
url='', input_size=(3, 800, 800), pool_size=(25, 25), crop_pct=0.961),
'efficientnet_es': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/efficientnet_es_ra-f111e99c.pth'),
'efficientnet_em': _cfg(
url='', input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),
'efficientnet_el': _cfg(
url='', input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),
'efficientnet_cc_b0_4e': _cfg(url=''),
'efficientnet_cc_b0_8e': _cfg(url=''),
'efficientnet_cc_b1_8e': _cfg(url='', input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),
'efficientnet_lite0': _cfg(
url=''),
'efficientnet_lite1': _cfg(
url='',
input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),
'efficientnet_lite2': _cfg(
url='',
input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890),
'efficientnet_lite3': _cfg(
url='',
input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),
'efficientnet_lite4': _cfg(
url='', input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922),
'efficientnet_b1_pruned': _cfg(
url='https://imvl-automl-sh.oss-cn-shanghai.aliyuncs.com/darts/hyperml/hyperml/job_45403/outputs/effnetb1_pruned_9ebb3fe6.pth',
input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882, mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),
'efficientnet_b2_pruned': _cfg(
url='https://imvl-automl-sh.oss-cn-shanghai.aliyuncs.com/darts/hyperml/hyperml/job_45403/outputs/effnetb2_pruned_203f55bc.pth',
input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890, mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),
'efficientnet_b3_pruned': _cfg(
url='https://imvl-automl-sh.oss-cn-shanghai.aliyuncs.com/darts/hyperml/hyperml/job_45403/outputs/effnetb3_pruned_5abcc29f.pth',
input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904, mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),
'tf_efficientnet_b0': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_aa-827b6e33.pth',
input_size=(3, 224, 224)),
'tf_efficientnet_b1': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_aa-ea7a6ee0.pth',
input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),
'tf_efficientnet_b2': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_aa-60c94f97.pth',
input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890),
'tf_efficientnet_b3': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_aa-84b4657e.pth',
input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),
'tf_efficientnet_b4': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_aa-818f208c.pth',
input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922),
'tf_efficientnet_b5': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ra-9a3e5369.pth',
input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934),
'tf_efficientnet_b6': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_aa-80ba17e4.pth',
input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942),
'tf_efficientnet_b7': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ra-6c08e654.pth',
input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949),
'tf_efficientnet_b8': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b8_ra-572d5dd9.pth',
input_size=(3, 672, 672), pool_size=(21, 21), crop_pct=0.954),
'tf_efficientnet_b0_ap': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_ap-f262efe1.pth',
mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD, input_size=(3, 224, 224)),
'tf_efficientnet_b1_ap': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_ap-44ef0a3d.pth',
mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,
input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),
'tf_efficientnet_b2_ap': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_ap-2f8e7636.pth',
mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,
input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890),
'tf_efficientnet_b3_ap': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_ap-aad25bdd.pth',
mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,
input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),
'tf_efficientnet_b4_ap': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_ap-dedb23e6.pth',
mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,
input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922),
'tf_efficientnet_b5_ap': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ap-9e82fae8.pth',
mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,
input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934),
'tf_efficientnet_b6_ap': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_ap-4ffb161f.pth',
mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,
input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942),
'tf_efficientnet_b7_ap': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ap-ddb28fec.pth',
mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,
input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949),
'tf_efficientnet_b8_ap': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b8_ap-00e169fa.pth',
mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,
input_size=(3, 672, 672), pool_size=(21, 21), crop_pct=0.954),
'tf_efficientnet_b0_ns': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b0_ns-c0e6a31c.pth',
input_size=(3, 224, 224)),
'tf_efficientnet_b1_ns': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b1_ns-99dd0c41.pth',
input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),
'tf_efficientnet_b2_ns': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b2_ns-00306e48.pth',
input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890),
'tf_efficientnet_b3_ns': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b3_ns-9d44bf68.pth',
input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),
'tf_efficientnet_b4_ns': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b4_ns-d6313a46.pth',
input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.922),
'tf_efficientnet_b5_ns': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b5_ns-6f26d0cf.pth',
input_size=(3, 456, 456), pool_size=(15, 15), crop_pct=0.934),
'tf_efficientnet_b6_ns': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b6_ns-51548356.pth',
input_size=(3, 528, 528), pool_size=(17, 17), crop_pct=0.942),
'tf_efficientnet_b7_ns': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_b7_ns-1dbc32de.pth',
input_size=(3, 600, 600), pool_size=(19, 19), crop_pct=0.949),
'tf_efficientnet_l2_ns_475': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_l2_ns_475-bebbd00a.pth',
input_size=(3, 475, 475), pool_size=(15, 15), crop_pct=0.936),
'tf_efficientnet_l2_ns': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_l2_ns-df73bb44.pth',
input_size=(3, 800, 800), pool_size=(25, 25), crop_pct=0.96),
'tf_efficientnet_es': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_es-ca1afbfe.pth',
mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),
input_size=(3, 224, 224), ),
'tf_efficientnet_em': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_em-e78cfe58.pth',
mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),
input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),
'tf_efficientnet_el': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_el-5143854e.pth',
mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),
input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904),
'tf_efficientnet_cc_b0_4e': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b0_4e-4362b6b2.pth',
mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),
'tf_efficientnet_cc_b0_8e': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b0_8e-66184a25.pth',
mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD),
'tf_efficientnet_cc_b1_8e': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b1_8e-f7c79ae1.pth',
mean=IMAGENET_INCEPTION_MEAN, std=IMAGENET_INCEPTION_STD,
input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882),
'tf_efficientnet_lite0': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite0-0aa007d2.pth',
mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),
interpolation='bicubic', # should be bilinear but bicubic better match for TF bilinear at low res
),
'tf_efficientnet_lite1': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite1-bde8b488.pth',
mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),
input_size=(3, 240, 240), pool_size=(8, 8), crop_pct=0.882,
interpolation='bicubic', # should be bilinear but bicubic better match for TF bilinear at low res
),
'tf_efficientnet_lite2': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite2-dcccb7df.pth',
mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),
input_size=(3, 260, 260), pool_size=(9, 9), crop_pct=0.890,
interpolation='bicubic', # should be bilinear but bicubic better match for TF bilinear at low res
),
'tf_efficientnet_lite3': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite3-b733e338.pth',
mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),
input_size=(3, 300, 300), pool_size=(10, 10), crop_pct=0.904, interpolation='bilinear'),
'tf_efficientnet_lite4': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_lite4-741542c3.pth',
mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5),
input_size=(3, 380, 380), pool_size=(12, 12), crop_pct=0.920, interpolation='bilinear'),
'mixnet_s': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_s-a907afbc.pth'),
'mixnet_m': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_m-4647fc68.pth'),
'mixnet_l': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_l-5a9a2ed8.pth'),
'mixnet_xl': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/mixnet_xl_ra-aac3c00c.pth'),
'mixnet_xxl': _cfg(),
'tf_mixnet_s': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_s-89d3354b.pth'),
'tf_mixnet_m': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_m-0f4d8805.pth'),
'tf_mixnet_l': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_mixnet_l-6c92e0c8.pth'),
}
_DEBUG = False
class EfficientNet(nn.Module):
""" (Generic) EfficientNet
A flexible and performant PyTorch implementation of efficient network architectures, including:
* EfficientNet B0-B8, L2
* EfficientNet-EdgeTPU
* EfficientNet-CondConv
* MixNet S, M, L, XL
* MnasNet A1, B1, and small
* FBNet C
* Single-Path NAS Pixel1
"""
def __init__(self, block_args, num_classes=1000, num_features=1280, in_chans=3, stem_size=32,
channel_multiplier=1.0, channel_divisor=8, channel_min=None,
output_stride=32, pad_type='', fix_stem=False, act_layer=nn.ReLU, drop_rate=0., drop_path_rate=0.,
se_kwargs=None, norm_layer=nn.BatchNorm2d, norm_kwargs=None, global_pool='avg'):
super(EfficientNet, self).__init__()
norm_kwargs = norm_kwargs or {}
self.num_classes = num_classes
self.num_features = num_features
self.drop_rate = drop_rate
self._in_chs = in_chans
# Stem
if not fix_stem:
stem_size = round_channels(stem_size, channel_multiplier, channel_divisor, channel_min)
self.conv_stem = create_conv2d(self._in_chs, stem_size, 3, stride=2, padding=pad_type)
self.bn1 = norm_layer(stem_size, **norm_kwargs)
self.act1 = act_layer(inplace=True)
self._in_chs = stem_size
# Middle stages (IR/ER/DS Blocks)
builder = EfficientNetBuilder(
channel_multiplier, channel_divisor, channel_min, output_stride, pad_type, act_layer, se_kwargs,
norm_layer, norm_kwargs, drop_path_rate, verbose=_DEBUG)
self.blocks = nn.Sequential(*builder(self._in_chs, block_args))
self.feature_info = builder.features
self._in_chs = builder.in_chs
# Head + Pooling
self.conv_head = create_conv2d(self._in_chs, self.num_features, 1, padding=pad_type)
self.bn2 = norm_layer(self.num_features, **norm_kwargs)
self.act2 = act_layer(inplace=True)
self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)
# Classifier
self.classifier = nn.Linear(self.num_features * self.global_pool.feat_mult(), self.num_classes)
efficientnet_init_weights(self)
def as_sequential(self):
layers = [self.conv_stem, self.bn1, self.act1]
layers.extend(self.blocks)
layers.extend([self.conv_head, self.bn2, self.act2, self.global_pool])
layers.extend([nn.Flatten(), nn.Dropout(self.drop_rate), self.classifier])
return nn.Sequential(*layers)
def get_classifier(self):
return self.classifier
def reset_classifier(self, num_classes, global_pool='avg'):
self.num_classes = num_classes
self.global_pool = SelectAdaptivePool2d(pool_type=global_pool)
self.classifier = nn.Linear(
self.num_features * self.global_pool.feat_mult(), num_classes) if num_classes else None
def forward_features(self, x):
x = self.conv_stem(x)
x = self.bn1(x)
x = self.act1(x)
x = self.blocks(x)
x = self.conv_head(x)
x = self.bn2(x)
x = self.act2(x)
return x
def forward(self, x):
x = self.forward_features(x)
x = self.global_pool(x)
x = x.flatten(1)
if self.drop_rate > 0.:
x = F.dropout(x, p=self.drop_rate, training=self.training)
return self.classifier(x)
class EfficientNetFeatures(nn.Module):
""" EfficientNet Feature Extractor
A work-in-progress feature extraction module for EfficientNet, to use as a backbone for segmentation
and object detection models.
"""
def __init__(self, block_args, out_indices=(0, 1, 2, 3, 4), feature_location='bottleneck',
in_chans=3, stem_size=32, channel_multiplier=1.0, channel_divisor=8, channel_min=None,
output_stride=32, pad_type='', fix_stem=False, act_layer=nn.ReLU, drop_rate=0., drop_path_rate=0.,
se_kwargs=None, norm_layer=nn.BatchNorm2d, norm_kwargs=None):
super(EfficientNetFeatures, self).__init__()
norm_kwargs = norm_kwargs or {}
# TODO only create stages needed, currently all stages are created regardless of out_indices
num_stages = max(out_indices) + 1
self.out_indices = out_indices
self.feature_location = feature_location
self.drop_rate = drop_rate
self._in_chs = in_chans
# Stem
if not fix_stem:
stem_size = round_channels(stem_size, channel_multiplier, channel_divisor, channel_min)
self.conv_stem = create_conv2d(self._in_chs, stem_size, 3, stride=2, padding=pad_type)
self.bn1 = norm_layer(stem_size, **norm_kwargs)
self.act1 = act_layer(inplace=True)
self._in_chs = stem_size
# Middle stages (IR/ER/DS Blocks)
builder = EfficientNetBuilder(
channel_multiplier, channel_divisor, channel_min, output_stride, pad_type, act_layer, se_kwargs,
norm_layer, norm_kwargs, drop_path_rate, feature_location=feature_location, verbose=_DEBUG)
self.blocks = nn.Sequential(*builder(self._in_chs, block_args))
self._feature_info = builder.features # builder provides info about feature channels for each block
self._stage_to_feature_idx = {
v['stage_idx']: fi for fi, v in self._feature_info.items() if fi in self.out_indices}
self._in_chs = builder.in_chs
efficientnet_init_weights(self)
if _DEBUG:
for k, v in self._feature_info.items():
print('Feature idx: {}: Name: {}, Channels: {}'.format(k, v['name'], v['num_chs']))
# Register feature extraction hooks with FeatureHooks helper
self.feature_hooks = None
if feature_location != 'bottleneck':
hooks = [dict(
name=self._feature_info[idx]['module'],
type=self._feature_info[idx]['hook_type']) for idx in out_indices]
self.feature_hooks = FeatureHooks(hooks, self.named_modules())
def feature_channels(self, idx=None):
""" Feature Channel Shortcut
Returns feature channel count for each output index if idx == None. If idx is an integer, will
return feature channel count for that feature block index (independent of out_indices setting).
"""
if isinstance(idx, int):
return self._feature_info[idx]['num_chs']
return [self._feature_info[i]['num_chs'] for i in self.out_indices]
def feature_info(self, idx=None):
""" Feature Channel Shortcut
Returns feature channel count for each output index if idx == None. If idx is an integer, will
return feature channel count for that feature block index (independent of out_indices setting).
"""
if isinstance(idx, int):
return self._feature_info[idx]
return [self._feature_info[i] for i in self.out_indices]
def forward(self, x):
x = self.conv_stem(x)
x = self.bn1(x)
x = self.act1(x)
if self.feature_hooks is None:
features = []
for i, b in enumerate(self.blocks):
x = b(x)
if i in self._stage_to_feature_idx:
features.append(x)
return features
else:
self.blocks(x)
return self.feature_hooks.get_output(x.device)
def load_pretrained(model, cfg=None, num_classes=1000, in_chans=3, filter_fn=None, strict=True):
if cfg is None:
cfg = getattr(model, 'default_cfg')
if cfg is None or 'url' not in cfg or not cfg['url']:
logging.warning("Pretrained model URL is invalid, using random initialization.")
return
state_dict = model_zoo.load_url(cfg['url'], progress=False, map_location='cpu')
if in_chans == 1:
conv1_name = cfg['first_conv']
logging.info('Converting first conv (%s) from 3 to 1 channel' % conv1_name)
conv1_weight = state_dict[conv1_name + '.weight']
state_dict[conv1_name + '.weight'] = conv1_weight.sum(dim=1, keepdim=True)
elif in_chans != 3:
assert False, "Invalid in_chans for pretrained weights"
classifier_name = cfg['classifier']
if num_classes == 1000 and cfg['num_classes'] == 1001:
# special case for imagenet trained models with extra background class in pretrained weights
classifier_weight = state_dict[classifier_name + '.weight']
state_dict[classifier_name + '.weight'] = classifier_weight[1:]
classifier_bias = state_dict[classifier_name + '.bias']
state_dict[classifier_name + '.bias'] = classifier_bias[1:]
elif num_classes != cfg['num_classes']:
# completely discard fully connected for all other differences between pretrained and created model
del state_dict[classifier_name + '.weight']
del state_dict[classifier_name + '.bias']
strict = False
if filter_fn is not None:
state_dict = filter_fn(state_dict)
model.load_state_dict(state_dict, strict=strict)
def _create_model(model_kwargs, default_cfg, pretrained=False):
if model_kwargs.pop('features_only', False):
load_strict = False
model_kwargs.pop('num_classes', 0)
model_kwargs.pop('num_features', 0)
model_kwargs.pop('head_conv', None)
model_class = EfficientNetFeatures
else:
load_strict = True
model_class = EfficientNet
variant = model_kwargs.pop('variant', '')
model = model_class(**model_kwargs)
model.default_cfg = default_cfg
if pretrained:
load_pretrained(
model,
default_cfg,
num_classes=model_kwargs.get('num_classes', 0),
in_chans=model_kwargs.get('in_chans', 3),
strict=load_strict)
return model
def _gen_mnasnet_a1(variant, channel_multiplier=1.0, pretrained=False, **kwargs):
"""Creates a mnasnet-a1 model.
Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/mnasnet
Paper: https://arxiv.org/pdf/1807.11626.pdf.
Args:
channel_multiplier: multiplier to number of channels per layer.
"""
arch_def = [
# stage 0, 112x112 in
['ds_r1_k3_s1_e1_c16_noskip'],
# stage 1, 112x112 in
['ir_r2_k3_s2_e6_c24'],
# stage 2, 56x56 in
['ir_r3_k5_s2_e3_c40_se0.25'],
# stage 3, 28x28 in
['ir_r4_k3_s2_e6_c80'],
# stage 4, 14x14in
['ir_r2_k3_s1_e6_c112_se0.25'],
# stage 5, 14x14in
['ir_r3_k5_s2_e6_c160_se0.25'],
# stage 6, 7x7 in
['ir_r1_k3_s1_e6_c320'],
]
model_kwargs = dict(
block_args=decode_arch_def(arch_def),
stem_size=32,
channel_multiplier=channel_multiplier,
norm_kwargs=resolve_bn_args(kwargs),
**kwargs
)
model = _create_model(model_kwargs, default_cfgs[variant], pretrained)
return model
def _gen_mnasnet_b1(variant, channel_multiplier=1.0, pretrained=False, **kwargs):
"""Creates a mnasnet-b1 model.
Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/mnasnet
Paper: https://arxiv.org/pdf/1807.11626.pdf.
Args:
channel_multiplier: multiplier to number of channels per layer.
"""
arch_def = [
# stage 0, 112x112 in
['ds_r1_k3_s1_c16_noskip'],
# stage 1, 112x112 in
['ir_r3_k3_s2_e3_c24'],
# stage 2, 56x56 in
['ir_r3_k5_s2_e3_c40'],
# stage 3, 28x28 in
['ir_r3_k5_s2_e6_c80'],
# stage 4, 14x14in
['ir_r2_k3_s1_e6_c96'],
# stage 5, 14x14in
['ir_r4_k5_s2_e6_c192'],
# stage 6, 7x7 in
['ir_r1_k3_s1_e6_c320_noskip']
]
model_kwargs = dict(
block_args=decode_arch_def(arch_def),
stem_size=32,
channel_multiplier=channel_multiplier,
norm_kwargs=resolve_bn_args(kwargs),
**kwargs
)
model = _create_model(model_kwargs, default_cfgs[variant], pretrained)
return model
def _gen_mnasnet_small(variant, channel_multiplier=1.0, pretrained=False, **kwargs):
"""Creates a mnasnet-b1 model.
Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/mnasnet
Paper: https://arxiv.org/pdf/1807.11626.pdf.
Args:
channel_multiplier: multiplier to number of channels per layer.
"""
arch_def = [
['ds_r1_k3_s1_c8'],
['ir_r1_k3_s2_e3_c16'],
['ir_r2_k3_s2_e6_c16'],
['ir_r4_k5_s2_e6_c32_se0.25'],
['ir_r3_k3_s1_e6_c32_se0.25'],
['ir_r3_k5_s2_e6_c88_se0.25'],
['ir_r1_k3_s1_e6_c144']
]
model_kwargs = dict(
block_args=decode_arch_def(arch_def),
stem_size=8,
channel_multiplier=channel_multiplier,
norm_kwargs=resolve_bn_args(kwargs),
**kwargs
)
model = _create_model(model_kwargs, default_cfgs[variant], pretrained)
return model
def _gen_mobilenet_v2(
variant, channel_multiplier=1.0, depth_multiplier=1.0, fix_stem_head=False, pretrained=False, **kwargs):
""" Generate MobileNet-V2 network
Ref impl: https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet_v2.py
Paper: https://arxiv.org/abs/1801.04381
"""
arch_def = [
['ds_r1_k3_s1_c16'],
['ir_r2_k3_s2_e6_c24'],
['ir_r3_k3_s2_e6_c32'],
['ir_r4_k3_s2_e6_c64'],
['ir_r3_k3_s1_e6_c96'],
['ir_r3_k3_s2_e6_c160'],
['ir_r1_k3_s1_e6_c320'],
]
model_kwargs = dict(
block_args=decode_arch_def(arch_def, depth_multiplier=depth_multiplier, fix_first_last=fix_stem_head),
num_features=1280 if fix_stem_head else round_channels(1280, channel_multiplier, 8, None),
stem_size=32,
fix_stem=fix_stem_head,
channel_multiplier=channel_multiplier,
norm_kwargs=resolve_bn_args(kwargs),
act_layer=nn.ReLU6,
**kwargs
)
model = _create_model(model_kwargs, default_cfgs[variant], pretrained)
return model
def _gen_fbnetc(variant, channel_multiplier=1.0, pretrained=False, **kwargs):
""" FBNet-C
Paper: https://arxiv.org/abs/1812.03443
Ref Impl: https://github.com/facebookresearch/maskrcnn-benchmark/blob/master/maskrcnn_benchmark/modeling/backbone/fbnet_modeldef.py
NOTE: the impl above does not relate to the 'C' variant here, that was derived from paper,
it was used to confirm some building block details
"""
arch_def = [
['ir_r1_k3_s1_e1_c16'],
['ir_r1_k3_s2_e6_c24', 'ir_r2_k3_s1_e1_c24'],
['ir_r1_k5_s2_e6_c32', 'ir_r1_k5_s1_e3_c32', 'ir_r1_k5_s1_e6_c32', 'ir_r1_k3_s1_e6_c32'],
['ir_r1_k5_s2_e6_c64', 'ir_r1_k5_s1_e3_c64', 'ir_r2_k5_s1_e6_c64'],
['ir_r3_k5_s1_e6_c112', 'ir_r1_k5_s1_e3_c112'],
['ir_r4_k5_s2_e6_c184'],
['ir_r1_k3_s1_e6_c352'],
]
model_kwargs = dict(
block_args=decode_arch_def(arch_def),
stem_size=16,
num_features=1984, # paper suggests this, but is not 100% clear
channel_multiplier=channel_multiplier,
norm_kwargs=resolve_bn_args(kwargs),
**kwargs
)
model = _create_model(model_kwargs, default_cfgs[variant], pretrained)
return model
def _gen_spnasnet(variant, channel_multiplier=1.0, pretrained=False, **kwargs):
"""Creates the Single-Path NAS model from search targeted for Pixel1 phone.
Paper: https://arxiv.org/abs/1904.02877
Args:
channel_multiplier: multiplier to number of channels per layer.
"""
arch_def = [
# stage 0, 112x112 in
['ds_r1_k3_s1_c16_noskip'],
# stage 1, 112x112 in
['ir_r3_k3_s2_e3_c24'],
# stage 2, 56x56 in
['ir_r1_k5_s2_e6_c40', 'ir_r3_k3_s1_e3_c40'],
# stage 3, 28x28 in
['ir_r1_k5_s2_e6_c80', 'ir_r3_k3_s1_e3_c80'],
# stage 4, 14x14in
['ir_r1_k5_s1_e6_c96', 'ir_r3_k5_s1_e3_c96'],
# stage 5, 14x14in
['ir_r4_k5_s2_e6_c192'],
# stage 6, 7x7 in
['ir_r1_k3_s1_e6_c320_noskip']
]
model_kwargs = dict(
block_args=decode_arch_def(arch_def),
stem_size=32,
channel_multiplier=channel_multiplier,
norm_kwargs=resolve_bn_args(kwargs),
**kwargs
)
model = _create_model(model_kwargs, default_cfgs[variant], pretrained)
return model
def _gen_efficientnet(variant, channel_multiplier=1.0, depth_multiplier=1.0, pretrained=False, **kwargs):
"""Creates an EfficientNet model.
Ref impl: https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/efficientnet_model.py
Paper: https://arxiv.org/abs/1905.11946
EfficientNet params
name: (channel_multiplier, depth_multiplier, resolution, dropout_rate)
'efficientnet-b0': (1.0, 1.0, 224, 0.2),
'efficientnet-b1': (1.0, 1.1, 240, 0.2),
'efficientnet-b2': (1.1, 1.2, 260, 0.3),
'efficientnet-b3': (1.2, 1.4, 300, 0.3),
'efficientnet-b4': (1.4, 1.8, 380, 0.4),
'efficientnet-b5': (1.6, 2.2, 456, 0.4),
'efficientnet-b6': (1.8, 2.6, 528, 0.5),
'efficientnet-b7': (2.0, 3.1, 600, 0.5),
'efficientnet-b8': (2.2, 3.6, 672, 0.5),
'efficientnet-l2': (4.3, 5.3, 800, 0.5),
Args:
channel_multiplier: multiplier to number of channels per layer
depth_multiplier: multiplier to number of repeats per stage
"""
arch_def = [
['ds_r1_k3_s1_e1_c16_se0.25'],
['ir_r2_k3_s2_e6_c24_se0.25'],
['ir_r2_k5_s2_e6_c40_se0.25'],
['ir_r3_k3_s2_e6_c80_se0.25'],
['ir_r3_k5_s1_e6_c112_se0.25'],
['ir_r4_k5_s2_e6_c192_se0.25'],
['ir_r1_k3_s1_e6_c320_se0.25'],
]
model_kwargs = dict(
block_args=decode_arch_def(arch_def, depth_multiplier),
num_features=round_channels(1280, channel_multiplier, 8, None),
stem_size=32,
channel_multiplier=channel_multiplier,
act_layer=Swish,
norm_kwargs=resolve_bn_args(kwargs),
variant=variant,
**kwargs,
)
model = _create_model(model_kwargs, default_cfgs[variant], pretrained)
return model
def _gen_efficientnet_edge(variant, channel_multiplier=1.0, depth_multiplier=1.0, pretrained=False, **kwargs):
""" Creates an EfficientNet-EdgeTPU model
Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/efficientnet/edgetpu
"""
arch_def = [
# NOTE `fc` is present to override a mismatch between stem channels and in chs not
# present in other models
['er_r1_k3_s1_e4_c24_fc24_noskip'],
['er_r2_k3_s2_e8_c32'],
['er_r4_k3_s2_e8_c48'],
['ir_r5_k5_s2_e8_c96'],
['ir_r4_k5_s1_e8_c144'],
['ir_r2_k5_s2_e8_c192'],
]
model_kwargs = dict(
block_args=decode_arch_def(arch_def, depth_multiplier),
num_features=round_channels(1280, channel_multiplier, 8, None),
stem_size=32,
channel_multiplier=channel_multiplier,
norm_kwargs=resolve_bn_args(kwargs),
act_layer=nn.ReLU,
**kwargs,
)
model = _create_model(model_kwargs, default_cfgs[variant], pretrained)
return model
def _gen_efficientnet_condconv(
variant, channel_multiplier=1.0, depth_multiplier=1.0, experts_multiplier=1, pretrained=False, **kwargs):
"""Creates an EfficientNet-CondConv model.
Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/efficientnet/condconv
"""
arch_def = [
['ds_r1_k3_s1_e1_c16_se0.25'],
['ir_r2_k3_s2_e6_c24_se0.25'],
['ir_r2_k5_s2_e6_c40_se0.25'],
['ir_r3_k3_s2_e6_c80_se0.25'],
['ir_r3_k5_s1_e6_c112_se0.25_cc4'],
['ir_r4_k5_s2_e6_c192_se0.25_cc4'],
['ir_r1_k3_s1_e6_c320_se0.25_cc4'],
]
# NOTE unlike official impl, this one uses `cc<x>` option where x is the base number of experts for each stage and
# the expert_multiplier increases that on a per-model basis as with depth/channel multipliers
model_kwargs = dict(
block_args=decode_arch_def(arch_def, depth_multiplier, experts_multiplier=experts_multiplier),
num_features=round_channels(1280, channel_multiplier, 8, None),
stem_size=32,
channel_multiplier=channel_multiplier,
norm_kwargs=resolve_bn_args(kwargs),
act_layer=Swish,
**kwargs,
)
model = _create_model(model_kwargs, default_cfgs[variant], pretrained)
return model
def _gen_efficientnet_lite(variant, channel_multiplier=1.0, depth_multiplier=1.0, pretrained=False, **kwargs):
"""Creates an EfficientNet-Lite model.
Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/efficientnet/lite
Paper: https://arxiv.org/abs/1905.11946
EfficientNet params
name: (channel_multiplier, depth_multiplier, resolution, dropout_rate)
'efficientnet-lite0': (1.0, 1.0, 224, 0.2),
'efficientnet-lite1': (1.0, 1.1, 240, 0.2),
'efficientnet-lite2': (1.1, 1.2, 260, 0.3),
'efficientnet-lite3': (1.2, 1.4, 280, 0.3),
'efficientnet-lite4': (1.4, 1.8, 300, 0.3),
Args:
channel_multiplier: multiplier to number of channels per layer
depth_multiplier: multiplier to number of repeats per stage
"""
arch_def = [
['ds_r1_k3_s1_e1_c16'],
['ir_r2_k3_s2_e6_c24'],
['ir_r2_k5_s2_e6_c40'],
['ir_r3_k3_s2_e6_c80'],
['ir_r3_k5_s1_e6_c112'],
['ir_r4_k5_s2_e6_c192'],
['ir_r1_k3_s1_e6_c320'],
]
model_kwargs = dict(
block_args=decode_arch_def(arch_def, depth_multiplier, fix_first_last=True),
num_features=1280,
stem_size=32,
fix_stem=True,
channel_multiplier=channel_multiplier,
act_layer=nn.ReLU6,
norm_kwargs=resolve_bn_args(kwargs),
**kwargs,
)
model = _create_model(model_kwargs, default_cfgs[variant], pretrained)
return model
def _gen_mixnet_s(variant, channel_multiplier=1.0, pretrained=False, **kwargs):
"""Creates a MixNet Small model.
Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/mnasnet/mixnet
Paper: https://arxiv.org/abs/1907.09595
"""
arch_def = [
# stage 0, 112x112 in
['ds_r1_k3_s1_e1_c16'], # relu
# stage 1, 112x112 in
['ir_r1_k3_a1.1_p1.1_s2_e6_c24', 'ir_r1_k3_a1.1_p1.1_s1_e3_c24'], # relu
# stage 2, 56x56 in
['ir_r1_k3.5.7_s2_e6_c40_se0.5_nsw', 'ir_r3_k3.5_a1.1_p1.1_s1_e6_c40_se0.5_nsw'], # swish
# stage 3, 28x28 in
['ir_r1_k3.5.7_p1.1_s2_e6_c80_se0.25_nsw', 'ir_r2_k3.5_p1.1_s1_e6_c80_se0.25_nsw'], # swish
# stage 4, 14x14in
['ir_r1_k3.5.7_a1.1_p1.1_s1_e6_c120_se0.5_nsw', 'ir_r2_k3.5.7.9_a1.1_p1.1_s1_e3_c120_se0.5_nsw'], # swish
# stage 5, 14x14in
['ir_r1_k3.5.7.9.11_s2_e6_c200_se0.5_nsw', 'ir_r2_k3.5.7.9_p1.1_s1_e6_c200_se0.5_nsw'], # swish
# 7x7
]
model_kwargs = dict(
block_args=decode_arch_def(arch_def),
num_features=1536,
stem_size=16,
channel_multiplier=channel_multiplier,
norm_kwargs=resolve_bn_args(kwargs),
**kwargs
)
model = _create_model(model_kwargs, default_cfgs[variant], pretrained)
return model
def _gen_mixnet_m(variant, channel_multiplier=1.0, depth_multiplier=1.0, pretrained=False, **kwargs):
"""Creates a MixNet Medium-Large model.
Ref impl: https://github.com/tensorflow/tpu/tree/master/models/official/mnasnet/mixnet
Paper: https://arxiv.org/abs/1907.09595
"""
arch_def = [
# stage 0, 112x112 in
['ds_r1_k3_s1_e1_c24'], # relu
# stage 1, 112x112 in
['ir_r1_k3.5.7_a1.1_p1.1_s2_e6_c32', 'ir_r1_k3_a1.1_p1.1_s1_e3_c32'], # relu
# stage 2, 56x56 in
['ir_r1_k3.5.7.9_s2_e6_c40_se0.5_nsw', 'ir_r3_k3.5_a1.1_p1.1_s1_e6_c40_se0.5_nsw'], # swish
# stage 3, 28x28 in
['ir_r1_k3.5.7_s2_e6_c80_se0.25_nsw', 'ir_r3_k3.5.7.9_a1.1_p1.1_s1_e6_c80_se0.25_nsw'], # swish
# stage 4, 14x14in
['ir_r1_k3_s1_e6_c120_se0.5_nsw', 'ir_r3_k3.5.7.9_a1.1_p1.1_s1_e3_c120_se0.5_nsw'], # swish
# stage 5, 14x14in
['ir_r1_k3.5.7.9_s2_e6_c200_se0.5_nsw', 'ir_r3_k3.5.7.9_p1.1_s1_e6_c200_se0.5_nsw'], # swish
# 7x7
]
model_kwargs = dict(
block_args=decode_arch_def(arch_def, depth_multiplier, depth_trunc='round'),
num_features=1536,
stem_size=24,
channel_multiplier=channel_multiplier,
norm_kwargs=resolve_bn_args(kwargs),
**kwargs
)
model = _create_model(model_kwargs, default_cfgs[variant], pretrained)
return model
def mnasnet_050(pretrained=False, **kwargs):
""" MNASNet B1, depth multiplier of 0.5. """
model = _gen_mnasnet_b1('mnasnet_050', 0.5, pretrained=pretrained, **kwargs)
return model
def mnasnet_075(pretrained=False, **kwargs):
""" MNASNet B1, depth multiplier of 0.75. """
model = _gen_mnasnet_b1('mnasnet_075', 0.75, pretrained=pretrained, **kwargs)
return model
def mnasnet_100(pretrained=False, **kwargs):
""" MNASNet B1, depth multiplier of 1.0. """
model = _gen_mnasnet_b1('mnasnet_100', 1.0, pretrained=pretrained, **kwargs)
return model
def mnasnet_b1(pretrained=False, **kwargs):
""" MNASNet B1, depth multiplier of 1.0. """
return mnasnet_100(pretrained, **kwargs)
def mnasnet_140(pretrained=False, **kwargs):
""" MNASNet B1, depth multiplier of 1.4 """
model = _gen_mnasnet_b1('mnasnet_140', 1.4, pretrained=pretrained, **kwargs)
return model
def semnasnet_050(pretrained=False, **kwargs):
""" MNASNet A1 (w/ SE), depth multiplier of 0.5 """
model = _gen_mnasnet_a1('semnasnet_050', 0.5, pretrained=pretrained, **kwargs)
return model
def semnasnet_075(pretrained=False, **kwargs):
""" MNASNet A1 (w/ SE), depth multiplier of 0.75. """
model = _gen_mnasnet_a1('semnasnet_075', 0.75, pretrained=pretrained, **kwargs)
return model
def semnasnet_100(pretrained=False, **kwargs):
""" MNASNet A1 (w/ SE), depth multiplier of 1.0. """
model = _gen_mnasnet_a1('semnasnet_100', 1.0, pretrained=pretrained, **kwargs)
return model
def mnasnet_a1(pretrained=False, **kwargs):
""" MNASNet A1 (w/ SE), depth multiplier of 1.0. """
return semnasnet_100(pretrained, **kwargs)
def semnasnet_140(pretrained=False, **kwargs):
""" MNASNet A1 (w/ SE), depth multiplier of 1.4. """
model = _gen_mnasnet_a1('semnasnet_140', 1.4, pretrained=pretrained, **kwargs)
return model
def mnasnet_small(pretrained=False, **kwargs):
""" MNASNet Small, depth multiplier of 1.0. """
model = _gen_mnasnet_small('mnasnet_small', 1.0, pretrained=pretrained, **kwargs)
return model
def mobilenetv2_100(pretrained=False, **kwargs):
""" MobileNet V2 w/ 1.0 channel multiplier """
model = _gen_mobilenet_v2('mobilenetv2_100', 1.0, pretrained=pretrained, **kwargs)
return model
def mobilenetv2_140(pretrained=False, **kwargs):
""" MobileNet V2 w/ 1.4 channel multiplier """
model = _gen_mobilenet_v2('mobilenetv2_140', 1.4, pretrained=pretrained, **kwargs)
return model
def mobilenetv2_110d(pretrained=False, **kwargs):
""" MobileNet V2 w/ 1.1 channel, 1.2 depth multipliers"""
model = _gen_mobilenet_v2(
'mobilenetv2_110d', 1.1, depth_multiplier=1.2, fix_stem_head=True, pretrained=pretrained, **kwargs)
return model
def mobilenetv2_120d(pretrained=False, **kwargs):
""" MobileNet V2 w/ 1.2 channel, 1.4 depth multipliers """
model = _gen_mobilenet_v2(
'mobilenetv2_120d', 1.2, depth_multiplier=1.4, fix_stem_head=True, pretrained=pretrained, **kwargs)
return model
def fbnetc_100(pretrained=False, **kwargs):
""" FBNet-C """
if pretrained:
# pretrained model trained with non-default BN epsilon
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
model = _gen_fbnetc('fbnetc_100', 1.0, pretrained=pretrained, **kwargs)
return model
def spnasnet_100(pretrained=False, **kwargs):
""" Single-Path NAS Pixel1"""
model = _gen_spnasnet('spnasnet_100', 1.0, pretrained=pretrained, **kwargs)
return model
def efficientnet_b0(pretrained=False, **kwargs):
""" EfficientNet-B0 """
# NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2
model = _gen_efficientnet(
'efficientnet_b0', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def efficientnet_b1(pretrained=False, **kwargs):
""" EfficientNet-B1 """
# NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2
model = _gen_efficientnet(
'efficientnet_b1', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)
return model
def efficientnet_b2(pretrained=False, **kwargs):
""" EfficientNet-B2 """
# NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2
model = _gen_efficientnet(
'efficientnet_b2', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs)
return model
def efficientnet_b2a(pretrained=False, **kwargs):
""" EfficientNet-B2 @ 288x288 w/ 1.0 test crop"""
# NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2
model = _gen_efficientnet(
'efficientnet_b2a', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs)
return model
def efficientnet_b3(pretrained=False, **kwargs):
""" EfficientNet-B3 """
# NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2
model = _gen_efficientnet(
'efficientnet_b3', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)
return model
def efficientnet_b3a(pretrained=False, **kwargs):
""" EfficientNet-B3 @ 320x320 w/ 1.0 test crop-pct """
# NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2
model = _gen_efficientnet(
'efficientnet_b3a', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)
return model
def efficientnet_b4(pretrained=False, **kwargs):
""" EfficientNet-B4 """
# NOTE for train, drop_rate should be 0.4, drop_path_rate should be 0.2
model = _gen_efficientnet(
'efficientnet_b4', channel_multiplier=1.4, depth_multiplier=1.8, pretrained=pretrained, **kwargs)
return model
def efficientnet_b5(pretrained=False, **kwargs):
""" EfficientNet-B5 """
# NOTE for train, drop_rate should be 0.4, drop_path_rate should be 0.2
model = _gen_efficientnet(
'efficientnet_b5', channel_multiplier=1.6, depth_multiplier=2.2, pretrained=pretrained, **kwargs)
return model
def efficientnet_b6(pretrained=False, **kwargs):
""" EfficientNet-B6 """
# NOTE for train, drop_rate should be 0.5, drop_path_rate should be 0.2
model = _gen_efficientnet(
'efficientnet_b6', channel_multiplier=1.8, depth_multiplier=2.6, pretrained=pretrained, **kwargs)
return model
def efficientnet_b7(pretrained=False, **kwargs):
""" EfficientNet-B7 """
# NOTE for train, drop_rate should be 0.5, drop_path_rate should be 0.2
model = _gen_efficientnet(
'efficientnet_b7', channel_multiplier=2.0, depth_multiplier=3.1, pretrained=pretrained, **kwargs)
return model
def efficientnet_b8(pretrained=False, **kwargs):
""" EfficientNet-B8 """
# NOTE for train, drop_rate should be 0.5, drop_path_rate should be 0.2
model = _gen_efficientnet(
'efficientnet_b8', channel_multiplier=2.2, depth_multiplier=3.6, pretrained=pretrained, **kwargs)
return model
def efficientnet_l2(pretrained=False, **kwargs):
""" EfficientNet-L2."""
# NOTE for train, drop_rate should be 0.5, drop_path_rate should be 0.2
model = _gen_efficientnet(
'efficientnet_l2', channel_multiplier=4.3, depth_multiplier=5.3, pretrained=pretrained, **kwargs)
return model
def efficientnet_es(pretrained=False, **kwargs):
""" EfficientNet-Edge Small. """
model = _gen_efficientnet_edge(
'efficientnet_es', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def efficientnet_em(pretrained=False, **kwargs):
""" EfficientNet-Edge-Medium. """
model = _gen_efficientnet_edge(
'efficientnet_em', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)
return model
def efficientnet_el(pretrained=False, **kwargs):
""" EfficientNet-Edge-Large. """
model = _gen_efficientnet_edge(
'efficientnet_el', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)
return model
def efficientnet_cc_b0_4e(pretrained=False, **kwargs):
""" EfficientNet-CondConv-B0 w/ 8 Experts """
# NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2
model = _gen_efficientnet_condconv(
'efficientnet_cc_b0_4e', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def efficientnet_cc_b0_8e(pretrained=False, **kwargs):
""" EfficientNet-CondConv-B0 w/ 8 Experts """
# NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2
model = _gen_efficientnet_condconv(
'efficientnet_cc_b0_8e', channel_multiplier=1.0, depth_multiplier=1.0, experts_multiplier=2,
pretrained=pretrained, **kwargs)
return model
def efficientnet_cc_b1_8e(pretrained=False, **kwargs):
""" EfficientNet-CondConv-B1 w/ 8 Experts """
# NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2
model = _gen_efficientnet_condconv(
'efficientnet_cc_b1_8e', channel_multiplier=1.0, depth_multiplier=1.1, experts_multiplier=2,
pretrained=pretrained, **kwargs)
return model
def efficientnet_lite0(pretrained=False, **kwargs):
""" EfficientNet-Lite0 """
# NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2
model = _gen_efficientnet_lite(
'efficientnet_lite0', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def efficientnet_lite1(pretrained=False, **kwargs):
""" EfficientNet-Lite1 """
# NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2
model = _gen_efficientnet_lite(
'efficientnet_lite1', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)
return model
def efficientnet_lite2(pretrained=False, **kwargs):
""" EfficientNet-Lite2 """
# NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2
model = _gen_efficientnet_lite(
'efficientnet_lite2', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs)
return model
def efficientnet_lite3(pretrained=False, **kwargs):
""" EfficientNet-Lite3 """
# NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2
model = _gen_efficientnet_lite(
'efficientnet_lite3', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)
return model
def efficientnet_lite4(pretrained=False, **kwargs):
""" EfficientNet-Lite4 """
# NOTE for train, drop_rate should be 0.4, drop_path_rate should be 0.2
model = _gen_efficientnet_lite(
'efficientnet_lite4', channel_multiplier=1.4, depth_multiplier=1.8, pretrained=pretrained, **kwargs)
return model
def efficientnet_b1_pruned(pretrained=False, **kwargs):
""" EfficientNet-B1 Pruned. The pruning has been obtained using https://arxiv.org/pdf/2002.08258.pdf """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
variant = 'efficientnet_b1_pruned'
model = _gen_efficientnet(
variant, channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)
return model
def efficientnet_b2_pruned(pretrained=False, **kwargs):
""" EfficientNet-B2 Pruned. The pruning has been obtained using https://arxiv.org/pdf/2002.08258.pdf """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'efficientnet_b2_pruned', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs)
return model
def efficientnet_b3_pruned(pretrained=False, **kwargs):
""" EfficientNet-B3 Pruned. The pruning has been obtained using https://arxiv.org/pdf/2002.08258.pdf """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'efficientnet_b3_pruned', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b0(pretrained=False, **kwargs):
""" EfficientNet-B0. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b0', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b1(pretrained=False, **kwargs):
""" EfficientNet-B1. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b1', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b2(pretrained=False, **kwargs):
""" EfficientNet-B2. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b2', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b3(pretrained=False, **kwargs):
""" EfficientNet-B3. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b3', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b4(pretrained=False, **kwargs):
""" EfficientNet-B4. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b4', channel_multiplier=1.4, depth_multiplier=1.8, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b5(pretrained=False, **kwargs):
""" EfficientNet-B5. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b5', channel_multiplier=1.6, depth_multiplier=2.2, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b6(pretrained=False, **kwargs):
""" EfficientNet-B6. Tensorflow compatible variant """
# NOTE for train, drop_rate should be 0.5
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b6', channel_multiplier=1.8, depth_multiplier=2.6, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b7(pretrained=False, **kwargs):
""" EfficientNet-B7. Tensorflow compatible variant """
# NOTE for train, drop_rate should be 0.5
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b7', channel_multiplier=2.0, depth_multiplier=3.1, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b8(pretrained=False, **kwargs):
""" EfficientNet-B8. Tensorflow compatible variant """
# NOTE for train, drop_rate should be 0.5
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b8', channel_multiplier=2.2, depth_multiplier=3.6, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b0_ap(pretrained=False, **kwargs):
""" EfficientNet-B0 AdvProp. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b0_ap', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b1_ap(pretrained=False, **kwargs):
""" EfficientNet-B1 AdvProp. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b1_ap', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b2_ap(pretrained=False, **kwargs):
""" EfficientNet-B2 AdvProp. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b2_ap', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b3_ap(pretrained=False, **kwargs):
""" EfficientNet-B3 AdvProp. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b3_ap', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b4_ap(pretrained=False, **kwargs):
""" EfficientNet-B4 AdvProp. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b4_ap', channel_multiplier=1.4, depth_multiplier=1.8, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b5_ap(pretrained=False, **kwargs):
""" EfficientNet-B5 AdvProp. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b5_ap', channel_multiplier=1.6, depth_multiplier=2.2, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b6_ap(pretrained=False, **kwargs):
""" EfficientNet-B6 AdvProp. Tensorflow compatible variant """
# NOTE for train, drop_rate should be 0.5
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b6_ap', channel_multiplier=1.8, depth_multiplier=2.6, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b7_ap(pretrained=False, **kwargs):
""" EfficientNet-B7 AdvProp. Tensorflow compatible variant """
# NOTE for train, drop_rate should be 0.5
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b7_ap', channel_multiplier=2.0, depth_multiplier=3.1, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b8_ap(pretrained=False, **kwargs):
""" EfficientNet-B8 AdvProp. Tensorflow compatible variant """
# NOTE for train, drop_rate should be 0.5
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b8_ap', channel_multiplier=2.2, depth_multiplier=3.6, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b0_ns(pretrained=False, **kwargs):
""" EfficientNet-B0 NoisyStudent. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b0_ns', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b1_ns(pretrained=False, **kwargs):
""" EfficientNet-B1 NoisyStudent. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b1_ns', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b2_ns(pretrained=False, **kwargs):
""" EfficientNet-B2 NoisyStudent. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b2_ns', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b3_ns(pretrained=False, **kwargs):
""" EfficientNet-B3 NoisyStudent. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b3_ns', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b4_ns(pretrained=False, **kwargs):
""" EfficientNet-B4 NoisyStudent. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b4_ns', channel_multiplier=1.4, depth_multiplier=1.8, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b5_ns(pretrained=False, **kwargs):
""" EfficientNet-B5 NoisyStudent. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b5_ns', channel_multiplier=1.6, depth_multiplier=2.2, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b6_ns(pretrained=False, **kwargs):
""" EfficientNet-B6 NoisyStudent. Tensorflow compatible variant """
# NOTE for train, drop_rate should be 0.5
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b6_ns', channel_multiplier=1.8, depth_multiplier=2.6, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_b7_ns(pretrained=False, **kwargs):
""" EfficientNet-B7 NoisyStudent. Tensorflow compatible variant """
# NOTE for train, drop_rate should be 0.5
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_b7_ns', channel_multiplier=2.0, depth_multiplier=3.1, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_l2_ns_475(pretrained=False, **kwargs):
""" EfficientNet-L2 NoisyStudent @ 475x475. Tensorflow compatible variant """
# NOTE for train, drop_rate should be 0.5
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_l2_ns_475', channel_multiplier=4.3, depth_multiplier=5.3, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_l2_ns(pretrained=False, **kwargs):
""" EfficientNet-L2 NoisyStudent. Tensorflow compatible variant """
# NOTE for train, drop_rate should be 0.5
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet(
'tf_efficientnet_l2_ns', channel_multiplier=4.3, depth_multiplier=5.3, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_es(pretrained=False, **kwargs):
""" EfficientNet-Edge Small. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet_edge(
'tf_efficientnet_es', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_em(pretrained=False, **kwargs):
""" EfficientNet-Edge-Medium. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet_edge(
'tf_efficientnet_em', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_el(pretrained=False, **kwargs):
""" EfficientNet-Edge-Large. Tensorflow compatible variant """
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet_edge(
'tf_efficientnet_el', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_cc_b0_4e(pretrained=False, **kwargs):
""" EfficientNet-CondConv-B0 w/ 4 Experts. Tensorflow compatible variant """
# NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet_condconv(
'tf_efficientnet_cc_b0_4e', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_cc_b0_8e(pretrained=False, **kwargs):
""" EfficientNet-CondConv-B0 w/ 8 Experts. Tensorflow compatible variant """
# NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet_condconv(
'tf_efficientnet_cc_b0_8e', channel_multiplier=1.0, depth_multiplier=1.0, experts_multiplier=2,
pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_cc_b1_8e(pretrained=False, **kwargs):
""" EfficientNet-CondConv-B1 w/ 8 Experts. Tensorflow compatible variant """
# NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet_condconv(
'tf_efficientnet_cc_b1_8e', channel_multiplier=1.0, depth_multiplier=1.1, experts_multiplier=2,
pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_lite0(pretrained=False, **kwargs):
""" EfficientNet-Lite0 """
# NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet_lite(
'tf_efficientnet_lite0', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_lite1(pretrained=False, **kwargs):
""" EfficientNet-Lite1 """
# NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet_lite(
'tf_efficientnet_lite1', channel_multiplier=1.0, depth_multiplier=1.1, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_lite2(pretrained=False, **kwargs):
""" EfficientNet-Lite2 """
# NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet_lite(
'tf_efficientnet_lite2', channel_multiplier=1.1, depth_multiplier=1.2, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_lite3(pretrained=False, **kwargs):
""" EfficientNet-Lite3 """
# NOTE for train, drop_rate should be 0.3, drop_path_rate should be 0.2
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet_lite(
'tf_efficientnet_lite3', channel_multiplier=1.2, depth_multiplier=1.4, pretrained=pretrained, **kwargs)
return model
def tf_efficientnet_lite4(pretrained=False, **kwargs):
""" EfficientNet-Lite4 """
# NOTE for train, drop_rate should be 0.4, drop_path_rate should be 0.2
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_efficientnet_lite(
'tf_efficientnet_lite4', channel_multiplier=1.4, depth_multiplier=1.8, pretrained=pretrained, **kwargs)
return model
def mixnet_s(pretrained=False, **kwargs):
"""Creates a MixNet Small model.
"""
model = _gen_mixnet_s(
'mixnet_s', channel_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def mixnet_m(pretrained=False, **kwargs):
"""Creates a MixNet Medium model.
"""
model = _gen_mixnet_m(
'mixnet_m', channel_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def mixnet_l(pretrained=False, **kwargs):
"""Creates a MixNet Large model.
"""
model = _gen_mixnet_m(
'mixnet_l', channel_multiplier=1.3, pretrained=pretrained, **kwargs)
return model
def mixnet_xl(pretrained=False, **kwargs):
"""Creates a MixNet Extra-Large model.
Not a paper spec, experimental def by RW w/ depth scaling.
"""
model = _gen_mixnet_m(
'mixnet_xl', channel_multiplier=1.6, depth_multiplier=1.2, pretrained=pretrained, **kwargs)
return model
def mixnet_xxl(pretrained=False, **kwargs):
"""Creates a MixNet Double Extra Large model.
Not a paper spec, experimental def by RW w/ depth scaling.
"""
model = _gen_mixnet_m(
'mixnet_xxl', channel_multiplier=2.4, depth_multiplier=1.3, pretrained=pretrained, **kwargs)
return model
def tf_mixnet_s(pretrained=False, **kwargs):
"""Creates a MixNet Small model. Tensorflow compatible variant
"""
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_mixnet_s(
'tf_mixnet_s', channel_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def tf_mixnet_m(pretrained=False, **kwargs):
"""Creates a MixNet Medium model. Tensorflow compatible variant
"""
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_mixnet_m(
'tf_mixnet_m', channel_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
def tf_mixnet_l(pretrained=False, **kwargs):
"""Creates a MixNet Large model. Tensorflow compatible variant
"""
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
model = _gen_mixnet_m(
'tf_mixnet_l', channel_multiplier=1.3, pretrained=pretrained, **kwargs)
return model
def efficientnet_b0(pretrained=False, **kwargs):
""" EfficientNet-B0 """
# NOTE for train, drop_rate should be 0.2, drop_path_rate should be 0.2
model = _gen_efficientnet(
'efficientnet_b0', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=pretrained, **kwargs)
return model
@BACKBONES.register_module
class SSDEFFB0(nn.Module):
def __init__(self, input_size, width_mult=1.0,
activation_type='relu',
single_scale=False):
super(SSDEFFB0, self).__init__()
self.input_size = input_size
self.single_scale = single_scale
self.width_mult = width_mult
self.backbone = _gen_efficientnet('efficientnet_b0', channel_multiplier=1.0, depth_multiplier=1.0, pretrained=True, features_only=True)
# del self.backbone.blocks[3][2]
for m in self.backbone.modules():
if isinstance(m, nn.BatchNorm2d):
m.eval()
m.weight.requires_grad = False
m.bias.requires_grad = False
# self.last_channel = self.backbone.blocks[-1][-1].conv.out_channels # self.backbone.blocks[-1][-1]
# building last several layers
self.extra_convs = []
if not self.single_scale:
self.extra_convs.append(conv_1x1_bn(self.last_channel, 1280,
act_fn=Swish))
self.extra_convs.append(conv_1x1_bn(1280, 256,
act_fn=Swish))
self.extra_convs.append(conv_bn(256, 256, 2, groups=256,
act_fn=Swish))
self.extra_convs.append(conv_1x1_bn(256, 512, groups=1,
act_fn=Swish))
self.extra_convs.append(conv_1x1_bn(512, 128,
act_fn=Swish))
self.extra_convs.append(conv_bn(128, 128, 2, groups=128,
act_fn=Swish))
self.extra_convs.append(conv_1x1_bn(128, 256,
act_fn=Swish))
self.extra_convs.append(conv_1x1_bn(256, 128,
act_fn=Swish))
self.extra_convs.append(conv_bn(128, 128, 2, groups=128,
act_fn=Swish))
self.extra_convs.append(conv_1x1_bn(128, 256,
act_fn=Swish))
self.extra_convs.append(conv_1x1_bn(256, 64,
act_fn=Swish))
self.extra_convs.append(conv_bn(64, 64, 2, groups=64,
act_fn=Swish))
self.extra_convs.append(conv_1x1_bn(64, 128,
act_fn=Swish))
self.extra_convs = nn.Sequential(*self.extra_convs)
def init_weights(self, pretrained=None):
if pretrained:
state_dict = torch.load(pretrained)
state_dict = state_dict['state_dict']
self.backbone.load_state_dict(state_dict, strict=True)
else:
print("No pretrained model!")
return
def forward(self, x):
outputs = self.backbone(x)
x = outputs[-1]
outs = []
for i, conv in enumerate(self.extra_convs):
x = conv(x)
if i % 3 == 0:
outs.append(x)
if self.single_scale:
# outs.append(x)
return outputs[1:]
return tuple(outs)
|
Cream/CDARTS/CDARTS_detection/mmdet/models/backbones/efficientnet.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/models/backbones/efficientnet.py",
"repo_id": "Cream",
"token_count": 37124
}
| 284 |
# --------------------------------------------------------
# Copyright (c) 2019 Jianyuan Guo ([email protected])
# --------------------------------------------------------
# from .darts_head_search import DartsHead
from .mbblock_head_search import MbblockHead
def build_search_head(cfg):
"""Build head model from config dict.
"""
if cfg is not None:
cfg_ = cfg.copy()
head_type = cfg_.pop('type')
if head_type == 'DARTS':
raise NotImplementedError
elif head_type == 'MBBlock':
return MbblockHead(**cfg_)
else:
raise KeyError('Invalid head type {}'.fromat(head_type))
else:
return None
|
Cream/CDARTS/CDARTS_detection/mmdet/models/bbox_heads/auto_head/build_head.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/models/bbox_heads/auto_head/build_head.py",
"repo_id": "Cream",
"token_count": 264
}
| 285 |
from .two_stage import TwoStageDetector
from ..registry import DETECTORS
@DETECTORS.register_module
class MaskRCNN(TwoStageDetector):
def __init__(self,
backbone,
rpn_head,
bbox_roi_extractor,
bbox_head,
mask_roi_extractor,
mask_head,
train_cfg,
test_cfg,
neck=None,
shared_head=None,
pretrained=None):
super(MaskRCNN, self).__init__(
backbone=backbone,
neck=neck,
shared_head=shared_head,
rpn_head=rpn_head,
bbox_roi_extractor=bbox_roi_extractor,
bbox_head=bbox_head,
mask_roi_extractor=mask_roi_extractor,
mask_head=mask_head,
train_cfg=train_cfg,
test_cfg=test_cfg,
pretrained=pretrained)
|
Cream/CDARTS/CDARTS_detection/mmdet/models/detectors/mask_rcnn.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/models/detectors/mask_rcnn.py",
"repo_id": "Cream",
"token_count": 549
}
| 286 |
import functools
import torch.nn.functional as F
def reduce_loss(loss, reduction):
"""Reduce loss as specified.
Args:
loss (Tensor): Elementwise loss tensor.
reduction (str): Options are "none", "mean" and "sum".
Return:
Tensor: Reduced loss tensor.
"""
reduction_enum = F._Reduction.get_enum(reduction)
# none: 0, elementwise_mean:1, sum: 2
if reduction_enum == 0:
return loss
elif reduction_enum == 1:
return loss.mean()
elif reduction_enum == 2:
return loss.sum()
def weight_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None):
"""Apply element-wise weight and reduce loss.
Args:
loss (Tensor): Element-wise loss.
weight (Tensor): Element-wise weights.
reduction (str): Same as built-in losses of PyTorch.
avg_factor (float): Avarage factor when computing the mean of losses.
Returns:
Tensor: Processed loss values.
"""
# if weight is specified, apply element-wise weight
if weight is not None:
loss = loss * weight
# if avg_factor is not specified, just reduce the loss
if avg_factor is None:
loss = reduce_loss(loss, reduction)
else:
# if reduction is mean, then average the loss by avg_factor
if reduction == 'mean':
loss = loss.sum() / avg_factor
# if reduction is 'none', then do nothing, otherwise raise an error
elif reduction != 'none':
raise ValueError('avg_factor can not be used with reduction="sum"')
return loss
def weighted_loss(loss_func):
"""Create a weighted version of a given loss function.
To use this decorator, the loss function must have the signature like
`loss_func(pred, target, **kwargs)`. The function only needs to compute
element-wise loss without any reduction. This decorator will add weight
and reduction arguments to the function. The decorated function will have
the signature like `loss_func(pred, target, weight=None, reduction='mean',
avg_factor=None, **kwargs)`.
:Example:
>>> @weighted_loss
>>> def l1_loss(pred, target):
>>> return (pred - target).abs()
>>> pred = torch.Tensor([0, 2, 3])
>>> target = torch.Tensor([1, 1, 1])
>>> weight = torch.Tensor([1, 0, 1])
>>> l1_loss(pred, target)
tensor(1.3333)
>>> l1_loss(pred, target, weight)
tensor(1.)
>>> l1_loss(pred, target, reduction='none')
tensor([1., 1., 2.])
>>> l1_loss(pred, target, weight, avg_factor=2)
tensor(1.5000)
"""
@functools.wraps(loss_func)
def wrapper(pred,
target,
weight=None,
reduction='mean',
avg_factor=None,
**kwargs):
# get element-wise loss
loss = loss_func(pred, target, **kwargs)
loss = weight_reduce_loss(loss, weight, reduction, avg_factor)
return loss
return wrapper
|
Cream/CDARTS/CDARTS_detection/mmdet/models/losses/utils.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/models/losses/utils.py",
"repo_id": "Cream",
"token_count": 1172
}
| 287 |
import torch
import torch.nn as nn
import torch.nn.functional as F
from mmcv.cnn import xavier_init
from mmcv.cnn import caffe2_xavier_init
from mmdet.core import auto_fp16
from ..registry import NECKS
from ..utils import ConvModule
norm_cfg_ = {
'BN': nn.BatchNorm2d,
'SyncBN': nn.SyncBatchNorm,
'GN': nn.GroupNorm,
}
class MergingCell(nn.Module):
def __init__(self, channels=256, with_conv=True, norm_type='BN'):
super(MergingCell, self).__init__()
self.with_conv = with_conv
norm_layer = norm_cfg_[norm_type]
if self.with_conv:
self.conv_out = nn.Sequential(
nn.ReLU(inplace=True),
nn.Conv2d(channels, channels, 3, 1, 1),
norm_layer(channels)
)
def _binary_op(self, x1, x2):
raise NotImplementedError
def _resize(self, x, size):
if x.shape[-2:] == size:
return x
elif x.shape[-2:] < size:
return F.interpolate(x, size=size, mode='nearest')
else:
assert x.shape[-2] % size[-2] == 0 and x.shape[-1] % size[-1] == 0
kernel_size = x.shape[-1] // size[-1]
x = F.max_pool2d(x, kernel_size=kernel_size, stride=kernel_size)
# x = F.interpolate(x, size=size, mode='nearest')
return x
def forward(self, x1, x2, out_size):
assert x1.shape[:2] == x2.shape[:2]
assert len(out_size) == 2
x1 = self._resize(x1, out_size)
x2 = self._resize(x2, out_size)
x = self._binary_op(x1, x2)
if self.with_conv:
x = self.conv_out(x)
return x
class SumCell(MergingCell):
def _binary_op(self, x1, x2):
return x1 + x2
class GPCell(MergingCell):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.global_pool = nn.AdaptiveAvgPool2d((1, 1))
def _binary_op(self, x1, x2):
x2_att = self.global_pool(x2).sigmoid()
return x2 + x2_att * x1
@NECKS.register_module
class NASFPN(nn.Module):
def __init__(self,
in_channels,
out_channels,
num_outs,
start_level=0,
end_level=-1,
add_extra_convs=False,
stack_times=7,
lateral_kernel=1,
norm_type='SyncBN'):
super(NASFPN, self).__init__()
assert isinstance(in_channels, list)
self.in_channels = in_channels
self.out_channels = out_channels
self.num_ins = len(in_channels)
self.num_outs = num_outs
self.stack_times = stack_times
self.norm_type = norm_type
if end_level == -1:
self.backbone_end_level = self.num_ins
assert num_outs >= self.num_ins - start_level
else:
# if end_level < inputs, no extra level is allowed
self.backbone_end_level = end_level
assert end_level <= len(in_channels)
assert num_outs == end_level - start_level
self.start_level = start_level
self.end_level = end_level
self.add_extra_convs = add_extra_convs
self.lateral_convs = nn.ModuleList()
# for i in range(self.start_level, self.backbone_end_level): # RetinaNet (1,4)
for i in range(self.start_level, self.start_level + num_outs):
in_channel = in_channels[i] if i < self.backbone_end_level else in_channels[-1]
padding = (lateral_kernel - 1) // 2
l_conv = nn.Conv2d(in_channel, out_channels, kernel_size=lateral_kernel, padding=padding)
self.lateral_convs.append(l_conv)
# add extra downsample layers (stride-2 pooling or conv)
extra_levels = num_outs - self.backbone_end_level + self.start_level
self.extra_downsamples = nn.ModuleList()
for i in range(extra_levels):
if self.add_extra_convs:
extra_conv = nn.Conv2d(in_channels[-1], in_channels[-1], 3, stride=2, padding=1)
self.extra_downsamples.append(extra_conv)
else:
self.extra_downsamples.append(nn.MaxPool2d(2, stride=2))
# add NAS FPN connections
self.fpn_stages = nn.ModuleList()
for _ in range(self.stack_times):
stage = nn.ModuleDict()
# gp(p6, p4) -> p4_1
stage['gp_64_4'] = GPCell(out_channels, norm_type=norm_type)
# sum(p4_1, p4) -> p4_2
stage['sum_44_4'] = SumCell(out_channels, norm_type=norm_type)
# sum(p4_2, p3) -> p3_out
stage['sum_43_3'] = SumCell(out_channels, norm_type=norm_type)
# sum(p3_out, p4_2) -> p4_out
stage['sum_34_4'] = SumCell(out_channels, norm_type=norm_type)
# sum(p5, gp(p4_out, p3_out)) -> p5_out
stage['gp_43_5'] = GPCell(with_conv=False)
stage['sum_55_5'] = SumCell(out_channels, norm_type=norm_type)
# sum(p7, gp(p5_out, p4_2)) -> p7_out
stage['gp_54_7'] = GPCell(with_conv=False)
stage['sum_77_7'] = SumCell(out_channels, norm_type=norm_type)
# gp(p7_out, p5_out) -> p6_out
stage['gp_75_6'] = GPCell(out_channels, norm_type=norm_type)
self.fpn_stages.append(stage)
for m in self.modules():
if isinstance(m, nn.SyncBatchNorm):
m._specify_ddp_gpu_num(1)
def init_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
caffe2_xavier_init(m)
@auto_fp16()
def forward(self, inputs):
assert len(inputs) == len(self.in_channels)
# build P6-P7 on top of P5
inputs = list(inputs)
for downsample in self.extra_downsamples:
inputs.append(downsample(inputs[-1]))
# 1x1 on P3-P7
feats = [
lateral_conv(inputs[i + self.start_level])
for i, lateral_conv in enumerate(self.lateral_convs)
]
p3, p4, p5, p6, p7 = feats
for stage in self.fpn_stages:
# gp(p6, p4) -> p4_1
p4_1 = stage['gp_64_4'](p6, p4, out_size=p4.shape[-2:])
# sum(p4_1, p4) -> p4_2
p4_2 = stage['sum_44_4'](p4_1, p4, out_size=p4.shape[-2:])
# sum(p4_2, p3) -> p3_out
p3 = stage['sum_43_3'](p4_2, p3, out_size=p3.shape[-2:])
# sum(p3_out, p4_2) -> p4_out
p4 = stage['sum_34_4'](p3, p4_2, out_size=p4.shape[-2:])
# sum(p5, gp(p4_out, p3_out)) -> p5_out
p5_tmp = stage['gp_43_5'](p4, p3, out_size=p5.shape[-2:])
p5 = stage['sum_55_5'](p5, p5_tmp, out_size=p5.shape[-2:])
# sum(p7, gp(p5_out, p4_2)) -> p7_out
p7_tmp = stage['gp_54_7'](p5, p4_2, out_size=p7.shape[-2:])
p7 = stage['sum_77_7'](p7, p7_tmp, out_size=p7.shape[-2:])
# gp(p7_out, p5_out) -> p6_out
p6 = stage['gp_75_6'](p7, p5, out_size=p6.shape[-2:])
return tuple([p3, p4, p5, p6, p7])
|
Cream/CDARTS/CDARTS_detection/mmdet/models/necks/nas_fpn.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/models/necks/nas_fpn.py",
"repo_id": "Cream",
"token_count": 3831
}
| 288 |
import numpy as np
import torch.nn as nn
def xavier_init(module, gain=1, bias=0, distribution='normal'):
assert distribution in ['uniform', 'normal']
if distribution == 'uniform':
nn.init.xavier_uniform_(module.weight, gain=gain)
else:
nn.init.xavier_normal_(module.weight, gain=gain)
if hasattr(module, 'bias'):
nn.init.constant_(module.bias, bias)
def normal_init(module, mean=0, std=1, bias=0):
nn.init.normal_(module.weight, mean, std)
if hasattr(module, 'bias'):
nn.init.constant_(module.bias, bias)
def uniform_init(module, a=0, b=1, bias=0):
nn.init.uniform_(module.weight, a, b)
if hasattr(module, 'bias'):
nn.init.constant_(module.bias, bias)
def kaiming_init(module,
mode='fan_out',
nonlinearity='relu',
bias=0,
distribution='normal'):
assert distribution in ['uniform', 'normal']
if distribution == 'uniform':
nn.init.kaiming_uniform_(
module.weight, mode=mode, nonlinearity=nonlinearity)
else:
nn.init.kaiming_normal_(
module.weight, mode=mode, nonlinearity=nonlinearity)
if hasattr(module, 'bias'):
nn.init.constant_(module.bias, bias)
def bias_init_with_prob(prior_prob):
""" initialize conv/fc bias value according to giving probablity"""
bias_init = float(-np.log((1 - prior_prob) / prior_prob))
return bias_init
|
Cream/CDARTS/CDARTS_detection/mmdet/models/utils/weight_init.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/models/utils/weight_init.py",
"repo_id": "Cream",
"token_count": 652
}
| 289 |
from .functions.masked_conv import masked_conv2d
from .modules.masked_conv import MaskedConv2d
__all__ = ['masked_conv2d', 'MaskedConv2d']
|
Cream/CDARTS/CDARTS_detection/mmdet/ops/masked_conv/__init__.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/ops/masked_conv/__init__.py",
"repo_id": "Cream",
"token_count": 54
}
| 290 |
from .roi_align import RoIAlign, roi_align
__all__ = ['roi_align', 'RoIAlign']
|
Cream/CDARTS/CDARTS_detection/mmdet/ops/roi_align/__init__.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/ops/roi_align/__init__.py",
"repo_id": "Cream",
"token_count": 35
}
| 291 |
from setuptools import setup
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
setup(
name='roi_pool',
ext_modules=[
CUDAExtension('roi_pool_cuda', [
'src/roi_pool_cuda.cpp',
'src/roi_pool_kernel.cu',
])
],
cmdclass={'build_ext': BuildExtension})
|
Cream/CDARTS/CDARTS_detection/mmdet/ops/roi_pool/setup.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/ops/roi_pool/setup.py",
"repo_id": "Cream",
"token_count": 150
}
| 292 |
import contextlib
import sys
import time
import torch
if sys.version_info >= (3, 7):
@contextlib.contextmanager
def profile_time(trace_name,
name,
enabled=True,
stream=None,
end_stream=None):
"""Print time spent by CPU and GPU.
Useful as a temporary context manager to find sweet spots of
code suitable for async implementation.
"""
if (not enabled) or not torch.cuda.is_available():
yield
return
stream = stream if stream else torch.cuda.current_stream()
end_stream = end_stream if end_stream else stream
start = torch.cuda.Event(enable_timing=True)
end = torch.cuda.Event(enable_timing=True)
stream.record_event(start)
try:
cpu_start = time.monotonic()
yield
finally:
cpu_end = time.monotonic()
end_stream.record_event(end)
end.synchronize()
cpu_time = (cpu_end - cpu_start) * 1000
gpu_time = start.elapsed_time(end)
msg = "{} {} cpu_time {:.2f} ms ".format(trace_name, name,
cpu_time)
msg += "gpu_time {:.2f} ms stream {}".format(gpu_time, stream)
print(msg, end_stream)
|
Cream/CDARTS/CDARTS_detection/mmdet/utils/profiling.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_detection/mmdet/utils/profiling.py",
"repo_id": "Cream",
"token_count": 685
}
| 293 |
# ------------------------------------------------------------------------------
# Loads Cityscapes semantic dataset.
# Written by Bowen Cheng ([email protected])
# ------------------------------------------------------------------------------
import glob
import os
import numpy as np
from .base_dataset import BaseDataset
from .utils import DatasetDescriptor
from ..transforms import build_transforms
_CITYSCAPES_INFORMATION = DatasetDescriptor(
splits_to_sizes={'train': 2975,
'trainval': 3475,
'val': 500,
'test': 1525},
num_classes=19,
ignore_label=255,
)
_CITYSCAPES_TRAIN_ID_TO_EVAL_ID = [7, 8, 11, 12, 13, 17, 19, 20, 21, 22,
23, 24, 25, 26, 27, 28, 31, 32, 33]
# A map from data type to folder name that saves the data.
_FOLDERS_MAP = {
'image': 'leftImg8bit',
'label': 'gtFine',
}
# A map from data type to filename postfix.
_POSTFIX_MAP = {
'image': '_leftImg8bit',
'label': '_gtFine_labelTrainIds',
}
# A map from data type to data format.
_DATA_FORMAT_MAP = {
'image': 'png',
'label': 'png',
}
class Cityscapes(BaseDataset):
"""
Cityscapes semantic segmentation dataset.
Arguments:
root: Str, root directory.
split: Str, data split, e.g. train/val/test.
is_train: Bool, for training or testing.
crop_size: Tuple, crop size.
mirror: Bool, whether to apply random horizontal flip.
min_scale: Float, min scale in scale augmentation.
max_scale: Float, max scale in scale augmentation.
scale_step_size: Float, step size to select random scale.
mean: Tuple, image mean.
std: Tuple, image std.
"""
def __init__(self,
root,
split,
is_train=True,
crop_size=(513, 1025),
mirror=True,
min_scale=0.5,
max_scale=2.,
scale_step_size=0.25,
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
**kwargs):
super(Cityscapes, self).__init__(root, split, is_train, crop_size, mirror, min_scale, max_scale,
scale_step_size, mean, std)
self.num_classes = _CITYSCAPES_INFORMATION.num_classes
self.ignore_label = _CITYSCAPES_INFORMATION.ignore_label
self.label_pad_value = (self.ignore_label, )
# Get image and annotation list.
self.img_list = self._get_files('image', self.split)
self.ann_list = self._get_files('label', self.split)
assert len(self) == _CITYSCAPES_INFORMATION.splits_to_sizes[self.split]
self.transform = build_transforms(self, is_train)
def _get_files(self, data, dataset_split):
"""Gets files for the specified data type and dataset split.
Args:
data: String, desired data ('image' or 'label').
dataset_split: String, dataset split ('train', 'val', 'test')
Returns:
A list of sorted file names or None when getting label for test set.
"""
if data == 'label' and dataset_split == 'test':
return None
pattern = '*%s.%s' % (_POSTFIX_MAP[data], _DATA_FORMAT_MAP[data])
search_files = os.path.join(
self.root, _FOLDERS_MAP[data], dataset_split, '*', pattern)
filenames = glob.glob(search_files)
return sorted(filenames)
@staticmethod
def train_id_to_eval_id():
return _CITYSCAPES_TRAIN_ID_TO_EVAL_ID
def _convert_train_id_to_eval_id(self, prediction):
"""Converts the predicted label for evaluation.
There are cases where the training labels are not equal to the evaluation
labels. This function is used to perform the conversion so that we could
evaluate the results on the evaluation server.
Args:
prediction: Semantic segmentation prediction.
Returns:
Semantic segmentation prediction whose labels have been changed.
"""
converted_prediction = prediction.copy()
for train_id, eval_id in enumerate(self.train_id_to_eval_id()):
converted_prediction[prediction == train_id] = eval_id
return converted_prediction
@staticmethod
def create_label_colormap():
"""Creates a label colormap used in CITYSCAPES segmentation benchmark.
Returns:
A colormap for visualizing segmentation results.
"""
colormap = np.zeros((256, 3), dtype=np.uint8)
colormap[0] = [128, 64, 128]
colormap[1] = [244, 35, 232]
colormap[2] = [70, 70, 70]
colormap[3] = [102, 102, 156]
colormap[4] = [190, 153, 153]
colormap[5] = [153, 153, 153]
colormap[6] = [250, 170, 30]
colormap[7] = [220, 220, 0]
colormap[8] = [107, 142, 35]
colormap[9] = [152, 251, 152]
colormap[10] = [70, 130, 180]
colormap[11] = [220, 20, 60]
colormap[12] = [255, 0, 0]
colormap[13] = [0, 0, 142]
colormap[14] = [0, 0, 70]
colormap[15] = [0, 60, 100]
colormap[16] = [0, 80, 100]
colormap[17] = [0, 0, 230]
colormap[18] = [119, 11, 32]
return colormap
|
Cream/CDARTS/CDARTS_segmentation/dataloaders/segdatasets/cityscapes.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/dataloaders/segdatasets/cityscapes.py",
"repo_id": "Cream",
"token_count": 2451
}
| 294 |
# ------------------------------------------------------------------------------
# Reference: https://github.com/facebookresearch/detectron2/blob/master/detectron2/evaluation/panoptic_evaluation.py
# Modified by Bowen Cheng ([email protected])
# ------------------------------------------------------------------------------
import contextlib
import io
import logging
from collections import OrderedDict
import os
import json
import numpy as np
from fvcore.common.file_io import PathManager
from segmentation.utils import save_annotation
class CityscapesPanopticEvaluator:
"""
Evaluate panoptic segmentation
"""
def __init__(self, output_dir=None, train_id_to_eval_id=None, label_divisor=1000, void_label=255000,
gt_dir='./datasets/cityscapes', split='val', num_classes=19):
"""
Args:
corresponding pixels should be ignored.
output_dir (str): an output directory to dump results.
train_id_to_eval_id (list): maps training id to evaluation id.
label_divisor (int):
void_label (int):
gt_dir (str): path to ground truth annotations.
split (str): evaluation split.
num_classes (int): number of classes.
"""
if output_dir is None:
raise ValueError('Must provide a output directory.')
self._output_dir = output_dir
if self._output_dir:
PathManager.mkdirs(self._output_dir)
self._panoptic_dir = os.path.join(self._output_dir, 'predictions')
if self._panoptic_dir:
PathManager.mkdirs(self._panoptic_dir)
self._predictions = []
self._predictions_json = os.path.join(output_dir, 'predictions.json')
self._train_id_to_eval_id = train_id_to_eval_id
self._label_divisor = label_divisor
self._void_label = void_label
self._num_classes = num_classes
self._logger = logging.getLogger(__name__)
self._gt_json_file = os.path.join(gt_dir, 'gtFine', 'cityscapes_panoptic_{}.json'.format(split))
self._gt_folder = os.path.join(gt_dir, 'gtFine', 'cityscapes_panoptic_{}'.format(split))
self._pred_json_file = os.path.join(output_dir, 'predictions.json')
self._pred_folder = self._panoptic_dir
self._resultsFile = os.path.join(output_dir, 'resultPanopticSemanticLabeling.json')
@staticmethod
def id2rgb(id_map):
if isinstance(id_map, np.ndarray):
id_map_copy = id_map.copy()
rgb_shape = tuple(list(id_map.shape) + [3])
rgb_map = np.zeros(rgb_shape, dtype=np.uint8)
for i in range(3):
rgb_map[..., i] = id_map_copy % 256
id_map_copy //= 256
return rgb_map
color = []
for _ in range(3):
color.append(id_map % 256)
id_map //= 256
return color
def update(self, panoptic, image_filename=None, image_id=None):
if image_filename is None:
raise ValueError('Need to provide image_filename.')
if image_id is None:
raise ValueError('Need to provide image_id.')
# Change void region.
panoptic[panoptic == self._void_label] = 0
segments_info = []
for pan_lab in np.unique(panoptic):
pred_class = pan_lab // self._label_divisor
if self._train_id_to_eval_id is not None:
pred_class = self._train_id_to_eval_id[pred_class]
segments_info.append(
{
'id': int(pan_lab),
'category_id': int(pred_class),
}
)
save_annotation(self.id2rgb(panoptic), self._panoptic_dir, image_filename, add_colormap=False)
self._predictions.append(
{
'image_id': image_id,
'file_name': image_filename + '.png',
'segments_info': segments_info,
}
)
def evaluate(self):
import cityscapesscripts.evaluation.evalPanopticSemanticLabeling as cityscapes_eval
gt_json_file = self._gt_json_file
gt_folder = self._gt_folder
pred_json_file = self._pred_json_file
pred_folder = self._pred_folder
resultsFile = self._resultsFile
with open(gt_json_file, "r") as f:
json_data = json.load(f)
json_data["annotations"] = self._predictions
with PathManager.open(self._predictions_json, "w") as f:
f.write(json.dumps(json_data))
with contextlib.redirect_stdout(io.StringIO()):
results = cityscapes_eval.evaluatePanoptic(gt_json_file, gt_folder, pred_json_file, pred_folder, resultsFile)
self._logger.info(results)
return results
|
Cream/CDARTS/CDARTS_segmentation/segmentation/evaluation/panoptic.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/segmentation/evaluation/panoptic.py",
"repo_id": "Cream",
"token_count": 2162
}
| 295 |
from torch import nn
from .criterion import RegularCE, OhemCE, DeepLabCE
L1Loss = nn.L1Loss
MSELoss = nn.MSELoss
CrossEntropyLoss = nn.CrossEntropyLoss
|
Cream/CDARTS/CDARTS_segmentation/segmentation/model/loss/__init__.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/segmentation/model/loss/__init__.py",
"repo_id": "Cream",
"token_count": 63
}
| 296 |
# ------------------------------------------------------------------------------
# This file contains primitives for multi-gpu communication.
# This is useful when doing distributed training.
# Reference: https://github.com/facebookresearch/detectron2/blob/master/detectron2/utils/comm.py
# Modified by Bowen Cheng ([email protected])
# ------------------------------------------------------------------------------
import functools
import logging
import numpy as np
import pickle
import torch
import torch.distributed as dist
_LOCAL_PROCESS_GROUP = None
"""
A torch process group which only includes processes that on the same machine as the current process.
This variable is set when processes are spawned by `launch()` in "engine/launch.py".
"""
def get_world_size() -> int:
if not dist.is_available():
return 1
if not dist.is_initialized():
return 1
return dist.get_world_size()
def get_rank() -> int:
if not dist.is_available():
return 0
if not dist.is_initialized():
return 0
return dist.get_rank()
def get_local_rank() -> int:
"""
Returns:
The rank of the current process within the local (per-machine) process group.
"""
if not dist.is_available():
return 0
if not dist.is_initialized():
return 0
assert _LOCAL_PROCESS_GROUP is not None
return dist.get_rank(group=_LOCAL_PROCESS_GROUP)
def get_local_size() -> int:
"""
Returns:
The size of the per-machine process group,
i.e. the number of processes per machine.
"""
if not dist.is_available():
return 1
if not dist.is_initialized():
return 1
return dist.get_world_size(group=_LOCAL_PROCESS_GROUP)
def is_main_process() -> bool:
return get_rank() == 0
def synchronize():
"""
Helper function to synchronize (barrier) among all processes when
using distributed training
"""
if not dist.is_available():
return
if not dist.is_initialized():
return
world_size = dist.get_world_size()
if world_size == 1:
return
dist.barrier()
@functools.lru_cache()
def _get_global_gloo_group():
"""
Return a process group based on gloo backend, containing all the ranks
The result is cached.
"""
if dist.get_backend() == "nccl":
return dist.new_group(backend="gloo")
else:
return dist.group.WORLD
def _serialize_to_tensor(data, group):
backend = dist.get_backend(group)
assert backend in ["gloo", "nccl"]
device = torch.device("cpu" if backend == "gloo" else "cuda")
buffer = pickle.dumps(data)
if len(buffer) > 1024 ** 3:
logger = logging.getLogger(__name__)
logger.warning(
"Rank {} trying to all-gather {:.2f} GB of data on device {}".format(
get_rank(), len(buffer) / (1024 ** 3), device
)
)
storage = torch.ByteStorage.from_buffer(buffer)
tensor = torch.ByteTensor(storage).to(device=device)
return tensor
def _pad_to_largest_tensor(tensor, group):
"""
Returns:
list[int]: size of the tensor, on each rank
Tensor: padded tensor that has the max size
"""
world_size = dist.get_world_size(group=group)
assert (
world_size >= 1
), "comm.gather/all_gather must be called from ranks within the given group!"
local_size = torch.tensor([tensor.numel()], dtype=torch.int64, device=tensor.device)
size_list = [
torch.zeros([1], dtype=torch.int64, device=tensor.device) for _ in range(world_size)
]
dist.all_gather(size_list, local_size, group=group)
size_list = [int(size.item()) for size in size_list]
max_size = max(size_list)
# we pad the tensor because torch all_gather does not support
# gathering tensors of different shapes
if local_size != max_size:
padding = torch.zeros((max_size - local_size,), dtype=torch.uint8, device=tensor.device)
tensor = torch.cat((tensor, padding), dim=0)
return size_list, tensor
def all_gather(data, group=None):
"""
Run all_gather on arbitrary picklable data (not necessarily tensors).
Args:
data: any picklable object
group: a torch process group. By default, will use a group which
contains all ranks on gloo backend.
Returns:
list[data]: list of data gathered from each rank
"""
if get_world_size() == 1:
return [data]
if group is None:
group = _get_global_gloo_group()
if dist.get_world_size(group) == 1:
return [data]
tensor = _serialize_to_tensor(data, group)
size_list, tensor = _pad_to_largest_tensor(tensor, group)
max_size = max(size_list)
# receiving Tensor from all ranks
tensor_list = [
torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) for _ in size_list
]
dist.all_gather(tensor_list, tensor, group=group)
data_list = []
for size, tensor in zip(size_list, tensor_list):
buffer = tensor.cpu().numpy().tobytes()[:size]
data_list.append(pickle.loads(buffer))
return data_list
def gather(data, dst=0, group=None):
"""
Run gather on arbitrary picklable data (not necessarily tensors).
Args:
data: any picklable object
dst (int): destination rank
group: a torch process group. By default, will use a group which
contains all ranks on gloo backend.
Returns:
list[data]: on dst, a list of data gathered from each rank. Otherwise,
an empty list.
"""
if get_world_size() == 1:
return [data]
if group is None:
group = _get_global_gloo_group()
if dist.get_world_size(group=group) == 1:
return [data]
rank = dist.get_rank(group=group)
tensor = _serialize_to_tensor(data, group)
size_list, tensor = _pad_to_largest_tensor(tensor, group)
# receiving Tensor from all ranks
if rank == dst:
max_size = max(size_list)
tensor_list = [
torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) for _ in size_list
]
dist.gather(tensor, tensor_list, dst=dst, group=group)
data_list = []
for size, tensor in zip(size_list, tensor_list):
buffer = tensor.cpu().numpy().tobytes()[:size]
data_list.append(pickle.loads(buffer))
return data_list
else:
dist.gather(tensor, [], dst=dst, group=group)
return []
def shared_random_seed():
"""
Returns:
int: a random number that is the same across all workers.
If workers need a shared RNG, they can use this shared seed to
create one.
All workers must call this function, otherwise it will deadlock.
"""
ints = np.random.randint(2 ** 31)
all_ints = all_gather(ints)
return all_ints[0]
def reduce_dict(input_dict, average=True):
"""
Reduce the values in the dictionary from all processes so that process with rank
0 has the reduced results.
Args:
input_dict (dict): inputs to be reduced. All the values must be scalar CUDA Tensor.
average (bool): whether to do average or sum
Returns:
a dict with the same keys as input_dict, after reduction.
"""
world_size = get_world_size()
if world_size < 2:
return input_dict
with torch.no_grad():
names = []
values = []
# sort the keys so that they are consistent across processes
for k in sorted(input_dict.keys()):
names.append(k)
values.append(input_dict[k])
values = torch.stack(values, dim=0)
dist.reduce(values, dst=0)
if dist.get_rank() == 0 and average:
# only main process gets accumulated, so only divide by
# world_size in this case
values /= world_size
reduced_dict = {k: v for k, v in zip(names, values)}
return reduced_dict
|
Cream/CDARTS/CDARTS_segmentation/segmentation/utils/comm.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/segmentation/utils/comm.py",
"repo_id": "Cream",
"token_count": 3158
}
| 297 |
import numpy as np
from datasets.BaseDataset import BaseDataset
class Cityscapes(BaseDataset):
trans_labels = [7, 8, 11, 12, 13, 17, 19, 20, 21, 22, 23, 24, 25, 26, 27,
28, 31, 32, 33]
@classmethod
def get_class_colors(*args):
return [[128, 64, 128], [244, 35, 232], [70, 70, 70],
[102, 102, 156], [190, 153, 153], [153, 153, 153],
[250, 170, 30], [220, 220, 0], [107, 142, 35],
[152, 251, 152], [70, 130, 180], [220, 20, 60], [255, 0, 0],
[0, 0, 142], [0, 0, 70], [0, 60, 100], [0, 80, 100],
[0, 0, 230], [119, 11, 32]]
@classmethod
def get_class_names(*args):
# class counting(gtFine)
# 2953 2811 2934 970 1296 2949 1658 2808 2891 1654 2686 2343 1023 2832
# 359 274 142 513 1646
return ['road', 'sidewalk', 'building', 'wall', 'fence', 'pole',
'traffic light', 'traffic sign',
'vegetation', 'terrain', 'sky', 'person', 'rider', 'car',
'truck', 'bus', 'train', 'motorcycle', 'bicycle']
@classmethod
def transform_label(cls, pred, name):
label = np.zeros(pred.shape)
ids = np.unique(pred)
for id in ids:
label[np.where(pred == id)] = cls.trans_labels[id]
new_name = (name.split('.')[0]).split('_')[:-1]
new_name = '_'.join(new_name) + '.png'
print('Trans', name, 'to', new_name, ' ',
np.unique(np.array(pred, np.uint8)), ' ---------> ',
np.unique(np.array(label, np.uint8)))
return label, new_name
|
Cream/CDARTS/CDARTS_segmentation/tools/datasets/cityscapes/cityscapes.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/tools/datasets/cityscapes/cityscapes.py",
"repo_id": "Cream",
"token_count": 823
}
| 298 |
""" Common distribution utilities
Hacked by Hongyuan Yu
"""
from copy import deepcopy
import torch
from torch import distributed as dist
import logging
from collections import OrderedDict
_logger = logging.getLogger(__name__)
def reduce_tensor(tensor, n):
rt = tensor.clone()
dist.all_reduce(rt, op=dist.ReduceOp.SUM)
rt /= n
return rt
class ModelEma:
""" Model Exponential Moving Average
Keep a moving average of everything in the model state_dict (parameters and buffers).
This is intended to allow functionality like
https://www.tensorflow.org/api_docs/python/tf/train/ExponentialMovingAverage
A smoothed version of the weights is necessary for some training schemes to perform well.
E.g. Google's hyper-params for training MNASNet, MobileNet-V3, EfficientNet, etc that use
RMSprop with a short 2.4-3 epoch decay period and slow LR decay rate of .96-.99 requires EMA
smoothing of weights to match results. Pay attention to the decay constant you are using
relative to your update count per epoch.
To keep EMA from using GPU resources, set device='cpu'. This will save a bit of memory but
disable validation of the EMA weights. Validation will have to be done manually in a separate
process, or after the training stops converging.
This class is sensitive where it is initialized in the sequence of model init,
GPU assignment and distributed training wrappers.
I've tested with the sequence in my own train.py for torch.DataParallel, apex.DDP, and single-GPU.
"""
def __init__(self, model, decay=0.9999, device='', resume=''):
# make a copy of the model for accumulating moving average of weights
self.ema = deepcopy(model)
self.ema.eval()
self.decay = decay
self.device = device # perform ema on different device from model if set
if device:
self.ema.to(device=device)
self.ema_has_module = hasattr(self.ema, 'module')
if resume:
self._load_checkpoint(resume)
for p in self.ema.parameters():
p.requires_grad_(False)
def _load_checkpoint(self, checkpoint_path):
checkpoint = torch.load(checkpoint_path, map_location='cpu')
assert isinstance(checkpoint, dict)
if 'state_dict_ema' in checkpoint:
new_state_dict = OrderedDict()
for k, v in checkpoint['state_dict_ema'].items():
# ema model may have been wrapped by DataParallel, and need module prefix
if self.ema_has_module:
name = 'module.' + k if not k.startswith('module') else k
else:
name = k
new_state_dict[name] = v
self.ema.load_state_dict(new_state_dict)
_logger.info("Loaded state_dict_ema")
else:
_logger.warning("Failed to find state_dict_ema, starting from loaded model weights")
def update(self, model):
# correct a mismatch in state dict keys
needs_module = hasattr(model, 'module') and not self.ema_has_module
with torch.no_grad():
msd = model.state_dict()
for k, ema_v in self.ema.state_dict().items():
if needs_module:
k = 'module.' + k
model_v = msd[k].detach()
if self.device:
model_v = model_v.to(device=self.device)
ema_v.copy_(ema_v * self.decay + (1. - self.decay) * model_v)
|
Cream/CDARTS/CDARTS_segmentation/tools/utils/dist_utils.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/tools/utils/dist_utils.py",
"repo_id": "Cream",
"token_count": 1391
}
| 299 |
# encoding: utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path as osp
import sys
import numpy as np
from easydict import EasyDict as edict
C = edict()
config = C
cfg = C
C.seed = 12345
"""please config ROOT_dir and user when u first using"""
#C.repo_name = 'FasterSeg'
#C.abs_dir = osp.realpath(".")
#C.this_dir = C.abs_dir.split(osp.sep)[-1]
#C.root_dir = C.abs_dir[:C.abs_dir.index(C.repo_name) + len(C.repo_name)]
C.abs_dir = osp.realpath(".")
C.root_dir = osp.realpath("..")
C.this_dir = C.abs_dir.split(osp.sep)[-1]
C.log_dir = osp.abspath(osp.join(C.root_dir, 'log', C.this_dir))
"""Data Dir"""
C.dataset_path = "../DATASET/cityscapes/"
C.img_root_folder = C.dataset_path
C.gt_root_folder = C.dataset_path
C.train_source = osp.join(C.dataset_path, "cityscapes_train_fine.txt")
C.train_eval_source = osp.join(C.dataset_path, "cityscapes_train_val_fine.txt")
C.eval_source = osp.join(C.dataset_path, "cityscapes_val_fine.txt")
C.test_source = osp.join(C.dataset_path, "cityscapes_test.txt")
"""Path Config"""
def add_path(path):
if path not in sys.path:
sys.path.insert(0, path)
add_path(osp.join(C.root_dir, 'tools'))
add_path(C.root_dir)
"""Image Config"""
C.num_classes = 19
C.background = -1
C.image_mean = np.array([0.485, 0.456, 0.406])
C.image_std = np.array([0.229, 0.224, 0.225])
C.target_size = 1024
C.down_sampling = 1 # first down_sampling then crop ......
C.gt_down_sampling = 1
C.num_train_imgs = 2975
C.num_eval_imgs = 500
""" Settings for network, this would be different for each kind of model"""
C.bn_eps = 1e-5
C.bn_momentum = 0.1
"""Train Config"""
C.lr = 0.01
C.momentum = 0.9
C.weight_decay = 5e-4
C.nepochs = 600
C.niters_per_epoch = 1000
C.num_workers = 4
C.train_scale_array = [0.75, 1, 1.25]
"""Eval Config"""
C.eval_stride_rate = 5 / 6
C.eval_scale_array = [1, ]
C.eval_flip = False
C.eval_base_size = 1024
C.eval_crop_size = 1024
C.eval_height = 1024
C.eval_width = 2048
C.layers = 16
""" Train Config """
C.mode = "teacher" # "teacher" or "student"
if C.mode == "teacher":
##### train teacher model only ####################################
C.arch_idx = [1] # 0 for teacher
C.branch = [3]
C.width_mult_list = [4./12, 6./12, 8./12, 10./12, 1.,]
# C.stem_head_width = [(1, 1)]
C.stem_head_width = [(8./12, 8./12)]
C.load_path = "search-224x448_F12.L16_batch2-20200828-201547" # path to the searched directory
C.load_epoch = "last" # "last" or "int" (e.g. "30"): which epoch to load from the searched architecture
# C.batch_size = 12
C.batch_size = 4
C.Fch = 12
C.image_height = 512
C.image_width = 1024
C.save = "%dx%d_model_batch%d"%(C.image_height, C.image_width, C.batch_size)
elif C.mode == "student":
##### train student with KL distillation from teacher ##############
C.arch_idx = [0, 1] # 0 for teacher, 1 for student
C.branch = [2, 2]
C.width_mult_list = [4./12, 6./12, 8./12, 10./12, 1.,]
C.stem_head_width = [(1, 1), (8./12, 8./12),]
C.load_path = "fasterseg" # path to the searched directory
C.teacher_path = "fasterseg" # where to load the pretrained teacher's weight
C.load_epoch = "last" # "last" or "int" (e.g. "30")
C.batch_size = 12
C.Fch = 12
C.image_height = 512
C.image_width = 1024
C.save = "%dx%d_student_batch%d"%(C.image_height, C.image_width, C.batch_size)
########################################
C.is_test = False # if True, prediction files for the test set will be generated
C.is_eval = False # if True, the train.py will only do evaluation for once
C.eval_path = "fasterseg" # path to pretrained directory to be evaluated
|
Cream/CDARTS/CDARTS_segmentation/train/config_train.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/train/config_train.py",
"repo_id": "Cream",
"token_count": 1549
}
| 300 |
import numpy as np
import torch
class Seg_Metrics(object):
def __init__(self, n_classes=19):
self.n_classes = n_classes
self.total_inter = np.zeros(n_classes)
self.total_union = np.zeros(n_classes)
def update(self, inter, union, N):
self.total_inter += inter * N
self.total_union += union * N
def get_scores(self):
idx = self.total_union > 0
IoU = 1.0 * self.total_inter[idx] / (np.spacing(1) + self.total_union[idx])
mIoU = IoU.mean()
return mIoU
def reset(self):
self.total_inter = np.zeros(n_classes)
self.total_union = np.zeros(n_classes)
def batch_pix_accuracy(predict, target):
"""Batch Pixel Accuracy
Args:
predict: input 4D tensor
target: label 3D tensor
"""
_, predict = torch.max(predict, 1)
predict = predict.cpu().numpy() + 1
target = target.cpu().numpy() + 1
pixel_labeled = np.sum(target > 0)
pixel_correct = np.sum((predict == target)*(target > 0))
assert pixel_correct <= pixel_labeled, \
"Correct area should be smaller than Labeled"
return pixel_correct, pixel_labeled
def batch_intersection_union(predict, target, nclass):
"""Batch Intersection of Union
Args:
predict: input 4D tensor
target: label 3D tensor
nclass: number of categories (int)
"""
_, predict = torch.max(predict, 1)
mini = 1
maxi = nclass
nbins = nclass
predict = predict.cpu().numpy() + 1
target = target.cpu().numpy() + 1
k = (target >= 1) & (target <= nclass)
# predict = predict * (target > 0).astype(predict.dtype)
predict = predict * k.astype(predict.dtype)
intersection = predict * (predict == target)
# areas of intersection and union
area_inter, _ = np.histogram(intersection, bins=nbins, range=(mini, maxi))
area_pred, _ = np.histogram(predict, bins=nbins, range=(mini, maxi))
area_lab, _ = np.histogram(target, bins=nbins, range=(mini, maxi))
area_union = area_pred + area_lab - area_inter
assert (area_inter <= area_union).all(), \
"Intersection area should be smaller than Union area"
return area_inter, area_union
# ref https://github.com/CSAILVision/sceneparsing/blob/master/evaluationCode/utils_eval.py
def pixel_accuracy(im_pred, im_lab):
im_pred = np.asarray(im_pred)
im_lab = np.asarray(im_lab)
# Remove classes from unlabeled pixels in gt image.
# We should not penalize detections in unlabeled portions of the image.
pixel_labeled = np.sum(im_lab > 0)
pixel_correct = np.sum((im_pred == im_lab) * (im_lab > 0))
#pixel_accuracy = 1.0 * pixel_correct / pixel_labeled
return pixel_correct, pixel_labeled
def intersection_and_union(im_pred, im_lab, num_class):
im_pred = np.asarray(im_pred)
im_lab = np.asarray(im_lab)
# Remove classes from unlabeled pixels in gt image.
im_pred = im_pred * (im_lab > 0)
# Compute area intersection:
intersection = im_pred * (im_pred == im_lab)
area_inter, _ = np.histogram(intersection, bins=num_class-1,
range=(1, num_class - 1))
# Compute area union:
area_pred, _ = np.histogram(im_pred, bins=num_class-1,
range=(1, num_class - 1))
area_lab, _ = np.histogram(im_lab, bins=num_class-1,
range=(1, num_class - 1))
area_union = area_pred + area_lab - area_inter
return area_inter, area_union
|
Cream/CDARTS/CDARTS_segmentation/train/seg_metrics.py/0
|
{
"file_path": "Cream/CDARTS/CDARTS_segmentation/train/seg_metrics.py",
"repo_id": "Cream",
"token_count": 1478
}
| 301 |
import torch
import numpy as np
import torchvision.datasets as dset
import torchvision.transforms as transforms
from datasets.data_utils import SubsetDistributedSampler
from datasets.data_utils import CIFAR10Policy, Cutout
def data_transforms_cifar(config, cutout=False):
CIFAR_MEAN = [0.49139968, 0.48215827, 0.44653124]
CIFAR_STD = [0.24703233, 0.24348505, 0.26158768]
if config.use_aa:
train_transform = transforms.Compose([
transforms.RandomCrop(32, padding=4, fill=128),
transforms.RandomHorizontalFlip(), CIFAR10Policy(),
transforms.ToTensor(),
transforms.Normalize(CIFAR_MEAN, CIFAR_STD),
])
else:
train_transform = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(CIFAR_MEAN, CIFAR_STD),
])
if cutout:
train_transform.transforms.append(Cutout(config.cutout_length))
valid_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(CIFAR_MEAN, CIFAR_STD),
])
return train_transform, valid_transform
def get_search_datasets(config):
dataset = config.dataset.lower()
if dataset == 'cifar10':
dset_cls = dset.CIFAR10
n_classes = 10
elif dataset == 'cifar100':
dset_cls = dset.CIFAR100
n_classes = 100
else:
raise Exception("Not support dataset!")
train_transform, valid_transform = data_transforms_cifar(config, cutout=False)
train_data = dset_cls(root=config.data_dir, train=True, download=True, transform=train_transform)
test_data = dset_cls(root=config.data_dir, train=False, download=True, transform=valid_transform)
num_train = len(train_data)
# num_train = 512
indices = list(range(num_train))
split_mid = int(np.floor(0.5 * num_train))
train_sampler = SubsetDistributedSampler(train_data, indices[:split_mid])
valid_sampler = SubsetDistributedSampler(train_data, indices[split_mid:num_train])
train_loader = torch.utils.data.DataLoader(
train_data, batch_size=config.batch_size,
sampler=train_sampler,
pin_memory=True, num_workers=config.workers)
valid_loader = torch.utils.data.DataLoader(
train_data, batch_size=config.batch_size,
sampler=valid_sampler,
pin_memory=True, num_workers=config.workers)
return [train_loader, valid_loader], [train_sampler, valid_sampler]
def get_augment_datasets(config):
dataset = config.dataset.lower()
if dataset == 'cifar10':
dset_cls = dset.CIFAR10
elif dataset == 'cifar100':
dset_cls = dset.CIFAR100
else:
raise Exception("Not support dataset!")
train_transform, valid_transform = data_transforms_cifar(config, cutout=True)
train_data = dset_cls(root=config.data_dir, train=True, download=True, transform=train_transform)
test_data = dset_cls(root=config.data_dir, train=False, download=True, transform=valid_transform)
train_sampler = torch.utils.data.distributed.DistributedSampler(train_data)
test_sampler = torch.utils.data.distributed.DistributedSampler(test_data)
train_loader = torch.utils.data.DataLoader(
train_data, batch_size=config.batch_size,
sampler=train_sampler,
pin_memory=True, num_workers=config.workers)
test_loader = torch.utils.data.DataLoader(
test_data, batch_size=config.batch_size,
sampler=test_sampler,
pin_memory=True, num_workers=config.workers)
return [train_loader, test_loader], [train_sampler, test_sampler]
|
Cream/CDARTS/benchmark201/datasets/cifar.py/0
|
{
"file_path": "Cream/CDARTS/benchmark201/datasets/cifar.py",
"repo_id": "Cream",
"token_count": 1462
}
| 302 |
""" Genotypes
- Genotype: normal/reduce gene + normal/reduce cell output connection (concat)
- gene: discrete ops information (w/o output connection)
- dag: real ops (can be mixed or discrete, but Genotype has only discrete information itself)
"""
from collections import namedtuple
import torch
import torch.nn as nn
import torch.nn.functional as F
from copy import deepcopy
from models import ops
Genotype = namedtuple('Genotype', 'normal normal_concat reduce reduce_concat')
def to_dag(C_in, gene, reduction, bn_affine=True):
""" generate discrete ops from gene """
dag = nn.ModuleList()
for edges in gene:
row = nn.ModuleList()
for op_name, s_idx in edges:
# reduction cell & from input nodes => stride = 2
stride = 2 if reduction and s_idx < 2 else 1
op = ops.OPS[op_name](C_in, stride, bn_affine)
if not isinstance(op, ops.Identity): # Identity does not use drop path
op = nn.Sequential(
op,
ops.DropPath_()
)
op.s_idx = s_idx
row.append(op)
dag.append(row)
return dag
def from_str(s):
""" generate genotype from string
e.g. "Genotype(
normal=[[('sep_conv_3x3', 0), ('sep_conv_3x3', 1)],
[('sep_conv_3x3', 1), ('dil_conv_3x3', 2)],
[('sep_conv_3x3', 1), ('sep_conv_3x3', 2)],
[('sep_conv_3x3', 1), ('dil_conv_3x3', 4)]],
normal_concat=range(2, 6),
reduce=[[('max_pool_3x3', 0), ('max_pool_3x3', 1)],
[('max_pool_3x3', 0), ('skip_connect', 2)],
[('max_pool_3x3', 0), ('skip_connect', 2)],
[('max_pool_3x3', 0), ('skip_connect', 2)]],
reduce_concat=range(2, 6))"
"""
genotype = eval(s)
return genotype
def parse(alpha, beta, k):
"""
parse continuous alpha to discrete gene.
alpha is ParameterList:
ParameterList [
Parameter(n_edges1, n_ops),
Parameter(n_edges2, n_ops),
...
]
beta is ParameterList:
ParameterList [
Parameter(n_edges1),
Parameter(n_edges2),
...
]
gene is list:
[
[('node1_ops_1', node_idx), ..., ('node1_ops_k', node_idx)],
[('node2_ops_1', node_idx), ..., ('node2_ops_k', node_idx)],
...
]
each node has two edges (k=2) in CNN.
"""
gene = []
assert PRIMITIVES[-1] == 'none' # assume last PRIMITIVE is 'none'
# 1) Convert the mixed op to discrete edge (single op) by choosing top-1 weight edge
# 2) Choose top-k edges per node by edge score (top-1 weight in edge)
# output the connect idx[(node_idx, connect_idx, op_idx).... () ()]
connect_idx = []
for edges, w in zip(alpha, beta):
# edges: Tensor(n_edges, n_ops)
edge_max, primitive_indices = torch.topk((w.view(-1, 1) * edges)[:, :-1], 1) # ignore 'none'
topk_edge_values, topk_edge_indices = torch.topk(edge_max.view(-1), k)
node_gene = []
node_idx = []
for edge_idx in topk_edge_indices:
prim_idx = primitive_indices[edge_idx]
prim = PRIMITIVES[prim_idx]
node_gene.append((prim, edge_idx.item()))
node_idx.append((edge_idx.item(), prim_idx.item()))
gene.append(node_gene)
connect_idx.append(node_idx)
return gene, connect_idx
def parse_gumbel(alpha, beta, k):
"""
parse continuous alpha to discrete gene.
alpha is ParameterList:
ParameterList [
Parameter(n_edges1, n_ops),
Parameter(n_edges2, n_ops),
...
]
beta is ParameterList:
ParameterList [
Parameter(n_edges1),
Parameter(n_edges2),
...
]
gene is list:
[
[('node1_ops_1', node_idx), ..., ('node1_ops_k', node_idx)],
[('node2_ops_1', node_idx), ..., ('node2_ops_k', node_idx)],
...
]
each node has two edges (k=2) in CNN.
"""
gene = []
assert PRIMITIVES[-1] == 'none' # assume last PRIMITIVE is 'none'
# 1) Convert the mixed op to discrete edge (single op) by choosing top-1 weight edge
# 2) Choose top-k edges per node by edge score (top-1 weight in edge)
# output the connect idx[(node_idx, connect_idx, op_idx).... () ()]
connect_idx = []
for edges, w in zip(alpha, beta):
# edges: Tensor(n_edges, n_ops)
discrete_a = F.gumbel_softmax(edges[:, :-1].reshape(-1), tau=1, hard=True)
for i in range(k-1):
discrete_a = discrete_a + F.gumbel_softmax(edges[:, :-1].reshape(-1), tau=1, hard=True)
discrete_a = discrete_a.reshape(-1, len(PRIMITIVES)-1)
reserved_edge = (discrete_a>0).nonzero()
node_gene = []
node_idx = []
for i in range(reserved_edge.shape[0]):
edge_idx = reserved_edge[i][0].item()
prim_idx = reserved_edge[i][1].item()
prim = PRIMITIVES[prim_idx]
node_gene.append((prim, edge_idx))
node_idx.append((edge_idx, prim_idx))
gene.append(node_gene)
connect_idx.append(node_idx)
return gene, connect_idx
def get_combination(space, num):
combs = []
for i in range(num):
if i == 0:
for func in space:
combs.append( [(func, i)] )
else:
new_combs = []
for string in combs:
for func in space:
xstring = string + [(func, i)]
new_combs.append( xstring )
combs = new_combs
return combs
class Structure:
def __init__(self, genotype):
assert isinstance(genotype, list) or isinstance(genotype, tuple), 'invalid class of genotype : {:}'.format(type(genotype))
self.node_num = len(genotype) + 1
self.nodes = []
self.node_N = []
for idx, node_info in enumerate(genotype):
assert isinstance(node_info, list) or isinstance(node_info, tuple), 'invalid class of node_info : {:}'.format(type(node_info))
assert len(node_info) >= 1, 'invalid length : {:}'.format(len(node_info))
for node_in in node_info:
assert isinstance(node_in, list) or isinstance(node_in, tuple), 'invalid class of in-node : {:}'.format(type(node_in))
assert len(node_in) == 2 and node_in[1] <= idx, 'invalid in-node : {:}'.format(node_in)
self.node_N.append( len(node_info) )
self.nodes.append( tuple(deepcopy(node_info)) )
def tolist(self, remove_str):
# convert this class to the list, if remove_str is 'none', then remove the 'none' operation.
# note that we re-order the input node in this function
# return the-genotype-list and success [if unsuccess, it is not a connectivity]
genotypes = []
for node_info in self.nodes:
node_info = list( node_info )
node_info = sorted(node_info, key=lambda x: (x[1], x[0]))
node_info = tuple(filter(lambda x: x[0] != remove_str, node_info))
if len(node_info) == 0: return None, False
genotypes.append( node_info )
return genotypes, True
def node(self, index):
assert index > 0 and index <= len(self), 'invalid index={:} < {:}'.format(index, len(self))
return self.nodes[index]
def tostr(self):
strings = []
for node_info in self.nodes:
string = '|'.join([x[0]+'~{:}'.format(x[1]) for x in node_info])
string = '|{:}|'.format(string)
strings.append( string )
return '+'.join(strings)
def check_valid(self):
nodes = {0: True}
for i, node_info in enumerate(self.nodes):
sums = []
for op, xin in node_info:
if op == 'none' or nodes[xin] is False: x = False
else: x = True
sums.append( x )
nodes[i+1] = sum(sums) > 0
return nodes[len(self.nodes)]
def to_unique_str(self, consider_zero=False):
# this is used to identify the isomorphic cell, which rerquires the prior knowledge of operation
# two operations are special, i.e., none and skip_connect
nodes = {0: '0'}
for i_node, node_info in enumerate(self.nodes):
cur_node = []
for op, xin in node_info:
if consider_zero is None:
x = '('+nodes[xin]+')' + '@{:}'.format(op)
elif consider_zero:
if op == 'none' or nodes[xin] == '#': x = '#' # zero
elif op == 'skip_connect': x = nodes[xin]
else: x = '('+nodes[xin]+')' + '@{:}'.format(op)
else:
if op == 'skip_connect': x = nodes[xin]
else: x = '('+nodes[xin]+')' + '@{:}'.format(op)
cur_node.append(x)
nodes[i_node+1] = '+'.join( sorted(cur_node) )
return nodes[ len(self.nodes) ]
def check_valid_op(self, op_names):
for node_info in self.nodes:
for inode_edge in node_info:
#assert inode_edge[0] in op_names, 'invalid op-name : {:}'.format(inode_edge[0])
if inode_edge[0] not in op_names: return False
return True
def __repr__(self):
return ('{name}({node_num} nodes with {node_info})'.format(name=self.__class__.__name__, node_info=self.tostr(), **self.__dict__))
def __len__(self):
return len(self.nodes) + 1
def __getitem__(self, index):
return self.nodes[index]
@staticmethod
def str2structure(xstr):
assert isinstance(xstr, str), 'must take string (not {:}) as input'.format(type(xstr))
nodestrs = xstr.split('+')
genotypes = []
for i, node_str in enumerate(nodestrs):
inputs = list(filter(lambda x: x != '', node_str.split('|')))
for xinput in inputs: assert len(xinput.split('~')) == 2, 'invalid input length : {:}'.format(xinput)
inputs = ( xi.split('~') for xi in inputs )
input_infos = tuple( (op, int(IDX)) for (op, IDX) in inputs)
genotypes.append( input_infos )
return Structure( genotypes )
@staticmethod
def str2fullstructure(xstr, default_name='none'):
assert isinstance(xstr, str), 'must take string (not {:}) as input'.format(type(xstr))
nodestrs = xstr.split('+')
genotypes = []
for i, node_str in enumerate(nodestrs):
inputs = list(filter(lambda x: x != '', node_str.split('|')))
for xinput in inputs: assert len(xinput.split('~')) == 2, 'invalid input length : {:}'.format(xinput)
inputs = ( xi.split('~') for xi in inputs )
input_infos = list( (op, int(IDX)) for (op, IDX) in inputs)
all_in_nodes= list(x[1] for x in input_infos)
for j in range(i):
if j not in all_in_nodes: input_infos.append((default_name, j))
node_info = sorted(input_infos, key=lambda x: (x[1], x[0]))
genotypes.append( tuple(node_info) )
return Structure( genotypes )
@staticmethod
def gen_all(search_space, num, return_ori):
assert isinstance(search_space, list) or isinstance(search_space, tuple), 'invalid class of search-space : {:}'.format(type(search_space))
assert num >= 2, 'There should be at least two nodes in a neural cell instead of {:}'.format(num)
all_archs = get_combination(search_space, 1)
for i, arch in enumerate(all_archs):
all_archs[i] = [ tuple(arch) ]
for inode in range(2, num):
cur_nodes = get_combination(search_space, inode)
new_all_archs = []
for previous_arch in all_archs:
for cur_node in cur_nodes:
new_all_archs.append( previous_arch + [tuple(cur_node)] )
all_archs = new_all_archs
if return_ori:
return all_archs
else:
return [Structure(x) for x in all_archs]
ResNet_CODE = Structure(
[(('nor_conv_3x3', 0), ), # node-1
(('nor_conv_3x3', 1), ), # node-2
(('skip_connect', 0), ('skip_connect', 2))] # node-3
)
AllConv3x3_CODE = Structure(
[(('nor_conv_3x3', 0), ), # node-1
(('nor_conv_3x3', 0), ('nor_conv_3x3', 1)), # node-2
(('nor_conv_3x3', 0), ('nor_conv_3x3', 1), ('nor_conv_3x3', 2))] # node-3
)
AllFull_CODE = Structure(
[(('skip_connect', 0), ('nor_conv_1x1', 0), ('nor_conv_3x3', 0), ('avg_pool_3x3', 0)), # node-1
(('skip_connect', 0), ('nor_conv_1x1', 0), ('nor_conv_3x3', 0), ('avg_pool_3x3', 0), ('skip_connect', 1), ('nor_conv_1x1', 1), ('nor_conv_3x3', 1), ('avg_pool_3x3', 1)), # node-2
(('skip_connect', 0), ('nor_conv_1x1', 0), ('nor_conv_3x3', 0), ('avg_pool_3x3', 0), ('skip_connect', 1), ('nor_conv_1x1', 1), ('nor_conv_3x3', 1), ('avg_pool_3x3', 1), ('skip_connect', 2), ('nor_conv_1x1', 2), ('nor_conv_3x3', 2), ('avg_pool_3x3', 2))] # node-3
)
AllConv1x1_CODE = Structure(
[(('nor_conv_1x1', 0), ), # node-1
(('nor_conv_1x1', 0), ('nor_conv_1x1', 1)), # node-2
(('nor_conv_1x1', 0), ('nor_conv_1x1', 1), ('nor_conv_1x1', 2))] # node-3
)
AllIdentity_CODE = Structure(
[(('skip_connect', 0), ), # node-1
(('skip_connect', 0), ('skip_connect', 1)), # node-2
(('skip_connect', 0), ('skip_connect', 1), ('skip_connect', 2))] # node-3
)
architectures = {'resnet' : ResNet_CODE,
'all_c3x3': AllConv3x3_CODE,
'all_c1x1': AllConv1x1_CODE,
'all_idnt': AllIdentity_CODE,
'all_full': AllFull_CODE}
|
Cream/CDARTS/benchmark201/utils/genotypes.py/0
|
{
"file_path": "Cream/CDARTS/benchmark201/utils/genotypes.py",
"repo_id": "Cream",
"token_count": 5956
}
| 303 |
import torch
import torch.nn as nn
import torch.nn.functional as F
import lib.utils.genotypes as gt
import logging
import copy
from lib.models import ops
from lib.models.search_cells import SearchCell
from lib.models.augment_cells import AugmentCell
from lib.models.aux_head import AuxiliaryHeadCIFAR, AuxiliaryHeadImageNet, DistillHeadCIFAR, DistillHeadImagenet
from lib.models.model_augment import ModelAug
class CDARTSController(nn.Module):
""" CDARTS Controller"""
def __init__(self, config, criterion, n_nodes=4, stem_multiplier=3, genotypes={}):
"""
args:
"""
super(CDARTSController, self).__init__()
# some settings
self.n_nodes = n_nodes
self.n_ops = len(gt.PRIMITIVES)
self.criterion = criterion
self.layer_num = config.layer_num
self.c_in = config.input_channels
self.num_classes = config.n_classes
# cifar10 or imagenet
self.model_type = config.model_type
self.stem_multiplier = stem_multiplier
self.init_channel = config.init_channels
self.res_stem = config.res_stem
self.ensemble_sum = config.ensemble_sum
self.use_ensemble_param = config.ensemble_param
self.use_beta = config.use_beta
self.bn_affine = config.bn_affine
self.repeat_cell = config.repeat_cell
self.fix_head = config.fix_head
self.share_fc = config.share_fc
self.sample_pretrain = config.sample_pretrain
if self.model_type == 'cifar':
self.layers = [3, 3, 2]
self.layers_reduction = [True, True, False]
self.augment_layers = [7, 7, 6]
self.nas_layers = nn.ModuleList([None, None, None])
elif self.model_type == 'imagenet':
if self.res_stem:
self.layers = [2, 2, 2, 2]
self.nas_layers = nn.ModuleList([None, None, None, None])
self.layers_reduction = [False, True, True, True]
self.augment_layers = [3, 4, 3, 4]
else:
self.layers = [3, 3, 2]
self.nas_layers = nn.ModuleList([None, None, None])
self.layers_reduction = [True, True, False]
self.augment_layers = [5, 5, 4]
else:
raise Exception("Wrong model type!")
# use genotypes to generate search layers
self.genotypes = genotypes
self.connects = {}
self.fc_super = None
self.fc_nas = None
self.distill_aux_c1 = None
self.distill_aux_c2 = None
self.feature_extractor = None
self.gap = nn.AdaptiveAvgPool2d(1)
self.super_layers = nn.ModuleList()
self.super_layers_arch = nn.ModuleList()
self.super_layers_pool = nn.ModuleList()
self.super_layers_pool_arch = nn.ModuleList()
self.model_main = None
self.build_init_model()
######################## ---------------------------- ########################
######################## Functions for update modules ########################
######################## ---------------------------- ########################
def build_init_model(self):
self.extractor_grad = True
if self.model_type == 'cifar':
self.feature_extractor = self.cifar_stem(self.init_channel * self.stem_multiplier)
reduction_p = False
elif self.model_type == 'imagenet':
if self.res_stem:
self.feature_extractor = self.resnet_stem(self.init_channel * self.stem_multiplier)
reduction_p = False
else:
self.feature_extractor = self.imagenet_stem(self.init_channel * self.stem_multiplier)
reduction_p = True
else:
raise Exception("error! not support now!")
c_p = self.init_channel * self.stem_multiplier
c_pp = self.init_channel * self.stem_multiplier
c_cur = self.init_channel
self.super_layers_pool_arch.append(self.pretrain_architecture_params(self.n_ops))
if self.repeat_cell:
self.super_layers_arch.append(self.add_architecture_params(self.n_ops))
for layer_idx in range(self.layer_num):
reduction = self.layers_reduction[layer_idx]
super_layer = self.add_super_layer(c_cur, c_p, c_pp, reduction_p, reduction, self.layers[layer_idx])
super_layer_pool = self.add_super_layer(c_cur, c_p, c_pp, reduction_p, reduction, self.augment_layers[layer_idx], is_slim=self.sample_pretrain)
super_layer_arch = self.add_architecture_params(self.n_ops)
self.freeze_unused_params(super_layer_arch, reduction, self.layers[layer_idx])
self.super_layers.append(super_layer)
self.super_layers_pool.append(super_layer_pool)
if not self.repeat_cell:
self.super_layers_arch.append(super_layer_arch)
if reduction:
c_p = c_cur * 2 * self.n_nodes
else:
c_p = c_cur * self.n_nodes
if self.res_stem:
c_pp = c_p
reduction_p = False
else:
c_pp = c_cur * self.n_nodes
reduction_p = reduction
if layer_idx == self.layer_num-3:
self.distill_aux_c1 = c_p
if layer_idx == self.layer_num-2:
self.distill_aux_c2 = c_p
if reduction:
c_cur = c_cur * 2
else:
c_cur = c_cur
self.fc_super = nn.Linear(c_p, self.num_classes)
if self.share_fc:
self.fc_nas = self.fc_super
else:
self.fc_nas = nn.Linear(c_p, self.num_classes)
if self.use_ensemble_param:
self.ensemble_param = nn.Parameter(0.333*torch.rand(3), requires_grad=True)
else:
self.ensemble_param = nn.Parameter(0.333*torch.ones(3), requires_grad=False)
if self.model_type == 'cifar':
self.distill_aux_head1 = DistillHeadCIFAR(self.distill_aux_c1, 6, self.num_classes, bn_affine=False)
self.distill_aux_head2 = DistillHeadCIFAR(self.distill_aux_c2, 6, self.num_classes, bn_affine=False)
elif self.model_type == 'imagenet':
if self.res_stem:
self.distill_aux_head1 = DistillHeadImagenet(self.distill_aux_c1, 14, self.num_classes, bn_affine=False)
self.distill_aux_head2 = DistillHeadImagenet(self.distill_aux_c2, 6, self.num_classes, bn_affine=False)
else:
self.distill_aux_head1 = DistillHeadImagenet(self.distill_aux_c1, 6, self.num_classes, bn_affine=False)
self.distill_aux_head2 = DistillHeadImagenet(self.distill_aux_c2, 5, self.num_classes, bn_affine=False)
else:
raise Exception("error! not support now!")
self.fix_structure()
def fix_structure(self):
if self.fix_head:
for n, p in self.distill_aux_head1.named_parameters():
p.requires_grad = False
for n, p in self.distill_aux_head2.named_parameters():
p.requires_grad = False
def fix_pre_layers(self, layer_idx=0):
for i in range(layer_idx):
for name, param in self.super_layers_arch[i].named_parameters():
param.requires_grad=False
def build_nas_layers(self, layer_idx, best_genotype, same_structure=False):
c_p = self.init_channel * self.stem_multiplier
c_pp = self.init_channel * self.stem_multiplier
c_cur = self.init_channel
if self.model_type == 'cifar':
reduction_p = False
elif self.model_type == 'imagenet':
if self.res_stem:
reduction_p = False
else:
reduction_p = True
else:
raise Exception("error! not support now!")
for i in range(self.layer_num):
reduction = self.layers_reduction[i]
if i == layer_idx:
break
if reduction:
c_p = c_cur * 2 * self.n_nodes
else:
c_p = c_cur * self.n_nodes
if self.res_stem:
c_pp = c_p
reduction_p = False
else:
c_pp = c_cur * self.n_nodes
reduction_p = reduction
if reduction:
c_cur = c_cur * 2
else:
c_cur = c_cur
# once model search is well trained, transfor model params from model_search to model_main
# genotype = self.generate_genotype(self.model_search.arch_params)
if same_structure:
nas_layer = self.generate_nas_layer(c_cur, c_p, c_pp, reduction_p, reduction, best_genotype, self.layers[layer_idx], bn_affine=self.bn_affine)
else:
nas_layer = self.generate_nas_layer(c_cur, c_p, c_pp, reduction_p, reduction, best_genotype, self.augment_layers[layer_idx], bn_affine=self.bn_affine)
self.genotypes[layer_idx] = best_genotype
self.nas_layers[layer_idx] = nas_layer
def build_augment_model(self, init_channel, genotypes_dict):
if len(genotypes_dict.keys()) == 0:
raise Exception("error! genotypes is empty!")
else:
self.extractor_grad = True
if self.model_type == 'cifar':
feature_extractor = self.cifar_stem(self.init_channel * self.stem_multiplier)
reduction_p = False
elif self.model_type == 'imagenet':
if self.res_stem:
feature_extractor = self.resnet_stem(self.init_channel * self.stem_multiplier)
reduction_p = False
else:
feature_extractor = self.imagenet_stem(self.init_channel * self.stem_multiplier)
reduction_p = True
else:
raise Exception("error! not support now!")
c_p = self.init_channel * self.stem_multiplier
c_pp = self.init_channel * self.stem_multiplier
c_cur = self.init_channel
for layer_idx, genotype in genotypes_dict.items():
reduction = self.layers_reduction[layer_idx]
nas_layer = self.generate_nas_layer(c_cur, c_p, c_pp, reduction_p, reduction, genotype, self.augment_layers[layer_idx])
self.nas_layers[layer_idx] = nas_layer
if reduction:
c_p = c_cur * 2 * self.n_nodes
else:
c_p = c_cur * self.n_nodes
if self.res_stem:
c_pp = c_p
reduction_p = False
else:
c_pp = c_cur * self.n_nodes
reduction_p = reduction
if reduction:
c_cur = c_cur * 2
else:
c_cur = c_cur
if layer_idx == self.layer_num-2:
c_aux = c_p
if self.model_type == 'cifar':
aux_head = AuxiliaryHeadCIFAR(c_aux, 5, self.num_classes)
elif self.model_type == 'imagenet':
if self.res_stem:
aux_head = AuxiliaryHeadImageNet(c_aux, 12, self.num_classes)
else:
aux_head = AuxiliaryHeadImageNet(c_aux, 5, self.num_classes)
else:
aux_head = None
# super_layers = copy.deepcopy(self.super_layers)
# super_layers_arch = copy.deepcopy(self.super_layers_arch)
nas_layers = copy.deepcopy(self.nas_layers)
fc = copy.deepcopy(self.fc_nas)
self.model_main = ModelAug(feature_extractor, nas_layers, fc, n_nodes=self.n_nodes, aux_head=aux_head)
def freeze_unused_params(self, super_layer_arch, reduction, cell_num):
if not reduction:
for name, param in super_layer_arch.named_parameters():
if name.startswith('1') or name.startswith('3'):
param.requires_grad=False
elif cell_num == 1 and reduction:
for name, param in super_layer_arch.named_parameters():
if name.startswith('0') or name.startswith('2'):
param.requires_grad=False
else:
pass
def param_copy(self, target_model, model):
if model:
for target_param, param in zip(target_model.parameters(), model.parameters()):
target_param.data.copy_(param.data)
def param_copy_plus(self, target_model, model):
model_dict_keys = model.state_dict().keys()
for n, p in target_model.named_parameters():
if n in model_dict_keys:
p.data.copy_(model.state_dict()[n])
def copy_params_from_super_layer(self, layer_idx):
super_layer = self.super_layers_pool[layer_idx]
nas_layer = self.nas_layers[layer_idx]
connect_dict = self.connects[layer_idx]
normal_cell_connect = connect_dict['normal']
reduce_cell_connect = connect_dict['reduce']
for super_cell, nas_cell in zip(super_layer, nas_layer):
# copy preproc0 and preproc1
self.param_copy_plus(nas_cell.preproc0, super_cell.preproc0)
self.param_copy_plus(nas_cell.preproc1, super_cell.preproc1)
if super_cell.reduction:
cell_connect = reduce_cell_connect
else:
cell_connect = normal_cell_connect
for i, (super_hidden, nas_hidden) in enumerate(zip(super_cell.dag, nas_cell.dag)):
hidden_connect = cell_connect[i]
# k = 2
for j in range(len(hidden_connect)):
connect = hidden_connect[j]
super_edge = super_hidden[connect[0]]
super_op = super_edge._ops[connect[1]]
nas_edge = nas_hidden[j]
if isinstance(nas_edge, ops.Identity):
break
nas_op = nas_edge[0]
# copy params
self.param_copy_plus(nas_op, super_op)
# self.param_copy(super_op, nas_op)
def copy_params_from_nas_layer(self, layer_idx):
super_layer = self.super_layers_pool[layer_idx]
nas_layer = self.nas_layers[layer_idx]
connect_dict = self.connects[layer_idx]
normal_cell_connect = connect_dict['normal']
reduce_cell_connect = connect_dict['reduce']
for super_cell, nas_cell in zip(super_layer, nas_layer):
# copy preproc0 and preproc1
self.param_copy_plus(super_cell.preproc0, nas_cell.preproc0)
self.param_copy_plus(super_cell.preproc1, nas_cell.preproc1)
if super_cell.reduction:
cell_connect = reduce_cell_connect
else:
cell_connect = normal_cell_connect
for i, (super_hidden, nas_hidden) in enumerate(zip(super_cell.dag, nas_cell.dag)):
hidden_connect = cell_connect[i]
# k = 2
for j in range(len(hidden_connect)):
connect = hidden_connect[j]
super_edge = super_hidden[connect[0]]
super_op = super_edge._ops[connect[1]]
nas_edge = nas_hidden[j]
if isinstance(nas_edge, ops.Identity):
break
nas_op = nas_edge[0]
# copy params
self.param_copy_plus(super_op, nas_op)
# self.param_copy(super_op, nas_op)
######################## -------------------------- ########################
######################## Functions for layer search ########################
######################## -------------------------- ########################
def add_super_layer(self, C_cur, C_p, C_pp, reduction_p=False, reduction_cur=False, cell_num=3, is_slim=False):
cells = nn.ModuleList()
# reduction_idx = (cell_num + 1) // 2 - 1
# the first cell(block) is downsample
# reduction_idx = 0
if self.res_stem:
reduction_idx = 0
else:
reduction_idx = cell_num - 1
for i in range(cell_num):
if i == reduction_idx and reduction_cur:
C_cur *= 2
reduction = True
else:
reduction = False
cell = SearchCell(self.n_nodes, C_pp, C_p, C_cur, reduction_p, reduction, is_slim)
reduction_p = reduction
cells.append(cell)
C_cur_out = C_cur * self.n_nodes
C_pp, C_p = C_p, C_cur_out
return cells
def add_architecture_params(self, n_ops):
arch_params = nn.ModuleList()
alpha_normal = nn.ParameterList()
alpha_reduce = nn.ParameterList()
beta_normal = nn.ParameterList()
beta_reduce = nn.ParameterList()
for i in range(self.n_nodes):
alpha_normal.append(nn.Parameter(1e-3*torch.randn(i+2, n_ops)))
alpha_reduce.append(nn.Parameter(1e-3*torch.randn(i+2, n_ops)))
if self.use_beta:
beta_normal.append(nn.Parameter(1e-3*torch.randn(i+2)))
beta_reduce.append(nn.Parameter(1e-3*torch.randn(i+2)))
else:
beta_normal.append(nn.Parameter(1e-1*torch.ones(i+2), requires_grad=False))
beta_reduce.append(nn.Parameter(1e-1*torch.ones(i+2), requires_grad=False))
arch_params.append(alpha_normal)
arch_params.append(alpha_reduce)
arch_params.append(beta_normal)
arch_params.append(beta_reduce)
return arch_params
def pretrain_architecture_params(self, n_ops):
arch_params = nn.ModuleList()
alpha_normal = nn.ParameterList()
alpha_reduce = nn.ParameterList()
beta_normal = nn.ParameterList()
beta_reduce = nn.ParameterList()
for i in range(self.n_nodes):
alpha_normal.append(nn.Parameter(1e-3*torch.ones(i+2, n_ops), requires_grad=False))
alpha_reduce.append(nn.Parameter(1e-3*torch.ones(i+2, n_ops), requires_grad=False))
beta_normal.append(nn.Parameter(1e-1*torch.ones(i+2), requires_grad=False))
beta_reduce.append(nn.Parameter(1e-1*torch.ones(i+2), requires_grad=False))
arch_params.append(alpha_normal)
arch_params.append(alpha_reduce)
arch_params.append(beta_normal)
arch_params.append(beta_reduce)
return arch_params
######################## ---------------------------- ########################
######################## Functions for layer generate ########################
######################## ---------------------------- ########################
def generate_nas_layer(self, C_cur, C_p, C_pp, reduction_p, reduction_cur, genotype, cell_num=3, bn_affine=True):
cells = nn.ModuleList()
# reduction_idx = (cell_num + 1) // 2 - 1
# the first cell(block) is downsample
# reduction_idx = 0
if self.res_stem:
reduction_idx = 0
else:
reduction_idx = cell_num - 1
for i in range(cell_num):
if i == reduction_idx and reduction_cur:
C_cur *= 2
reduction = True
else:
reduction = False
cell = AugmentCell(genotype, C_pp, C_p, C_cur, reduction_p, reduction, bn_affine)
reduction_p = reduction
cells.append(cell)
C_cur_out = C_cur * len(cell.concat)
C_pp, C_p = C_p, C_cur_out
return cells
######################## ---------------------------- ########################
######################## Functions for stem ########################
######################## ---------------------------- ########################
def resnet_stem(self, inplanes=64):
C_in = self.c_in
feature_extractor = nn.ModuleList()
stem = nn.Sequential(
nn.Conv2d(C_in, inplanes, kernel_size=7, stride=2, padding=3, bias=False),
nn.BatchNorm2d(inplanes),
nn.ReLU(inplace=True),
# the layer1 is concated with maxpool
nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
)
feature_extractor.append(stem)
return feature_extractor
def cifar_stem(self, init_channel):
C_in = self.c_in
C_cur = init_channel
feature_extractor = nn.ModuleList()
stem = nn.Sequential(
nn.Conv2d(C_in, C_cur, 3, 1, 1, bias=False),
nn.BatchNorm2d(C_cur)
)
feature_extractor.append(stem)
return feature_extractor
def imagenet_stem(self, init_channel):
C_in = self.c_in
C_cur = init_channel
feature_extractor = nn.ModuleList()
stem0 = nn.Sequential(
nn.Conv2d(C_in, C_cur // 2, kernel_size=3, stride=2, padding=1, bias=False),
nn.BatchNorm2d(C_cur // 2),
nn.ReLU(inplace=True),
nn.Conv2d(C_cur // 2, C_cur, 3, stride=2, padding=1, bias=False),
nn.BatchNorm2d(C_cur),
)
stem1 = nn.Sequential(
nn.ReLU(inplace=True),
nn.Conv2d(C_cur, C_cur, 3, stride=2, padding=1, bias=False),
nn.BatchNorm2d(C_cur),
)
feature_extractor.append(stem0)
feature_extractor.append(stem1)
return feature_extractor
######################## ---------------------------- ########################
######################## Functions for forward ########################
######################## ---------------------------- ########################
def extract_features(self, im):
# feature_extractor is nn.ModuleList()
if len(self.feature_extractor) == 1:
s0 = self.feature_extractor[0](im)
s1 = s0
return [s0, s1]
elif len(self.feature_extractor) == 2:
s0 = self.feature_extractor[0](im)
s1 = self.feature_extractor[1](s0)
return [s0, s1]
else:
raise NotImplementedError
def init_arch_params(self, layer_idx):
init_arch_params = self.add_architecture_params(n_ops=len(ops.PRIMITIVES))
for i in range(layer_idx, len(self.super_layers_arch)):
target_arch = self.super_layers_arch[i]
self.param_copy(target_arch, init_arch_params)
for i in range(layer_idx, len(self.super_layers_pool_arch)):
target_arch = self.super_layers_pool_arch[i]
self.param_copy(target_arch, init_arch_params)
del init_arch_params
def freeze_arch_params(self, layer_idx=0):
for i in range(self.super_layers_num):
if i != layer_idx:
for name, param in self.super_layers_arch[i].named_parameters():
param.requires_grad=False
else:
for name, param in self.super_layers_arch[i].named_parameters():
param.requires_grad=True
def print_arch_params(self, logger, layer_idx=0):
# remove formats
if self.repeat_cell:
alpha_normal, alpha_reduce, beta_normal, beta_reduce = self.super_layers_arch[0]
else:
alpha_normal, alpha_reduce, beta_normal, beta_reduce = self.super_layers_arch[layer_idx]
org_formatters = []
for handler in logger.handlers:
org_formatters.append(handler.formatter)
handler.setFormatter(logging.Formatter("%(message)s"))
logger.info("####### ALPHA #######")
logger.info("# Alpha - normal")
for alpha in alpha_normal:
logger.info(F.softmax(alpha, dim=-1))
logger.info("\n# Alpha - reduce")
for alpha in alpha_reduce:
logger.info(F.softmax(alpha, dim=-1))
logger.info("#####################")
if self.use_beta:
logger.info("####### BETA #######")
logger.info("# Beta - normal")
for beta in beta_normal:
logger.info(F.softmax(beta, dim=-1))
logger.info("\n# Beta - reduce")
for beta in beta_reduce:
logger.info(F.softmax(beta, dim=-1))
logger.info("#####################")
def generate_genotype(self, layer_idx=0):
# arch_params list
if self.repeat_cell:
alpha_normal, alpha_reduce, beta_normal, beta_reduce = self.super_layers_arch[0]
else:
alpha_normal, alpha_reduce, beta_normal, beta_reduce = self.super_layers_arch[layer_idx]
weights_normal = [F.softmax(alpha, dim=-1) for alpha in alpha_normal]
weights_reduce = [F.softmax(alpha, dim=-1) for alpha in alpha_reduce]
weights_edge_normal = [F.softmax(beta, dim=0) for beta in beta_normal]
weights_edge_reduce = [F.softmax(beta, dim=0) for beta in beta_reduce]
gene_normal, connect_normal = gt.parse(weights_normal, weights_edge_normal, k=2)
gene_reduce, connect_reduce = gt.parse(weights_reduce, weights_edge_reduce, k=2)
connect_dict = {"normal": connect_normal, "reduce": connect_reduce}
concat = range(2, 2+self.n_nodes) # concat all intermediate nodes
return gt.Genotype(normal=gene_normal, normal_concat=concat, reduce=gene_reduce, reduce_concat=concat), connect_dict
def generate_genotype_gumbel(self, layer_idx=0):
# arch_params list
if self.repeat_cell:
alpha_normal, alpha_reduce, beta_normal, beta_reduce = self.super_layers_arch[0]
else:
alpha_normal, alpha_reduce, beta_normal, beta_reduce = self.super_layers_arch[layer_idx]
weights_normal = [F.softmax(alpha, dim=-1) for alpha in alpha_normal]
weights_reduce = [F.softmax(alpha, dim=-1) for alpha in alpha_reduce]
weights_edge_normal = [F.softmax(beta, dim=0) for beta in beta_normal]
weights_edge_reduce = [F.softmax(beta, dim=0) for beta in beta_reduce]
gene_normal, connect_normal = gt.parse_gumbel(weights_normal, weights_edge_normal, k=2)
gene_reduce, connect_reduce = gt.parse_gumbel(weights_reduce, weights_edge_reduce, k=2)
connect_dict = {"normal": connect_normal, "reduce": connect_reduce}
concat = range(2, 2+self.n_nodes) # concat all intermediate nodes
return gt.Genotype(normal=gene_normal, normal_concat=concat, reduce=gene_reduce, reduce_concat=concat), connect_dict
def get_aux_logits(self, idx, s1):
if idx == self.layer_num-3:
return self.distill_aux_head1(s1)
if idx == self.layer_num-2:
return self.distill_aux_head2(s1)
return None
def forward(self, x, layer_idx, super_flag=True, pretrain_flag=False, is_slim=False):
# layer_idx, which stage we are
# if super_flag, forward supernetwork else forward nas network
# if pretrain_flag, foward supernetwork pool
if pretrain_flag:
super_layers_num = len(self.super_layers)
nas_layers_num = 0
super_layers = self.super_layers_pool
super_layers_arch = self.super_layers_pool_arch
else:
if super_flag:
super_layers = self.super_layers
super_layers_arch = self.super_layers_arch
nas_layers = self.nas_layers
nas_layers_num = len(self.nas_layers[:layer_idx])
super_layers_num = len(self.super_layers[layer_idx:])
else:
nas_layers = self.nas_layers
nas_layers_num = len(self.nas_layers)
super_layers_num = 0
outputs = []
s0, s1 = self.extract_features(x)
for i in range(nas_layers_num):
s0, s1 = self.forward_nas_layer(s0, s1, nas_layers[i])
logit = self.get_aux_logits(i, s1)
if logit is not None:
outputs.append(logit)
aux_logits = None
for j in range(super_layers_num):
k = nas_layers_num + j
if self.repeat_cell or pretrain_flag:
s0, s1 = self.forward_super_layer(s0, s1, super_layers[k], super_layers_arch[0], is_slim)
if k == self.layer_num-2:
aux_logits = self.distill_aux_head2(s1)
else:
s0, s1 = self.forward_super_layer(s0, s1, super_layers[k], super_layers_arch[k], is_slim)
if not pretrain_flag:
logit = self.get_aux_logits(k, s1)
if logit is not None:
outputs.append(logit)
out = self.gap(s1)
out = out.view(out.size(0), -1) # flatten
if super_flag:
logits = self.fc_super(out)
else:
logits = self.fc_nas(out)
if pretrain_flag:
return logits, aux_logits
outputs.append(logits)
logits_output = logits
ensemble_param = F.softmax(self.ensemble_param, dim=0)
if self.ensemble_sum:
em_output = ensemble_param[0] * outputs[0] + ensemble_param[1] * outputs[1] + ensemble_param[2] * outputs[2]
else:
em_output = torch.cat((ensemble_param[0] * outputs[0], ensemble_param[1] * outputs[1], ensemble_param[2] * outputs[2]), 0)
return logits_output, em_output
# return em_output, em_output
def process_alpha(self, alpha_param, beta_param):
weights_normal = [F.softmax(alpha, dim=-1) for alpha in alpha_param]
weights_edge_normal = [F.softmax(beta, dim=0) for beta in beta_param]
output_alpha = nn.ParameterList()
for alpha in weights_normal:
output_alpha.append(nn.Parameter(torch.zeros_like(alpha), requires_grad=False))
connect_idx = []
k = 2
for idx, (edges, w) in enumerate(zip(weights_normal, weights_edge_normal)):
# edges: Tensor(n_edges, n_ops)
edge_max, primitive_indices = torch.topk((w.view(-1, 1) * edges)[:, :-1], 1) # ignore 'none'
topk_edge_values, topk_edge_indices = torch.topk(edge_max.view(-1), k)
node_idx = []
for edge_idx in topk_edge_indices:
prim_idx = primitive_indices[edge_idx]
node_idx.append((edge_idx.item(), prim_idx.item()))
output_alpha[idx][edge_idx.item(), prim_idx.item()] = 1.
connect_idx.append(node_idx)
return output_alpha
def forward_super_layer(self, s0, s1, super_layer, arch_params, is_slim=False):
# arch_params: list
# super_layer: cells (2 / 3)
alpha_normal, alpha_reduce, beta_normal, beta_reduce = arch_params
if is_slim:
weights_normal = self.process_alpha(alpha_normal, beta_normal)
weights_edge_normal = [F.softmax(beta, dim=0) for beta in beta_normal]
weights_reduce = self.process_alpha(alpha_reduce, beta_reduce)
weights_edge_reduce = [F.softmax(beta, dim=0) for beta in beta_reduce]
else:
weights_normal = [F.softmax(alpha, dim=-1) for alpha in alpha_normal]
weights_edge_normal = [F.softmax(beta, dim=0) for beta in beta_normal]
weights_reduce = [F.softmax(alpha, dim=-1) for alpha in alpha_reduce]
weights_edge_reduce = [F.softmax(beta, dim=0) for beta in beta_reduce]
for cell in super_layer:
weights = weights_reduce if cell.reduction else weights_normal
weights_edge = weights_edge_reduce if cell.reduction else weights_edge_normal
s0, s1 = s1, cell(s0, s1, weights, weights_edge)
return s0, s1
def forward_nas_layer(self, s0, s1, nas_layer):
for cell in nas_layer:
s0, s1 = s1, cell(s0, s1)
return s0, s1
def loss(self, X, y):
logits = self.forward(X)
return self.criterion(logits, y)
def add_alpha_regularization(self, operations, weight_decay=0.0005, method='L2', normal=True, reduce=True):
if method == 'L2':
reg_loss = torch.tensor(0.).to(torch.device("cuda"))
for operation in operations:
if self.repeat_cell:
stage, operation = operation
stage = 0
else:
stage, operation = operation
if normal:
for node in self.super_layers_arch[stage][0]:
for connection in node:
reg_loss += connection[ops.PRIMITIVES.index(operation)] * \
connection[ops.PRIMITIVES.index(operation)]
if reduce:
for node in self.super_layers_arch[stage][1]:
for connection in node:
reg_loss += connection[ops.PRIMITIVES.index(operation)] * \
connection[ops.PRIMITIVES.index(operation)]
return reg_loss * weight_decay
elif method == 'L1':
reg_loss = torch.tensor(0.).cuda()
for operation in operations:
if self.repeat_cell:
stage, operation = operation
stage = 0
else:
stage, operation = operation
if normal:
for node in self.super_layers_arch[stage][0]:
for connection in node:
reg_loss += abs(connection[ops.PRIMITIVES.index(operation)])
if reduce:
for node in self.super_layers_arch[stage][1]:
for connection in node:
reg_loss += abs(connection[ops.PRIMITIVES.index(operation)])
return reg_loss * weight_decay
else:
raise ValueError('Method isn\'t supported')
|
Cream/CDARTS/lib/models/cdarts_controller.py/0
|
{
"file_path": "Cream/CDARTS/lib/models/cdarts_controller.py",
"repo_id": "Cream",
"token_count": 16960
}
| 304 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# Written by Hao Du and Houwen Peng
# email: [email protected] and [email protected]
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from yacs.config import CfgNode as CN
DEFAULT_CROP_PCT = 0.875
IMAGENET_DEFAULT_MEAN = (0.485, 0.456, 0.406)
IMAGENET_DEFAULT_STD = (0.229, 0.224, 0.225)
__C = CN()
cfg = __C
__C.AUTO_RESUME = True
__C.DATA_DIR = './data/imagenet'
__C.MODEL = 'cream'
__C.RESUME_PATH = './experiments/ckps/resume.pth.tar'
__C.SAVE_PATH = './experiments/ckps/'
__C.SEED = 42
__C.LOG_INTERVAL = 50
__C.RECOVERY_INTERVAL = 0
__C.WORKERS = 4
__C.NUM_GPU = 1
__C.SAVE_IMAGES = False
__C.AMP = False
__C.OUTPUT = 'output/path/'
__C.EVAL_METRICS = 'prec1'
__C.TTA = 0 # Test or inference time augmentation
__C.LOCAL_RANK = 0
__C.VERBOSE = False
# dataset configs
__C.DATASET = CN()
__C.DATASET.NUM_CLASSES = 1000
__C.DATASET.IMAGE_SIZE = 224 # image patch size
__C.DATASET.INTERPOLATION = 'bilinear' # Image resize interpolation type
__C.DATASET.BATCH_SIZE = 32 # batch size
__C.DATASET.NO_PREFECHTER = False
__C.DATASET.PIN_MEM = True
__C.DATASET.VAL_BATCH_MUL = 4
# model configs
__C.NET = CN()
__C.NET.SELECTION = 14
__C.NET.GP = 'avg' # type of global pool ["avg", "max", "avgmax", "avgmaxc"]
__C.NET.DROPOUT_RATE = 0.0 # dropout rate
# model ema parameters
__C.NET.EMA = CN()
__C.NET.EMA.USE = True
__C.NET.EMA.FORCE_CPU = False # force model ema to be tracked on CPU
__C.NET.EMA.DECAY = 0.9998
# optimizer configs
__C.OPT = 'sgd'
__C.OPT_EPS = 1e-2
__C.MOMENTUM = 0.9
__C.WEIGHT_DECAY = 1e-4
__C.OPTIMIZER = CN()
__C.OPTIMIZER.NAME = 'sgd'
__C.OPTIMIZER.MOMENTUM = 0.9
__C.OPTIMIZER.WEIGHT_DECAY = 1e-3
# scheduler configs
__C.SCHED = 'sgd'
__C.LR_NOISE_PCT = 0.67
__C.LR_NOISE_STD = 1.0
__C.WARMUP_LR = 1e-4
__C.MIN_LR = 1e-5
__C.EPOCHS = 200
__C.START_EPOCH = None
__C.DECAY_EPOCHS = 30.0
__C.WARMUP_EPOCHS = 3
__C.COOLDOWN_EPOCHS = 10
__C.PATIENCE_EPOCHS = 10
__C.DECAY_RATE = 0.1
__C.LR = 1e-2
__C.LR_NOISE = None
__C.META_LR = 1e-4
# data augmentation parameters
__C.AUGMENTATION = CN()
__C.AUGMENTATION.AA = 'rand-m9-mstd0.5'
__C.AUGMENTATION.COLOR_JITTER = 0.4
__C.AUGMENTATION.RE_PROB = 0.2 # random erase prob
__C.AUGMENTATION.RE_MODE = 'pixel' # random erase mode
__C.AUGMENTATION.MIXUP = 0.0 # mixup alpha
__C.AUGMENTATION.MIXUP_OFF_EPOCH = 0 # turn off mixup after this epoch
__C.AUGMENTATION.SMOOTHING = 0.1 # label smoothing parameters
# batch norm parameters (only works with gen_efficientnet based models
# currently)
__C.BATCHNORM = CN()
__C.BATCHNORM.SYNC_BN = False
__C.BATCHNORM.BN_TF = False
__C.BATCHNORM.BN_MOMENTUM = 0.1 # batchnorm momentum override
__C.BATCHNORM.BN_EPS = 1e-5 # batchnorm eps override
# supernet training hyperparameters
__C.SUPERNET = CN()
__C.SUPERNET.UPDATE_ITER = 1300
__C.SUPERNET.SLICE = 4
__C.SUPERNET.POOL_SIZE = 10
__C.SUPERNET.RESUNIT = False
__C.SUPERNET.DIL_CONV = False
__C.SUPERNET.UPDATE_2ND = True
__C.SUPERNET.FLOPS_MAXIMUM = 600
__C.SUPERNET.FLOPS_MINIMUM = 0
__C.SUPERNET.PICK_METHOD = 'meta' # pick teacher method
__C.SUPERNET.META_STA_EPOCH = 20 # start using meta picking method
__C.SUPERNET.HOW_TO_PROB = 'pre_prob' # sample method
__C.SUPERNET.PRE_PROB = (0.05, 0.2, 0.05, 0.5, 0.05, 0.15) # sample prob in 'pre_prob'
|
Cream/Cream/lib/config.py/0
|
{
"file_path": "Cream/Cream/lib/config.py",
"repo_id": "Cream",
"token_count": 1555
}
| 305 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# Written by Hao Du and Houwen Peng
# email: [email protected] and [email protected]
def search_for_layer(flops_op_dict, arch_def, flops_minimum, flops_maximum):
sta_num = [1, 1, 1, 1, 1]
order = [2, 3, 4, 1, 0, 2, 3, 4, 1, 0]
limits = [3, 3, 3, 2, 2, 4, 4, 4, 4, 4]
size_factor = 224 // 32
base_min_flops = sum([flops_op_dict[i][0][0] for i in range(5)])
base_max_flops = sum([flops_op_dict[i][5][0] for i in range(5)])
if base_min_flops > flops_maximum:
while base_min_flops > flops_maximum and size_factor >= 2:
size_factor = size_factor - 1
flops_minimum = flops_minimum * (7. / size_factor)
flops_maximum = flops_maximum * (7. / size_factor)
if size_factor < 2:
return None, None, None
elif base_max_flops < flops_minimum:
cur_ptr = 0
while base_max_flops < flops_minimum and cur_ptr <= 9:
if sta_num[order[cur_ptr]] >= limits[cur_ptr]:
cur_ptr += 1
continue
base_max_flops = base_max_flops + \
flops_op_dict[order[cur_ptr]][5][1]
sta_num[order[cur_ptr]] += 1
if cur_ptr > 7 and base_max_flops < flops_minimum:
return None, None, None
cur_ptr = 0
while cur_ptr <= 9:
if sta_num[order[cur_ptr]] >= limits[cur_ptr]:
cur_ptr += 1
continue
base_max_flops = base_max_flops + flops_op_dict[order[cur_ptr]][5][1]
if base_max_flops <= flops_maximum:
sta_num[order[cur_ptr]] += 1
else:
break
arch_def = [item[:i] for i, item in zip([1] + sta_num + [1], arch_def)]
# print(arch_def)
return sta_num, arch_def, size_factor * 32
|
Cream/Cream/lib/utils/search_structure_supernet.py/0
|
{
"file_path": "Cream/Cream/lib/utils/search_structure_supernet.py",
"repo_id": "Cream",
"token_count": 910
}
| 306 |
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
samples_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric='bbox')
|
Cream/EfficientViT/downstream/configs/_base_/datasets/coco_detection.py/0
|
{
"file_path": "Cream/EfficientViT/downstream/configs/_base_/datasets/coco_detection.py",
"repo_id": "Cream",
"token_count": 795
}
| 307 |
#!/usr/bin/env bash
CONFIG=$1
CHECKPOINT=$2
GPUS=$3
PORT=${PORT:-29500}
PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \
python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \
$(dirname "$0")/test.py $CONFIG $CHECKPOINT --launcher pytorch ${@:4}
|
Cream/EfficientViT/downstream/dist_test.sh/0
|
{
"file_path": "Cream/EfficientViT/downstream/dist_test.sh",
"repo_id": "Cream",
"token_count": 118
}
| 308 |
import torch
import torch.distributed as dist
import math
class RASampler(torch.utils.data.Sampler):
"""Sampler that restricts data loading to a subset of the dataset for distributed,
with repeated augmentation.
It ensures that different each augmented version of a sample will be visible to a
different process (GPU)
Heavily based on torch.utils.data.DistributedSampler
"""
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True):
if num_replicas is None:
if not dist.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = dist.get_world_size()
if rank is None:
if not dist.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = dist.get_rank()
self.dataset = dataset
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
self.num_samples = int(math.ceil(len(self.dataset) * 3.0 / self.num_replicas))
self.total_size = self.num_samples * self.num_replicas
# self.num_selected_samples = int(math.ceil(len(self.dataset) / self.num_replicas))
self.num_selected_samples = int(math.floor(len(self.dataset) // 256 * 256 / self.num_replicas))
self.shuffle = shuffle
def __iter__(self):
# deterministically shuffle based on epoch
g = torch.Generator()
g.manual_seed(self.epoch)
if self.shuffle:
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
# add extra samples to make it evenly divisible
indices = [ele for ele in indices for i in range(3)]
indices += indices[:(self.total_size - len(indices))]
assert len(indices) == self.total_size
# subsample
indices = indices[self.rank:self.total_size:self.num_replicas]
assert len(indices) == self.num_samples
return iter(indices[:self.num_selected_samples])
def __len__(self):
return self.num_selected_samples
def set_epoch(self, epoch):
self.epoch = epoch
|
Cream/MiniViT/Mini-DeiT/samplers.py/0
|
{
"file_path": "Cream/MiniViT/Mini-DeiT/samplers.py",
"repo_id": "Cream",
"token_count": 911
}
| 309 |
import os
import torch
import numpy as np
import torch.distributed as dist
from torchvision import datasets, transforms
from timm.data.constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD
from timm.data import Mixup
from timm.data import create_transform
from timm.data.transforms import _pil_interp
try:
from timm.data import DatasetTar
except ImportError:
# for higher version of timm
from timm.data import ImageDataset as DatasetTar
from .cached_image_folder import CachedImageFolder
from .samplers import SubsetRandomSampler
def build_loader(config):
config.defrost()
dataset_train, config.MODEL.NUM_CLASSES = build_dataset(is_train=True, config=config)
config.freeze()
print(f"local rank {config.LOCAL_RANK} / global rank {dist.get_rank()} successfully build train dataset")
dataset_val, _ = build_dataset(is_train=False, config=config)
print(f"local rank {config.LOCAL_RANK} / global rank {dist.get_rank()} successfully build val dataset")
num_tasks = dist.get_world_size()
global_rank = dist.get_rank()
if config.DATA.ZIP_MODE and config.DATA.CACHE_MODE == 'part':
indices = np.arange(dist.get_rank(), len(dataset_train), dist.get_world_size())
sampler_train = SubsetRandomSampler(indices)
else:
sampler_train = torch.utils.data.DistributedSampler(
dataset_train, num_replicas=num_tasks, rank=global_rank, shuffle=True
)
indices = np.arange(dist.get_rank(), len(dataset_val), dist.get_world_size())
sampler_val = SubsetRandomSampler(indices)
data_loader_train = torch.utils.data.DataLoader(
dataset_train, sampler=sampler_train,
batch_size=config.DATA.BATCH_SIZE,
num_workers=config.DATA.NUM_WORKERS,
pin_memory=config.DATA.PIN_MEMORY,
drop_last=True,
)
data_loader_val = torch.utils.data.DataLoader(
dataset_val, sampler=sampler_val,
batch_size=config.DATA.BATCH_SIZE,
shuffle=False,
num_workers=config.DATA.NUM_WORKERS,
pin_memory=config.DATA.PIN_MEMORY,
drop_last=False
)
# setup mixup / cutmix
mixup_fn = None
mixup_active = config.AUG.MIXUP > 0 or config.AUG.CUTMIX > 0. or config.AUG.CUTMIX_MINMAX is not None
if mixup_active:
mixup_fn = Mixup(
mixup_alpha=config.AUG.MIXUP, cutmix_alpha=config.AUG.CUTMIX, cutmix_minmax=config.AUG.CUTMIX_MINMAX,
prob=config.AUG.MIXUP_PROB, switch_prob=config.AUG.MIXUP_SWITCH_PROB, mode=config.AUG.MIXUP_MODE,
label_smoothing=config.MODEL.LABEL_SMOOTHING, num_classes=config.MODEL.NUM_CLASSES)
return dataset_train, dataset_val, data_loader_train, data_loader_val, mixup_fn
def build_dataset(is_train, config):
transform = build_transform(is_train, config)
if config.DATA.DATASET == 'imagenet':
prefix = 'train' if is_train else 'val'
if config.DATA.LOAD_TAR:
data_dir = os.path.join(config.DATA.DATA_PATH, f'{prefix}.tar')
dataset = DatasetTar(data_dir, transform=transform)
else:
if config.DATA.ZIP_MODE:
ann_file = prefix + "_map.txt"
prefix = prefix + ".zip@/"
dataset = CachedImageFolder(config.DATA.DATA_PATH, ann_file, prefix, transform,
cache_mode=config.DATA.CACHE_MODE if is_train else 'part')
else:
root = os.path.join(config.DATA.DATA_PATH, prefix)
dataset = datasets.ImageFolder(root, transform=transform)
nb_classes = 1000
else:
raise NotImplementedError("We only support ImageNet Now.")
return dataset, nb_classes
def build_transform(is_train, config):
resize_im = config.DATA.IMG_SIZE > 32
if is_train:
# this should always dispatch to transforms_imagenet_train
transform = create_transform(
input_size=config.DATA.IMG_SIZE,
is_training=True,
color_jitter=config.AUG.COLOR_JITTER if config.AUG.COLOR_JITTER > 0 else None,
auto_augment=config.AUG.AUTO_AUGMENT if config.AUG.AUTO_AUGMENT != 'none' else None,
re_prob=config.AUG.REPROB,
re_mode=config.AUG.REMODE,
re_count=config.AUG.RECOUNT,
interpolation=config.DATA.INTERPOLATION,
)
if not resize_im:
# replace RandomResizedCropAndInterpolation with
# RandomCrop
transform.transforms[0] = transforms.RandomCrop(config.DATA.IMG_SIZE, padding=4)
return transform
t = []
if resize_im:
if config.TEST.CROP:
size = int((256 / 224) * config.DATA.IMG_SIZE)
t.append(
transforms.Resize(size, interpolation=_pil_interp(config.DATA.INTERPOLATION)),
# to maintain same ratio w.r.t. 224 images
)
t.append(transforms.CenterCrop(config.DATA.IMG_SIZE))
else:
t.append(
transforms.Resize((config.DATA.IMG_SIZE, config.DATA.IMG_SIZE),
interpolation=_pil_interp(config.DATA.INTERPOLATION))
)
t.append(transforms.ToTensor())
t.append(transforms.Normalize(IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD))
return transforms.Compose(t)
|
Cream/MiniViT/Mini-Swin/data/build.py/0
|
{
"file_path": "Cream/MiniViT/Mini-Swin/data/build.py",
"repo_id": "Cream",
"token_count": 2386
}
| 310 |
# Adapted from https://github.com/princeton-nlp/CoFiPruning/blob/main/models/l0_module.py
# MIT license
import math
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
class L0Module(nn.Module):
limit_a, limit_b, epsilon = -.1, 1.1, 1e-6
all_types = ["hidden_z", "heads_z", "mha_z", "intermediate_z", "ffn_z"]
def __init__(self, config,
start_sparsity=0.0,
target_sparsity=0.0,
lagrangian_warmup=0,
init_loga=0.5,
temperature=2. / 3.,
pruning_type=["hidden", "heads", "intermediate", "layer"],
magical_number=0.8, # from Wang et al. 2020
):
super(L0Module, self).__init__()
self.magical_number = magical_number
self.lagrangian_warmup = lagrangian_warmup
self.pruning_type = pruning_type
self.start_sparsity = start_sparsity
self.target_sparsity = target_sparsity
self.temperature = temperature
self.hidden_size = config.hidden_size
self.intermediate_size = config.intermediate_size
self.num_attention_heads = config.num_attention_heads
self.dim_per_head = self.hidden_size // self.num_attention_heads
self.num_hidden_layers = config.num_hidden_layers
self.params_per_head_layer = self.hidden_size * \
self.hidden_size * 4 + self.hidden_size * 4
self.params_per_head = self.params_per_head_layer // self.num_attention_heads
self.params_per_mlp_layer = self.hidden_size * self.intermediate_size * \
2 + self.hidden_size + self.intermediate_size
self.params_per_intermediate_dim = self.params_per_mlp_layer // self.intermediate_size
# we ignore the parameters in normalization layers (it takes a very small amount)
self.full_model_size = (
self.params_per_head_layer + self.params_per_mlp_layer) * self.num_hidden_layers
self.prunable_model_size = 0
init_loga = init_loga if isinstance(init_loga, float) else 0.5
self.loga_mean = math.log(
1.0 - self.epsilon - init_loga) - math.log(init_loga + self.epsilon)
self.types = []
self.z_logas = {}
self.parameters_per_dim = {}
self.sizes = {}
self.shapes = {}
self.hidden_loga = None
self.hidden_type = None
for t in pruning_type:
self.initialize_one_module(t)
self.lambda_1 = nn.Parameter(torch.tensor(10.00))
self.lambda_2 = nn.Parameter(torch.tensor(10.00))
def initialize_parameters(self, size, num_layer=None, mean=None):
if num_layer is not None:
loga = nn.Parameter(torch.Tensor(num_layer, size))
else:
loga = nn.Parameter(torch.Tensor(size))
mean = mean or self.loga_mean
# loga.data.normal_(mean, 1e-2)
loga.data.normal_(mean, 0)
return loga
def initialize_one_module(self, module_name):
default_mean = 10
if module_name == "intermediate":
self.intermediate_loga = self.initialize_parameters(
self.intermediate_size, self.num_hidden_layers, mean=default_mean)
self.add_one_module(
self.intermediate_loga, type_name="intermediate",
parameter_per_dim=self.params_per_intermediate_dim, size=self.intermediate_size,
shape=[self.num_hidden_layers, 1, 1, self.intermediate_size]
)
self.prunable_model_size += self.params_per_mlp_layer * self.num_hidden_layers
elif module_name == "heads":
self.heads_loga = self.initialize_parameters(
self.num_attention_heads, self.num_hidden_layers, mean=default_mean)
self.add_one_module(
self.heads_loga, type_name="heads",
parameter_per_dim=self.params_per_head, size=self.num_attention_heads,
shape=[self.num_hidden_layers, 1,
self.num_attention_heads, 1, 1]
)
self.prunable_model_size += self.params_per_head * \
self.num_hidden_layers * self.num_attention_heads
elif module_name == "hidden":
self.hidden_loga = self.initialize_parameters(
self.hidden_size, mean=default_mean)
self.add_one_module(
self.hidden_loga, type_name="hidden",
parameter_per_dim=self.hidden_size * 4 + self.hidden_size * 4 * 2,
size=self.hidden_size, shape=[self.hidden_size]
)
elif module_name == "layer":
self.ffn_loga = self.initialize_parameters(
self.num_hidden_layers, mean=default_mean)
self.add_one_module(
self.ffn_loga, type_name="ffn",
parameter_per_dim=self.params_per_mlp_layer, size=1,
shape=[self.num_hidden_layers]
)
self.mha_loga = self.initialize_parameters(
self.num_hidden_layers, mean=default_mean)
self.add_one_module(
self.mha_loga, type_name="mha",
parameter_per_dim=self.params_per_head * self.num_attention_heads, size=1,
shape=[self.num_hidden_layers]
)
# ! init the z_logas
def add_one_module(self, z_loga, type_name, parameter_per_dim, size, shape):
self.types.append(type_name)
self.z_logas[type_name] = z_loga
self.parameters_per_dim[type_name] = parameter_per_dim
self.sizes[type_name] = size
self.shapes[type_name] = shape
def constrain_parameters(self):
for key in self.z_logas:
self.z_logas[key].data.clamp_(
min=math.log(1e-2), max=math.log(1e2))
def cdf_qz(self, x, loga):
"""Implements the CDF of the 'stretched' concrete distribution"""
xn = (x - self.limit_a) / (self.limit_b - self.limit_a)
logits = math.log(xn) - math.log(1.0 - xn)
return torch.sigmoid(logits * self.temperature - loga).clamp(min=self.epsilon, max=1 - self.epsilon)
def score_loga(self, loga):
return 1.0 - self.cdf_qz(0.0, loga)
def get_num_parameters_and_constraint(self, hidden=False):
num_parameters = 0
layers = self.num_hidden_layers
hidden_size = self.hidden_size
heads = self.num_attention_heads
device = self.z_logas[self.types[0]].device
# 12 * 1 * 1
mha_score = self.score_loga(self.mha_loga).view(
-1, 1, 1) if "mha" in self.types else torch.ones([layers, 1, 1]).to(device)
# 12 * 12 * 1
heads_score = self.score_loga(self.heads_loga).unsqueeze(
dim=-1) if "heads" in self.types else torch.ones([layers, heads, 1]).to(device)
if "heads" not in self.parameters_per_dim:
self.parameters_per_dim["heads"] = self.params_per_head
if "intermediate" not in self.parameters_per_dim:
self.parameters_per_dim["intermediate"] = self.params_per_intermediate_dim
if hidden:
hidden_score = self.score_loga(
self.hidden_loga) if "hidden" in self.types else torch.ones([hidden_size]).to(device)
heads_score = (
heads_score * mha_score) if mha_score is not None else heads_score # 38+106
heads_score = heads_score.reshape(-1)
num_parameters += torch.outer(hidden_score, heads_score).sum(
) * self.parameters_per_dim["heads"] / self.hidden_size
else:
heads_score = heads_score * mha_score
num_parameters += heads_score.sum() * \
self.parameters_per_dim["heads"]
# 12 * 1
if 'ffn' in self.types:
ffn_score = self.score_loga(self.ffn_loga).unsqueeze(
dim=-1) if "ffn" in self.types else torch.ones([layers, 1]).to(device)
else:
ffn_score = 1
# 12 * 3072
intermediate_score = self.score_loga(self.intermediate_loga) if "intermediate" in self.types else torch.ones([
layers, hidden_size * 4]).to(device)
intermediate_score = intermediate_score * ffn_score
if hidden:
intermediate_score = intermediate_score.reshape(-1) # 13893+22971
num_parameters += torch.sum(torch.outer(hidden_score,
intermediate_score)) * 2
else:
num_parameters += intermediate_score.sum() * \
self.parameters_per_dim["intermediate"]
return num_parameters
def get_target_sparsity(self, pruned_steps):
target_sparsity = (self.target_sparsity - self.start_sparsity) * \
min(1, pruned_steps / self.lagrangian_warmup) + self.start_sparsity
return target_sparsity
def lagrangian_regularization(self, pruned_steps):
target_sparsity = self.get_target_sparsity(
pruned_steps) if self.lagrangian_warmup > 0 else self.target_sparsity
expect_sparsity = 1 - self.get_num_parameters_and_constraint(
"hidden" in self.types) / self.prunable_model_size
# lagrangian_loss = (
# self.lambda_1 * (expect_sparsity - target_sparsity).abs() +
# self.lambda_2 * (expect_sparsity - target_sparsity).square()
# )
zero = torch.tensor(0.0, device=expect_sparsity.device)
lagrangian_loss = (
self.lambda_1 * torch.maximum(target_sparsity - expect_sparsity, zero) +
self.lambda_2 *
torch.maximum(target_sparsity - expect_sparsity, zero).square()
)
return lagrangian_loss, expect_sparsity.detach().item(), target_sparsity
# during training
def _sample_z(self, loga):
# Uniform random numbers for the concrete distribution
u = torch.zeros_like(loga).uniform_(self.epsilon, 1.0 - self.epsilon)
# quantile concrete
z = torch.sigmoid(
(torch.log(u) - torch.log(1 - u) + loga) / self.temperature)
z = z * (self.limit_b - self.limit_a) + self.limit_a
z = F.hardtanh(z, min_val=0.0, max_val=1.0)
return z
# during inference
def _deterministic_z(self, size, loga, soft=True):
soft_mask = torch.sigmoid(
loga / self.temperature * self.magical_number)
if not soft:
return soft_mask
expected_num_zeros = size - self.score_loga(loga).sum().item()
num_zeros = round(expected_num_zeros)
if num_zeros > 0:
if soft_mask.ndim == 0:
soft_mask = torch.tensor(0).to(loga.device)
else:
_, indices = torch.topk(soft_mask, k=num_zeros, largest=False)
soft_mask[indices] = 0.
return soft_mask
def get_z_from_zs(self, zs):
numpified_zs = {}
# for t in self.all_types:
# name = t[:-2]
for t in self.types:
name = t
numpified_zs[name] = (zs[t].squeeze().detach().cpu(
).numpy() > 0) if t in zs else np.ones(self.shapes[name])
return numpified_zs
def calculate_model_size(self, zs):
if zs is None:
return {"pruned_sparsity": 0.0}
layers = self.num_hidden_layers
hidden_size = self.hidden_size
heads = self.num_attention_heads
device = self.z_logas[self.types[0]].device
numpified_zs = self.get_z_from_zs(zs)
hidden_z = numpified_zs["hidden"] if "hidden" in numpified_zs.keys() else np.ones([
hidden_size])
heads_z = numpified_zs["heads"] if "heads" in numpified_zs.keys() else np.ones([
layers, 1, heads, 1, 1])
mha_z = numpified_zs["mha"].reshape(-1, 1, 1, 1, 1) if "mha" in numpified_zs.keys(
) else np.ones([heads_z.shape[0], 1, 1, 1, 1])
intermediate_z = numpified_zs["intermediate"] if "intermediate" in numpified_zs.keys(
) else np.ones([layers, 1, 1, hidden_size * 4])
ffn_z = numpified_zs["ffn"].reshape(-1, 1, 1, 1) if "ffn" in numpified_zs.keys(
) else np.ones([heads_z.shape[0], 1, 1, 1])
remain_hidden = hidden_z.sum().item()
remain_intermediate = intermediate_z.reshape(
self.num_hidden_layers, self.intermediate_size).sum(-1).tolist()
remain_heads = heads_z.reshape(
self.num_hidden_layers, self.num_attention_heads).sum(-1).tolist()
heads = np.outer((heads_z * mha_z).reshape(-1), hidden_z).sum().item()
intermediate = np.outer(
(intermediate_z * ffn_z).reshape(-1), hidden_z).sum().item()
remain_model_size = heads * self.dim_per_head * 4 + intermediate * 2
pruned_model_size = self.prunable_model_size - remain_model_size
results = {
'mha': mha_z.reshape(-1).astype(int).tolist(),
'ffn': ffn_z.reshape(-1).astype(int).tolist(),
'remain_hidden': remain_hidden,
'remain_intermediate': remain_intermediate,
'remain_heads': remain_heads,
'pruned_params': pruned_model_size,
'remain_params': remain_model_size,
'pruned_sparsity': pruned_model_size / self.prunable_model_size
}
return results
def forward(self, soft=True):
zs = {f"{t}_z": [] for t in self.types}
if self.training:
for i, t in enumerate(self.types):
loga = self.z_logas[t]
z = self._sample_z(loga)
zs[f"{t}_z"] = z.reshape(self.shapes[t])
else:
for i, t in enumerate(self.types):
if t != "hidden": # hidden is not a per layer sample
tmp = []
for loga in self.z_logas[t]:
z = self._deterministic_z(
self.sizes[t], loga.detach(), soft=soft)
tmp.append(z.reshape(self.shapes[t][1:]))
zs[f"{t}_z"] = torch.stack(tmp)
else:
zs[f"{t}_z"] = self._deterministic_z(
self.sizes[t], self.hidden_loga.detach(), soft=soft)
return zs
@torch.no_grad()
def l0_mask(self):
zs = {f"{t}_z": [] for t in self.types}
# self.magical_number = 1.0
def get_mask(loga): return torch.sigmoid(
loga / self.temperature * self.magical_number)
for t in self.types:
if t == "hidden":
zs[f"{t}_z"] = get_mask(self.hidden_loga)
else:
tmp = []
loga_all_layers = self.z_logas[t]
for layer in range(len(loga_all_layers)):
loga = loga_all_layers[layer]
z = get_mask(loga)
tmp.append(z.reshape(self.shapes[t][1:]))
zs[f"{t}_z"] = torch.stack(tmp)
return zs
if __name__ == '__main__':
from collections import namedtuple
Config = namedtuple('Config', [
'hidden_size', 'intermediate_size', 'num_attention_heads', 'num_hidden_layers'])
config = Config(hidden_size=768, intermediate_size=4 * 768,
num_attention_heads=12, num_hidden_layers=12)
l0_module = L0Module(config, lagrangian_warmup=200, target_sparsity=0.5)
l0_module.train()
zs = l0_module()
l0_module.eval()
zs = l0_module()
result = l0_module.calculate_model_size(zs)
print(result)
|
Cream/TinyCLIP/src/open_clip/l0module.py/0
|
{
"file_path": "Cream/TinyCLIP/src/open_clip/l0module.py",
"repo_id": "Cream",
"token_count": 7704
}
| 311 |
""" CLIP tokenizer
Copied from https://github.com/openai/CLIP. Originally MIT License, Copyright (c) 2021 OpenAI.
"""
import gzip
import html
import os
from functools import lru_cache
from typing import Union, List
import ftfy
import regex as re
import torch
@lru_cache()
def default_bpe():
return os.path.join(os.path.dirname(os.path.abspath(__file__)), "bpe_simple_vocab_16e6.txt.gz")
@lru_cache()
def bytes_to_unicode():
"""
Returns list of utf-8 byte and a corresponding list of unicode strings.
The reversible bpe codes work on unicode strings.
This means you need a large # of unicode characters in your vocab if you want to avoid UNKs.
When you're at something like a 10B token dataset you end up needing around 5K for decent coverage.
This is a signficant percentage of your normal, say, 32K bpe vocab.
To avoid that, we want lookup tables between utf-8 bytes and unicode strings.
And avoids mapping to whitespace/control characters the bpe code barfs on.
"""
bs = list(range(ord("!"), ord("~") + 1)) + list(range(ord("¡"),
ord("¬") + 1)) + list(range(ord("®"), ord("ÿ") + 1))
cs = bs[:]
n = 0
for b in range(2**8):
if b not in bs:
bs.append(b)
cs.append(2**8 + n)
n += 1
cs = [chr(n) for n in cs]
return dict(zip(bs, cs))
def get_pairs(word):
"""Return set of symbol pairs in a word.
Word is represented as tuple of symbols (symbols being variable-length strings).
"""
pairs = set()
prev_char = word[0]
for char in word[1:]:
pairs.add((prev_char, char))
prev_char = char
return pairs
def basic_clean(text):
text = ftfy.fix_text(text)
text = html.unescape(html.unescape(text))
return text.strip()
def whitespace_clean(text):
text = re.sub(r'\s+', ' ', text)
text = text.strip()
return text
class SimpleTokenizer(object):
def __init__(self, bpe_path: str = default_bpe(), special_tokens=None):
self.byte_encoder = bytes_to_unicode()
self.byte_decoder = {v: k for k, v in self.byte_encoder.items()}
merges = gzip.open(bpe_path).read().decode("utf-8").split('\n')
merges = merges[1:49152 - 256 - 2 + 1]
merges = [tuple(merge.split()) for merge in merges]
vocab = list(bytes_to_unicode().values())
vocab = vocab + [v + '</w>' for v in vocab]
for merge in merges:
vocab.append(''.join(merge))
if not special_tokens:
special_tokens = ['<start_of_text>', '<end_of_text>']
else:
special_tokens = ['<start_of_text>',
'<end_of_text>'] + special_tokens
vocab.extend(special_tokens)
self.encoder = dict(zip(vocab, range(len(vocab))))
self.decoder = {v: k for k, v in self.encoder.items()}
self.bpe_ranks = dict(zip(merges, range(len(merges))))
self.cache = {t: t for t in special_tokens}
special = "|".join(special_tokens)
self.pat = re.compile(
special + r"""|'s|'t|'re|'ve|'m|'ll|'d|[\p{L}]+|[\p{N}]|[^\s\p{L}\p{N}]+""", re.IGNORECASE)
self.vocab_size = len(self.encoder)
self.all_special_ids = [self.encoder[t] for t in special_tokens]
def bpe(self, token):
if token in self.cache:
return self.cache[token]
word = tuple(token[:-1]) + (token[-1] + '</w>',)
pairs = get_pairs(word)
if not pairs:
return token + '</w>'
while True:
bigram = min(pairs, key=lambda pair: self.bpe_ranks.get(
pair, float('inf')))
if bigram not in self.bpe_ranks:
break
first, second = bigram
new_word = []
i = 0
while i < len(word):
try:
j = word.index(first, i)
new_word.extend(word[i:j])
i = j
except:
new_word.extend(word[i:])
break
if word[i] == first and i < len(word) - 1 and word[i + 1] == second:
new_word.append(first + second)
i += 2
else:
new_word.append(word[i])
i += 1
new_word = tuple(new_word)
word = new_word
if len(word) == 1:
break
else:
pairs = get_pairs(word)
word = ' '.join(word)
self.cache[token] = word
return word
def encode(self, text):
bpe_tokens = []
text = whitespace_clean(basic_clean(text)).lower()
for token in re.findall(self.pat, text):
token = ''.join(self.byte_encoder[b]
for b in token.encode('utf-8'))
bpe_tokens.extend(self.encoder[bpe_token]
for bpe_token in self.bpe(token).split(' '))
return bpe_tokens
def decode(self, tokens):
text = ''.join([self.decoder[token] for token in tokens])
text = bytearray([self.byte_decoder[c] for c in text]).decode(
'utf-8', errors="replace").replace('</w>', ' ')
return text
_tokenizer = SimpleTokenizer()
def tokenize(texts: Union[str, List[str]], context_length: int = 77) -> torch.LongTensor:
"""
Returns the tokenized representation of given input string(s)
Parameters
----------
texts : Union[str, List[str]]
An input string or a list of input strings to tokenize
context_length : int
The context length to use; all CLIP models use 77 as the context length
Returns
-------
A two-dimensional tensor containing the resulting tokens, shape = [number of input strings, context_length]
"""
if isinstance(texts, str):
texts = [texts]
sot_token = _tokenizer.encoder["<start_of_text>"]
eot_token = _tokenizer.encoder["<end_of_text>"]
all_tokens = [[sot_token] +
_tokenizer.encode(text) + [eot_token] for text in texts]
result = torch.zeros(len(all_tokens), context_length, dtype=torch.long)
for i, tokens in enumerate(all_tokens):
if len(tokens) > context_length:
tokens = tokens[:context_length] # Truncate
tokens[-1] = eot_token
result[i, :len(tokens)] = torch.tensor(tokens)
return result
class HFTokenizer:
"""HuggingFace tokenizer wrapper"""
def __init__(self, tokenizer_name: str):
from transformers import AutoTokenizer
self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_name)
def save_pretrained(self, dest):
self.tokenizer.save_pretrained(dest)
def __call__(self, texts: Union[str, List[str]], context_length: int = 77) -> torch.Tensor:
# same cleaning as for default tokenizer, except lowercasing
# adding lower (for case-sensitive tokenizers) will make it more robust but less sensitive to nuance
if isinstance(texts, str):
texts = [texts]
texts = [whitespace_clean(basic_clean(text)) for text in texts]
input_ids = self.tokenizer(
texts,
return_tensors='pt',
max_length=context_length,
padding='max_length',
truncation=True,
).input_ids
return input_ids
|
Cream/TinyCLIP/src/open_clip/tokenizer.py/0
|
{
"file_path": "Cream/TinyCLIP/src/open_clip/tokenizer.py",
"repo_id": "Cream",
"token_count": 3499
}
| 312 |
import torch
from contextlib import suppress
# amp_bfloat16 is more stable than amp float16 for clip training
def get_autocast(precision):
if precision == 'amp':
return torch.cuda.amp.autocast
elif precision == 'amp_bfloat16':
return lambda: torch.cuda.amp.autocast(dtype=torch.bfloat16)
elif precision == 'fp32':
return lambda: torch.cuda.amp.autocast(enabled=False)
else:
return suppress
|
Cream/TinyCLIP/src/training/precision.py/0
|
{
"file_path": "Cream/TinyCLIP/src/training/precision.py",
"repo_id": "Cream",
"token_count": 169
}
| 313 |
""" Quick n Simple Image Folder, Tarfile based DataSet
Hacked together by / Copyright 2020 Ross Wightman
"""
import torch.utils.data as data
import os
import torch
import logging
from PIL import Image
from .parsers import create_parser
_logger = logging.getLogger(__name__)
_ERROR_RETRY = 50
class ImageDataset(data.Dataset):
def __init__(
self,
root,
parser=None,
class_map=None,
load_bytes=False,
transform=None,
target_transform=None,
):
if parser is None or isinstance(parser, str):
parser = create_parser(parser or '', root=root, class_map=class_map)
self.parser = parser
self.load_bytes = load_bytes
self.transform = transform
self.target_transform = target_transform
self._consecutive_errors = 0
def __getitem__(self, index):
img, target = self.parser[index]
try:
img = img.read() if self.load_bytes else Image.open(img).convert('RGB')
except Exception as e:
_logger.warning(f'Skipped sample (index {index}, file {self.parser.filename(index)}). {str(e)}')
self._consecutive_errors += 1
if self._consecutive_errors < _ERROR_RETRY:
return self.__getitem__((index + 1) % len(self.parser))
else:
raise e
self._consecutive_errors = 0
if self.transform is not None:
img = self.transform(img)
if target is None:
target = -1
elif self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self):
return len(self.parser)
def filename(self, index, basename=False, absolute=False):
return self.parser.filename(index, basename, absolute)
def filenames(self, basename=False, absolute=False):
return self.parser.filenames(basename, absolute)
class IterableImageDataset(data.IterableDataset):
def __init__(
self,
root,
parser=None,
split='train',
is_training=False,
batch_size=None,
repeats=0,
download=False,
transform=None,
target_transform=None,
):
assert parser is not None
if isinstance(parser, str):
self.parser = create_parser(
parser, root=root, split=split, is_training=is_training,
batch_size=batch_size, repeats=repeats, download=download)
else:
self.parser = parser
self.transform = transform
self.target_transform = target_transform
self._consecutive_errors = 0
def __iter__(self):
for img, target in self.parser:
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
yield img, target
def __len__(self):
if hasattr(self.parser, '__len__'):
return len(self.parser)
else:
return 0
def filename(self, index, basename=False, absolute=False):
assert False, 'Filename lookup by index not supported, use filenames().'
def filenames(self, basename=False, absolute=False):
return self.parser.filenames(basename, absolute)
class AugMixDataset(torch.utils.data.Dataset):
"""Dataset wrapper to perform AugMix or other clean/augmentation mixes"""
def __init__(self, dataset, num_splits=2):
self.augmentation = None
self.normalize = None
self.dataset = dataset
if self.dataset.transform is not None:
self._set_transforms(self.dataset.transform)
self.num_splits = num_splits
def _set_transforms(self, x):
assert isinstance(x, (list, tuple)) and len(x) == 3, 'Expecting a tuple/list of 3 transforms'
self.dataset.transform = x[0]
self.augmentation = x[1]
self.normalize = x[2]
@property
def transform(self):
return self.dataset.transform
@transform.setter
def transform(self, x):
self._set_transforms(x)
def _normalize(self, x):
return x if self.normalize is None else self.normalize(x)
def __getitem__(self, i):
x, y = self.dataset[i] # all splits share the same dataset base transform
x_list = [self._normalize(x)] # first split only normalizes (this is the 'clean' split)
# run the full augmentation on the remaining splits
for _ in range(self.num_splits - 1):
x_list.append(self._normalize(self.augmentation(x)))
return tuple(x_list), y
def __len__(self):
return len(self.dataset)
|
Cream/TinyViT/data/augmentation/dataset.py/0
|
{
"file_path": "Cream/TinyViT/data/augmentation/dataset.py",
"repo_id": "Cream",
"token_count": 2122
}
| 314 |
""" Random Erasing (Cutout)
Originally inspired by impl at https://github.com/zhunzhong07/Random-Erasing, Apache 2.0
Copyright Zhun Zhong & Liang Zheng
Hacked together by / Copyright 2020 Ross Wightman
"""
from .aug_random import random, np_random
import numpy as np
import math
import torch
def _get_pixels(per_pixel, rand_color, patch_size, dtype=torch.float32, device='cuda'):
# NOTE I've seen CUDA illegal memory access errors being caused by the normal_()
# paths, flip the order so normal is run on CPU if this becomes a problem
# Issue has been fixed in master https://github.com/pytorch/pytorch/issues/19508
if not per_pixel and not rand_color:
return torch.zeros((patch_size[0], 1, 1), dtype=dtype, device=device)
if per_pixel:
shape = patch_size
elif rand_color:
shape = (patch_size[0], 1, 1)
# normal_
seed = random.randint(0, 1 << 30)
bg = np.random.MT19937(seed)
g = np.random.Generator(bg)
x = g.normal(size=shape)
return torch.tensor(x, dtype=dtype, device=device)
class RandomErasing:
""" Randomly selects a rectangle region in an image and erases its pixels.
'Random Erasing Data Augmentation' by Zhong et al.
See https://arxiv.org/pdf/1708.04896.pdf
This variant of RandomErasing is intended to be applied to either a batch
or single image tensor after it has been normalized by dataset mean and std.
Args:
probability: Probability that the Random Erasing operation will be performed.
min_area: Minimum percentage of erased area wrt input image area.
max_area: Maximum percentage of erased area wrt input image area.
min_aspect: Minimum aspect ratio of erased area.
mode: pixel color mode, one of 'const', 'rand', or 'pixel'
'const' - erase block is constant color of 0 for all channels
'rand' - erase block is same per-channel random (normal) color
'pixel' - erase block is per-pixel random (normal) color
max_count: maximum number of erasing blocks per image, area per box is scaled by count.
per-image count is randomly chosen between 1 and this value.
"""
REF_H = 224
REF_W = 224
def __init__(
self,
probability=0.5, min_area=0.02, max_area=1/3, min_aspect=0.3, max_aspect=None,
mode='const', min_count=1, max_count=None, num_splits=0, device='cuda'):
self.probability = probability
self.min_area = min_area
self.max_area = max_area
max_aspect = max_aspect or 1 / min_aspect
self.log_aspect_ratio = (math.log(min_aspect), math.log(max_aspect))
self.min_count = min_count
self.max_count = max_count or min_count
self.num_splits = num_splits
self.mode = mode.lower()
self.rand_color = False
self.per_pixel = False
if self.mode == 'rand':
self.rand_color = True # per block random normal
elif self.mode == 'pixel':
self.per_pixel = True # per pixel random normal
else:
assert not self.mode or self.mode == 'const'
self.device = device
def _erase(self, img, chan, img_h, img_w, dtype):
if random.random() > self.probability:
return
count = self.min_count if self.min_count == self.max_count else \
random.randint(self.min_count, self.max_count)
ref_h, ref_w = self.REF_H, self.REF_W
ref_area = ref_h * ref_w
area = img_h * img_w
for _ in range(count):
for attempt in range(10):
r1 = random.uniform(self.min_area, self.max_area)
target_area = r1 * ref_area / count
r2 = random.uniform(*self.log_aspect_ratio)
aspect_ratio = math.exp(r2)
h = int(round(math.sqrt(target_area * aspect_ratio)))
w = int(round(math.sqrt(target_area / aspect_ratio)))
if w < ref_w and h < ref_h:
top = random.randint(0, ref_h - h)
left = random.randint(0, ref_w - w)
# ref -> now
top = min(int(round(top * img_h / ref_h)), img_h - 1)
left = min(int(round(left * img_w / ref_w)), img_w - 1)
h = min(int(round(h * img_h / ref_h)), img_h - top)
w = min(int(round(w * img_w / ref_w)), img_w - left)
img[:, top:top + h, left:left + w] = _get_pixels(
self.per_pixel, self.rand_color, (chan, h, w),
dtype=dtype, device=self.device)
break
def __call__(self, input):
if len(input.size()) == 3:
self._erase(input, *input.size(), input.dtype)
else:
batch_size, chan, img_h, img_w = input.size()
# skip first slice of batch if num_splits is set (for clean portion of samples)
batch_start = batch_size // self.num_splits if self.num_splits > 1 else 0
for i in range(batch_start, batch_size):
self._erase(input[i], chan, img_h, img_w, input.dtype)
return input
def __repr__(self):
# NOTE simplified state for repr
fs = self.__class__.__name__ + f'(p={self.probability}, mode={self.mode}'
fs += f', count=({self.min_count}, {self.max_count}))'
return fs
|
Cream/TinyViT/data/augmentation/random_erasing.py/0
|
{
"file_path": "Cream/TinyViT/data/augmentation/random_erasing.py",
"repo_id": "Cream",
"token_count": 2458
}
| 315 |
# --------------------------------------------------------
# TinyViT Learning rate scheduler
# Copyright (c) 2022 Microsoft
# Based on the code: Swin Transformer
# (https://github.com/microsoft/swin-transformer)
# --------------------------------------------------------
import torch
from timm.scheduler.cosine_lr import CosineLRScheduler
from timm.scheduler.step_lr import StepLRScheduler
from timm.scheduler.scheduler import Scheduler
# Modified for TinyViT
from tinyvit_utils import LRSchedulerWrapper
def build_scheduler(config, optimizer, n_iter_per_epoch):
num_steps = int(config.TRAIN.EPOCHS * n_iter_per_epoch)
warmup_steps = int(config.TRAIN.WARMUP_EPOCHS * n_iter_per_epoch)
decay_steps = int(
config.TRAIN.LR_SCHEDULER.DECAY_EPOCHS * n_iter_per_epoch)
lr_scheduler = None
if config.TRAIN.LR_SCHEDULER.NAME == 'cosine':
lr_scheduler = CosineLRScheduler(
optimizer,
t_initial=num_steps,
lr_min=config.TRAIN.MIN_LR,
warmup_lr_init=config.TRAIN.WARMUP_LR,
warmup_t=warmup_steps,
cycle_limit=1,
t_in_epochs=False,
)
elif config.TRAIN.LR_SCHEDULER.NAME == 'linear':
lr_scheduler = LinearLRScheduler(
optimizer,
t_initial=num_steps,
lr_min_rate=0.01,
warmup_lr_init=config.TRAIN.WARMUP_LR,
warmup_t=warmup_steps,
t_in_epochs=False,
)
elif config.TRAIN.LR_SCHEDULER.NAME == 'step':
lr_scheduler = StepLRScheduler(
optimizer,
decay_t=decay_steps,
decay_rate=config.TRAIN.LR_SCHEDULER.DECAY_RATE,
warmup_lr_init=config.TRAIN.WARMUP_LR,
warmup_t=warmup_steps,
t_in_epochs=False,
)
# Modified for TinyViT
if config.TRAIN.LAYER_LR_DECAY != 1.0:
lr_scheduler = LRSchedulerWrapper(lr_scheduler, optimizer)
return lr_scheduler
class LinearLRScheduler(Scheduler):
def __init__(self,
optimizer: torch.optim.Optimizer,
t_initial: int,
lr_min_rate: float,
warmup_t=0,
warmup_lr_init=0.,
t_in_epochs=True,
noise_range_t=None,
noise_pct=0.67,
noise_std=1.0,
noise_seed=42,
initialize=True,
) -> None:
super().__init__(
optimizer, param_group_field="lr",
noise_range_t=noise_range_t, noise_pct=noise_pct, noise_std=noise_std, noise_seed=noise_seed,
initialize=initialize)
self.t_initial = t_initial
self.lr_min_rate = lr_min_rate
self.warmup_t = warmup_t
self.warmup_lr_init = warmup_lr_init
self.t_in_epochs = t_in_epochs
if self.warmup_t:
self.warmup_steps = [(v - warmup_lr_init) /
self.warmup_t for v in self.base_values]
super().update_groups(self.warmup_lr_init)
else:
self.warmup_steps = [1 for _ in self.base_values]
def _get_lr(self, t):
if t < self.warmup_t:
lrs = [self.warmup_lr_init + t * s for s in self.warmup_steps]
else:
t = t - self.warmup_t
total_t = self.t_initial - self.warmup_t
lrs = [v - ((v - v * self.lr_min_rate) * (t / total_t))
for v in self.base_values]
return lrs
def get_epoch_values(self, epoch: int):
if self.t_in_epochs:
return self._get_lr(epoch)
else:
return None
def get_update_values(self, num_updates: int):
if not self.t_in_epochs:
return self._get_lr(num_updates)
else:
return None
|
Cream/TinyViT/lr_scheduler.py/0
|
{
"file_path": "Cream/TinyViT/lr_scheduler.py",
"repo_id": "Cream",
"token_count": 2032
}
| 316 |
Hiring research interns for neural architecture search projects: [email protected]
# Rethinking and Improving Relative Position Encoding for Vision Transformer
[[Paper]](https://openaccess.thecvf.com/content/ICCV2021/html/Wu_Rethinking_and_Improving_Relative_Position_Encoding_for_Vision_Transformer_ICCV_2021_paper.html)
Object Detection: DETR with iRPE
# Model Zoo
We equip DETR models with contextual product shared-head RPE, and report their mAP on MSCOCO dataset.
- Absolute Position Encoding: Sinusoid
- Relative Position Encoding: iRPE (contextual product shared-head RPE)
enc\_rpe2d | Backbone | #Buckets | epoch | AP | AP\_50 | AP\_75 | AP\_S | AP\_M | AP\_L | Link | Log
----------------------- | --------- | -------- | ----- | ----- | ------ | ------ | ----- | ----- | ----- | ---- | ---
rpe-1.9-product-ctx-1-k | ResNet-50 | 7 x 7 | 150 | 0.409 | 0.614 | 0.429 | 0.195 | 0.443 | 0.605 | [link](https://github.com/wkcn/iRPE-model-zoo/releases/download/1.0/rpe-1.9-product-ctx-1-k.pth)| [log](https://github.com/wkcn/iRPE-model-zoo/releases/download/1.0/log_rpe-1.9-product-ctx-1-k.txt), [detail (188 MB)](https://github.com/wkcn/iRPE-model-zoo/releases/download/1.0/detail_rpe-1.9-product-ctx-1-k.log)
rpe-2.0-product-ctx-1-k | ResNet-50 | 9 x 9 | 150 | 0.410 | 0.615 | 0.434 | 0.192 | 0.445 | 0.608 | [link](https://github.com/wkcn/iRPE-model-zoo/releases/download/1.0/rpe-2.0-product-ctx-1-k.pth)| [log](https://github.com/wkcn/iRPE-model-zoo/releases/download/1.0/log_rpe-2.0-product-ctx-1-k.txt), [detail (188 MB)](https://github.com/wkcn/iRPE-model-zoo/releases/download/1.0/detail_rpe-2.0-product-ctx-1-k.log)
rpe-2.0-product-ctx-1-k | ResNet-50 | 9 x 9 | 300 | 0.422 | 0.623 | 0.446 | 0.205 | 0.457 | 0.613 | [link](https://github.com/wkcn/iRPE-model-zoo/releases/download/1.0/rpe-2.0-product-ctx-1-k_300epochs.pth)| [log](https://github.com/wkcn/iRPE-model-zoo/releases/download/1.0/log_rpe-2.0-product-ctx-1-k_300epochs.txt), [detail (375 MB)](https://github.com/wkcn/iRPE-model-zoo/releases/download/1.0/detail_rpe-2.0-product-ctx-1-k_300epochs.log)
`--enc_rpe2d` is an argument to represent the attributions of relative position encoding.
# Usage
## Setup
1. Install 3rd-party packages from [requirements.txt](./requirements.txt).
```bash
pip install -r ./requirements.txt
```
2. **[Optional, Recommend]** Build iRPE operators implemented by CUDA.
Although iRPE can be implemented by PyTorch native functions, the backward speed of PyTorch index function is very slow. We implement CUDA operators for more efficient training and recommend to build it.
`nvcc` is necessary to build CUDA operators.
```bash
cd rpe_ops/
python setup.py install --user
```
## Data Preparation
You can download the MSCOCO dataset from [`https://cocodataset.org/#download`](https://cocodataset.org/#download).
Please download the following files:
- [2017 Train images [118K/18GB]](http://images.cocodataset.org/zips/train2017.zip)
- [2017 Val images [5K/1GB]](http://images.cocodataset.org/zips/val2017.zip)
- [2017 Train/Val annotations [241MB]](http://images.cocodataset.org/annotations/annotations_trainval2017.zip)
After downloading them, move the three archieves into the same directory, then decompress the annotations archive by `unzip ./annotations_trainval2017.zip`. We **DO NOT** compress the images archieves.
The dataset should be saved as follow,
```
coco_data
├── annotations
│ ├── captions_train2017.json
│ ├── captions_val2017.json
│ ├── instances_train2017.json
│ ├── instances_val2017.json
│ ├── person_keypoints_train2017.json
│ └── person_keypoints_val2017.json
├── train2017.zip
└── val2017.zip
```
The zipfile `train2017.zip` and `val2017.zip` can also be decompressed.
```
coco_data
├── annotations
│ ├── captions_train2017.json
│ ├── captions_val2017.json
│ ├── instances_train2017.json
│ ├── instances_val2017.json
│ ├── person_keypoints_train2017.json
│ └── person_keypoints_val2017.json
├── train2017
│ └── 000000000009.jpg
└── val2017
│ └── 000000000009.jpg
```
## Argument for iRPE
We add an extra argument `--enc_rpe2d rpe-{ratio}-{method}-{mode}-{shared_head}-{rpe_on}` for iRPE. It means that we add relative position encoding on all the encoder layers.
Here is the format of the variables `ratio`, `method`, `mode`, `shared_head` and `rpe_on`.
```python
Parameters
----------
ratio: float
The ratio to control the number of buckets.
Example: 1.9, 2.0, 2.5, 3.0
For the product method,
ratio | The number of buckets
------|-----------------------
1.9 | 7 x 7
2.0 | 9 x 9
2.5 | 11 x 11
3.0 | 13 x 13
method: str
The method name of image relative position encoding.
Example: `euc` or `quant` or `cross` or `product`
euc: Euclidean method
quant: Quantization method
cross: Cross method
product: Product method
mode: str
The mode of image relative position encoding.
Example: `bias` or `ctx`
shared_head: bool
Whether to share weight among different heads.
Example: 0 or 1
0: Do not share encoding weight among different heads.
1: Share encoding weight among different heads.
rpe_on: str
Where RPE attaches.
"q": RPE on queries
"k": RPE on keys
"v": RPE on values
"qk": RPE on queries and keys
"qkv": RPE on queries, keys and values
```
If we want a image relative position encoding with contextual product shared-head `9 x 9` buckets, the argument is `--enc_rpe2d rpe-2.0-product-ctx-1-k`.
## Training
- Train a DETR-ResNet50 with iRPE (contextual product shared-head `9 x 9` buckets) for **150 epochs**:
```bash
python -m torch.distributed.launch --nproc_per_node=8 --use_env main.py --lr_drop 100 --epochs 150 --coco_path ./coco_data --enc_rpe2d rpe-2.0-product-ctx-1-k --output_dir ./output'
```
- Train a DETR-ResNet50 with iRPE (contextual product shared-head `9 x 9` buckets) for **300 epochs**:
```bash
python -m torch.distributed.launch --nproc_per_node=8 --use_env main.py --lr_drop 200 --epochs 300 --coco_path ./coco_data --enc_rpe2d rpe-2.0-product-ctx-1-k --output_dir ./output'
```
where `--nproc_per_node 8` means using 8 GPUs to train the model. `/coco_data` is the dataset folder, and `./output` is the model checkpoint folder.
## Evaluation
The step is similar to training. Add the checkpoint path and the flag `--eval --resume <the checkpoint path>`.
```bash
python -m torch.distributed.launch --nproc_per_node=8 --use_env main.py --lr_drop 100 --epochs 150 --coco_path ./coco_data --enc_rpe2d rpe-2.0-product-ctx-1-k --output_dir ./output --eval --resume rpe-2.0-product-ctx-1-k.pth'
```
## Code Structure
Our code is based on [DETR](https://github.com/facebookresearch/detr). The implementation of `MultiheadAttention` is based on PyTorch native operator ([module](https://github.com/pytorch/pytorch/blob/master/torch/nn/modules/activation.py), [function](https://github.com/pytorch/pytorch/blob/master/torch/nn/functional.py)). Thank you!
File | Description
-----|------------
[`models/rpe_attention/irpe.py`](./models/rpe_attention/irpe.py) | The implementation of image relative position encoding
[`models/rpe_attention/multi_head_attention.py`](./models/rpe_attention/multi_head_attention.py) | The nn.Module `MultiheadAttention` with iRPE
[`models/rpe_attention/rpe_attention_function.py`](./models/rpe_attention/rpe_attention_function.py) | The function `rpe_multi_head_attention_forward` with iRPE
[`rpe_ops`](./rpe_ops) | The CUDA implementation of iRPE operators for efficient training
# Citing iRPE
If this project is helpful for you, please cite it. Thank you! : )
```bibtex
@InProceedings{iRPE,
title = {Rethinking and Improving Relative Position Encoding for Vision Transformer},
author = {Wu, Kan and Peng, Houwen and Chen, Minghao and Fu, Jianlong and Chao, Hongyang},
booktitle = {Proceedings of the IEEE/CVF International Conference on Computer Vision (ICCV)},
month = {October},
year = {2021},
pages = {10033-10041}
}
```
# License
[Apache License](./LICENSE)
|
Cream/iRPE/DETR-with-iRPE/README.md/0
|
{
"file_path": "Cream/iRPE/DETR-with-iRPE/README.md",
"repo_id": "Cream",
"token_count": 3004
}
| 317 |
# Modify from https://github.com/pytorch/pytorch/blob/master/torch/nn/modules/activation.py
import warnings
from typing import Optional, Tuple
import torch
from torch import Tensor
from torch import nn
from torch.nn.init import xavier_uniform_
from torch.nn.init import constant_
from torch.nn.init import xavier_normal_
from torch.nn.parameter import Parameter
from torch.nn.modules.module import Module
from torch.nn import functional as F
from .rpe_attention_function import rpe_multi_head_attention_forward
from . import irpe
class RPEMultiheadAttention(nn.Module):
r"""Allows the model to jointly attend to information
from different representation subspaces.
See `Attention Is All You Need <https://arxiv.org/abs/1706.03762>`_
.. math::
\text{MultiHead}(Q, K, V) = \text{Concat}(head_1,\dots,head_h)W^O
where :math:`head_i = \text{Attention}(QW_i^Q, KW_i^K, VW_i^V)`.
Args:
embed_dim: total dimension of the model.
num_heads: parallel attention heads.
dropout: a Dropout layer on attn_output_weights. Default: 0.0.
bias: add bias as module parameter. Default: True.
add_bias_kv: add bias to the key and value sequences at dim=0.
add_zero_attn: add a new batch of zeros to the key and
value sequences at dim=1.
kdim: total number of features in key. Default: None.
vdim: total number of features in value. Default: None.
Note that if :attr:`kdim` and :attr:`vdim` are None, they will be set
to :attr:`embed_dim` such that query, key, and value have the same
number of features.
Examples::
>>> multihead_attn = nn.MultiheadAttention(embed_dim, num_heads)
>>> attn_output, attn_output_weights = multihead_attn(query, key, value)
"""
bias_k: Optional[torch.Tensor]
bias_v: Optional[torch.Tensor]
def __init__(self, embed_dim, num_heads, dropout=0., bias=True, add_bias_kv=False, add_zero_attn=False, kdim=None, vdim=None, rpe_config=None):
super().__init__()
self.embed_dim = embed_dim
self.kdim = kdim if kdim is not None else embed_dim
self.vdim = vdim if vdim is not None else embed_dim
self._qkv_same_embed_dim = self.kdim == embed_dim and self.vdim == embed_dim
self.num_heads = num_heads
self.dropout = dropout
self.head_dim = embed_dim // num_heads
assert self.head_dim * \
num_heads == self.embed_dim, "embed_dim must be divisible by num_heads"
if self._qkv_same_embed_dim is False:
self.q_proj_weight = Parameter(torch.Tensor(embed_dim, embed_dim))
self.k_proj_weight = Parameter(torch.Tensor(embed_dim, self.kdim))
self.v_proj_weight = Parameter(torch.Tensor(embed_dim, self.vdim))
self.register_parameter('in_proj_weight', None)
else:
self.in_proj_weight = Parameter(
torch.empty(3 * embed_dim, embed_dim))
self.register_parameter('q_proj_weight', None)
self.register_parameter('k_proj_weight', None)
self.register_parameter('v_proj_weight', None)
if bias:
self.in_proj_bias = Parameter(torch.empty(3 * embed_dim))
else:
self.register_parameter('in_proj_bias', None)
self.out_proj = nn.Linear(embed_dim, embed_dim, bias=True)
if add_bias_kv:
self.bias_k = Parameter(torch.empty(1, 1, embed_dim))
self.bias_v = Parameter(torch.empty(1, 1, embed_dim))
else:
self.bias_k = self.bias_v = None
self.add_zero_attn = add_zero_attn
self._reset_parameters()
self.rpe_q, self.rpe_k, self.rpe_v = \
irpe.build_rpe(rpe_config,
head_dim=self.head_dim,
num_heads=self.num_heads)
for c in 'qkv':
name = 'rpe_' + c
rpe = getattr(self, name)
if rpe is not None:
print(
f"The number of buckets on {name} in encoder:", rpe.num_buckets)
def _reset_parameters(self):
if self._qkv_same_embed_dim:
xavier_uniform_(self.in_proj_weight)
else:
xavier_uniform_(self.q_proj_weight)
xavier_uniform_(self.k_proj_weight)
xavier_uniform_(self.v_proj_weight)
if self.in_proj_bias is not None:
constant_(self.in_proj_bias, 0.)
constant_(self.out_proj.bias, 0.)
if self.bias_k is not None:
xavier_normal_(self.bias_k)
if self.bias_v is not None:
xavier_normal_(self.bias_v)
def __setstate__(self, state):
# Support loading old MultiheadAttention checkpoints generated by v1.1.0
if '_qkv_same_embed_dim' not in state:
state['_qkv_same_embed_dim'] = True
super().__setstate__(state)
def forward(self, query: Tensor, key: Tensor, value: Tensor, key_padding_mask: Optional[Tensor] = None,
need_weights: bool = True, attn_mask: Optional[Tensor] = None,
hw=None) -> Tuple[Tensor, Optional[Tensor]]:
r"""
Args:
query, key, value: map a query and a set of key-value pairs to an output.
See "Attention Is All You Need" for more details.
key_padding_mask: if provided, specified padding elements in the key will
be ignored by the attention. When given a binary mask and a value is True,
the corresponding value on the attention layer will be ignored. When given
a byte mask and a value is non-zero, the corresponding value on the attention
layer will be ignored
need_weights: output attn_output_weights.
attn_mask: 2D or 3D mask that prevents attention to certain positions. A 2D mask will be broadcasted for all
the batches while a 3D mask allows to specify a different mask for the entries of each batch.
Shapes for inputs:
- query: :math:`(L, N, E)` where L is the target sequence length, N is the batch size, E is
the embedding dimension.
- key: :math:`(S, N, E)`, where S is the source sequence length, N is the batch size, E is
the embedding dimension.
- value: :math:`(S, N, E)` where S is the source sequence length, N is the batch size, E is
the embedding dimension.
- key_padding_mask: :math:`(N, S)` where N is the batch size, S is the source sequence length.
If a ByteTensor is provided, the non-zero positions will be ignored while the position
with the zero positions will be unchanged. If a BoolTensor is provided, the positions with the
value of ``True`` will be ignored while the position with the value of ``False`` will be unchanged.
- attn_mask: if a 2D mask: :math:`(L, S)` where L is the target sequence length, S is the
source sequence length.
- hw: (height, width) of the feature map
If a 3D mask: :math:`(N\cdot\text{num\_heads}, L, S)` where N is the batch size, L is the target sequence
length, S is the source sequence length. ``attn_mask`` ensure that position i is allowed to attend
the unmasked positions. If a ByteTensor is provided, the non-zero positions are not allowed to attend
while the zero positions will be unchanged. If a BoolTensor is provided, positions with ``True``
is not allowed to attend while ``False`` values will be unchanged. If a FloatTensor
is provided, it will be added to the attention weight.
Shapes for outputs:
- attn_output: :math:`(L, N, E)` where L is the target sequence length, N is the batch size,
E is the embedding dimension.
- attn_output_weights: :math:`(N, L, S)` where N is the batch size,
L is the target sequence length, S is the source sequence length.
"""
if not self._qkv_same_embed_dim:
return rpe_multi_head_attention_forward(
query, key, value, self.embed_dim, self.num_heads,
self.in_proj_weight, self.in_proj_bias,
self.bias_k, self.bias_v, self.add_zero_attn,
self.dropout, self.out_proj.weight, self.out_proj.bias,
training=self.training,
key_padding_mask=key_padding_mask, need_weights=need_weights,
attn_mask=attn_mask, use_separate_proj_weight=True,
q_proj_weight=self.q_proj_weight, k_proj_weight=self.k_proj_weight,
v_proj_weight=self.v_proj_weight,
rpe_q=self.rpe_q, rpe_k=self.rpe_k, rpe_v=self.rpe_v, hw=hw)
else:
return rpe_multi_head_attention_forward(
query, key, value, self.embed_dim, self.num_heads,
self.in_proj_weight, self.in_proj_bias,
self.bias_k, self.bias_v, self.add_zero_attn,
self.dropout, self.out_proj.weight, self.out_proj.bias,
training=self.training,
key_padding_mask=key_padding_mask, need_weights=need_weights,
attn_mask=attn_mask,
rpe_q=self.rpe_q, rpe_k=self.rpe_k, rpe_v=self.rpe_v, hw=hw)
|
Cream/iRPE/DETR-with-iRPE/models/rpe_attention/multi_head_attention.py/0
|
{
"file_path": "Cream/iRPE/DETR-with-iRPE/models/rpe_attention/multi_head_attention.py",
"repo_id": "Cream",
"token_count": 4135
}
| 318 |
""" Vision Transformer (ViT) in PyTorch
A PyTorch implement of Vision Transformers as described in
'An Image Is Worth 16 x 16 Words: Transformers for Image Recognition at Scale' - https://arxiv.org/abs/2010.11929
The official jax code is released and available at https://github.com/google-research/vision_transformer
Status/TODO:
* Models updated to be compatible with official impl. Args added to support backward compat for old PyTorch weights.
* Weights ported from official jax impl for 384x384 base and small models, 16x16 and 32x32 patches.
* Trained (supervised on ImageNet-1k) my custom 'small' patch model to 77.9, 'base' to 79.4 top-1 with this code.
* Hopefully find time and GPUs for SSL or unsupervised pretraining on OpenImages w/ ImageNet fine-tune in future.
Acknowledgments:
* The paper authors for releasing code and weights, thanks!
* I fixed my class token impl based on Phil Wang's https://github.com/lucidrains/vit-pytorch ... check it out
for some einops/einsum fun
* Simple transformer style inspired by Andrej Karpathy's https://github.com/karpathy/minGPT
* Bert reference code checks against Huggingface Transformers and Tensorflow Bert
Hacked together by / Copyright 2020 Ross Wightman
Adapted from timm 0.3.2
"""
import torch
import torch.nn as nn
from functools import partial
from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD
from timm.models.helpers import load_pretrained
from timm.models.layers import DropPath, to_2tuple, trunc_normal_
from timm.models.resnet import resnet26d, resnet50d
from timm.models.registry import register_model
from timm.models.vision_transformer import _cfg, default_cfgs,\
Mlp, PatchEmbed
try:
from timm.models.vision_transformer import HybridEmbed
except ImportError:
# for higher version of timm
from timm.models.vision_transformer_hybrid import HybridEmbed
from irpe import build_rpe
class RPEAttention(nn.Module):
'''
Attention with image relative position encoding
'''
def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0., rpe_config=None):
super().__init__()
self.num_heads = num_heads
head_dim = dim // num_heads
# NOTE scale factor was wrong in my original version, can set manually to be compat with prev weights
self.scale = qk_scale or head_dim ** -0.5
self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
self.attn_drop = nn.Dropout(attn_drop)
self.proj = nn.Linear(dim, dim)
self.proj_drop = nn.Dropout(proj_drop)
# image relative position encoding
self.rpe_q, self.rpe_k, self.rpe_v = \
build_rpe(rpe_config,
head_dim=head_dim,
num_heads=num_heads)
def forward(self, x):
B, N, C = x.shape
qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)
q, k, v = qkv[0], qkv[1], qkv[2] # make torchscript happy (cannot use tensor as tuple)
q *= self.scale
attn = (q @ k.transpose(-2, -1))
# image relative position on keys
if self.rpe_k is not None:
attn += self.rpe_k(q)
# image relative position on queries
if self.rpe_q is not None:
attn += self.rpe_q(k * self.scale).transpose(2, 3)
attn = attn.softmax(dim=-1)
attn = self.attn_drop(attn)
out = attn @ v
# image relative position on values
if self.rpe_v is not None:
out += self.rpe_v(attn)
x = out.transpose(1, 2).reshape(B, N, C)
x = self.proj(x)
x = self.proj_drop(x)
return x
class RPEBlock(nn.Module):
def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0.,
drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm, rpe_config=None):
super().__init__()
self.norm1 = norm_layer(dim)
self.attn = RPEAttention(
dim, num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop, rpe_config=rpe_config)
# NOTE: drop path for stochastic depth, we shall see if this is better than dropout here
self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()
self.norm2 = norm_layer(dim)
mlp_hidden_dim = int(dim * mlp_ratio)
self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop)
def forward(self, x):
x = x + self.drop_path(self.attn(self.norm1(x)))
x = x + self.drop_path(self.mlp(self.norm2(x)))
return x
class VisionTransformer(nn.Module):
""" Vision Transformer with support for patch or hybrid CNN input stage
and image relative position encoding
"""
def __init__(self, img_size=224, patch_size=16, in_chans=3, num_classes=1000, embed_dim=768, depth=12,
num_heads=12, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop_rate=0., attn_drop_rate=0.,
drop_path_rate=0., hybrid_backbone=None, norm_layer=nn.LayerNorm, rpe_config=None):
super().__init__()
self.num_classes = num_classes
self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models
if hybrid_backbone is not None:
self.patch_embed = HybridEmbed(
hybrid_backbone, img_size=img_size, in_chans=in_chans, embed_dim=embed_dim)
else:
self.patch_embed = PatchEmbed(
img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim)
num_patches = self.patch_embed.num_patches
self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim))
self.pos_embed = nn.Parameter(torch.zeros(1, num_patches + 1, embed_dim))
self.pos_drop = nn.Dropout(p=drop_rate)
dpr = [x.item() for x in torch.linspace(0, drop_path_rate, depth)] # stochastic depth decay rule
self.blocks = nn.ModuleList([
RPEBlock(
dim=embed_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, qk_scale=qk_scale,
drop=drop_rate, attn_drop=attn_drop_rate, drop_path=dpr[i], norm_layer=norm_layer, rpe_config=rpe_config)
for i in range(depth)])
self.norm = norm_layer(embed_dim)
# NOTE as per official impl, we could have a pre-logits representation dense layer + tanh here
#self.repr = nn.Linear(embed_dim, representation_size)
#self.repr_act = nn.Tanh()
# Classifier head
self.head = nn.Linear(embed_dim, num_classes) if num_classes > 0 else nn.Identity()
trunc_normal_(self.pos_embed, std=.02)
trunc_normal_(self.cls_token, std=.02)
self.apply(self._init_weights)
def _init_weights(self, m):
if isinstance(m, nn.Linear):
trunc_normal_(m.weight, std=.02)
if isinstance(m, nn.Linear) and m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.LayerNorm):
nn.init.constant_(m.bias, 0)
nn.init.constant_(m.weight, 1.0)
@torch.jit.ignore
def no_weight_decay(self):
return {'pos_embed', 'cls_token'}
def get_classifier(self):
return self.head
def reset_classifier(self, num_classes, global_pool=''):
self.num_classes = num_classes
self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity()
def forward_features(self, x):
B = x.shape[0]
x = self.patch_embed(x)
cls_tokens = self.cls_token.expand(B, -1, -1) # stole cls_tokens impl from Phil Wang, thanks
x = torch.cat((cls_tokens, x), dim=1)
x = x + self.pos_embed
x = self.pos_drop(x)
for blk in self.blocks:
x = blk(x)
x = self.norm(x)
return x[:, 0]
def forward(self, x):
x = self.forward_features(x)
x = self.head(x)
return x
|
Cream/iRPE/DeiT-with-iRPE/rpe_vision_transformer.py/0
|
{
"file_path": "Cream/iRPE/DeiT-with-iRPE/rpe_vision_transformer.py",
"repo_id": "Cream",
"token_count": 3582
}
| 319 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import time
import torch
from timm.data import Mixup
from torch.cuda.amp import autocast
from core.evaluate import accuracy
from utils.comm import comm
def train_one_epoch(config, train_loader, model, criterion, optimizer, epoch,
output_dir, tb_log_dir, writer_dict, scaler=None):
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
logging.info('=> switch to train mode')
model.train()
aug = config.AUG
mixup_fn = Mixup(
mixup_alpha=aug.MIXUP, cutmix_alpha=aug.MIXCUT,
cutmix_minmax=aug.MIXCUT_MINMAX if aug.MIXCUT_MINMAX else None,
prob=aug.MIXUP_PROB, switch_prob=aug.MIXUP_SWITCH_PROB,
mode=aug.MIXUP_MODE, label_smoothing=config.LOSS.LABEL_SMOOTHING,
num_classes=config.MODEL.NUM_CLASSES
) if aug.MIXUP_PROB > 0.0 else None
end = time.time()
for i, (x, y) in enumerate(train_loader):
# measure data loading time
data_time.update(time.time() - end)
# compute output
x = x.cuda(non_blocking=True)
y = y.cuda(non_blocking=True)
if mixup_fn:
x, y = mixup_fn(x, y)
with autocast(enabled=config.AMP.ENABLED):
if config.AMP.ENABLED and config.AMP.MEMORY_FORMAT == 'nwhc':
x = x.contiguous(memory_format=torch.channels_last)
y = y.contiguous(memory_format=torch.channels_last)
outputs = model(x)
loss = criterion(outputs, y)
# compute gradient and do update step
optimizer.zero_grad()
is_second_order = hasattr(optimizer, 'is_second_order') \
and optimizer.is_second_order
scaler.scale(loss).backward(create_graph=is_second_order)
if config.TRAIN.CLIP_GRAD_NORM > 0.0:
# Unscales the gradients of optimizer's assigned params in-place
scaler.unscale_(optimizer)
# Since the gradients of optimizer's assigned params are unscaled, clips as usual:
torch.nn.utils.clip_grad_norm_(
model.parameters(), config.TRAIN.CLIP_GRAD_NORM
)
scaler.step(optimizer)
scaler.update()
# measure accuracy and record loss
losses.update(loss.item(), x.size(0))
if mixup_fn:
y = torch.argmax(y, dim=1)
prec1, prec5 = accuracy(outputs, y, (1, 5))
top1.update(prec1, x.size(0))
top5.update(prec5, x.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % config.PRINT_FREQ == 0:
msg = '=> Epoch[{0}][{1}/{2}]: ' \
'Time {batch_time.val:.3f}s ({batch_time.avg:.3f}s)\t' \
'Speed {speed:.1f} samples/s\t' \
'Data {data_time.val:.3f}s ({data_time.avg:.3f}s)\t' \
'Loss {loss.val:.5f} ({loss.avg:.5f})\t' \
'Accuracy@1 {top1.val:.3f} ({top1.avg:.3f})\t' \
'Accuracy@5 {top5.val:.3f} ({top5.avg:.3f})\t'.format(
epoch, i, len(train_loader),
batch_time=batch_time,
speed=x.size(0)/batch_time.val,
data_time=data_time, loss=losses, top1=top1, top5=top5)
logging.info(msg)
torch.cuda.synchronize()
if writer_dict and comm.is_main_process():
writer = writer_dict['writer']
global_steps = writer_dict['train_global_steps']
writer.add_scalar('train_loss', losses.avg, global_steps)
writer.add_scalar('train_top1', top1.avg, global_steps)
writer_dict['train_global_steps'] = global_steps + 1
@torch.no_grad()
def test(config, val_loader, model, criterion, output_dir, tb_log_dir,
writer_dict=None, distributed=False, real_labels=None,
valid_labels=None):
batch_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
logging.info('=> switch to eval mode')
model.eval()
end = time.time()
for i, (x, y) in enumerate(val_loader):
# compute output
x = x.cuda(non_blocking=True)
y = y.cuda(non_blocking=True)
outputs = model(x)
if valid_labels:
outputs = outputs[:, valid_labels]
loss = criterion(outputs, y)
if real_labels and not distributed:
real_labels.add_result(outputs)
# measure accuracy and record loss
losses.update(loss.item(), x.size(0))
prec1, prec5 = accuracy(outputs, y, (1, 5))
top1.update(prec1, x.size(0))
top5.update(prec5, x.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
logging.info('=> synchronize...')
comm.synchronize()
top1_acc, top5_acc, loss_avg = map(
_meter_reduce if distributed else lambda x: x.avg,
[top1, top5, losses]
)
if real_labels and not distributed:
real_top1 = real_labels.get_accuracy(k=1)
real_top5 = real_labels.get_accuracy(k=5)
msg = '=> TEST using Reassessed labels:\t' \
'Error@1 {error1:.3f}%\t' \
'Error@5 {error5:.3f}%\t' \
'Accuracy@1 {top1:.3f}%\t' \
'Accuracy@5 {top5:.3f}%\t'.format(
top1=real_top1,
top5=real_top5,
error1=100-real_top1,
error5=100-real_top5
)
logging.info(msg)
if comm.is_main_process():
msg = '=> TEST:\t' \
'Loss {loss_avg:.4f}\t' \
'Error@1 {error1:.3f}%\t' \
'Error@5 {error5:.3f}%\t' \
'Accuracy@1 {top1:.3f}%\t' \
'Accuracy@5 {top5:.3f}%\t'.format(
loss_avg=loss_avg, top1=top1_acc,
top5=top5_acc, error1=100-top1_acc,
error5=100-top5_acc
)
logging.info(msg)
if writer_dict and comm.is_main_process():
writer = writer_dict['writer']
global_steps = writer_dict['valid_global_steps']
writer.add_scalar('valid_loss', loss_avg, global_steps)
writer.add_scalar('valid_top1', top1_acc, global_steps)
writer_dict['valid_global_steps'] = global_steps + 1
logging.info('=> switch to train mode')
model.train()
return top1_acc
def _meter_reduce(meter):
rank = comm.local_rank
meter_sum = torch.FloatTensor([meter.sum]).cuda(rank)
meter_count = torch.FloatTensor([meter.count]).cuda(rank)
torch.distributed.reduce(meter_sum, 0)
torch.distributed.reduce(meter_count, 0)
meter_avg = meter_sum / meter_count
return meter_avg.item()
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
|
CvT/lib/core/function.py/0
|
{
"file_path": "CvT/lib/core/function.py",
"repo_id": "CvT",
"token_count": 3597
}
| 320 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch
from timm.scheduler import create_scheduler
def build_lr_scheduler(cfg, optimizer, begin_epoch):
if 'METHOD' not in cfg.TRAIN.LR_SCHEDULER:
raise ValueError('Please set TRAIN.LR_SCHEDULER.METHOD!')
elif cfg.TRAIN.LR_SCHEDULER.METHOD == 'MultiStep':
lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(
optimizer,
cfg.TRAIN.LR_SCHEDULER.MILESTONES,
cfg.TRAIN.LR_SCHEDULER.GAMMA,
begin_epoch - 1)
elif cfg.TRAIN.LR_SCHEDULER.METHOD == 'CosineAnnealing':
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
optimizer,
cfg.TRAIN.END_EPOCH,
cfg.TRAIN.LR_SCHEDULER.ETA_MIN,
begin_epoch - 1
)
elif cfg.TRAIN.LR_SCHEDULER.METHOD == 'CyclicLR':
lr_scheduler = torch.optim.lr_scheduler.CyclicLR(
optimizer,
base_lr=cfg.TRAIN.LR_SCHEDULER.BASE_LR,
max_LR=cfg.TRAIN.LR_SCHEDULER.MAX_LR,
step_size_up=cfg.TRAIN.LR_SCHEDULER.STEP_SIZE_UP
)
elif cfg.TRAIN.LR_SCHEDULER.METHOD == 'timm':
args = cfg.TRAIN.LR_SCHEDULER.ARGS
lr_scheduler, _ = create_scheduler(args, optimizer)
lr_scheduler.step(begin_epoch)
else:
raise ValueError('Unknown lr scheduler: {}'.format(
cfg.TRAIN.LR_SCHEDULER.METHOD))
return lr_scheduler
|
CvT/lib/scheduler/build.py/0
|
{
"file_path": "CvT/lib/scheduler/build.py",
"repo_id": "CvT",
"token_count": 789
}
| 321 |
"""
Copyright (C) Microsoft Corporation. All rights reserved.
Microsoft Corporation ("Microsoft") grants you a nonexclusive, perpetual,
royalty-free right to use, copy, and modify the software code provided by us
("Software Code"). You may not sublicense the Software Code or any use of it
(except to your affiliates and to vendors to perform work on your behalf)
through distribution, network access, service agreement, lease, rental, or
otherwise. This license does not purport to express any claim of ownership over
data you may have shared with Microsoft in the creation of the Software Code.
Unless applicable law gives you more rights, Microsoft reserves all other
rights not expressly granted herein, whether by implication, estoppel or
otherwise.
THE SOFTWARE CODE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
MICROSOFT OR ITS LICENSORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THE SOFTWARE CODE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import argparse
from srcnn.utils import *
import os
import time
from msanomalydetector.util import average_filter
class gen():
def __init__(self, win_siz, step, nums):
self.control = 0
self.win_siz = win_siz
self.step = step
self.number = nums
def generate_train_data(self, value, back_k=0):
def normalize(a):
amin = np.min(a)
amax = np.max(a)
a = (a - amin) / (amax - amin + 1e-5)
return 3 * a
if back_k <= 5:
back = back_k
else:
back = 5
length = len(value)
tmp = []
for pt in range(self.win_siz, length - back, self.step):
head = max(0, pt - self.win_siz)
tail = min(length - back, pt)
data = np.array(value[head:tail])
data = data.astype(np.float64)
data = normalize(data)
num = np.random.randint(1, self.number)
ids = np.random.choice(self.win_siz, num, replace=False)
lbs = np.zeros(self.win_siz, dtype=np.int64)
if (self.win_siz - 6) not in ids:
self.control += np.random.random()
else:
self.control = 0
if self.control > 100:
ids[0] = self.win_siz - 6
self.control = 0
mean = np.mean(data)
dataavg = average_filter(data)
var = np.var(data)
for id in ids:
data[id] += (dataavg[id] + mean) * np.random.randn() * min((1 + var), 10)
lbs[id] = 1
tmp.append([data.tolist(), lbs.tolist()])
return tmp
def auto(dic):
path_auto = os.getcwd() + '/auto.json'
auto = {}
for item, value in dic:
if value != None:
auto[item] = value
with open(path_auto, 'w+') as f:
json.dump(auto, f)
def get_path(data):
dir_ = os.getcwd() + '/' + data + '/'
fadir = [_ for _ in os.listdir(dir_)]
print(fadir, 'fadir')
files = []
for eachdir in fadir:
files += [dir_ + eachdir + '/' + _ for _ in os.listdir(dir_ + eachdir)]
print(files, 'files')
return files
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='SRCNN')
parser.add_argument('--data', type=str, required=True, help='location of the data file')
parser.add_argument('--window', type=int, default=128, help='window size')
parser.add_argument('--step', type=int, default=64, help='step')
parser.add_argument('--seed', type=int, default=54321, help='random seed')
parser.add_argument('--num', type=int, default=10, help='upper limit value for the number of anomaly points')
args = parser.parse_args()
np.random.seed(args.seed)
auto(vars(args).items())
files = get_path(args.data)
train_data_path = os.getcwd() + '/' + args.data + '_' + str(args.window) + '_train.json'
total_time = 0
results = []
print("generating train data")
generator = gen(args.window, args.step, args.num)
for f in files:
print('reading', f)
in_timestamp, in_value = read_csv(f)
in_label = []
if len(in_value) < args.window:
print("value's length < window size", len(in_value), args.window)
continue
time_start = time.time()
train_data = generator.generate_train_data(in_value)
time_end = time.time()
total_time += time_end - time_start
results += train_data
print('file num:', len(files))
print('total fake data size:', len(results))
with open(train_data_path, 'w+') as f:
print(train_data_path)
json.dump(results, f)
|
anomalydetector/srcnn/generate_data.py/0
|
{
"file_path": "anomalydetector/srcnn/generate_data.py",
"repo_id": "anomalydetector",
"token_count": 2168
}
| 322 |
{
"version": "0.2.0",
"configurations": [
{
"name": "All-Toy-NoPareto",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal"
},
{
"name": "All-Toy-Pareto",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--nas.search.pareto.enabled", "True",
"--nas.search.seed_train.trainer.epochs", "1",
"--nas.search.post_train.trainer.epochs", "1"]
},
{
"name": "Darts-Full",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--full", "--algos", "darts"]
},
{
"name": "Darts-Search-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--no-eval", "--algos", "darts"]
},
{
"name": "Darts-Eval-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--no-search", "--algos", "darts", "--nas.eval.final_desc_filename", "models/darts/final_model_desc1.yaml"]
},
{
"name": "Darts-E2E-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--algos", "darts"]
},
{
"name": "Darts-Eval-ImageNet",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--full", "--algos", "darts", "--datasets", "imagenet", "--no-search", "--nas.eval.final_desc_filename", "models/darts/final_model_desc1.yaml"]
},
{
"name": "DiDarts-E2E-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--algos", "didarts"]
},
{
"name": "Darts-Food101-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--algos", "darts", "--datasets", "food101"]
},
{
"name": "Darts-ImageNet-Eval-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--no-search", "--algos", "darts", "--datasets", "imagenet", "--nas.eval.final_desc_filename", "models/darts/final_model_desc1.yaml"]
},
{
"name": "Petridish-Full",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--full", "--algos", "petridish"]
},
{
"name": "Petridish-Eval-ImageNet",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--full", "--algos", "petridish", "--datasets", "imagenet", "--no-search", "--nas.eval.final_desc_foldername", "models/petridish/pt_sweep_seed_36_epochs_600_scale_2.0/model_desc_gallery"]
},
{
"name": "Petridish-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--algos", "petridish", "--nas.search.pareto.enabled", "True"]
},
{
"name": "Xnas-Full",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--full", "--algos", "xnas"]
},
{
"name": "Xnas-Search-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--no-eval", "--algos", "xnas"]
},
{
"name": "Xnas-E2E-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--algos", "xnas"]
},
{
"name": "Divnas-Full",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--full", "--algos", "divnas"]
},
{
"name": "Divnas-Search-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--no-eval", "--algos", "divnas"]
},
{
"name": "Divnas-Eval-Full",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--no-search", "--full", "--algos", "divnas", "--nas.eval.final_desc_filename", "models/final_model_desc.yaml"]
},
{
"name": "Divnas-E2E-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--algos", "divnas"]
},
{
"name": "Gs-Full",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--full", "--algos", "gs"]
},
{
"name": "Gs-Search-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--no-eval", "--algos", "gs"]
},
{
"name": "Gs-E2E-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--algos", "gs"]
},
{
"name": "Random-Full",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--full", "--algos", "random"]
},
{
"name": "Random-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--algos", "random"]
},
{
"name": "Resnet-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--no-search", "--algos", "manual"]
},
{
"name": "Resnet-Full",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--no-search", "--full", "--algos", "manual"]
},
{
"name": "Manual-E2E-Toy",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/main.py",
"console": "integratedTerminal",
"args": ["--algos", "manual"]
},
{
"name": "TrainAug resnet50 cocob cifar10",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/augmented_train.py",
"console": "integratedTerminal",
"args": ["--config", "confs/aug_cifar.yaml;confs/aug_cifar_cocob_resnet50.yaml",
"--aug", "fa_reduced_cifar10"
]
},
{
"name": "TrainAug resnet50 sgd cifar10",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/augmented_train.py",
"console": "integratedTerminal",
"args": ["--config", "confs/aug_cifar.yaml;confs/aug_cifar_sgd_resnet50.yaml",
"--aug", "fa_reduced_cifar10"
]
},
{
"name": "Exprep",
"type": "python",
"request": "launch",
"program": "${cwd}/scripts/supergraph/reports/exprep.py",
"console": "integratedTerminal",
"args": ["--results-dir", "C:\\Users\\dedey\\Documents\\archaiphilly\\phillytools\\bilevel_default_20200521",
"--out-dir", "C:\\Users\\dedey\\archai_experiment_reports", "--collate"]
},
{
"name": "CurrentFile",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"args":[
]
}
]
}
|
archai/.vscode/launch.json/0
|
{
"file_path": "archai/.vscode/launch.json",
"repo_id": "archai",
"token_count": 5507
}
| 323 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import contextlib
import os
import psutil
import ray
import torch
import torch.distributed as dist
from torch import Tensor, nn
from torch.backends import cudnn
from torch.cuda.amp import GradScaler
from torch.nn import SyncBatchNorm
from torch.nn.parallel import DistributedDataParallel
from torch.optim.optimizer import Optimizer
from archai.common import ml_utils, utils
from archai.common.config import Config
from archai.common.ordered_dict_logger import get_global_logger
from archai.supergraph.utils.multi_optim import MultiOptim
logger = get_global_logger()
class ApexUtils:
def __init__(self, apex_config:Config)->None:
# region conf vars
self._enabled = apex_config['enabled'] # global switch to disable anything apex
self._distributed_enabled = apex_config['distributed_enabled'] # enable/disable distributed mode
self._mixed_prec_enabled = apex_config['mixed_prec_enabled'] # enable/disable distributed mode
# torch.amp has default 'O1' optimization level and cannot be configured further
# torch.amp keeps BN in fp32
# There is no loss_scale option in torch.amp
self._sync_bn = apex_config['sync_bn'] # should be replace BNs with sync BNs for distributed model
self._scale_lr = apex_config['scale_lr'] # enable/disable distributed mode
self._min_world_size = apex_config['min_world_size'] # allows to confirm we are indeed in distributed setting
seed = apex_config['seed']
detect_anomaly = apex_config['detect_anomaly']
conf_gpu_ids = apex_config['gpus']
conf_ray = apex_config['ray']
self.ray_enabled = conf_ray['enabled']
self.ray_local_mode = conf_ray['local_mode']
# endregion
self._scaler = None
self._set_ranks(conf_gpu_ids)
#_log_info({'apex_config': apex_config.to_dict()})
self._log_info({'ray.enabled': self.is_ray(), 'apex.enabled': self._enabled})
self._log_info({'torch.distributed.is_available': dist.is_available(),
'apex.distributed_enabled': self._distributed_enabled,
'apex.mixed_prec_enabled': self._mixed_prec_enabled})
if dist.is_available():
# dist.* properties are otherwise not accessible
self._op_map = {'mean': dist.ReduceOp.SUM, 'sum': dist.ReduceOp.SUM,
'min': dist.ReduceOp.MIN, 'max': dist.ReduceOp.MAX}
self._log_info({'gloo_available': dist.is_gloo_available(),
'mpi_available': dist.is_mpi_available(),
'nccl_available': dist.is_nccl_available()})
if self.is_mixed():
# init enable mixed precision
assert cudnn.enabled, "Amp requires cudnn backend to be enabled."
self._scaler = GradScaler()
# enable distributed processing
if self.is_dist():
assert not self.is_ray(), "Ray is not yet enabled for Apex distributed mode"
assert dist.is_available() # distributed module is available
assert dist.is_nccl_available()
if not dist.is_initialized():
dist.init_process_group(backend='nccl', init_method='env://')
assert dist.is_initialized()
assert dist.get_world_size() == self.world_size
assert dist.get_rank() == self.global_rank
if self.is_ray():
assert not self.is_dist(), "Ray is not yet enabled for Apex distributed mode"
if not ray.is_initialized():
ray.init(local_mode=self.ray_local_mode, include_dashboard=False,
# for some reason Ray is detecting wrong number of GPUs
num_gpus=torch.cuda.device_count())
ray_cpus = ray.nodes()[0]['Resources']['CPU']
ray_gpus = ray.nodes()[0]['Resources']['GPU']
self._log_info({'ray_cpus': ray_cpus, 'ray_gpus':ray_gpus})
assert self.world_size >= 1
assert not self._min_world_size or self.world_size >= self._min_world_size
assert self.local_rank >= 0 and self.local_rank < self.world_size
assert self.global_rank >= 0 and self.global_rank < self.world_size
assert self._gpu < torch.cuda.device_count()
torch.cuda.set_device(self._gpu)
self.device = torch.device('cuda', self._gpu)
self._setup_gpus(seed, detect_anomaly)
self._log_info({'dist_initialized': dist.is_initialized() if dist.is_available() else False,
'world_size': self.world_size,
'gpu': self._gpu, 'gpu_ids':self.gpu_ids,
'local_rank': self.local_rank,
'global_rank': self.global_rank})
def _setup_gpus(self, seed:float, detect_anomaly:bool):
utils.setup_cuda(seed, local_rank=self.local_rank)
torch.autograd.set_detect_anomaly(detect_anomaly)
self._log_info({'set_detect_anomaly': detect_anomaly,
'is_anomaly_enabled': torch.is_anomaly_enabled()})
self._log_info({'gpu_names': utils.cuda_device_names(),
'gpu_count': torch.cuda.device_count(),
'CUDA_VISIBLE_DEVICES': os.environ['CUDA_VISIBLE_DEVICES']
if 'CUDA_VISIBLE_DEVICES' in os.environ else 'NotSet',
'cudnn.enabled': cudnn.enabled,
'cudnn.benchmark': cudnn.benchmark,
'cudnn.deterministic': cudnn.deterministic,
'cudnn.version': cudnn.version()
})
self._log_info({'memory': str(psutil.virtual_memory())})
self._log_info({'CPUs': str(psutil.cpu_count())})
# gpu_usage = os.popen(
# 'nvidia-smi --query-gpu=memory.total,memory.used --format=csv,nounits,noheader'
# ).read().split('\n')
# for i, line in enumerate(gpu_usage):
# vals = line.split(',')
# if len(vals) == 2:
# _log_info('GPU {} mem: {}, used: {}'.format(i, vals[0], vals[1]))
def _set_ranks(self, conf_gpu_ids:str)->None:
# this function needs to work even when torch.distributed is not available
if 'WORLD_SIZE' in os.environ:
self.world_size = int(os.environ['WORLD_SIZE'])
else:
self.world_size = 1
if 'LOCAL_RANK' in os.environ:
self.local_rank = int(os.environ['LOCAL_RANK'])
else:
self.local_rank = 0
if 'RANK' in os.environ:
self.global_rank = int(os.environ['RANK'])
else:
self.global_rank = 0
assert self.local_rank < torch.cuda.device_count(), \
f'local_rank={self.local_rank} but device_count={torch.cuda.device_count()}' \
' Possible cause may be Pytorch is not GPU enabled or you have too few GPUs'
self.gpu_ids = [int(i) for i in conf_gpu_ids.split(',') if i]
# which GPU to use, we will use only 1 GPU per process to avoid complications with apex
# remap if GPU IDs are specified
if len(self.gpu_ids):
assert len(self.gpu_ids) > self.local_rank
self._gpu = self.gpu_ids[self.local_rank]
else:
self._gpu = self.local_rank % torch.cuda.device_count()
def is_mixed(self)->bool:
return self._enabled and self._mixed_prec_enabled
def is_dist(self)->bool:
return self._enabled and self._distributed_enabled and self.world_size > 1
def is_master(self)->bool:
return self.global_rank == 0
def is_ray(self)->bool:
return self.ray_enabled
def _log_info(self, d:dict)->None:
if logger is not None:
logger.info(d, override_key=True)
def sync_devices(self)->None:
if self.is_dist():
torch.cuda.synchronize(self.device)
def barrier(self)->None:
if self.is_dist():
dist.barrier() # wait for all processes to come to this point
def reduce(self, val, op='mean'):
if self.is_dist():
if not isinstance(val, Tensor):
rt = torch.tensor(val).to(self.device)
converted = True
else:
rt = val.clone().to(self.device)
converted = False
r_op = self._op_map[op]
dist.all_reduce(rt, op=r_op)
if op=='mean':
rt /= self.world_size
if converted and len(rt.shape)==0:
return rt.item()
return rt
else:
return val
def _get_one_optim(self, multi_optim:MultiOptim)->Optimizer:
assert len(multi_optim)==1, \
'Mixed precision is only supported for one optimizer' \
f' but {len(multi_optim)} optimizers were supplied'
return multi_optim[0].optim
def backward(self, loss:torch.Tensor)->None:
if self.is_mixed():
self._scaler.scale(loss).backward() # pyright: ignore[reportGeneralTypeIssues, reportOptionalMemberAccess]
else:
loss.backward()
def autocast(self):
if self.is_mixed():
return torch.cuda.amp.autocast()
else:
return contextlib.nullcontext()
def step(self, multi_optim:MultiOptim)->None:
if self.is_mixed():
# self._scaler.unscale_ will be called automatically if it isn't called yet from grad clipping
# https://pytorch.org/docs/stable/amp.html#torch.cuda.amp.GradScaler.step
for optim_shed in multi_optim:
self._scaler.step(optim_shed.optim) # pyright: ignore[reportOptionalMemberAccess]
self._scaler.update() # pyright: ignore[reportOptionalMemberAccess]
else:
multi_optim.step()
def to_amp(self, model:nn.Module, multi_optim:MultiOptim, batch_size:int)\
->nn.Module:
# conver BNs to sync BNs in distributed mode
if self.is_dist() and self._sync_bn:
model = SyncBatchNorm.convert_sync_batchnorm(model)
self._log_info({'BNs_converted': True})
model = model.to(self.device)
# scale LR
if self.is_dist() and self._scale_lr:
for optim_shed in multi_optim:
optim = optim_shed.optim
lr = ml_utils.get_optim_lr(optim)
scaled_lr = lr * self.world_size / float(batch_size)
ml_utils.set_optim_lr(optim, scaled_lr)
self._log_info({'lr_scaled': True, 'old_lr': lr, 'new_lr': scaled_lr})
if self.is_dist():
model = DistributedDataParallel(model, device_ids=[self._gpu], output_device=self._gpu)
return model
def clip_grad(self, clip:float, model:nn.Module, multi_optim:MultiOptim)->None:
if clip > 0.0:
if self.is_mixed():
# https://pytorch.org/docs/stable/notes/amp_examples.html#working-with-multiple-models-losses-and-optimizers
self._scaler.unscale_(multi_optim[0].optim) # pyright: ignore[reportOptionalMemberAccess]
nn.utils.clip_grad_norm_(model.parameters(), clip)
else:
nn.utils.clip_grad_norm_(model.parameters(), clip)
def state_dict(self):
if self.is_mixed():
return self._scaler.state_dict() # pyright: ignore[reportOptionalMemberAccess]
else:
return None
def load_state_dict(self, state_dict):
if self.is_mixed():
self._scaler.load_state_dict(state_dict) # pyright: ignore[reportOptionalMemberAccess]
|
archai/archai/common/apex_utils.py/0
|
{
"file_path": "archai/archai/common/apex_utils.py",
"repo_id": "archai",
"token_count": 5466
}
| 324 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
# adapted from https://github.com/ildoonet/pystopwatch2/blob/master/pystopwatch2/watch.py
import threading
import time
from collections import defaultdict
from enum import Enum
class _ClockState(Enum):
PAUSE = 0
RUN = 1
class _Clock:
tag_default = '__default1958__'
th_lock = threading.Lock()
def __init__(self):
self.prev_time = time.time()
self.sum = 0.
self.state = _ClockState.PAUSE
def __str__(self):
return 'state=%s elapsed=%.4f prev_time=%.8f' % (self.state, self.sum, self.prev_time)
def __repr__(self):
return self.__str__()
class StopWatch:
stopwatch:'StopWatch' = None
def __init__(self):
self.clocks = defaultdict(lambda: _Clock())
def start(self, tag=None):
if tag is None:
tag = _Clock.tag_default
with _Clock.th_lock:
clock = self.clocks[tag]
if clock.state == _ClockState.RUN:
return
clock.state = _ClockState.RUN
clock.prev_time = time.time()
def pause(self, tag=None):
if tag is None:
tag = _Clock.tag_default
with _Clock.th_lock:
clock = self.clocks[tag]
clock.state = _ClockState.PAUSE
delta = time.time() - clock.prev_time
clock.sum += delta
return clock.sum
def clear(self, tag=None):
if tag is None:
tag = _Clock.tag_default
del self.clocks[tag]
def get_elapsed(self, tag=None):
if tag is None:
tag = _Clock.tag_default
clock = self.clocks[tag]
elapsed = clock.sum
if clock.state == _ClockState.RUN:
elapsed += time.time() - clock.prev_time
return elapsed
def keys(self):
return self.clocks.keys()
def __str__(self):
return '\n'.join(['%s: %s' % (k, v) for k, v in self.clocks.items()])
def __repr__(self):
return self.__str__()
@staticmethod
def set(instance:'StopWatch')->None:
StopWatch.stopwatch = instance
@staticmethod
def get()->'StopWatch':
return StopWatch.stopwatch
|
archai/archai/common/stopwatch.py/0
|
{
"file_path": "archai/archai/common/stopwatch.py",
"repo_id": "archai",
"token_count": 1019
}
| 325 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Callable, Optional
from overrides import overrides
from torch.utils.data import Dataset
from torchvision.datasets import ImageNet
from torchvision.transforms import ToTensor
from archai.api.dataset_provider import DatasetProvider
from archai.common.ordered_dict_logger import OrderedDictLogger
logger = OrderedDictLogger(source=__name__)
class ImageNetDatasetProvider(DatasetProvider):
"""ImageNet dataset provider."""
def __init__(
self,
root: Optional[str] = "dataroot",
) -> None:
"""Initialize ImageNet dataset provider.
Args:
root: Root directory of dataset where is saved.
"""
super().__init__()
self.root = root
@overrides
def get_train_dataset(
self,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
loader: Optional[Callable] = None,
) -> Dataset:
return ImageNet(
self.root,
split="train",
transform=transform or ToTensor(),
target_transform=target_transform,
loader=loader,
)
@overrides
def get_val_dataset(
self,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
loader: Optional[Callable] = None,
) -> Dataset:
return ImageNet(
self.root,
split="val",
transform=transform or ToTensor(),
target_transform=target_transform,
loader=loader,
)
@overrides
def get_test_dataset(
self,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
loader: Optional[Callable] = None,
) -> Dataset:
logger.warn("Testing set not available. Returning validation set ...")
return self.get_val_dataset(transform=transform, target_transform=target_transform, loader=loader)
|
archai/archai/datasets/cv/imagenet_dataset_provider.py/0
|
{
"file_path": "archai/archai/datasets/cv/imagenet_dataset_provider.py",
"repo_id": "archai",
"token_count": 848
}
| 326 |
# Copyright (c) 2019-2020, NVIDIA CORPORATION.
# Licensed under the Apache License, Version 2.0.
# https://github.com/NVIDIA/DeepLearningExamples/blob/master/PyTorch/LanguageModeling/Transformer-XL/pytorch/data_utils.py
from typing import Generator, Iterator, List, Optional, Tuple
import numpy as np
import torch
from archai.common.distributed_utils import get_rank, get_world_size
from archai.datasets.nlp.tokenizer_utils.tokenizer_base import TokenizerBase
class LMOrderedIterator:
"""Iterator that provides contiguous batches of input tokens without padding."""
def __init__(
self,
input_ids: torch.LongTensor,
bsz: int,
bptt: int,
device: Optional[torch.device] = None,
mem_len: Optional[int] = 0,
ext_len: Optional[int] = 0,
warmup: Optional[bool] = True,
) -> None:
"""Initialize the iterator with the input sequence and batch parameters.
Args:
input_ids: Input sequence of tokens.
bsz: Batch size.
bptt: Sequence length (backpropagation through time).
device: Device to place the iterator.
mem_len: Length of memory (for Transformer-XL).
ext_len: Length of extended context (for Transformer-XL).
warmup: Whether warmup batches should be created.
"""
self.bsz = bsz
self.bptt = bptt
self.device = device or torch.device("cpu")
self.ext_len = ext_len
self.mem_len = mem_len
self.warmup = warmup
self.last_iter = None
# Divides cleanly the inputs into batches and trims the remaining elements
n_step = input_ids.size(0) // bsz
input_ids = input_ids[: n_step * bsz]
self.input_ids = input_ids.view(bsz, -1).contiguous()
if self.device.type != "cpu":
self.input_ids = self.input_ids.pin_memory()
# Creates warmup batches if memory is being used
if mem_len and warmup:
self.warmup_batches = (mem_len + bptt - 1) // bptt
self.warmup_elems = self.warmup_batches * bptt
warmup_ids = self.input_ids.roll((self.warmup_elems, 1), (1, 0))[:, : self.warmup_elems]
self.input_ids = torch.cat((warmup_ids, self.input_ids), dim=-1)
# Chunks the inputs for distributed training (if available)
world_size = get_world_size()
rank = get_rank()
self.input_ids = self.input_ids.chunk(world_size, dim=0)[rank]
self.n_batch = (self.input_ids.size(1) + self.bptt - 1) // self.bptt
def roll(self, seed: int) -> None:
"""Roll the data according to a random seed.
This method shuffles the input sequence for each batch in the iterator by
rolling/shifting the data according to the specified seed. This is useful for
creating diverse training data and preventing overfitting.
Args:
seed: Seed used to roll/shift the data.
"""
rng = torch.Generator()
rng.manual_seed(seed)
for i in range(self.input_ids.size(0)):
shift = torch.randint(0, self.input_ids.size(1), (1,), generator=rng)
row = self.input_ids[i, :]
row = torch.cat((row[shift:], row[:shift]))
self.input_ids[i, :] = row
def get_batch(self, i: int, bptt: Optional[int] = None) -> Tuple[torch.LongTensor, torch.LongTensor, int, bool]:
"""Get a batch of `bptt` size.
Args:
i: Identifier of batch.
bptt: Sequence length.
Returns:
Tuple of inputs, labels, sequence length and whether batch is from warmup.
"""
if bptt is None:
bptt = self.bptt
seq_len = min(bptt, self.input_ids.size(1) - 1 - i)
start_idx = max(0, i - self.ext_len)
end_idx = i + seq_len
input_ids = self.input_ids[:, start_idx:end_idx].to(self.device, non_blocking=True)
labels = self.input_ids[:, i + 1 : i + 1 + seq_len].to(self.device, non_blocking=True)
warmup = True
if self.mem_len and self.warmup:
warmup = i >= self.warmup_elems
return input_ids, labels, seq_len, warmup
def get_fixlen_iter(self, start: Optional[int] = 0) -> Generator[Tuple, None, None]:
"""Return a generator for generating fixed-length batches.
This method returns a generator that yields fixed-length batches of the specified size,
starting from the specified starting point. The batches are contiguous in the original
sequence.
Args:
start: Starting point for the generator.
Yields:
Fixed-length batches.
Example:
>>> for batch in iterator.get_fixlen_iter():
>>> # Process the batch.
>>> pass
"""
if start != 0:
start += self.bptt
for i in range(start, self.input_ids.size(1) - 1, self.bptt):
self.last_iter = i
yield self.get_batch(i)
def get_varlen_iter(
self,
start: Optional[int] = 0,
std: Optional[float] = 5.0,
min_len: Optional[int] = 5,
max_std: Optional[float] = 3.0,
) -> Generator[Tuple, None, None]:
"""Return a generator for generating variable-length batches.
This method returns a generator that yields variable-length batches of data,
starting from the specified starting point. The length of each batch is determined
by a Gaussian distribution with the specified mean and standard deviation.
Args:
start: Starting point for the generator.
std: Standard deviation.
min_len: Minimum length.
max_std: Max standard deviation.
Yields:
Variable-length batches.
Example:
>>> for batch in iterator.get_varlen_iter():
>>> # Process the batch.
>>> pass
"""
max_len = self.bptt + max_std * std
i = start
while True:
bptt = self.bptt if np.random.random() < 0.95 else self.bptt / 2.0
bptt = min(max_len, max(min_len, int(np.random.normal(bptt, std))))
input_ids, labels, seq_len = self.get_batch(i, bptt)
i += seq_len
yield input_ids, labels, seq_len
if i >= self.input_ids.size(1) - 2:
break
def __iter__(self) -> Generator[Tuple, None, None]:
return self.get_fixlen_iter()
class LMMultiFileIterator:
"""Multi-file non-ordered iterator, i.e. tokens come from different
files but are contiguous.
"""
def __init__(
self,
paths: List[str],
vocab: TokenizerBase,
bsz: int,
bptt: int,
device: Optional[str] = "cpu",
mem_len: Optional[int] = 0,
ext_len: Optional[int] = 0,
n_chunks: Optional[int] = 16,
shuffle: Optional[bool] = False,
) -> None:
"""Initialize by adding support to multi-file inputs and sharding files
across GPUs, if distributed training is available.
Args:
paths: Paths to input files.
vocab: Vocabulary/tokenizer.
bsz: Batch size.
bptt: Sequence length (backpropagation through time).
device: Device to place the iterator.
mem_len: Length of memory (for Transformer-XL).
ext_len: Length of extended context (for Transformer-XL).
n_chunks: Number of chunks (to avoid out of memory).
shuffle: Whether shuffling should be used.
"""
self.vocab = vocab
self.bsz = bsz
self.bptt = bptt
self.device = device
self.ext_len = ext_len
self.n_chunks = n_chunks
self.shuffle = shuffle
self.last_iter = None
# For compatibility with LMOrderedIterator
self.n_batch = -1
# Divides self.paths into world-size chunks and picks chunk for corresponding rank
world_size = get_world_size()
rank = get_rank()
chunk_len = len(paths) // world_size + 1 # it causes a slight imbalance
paths_chunks = [paths[i : i + chunk_len] for i in range(0, len(paths), chunk_len)]
self.paths = paths_chunks[rank]
def roll(self, seed: Optional[int] = 0) -> None:
"""Backward compatibility for using same API."""
pass
def get_sequences(self, path: str) -> torch.LongTensor:
"""Get a tensor of sequences from an input file.
Args:
path: A path to the input file.
Returns:
Tensor with encoded inputs.
"""
sequences = self.vocab.encode_file(path)
if self.shuffle:
np.random.shuffle(sequences)
return sequences
def stream_iterator(self, iterator: Iterator) -> Generator[Tuple, None, None]:
"""Create a streaming-based iterator.
Args:
iterator: Iterator with chunks of sequences.
Yields:
Stream-based batch.
"""
input_ids = torch.LongTensor(self.bsz, self.bptt)
labels = torch.LongTensor(self.bsz, self.bptt)
n_retain = 0
while True:
# input_ids: [bsz x n_retain+bptt]
# labels: [bsz x bptt]
input_ids[:, n_retain:].fill_(-1)
labels.fill_(-1)
valid_batch = True
for i in range(self.bsz):
n_filled = 0
try:
while n_filled < self.bptt:
stream = torch.LongTensor([next(iterator) for _ in range(self.bptt + 1)])
# Number of new tokens to be filled in
n_tokens = min(len(stream) - 1, self.bptt - n_filled)
# First n_tokens are retained from last batch
input_ids[i, n_retain + n_filled : n_retain + n_filled + n_tokens] = stream[:n_tokens]
labels[i, n_filled : n_filled + n_tokens] = stream[1 : n_tokens + 1]
n_filled += n_tokens
except StopIteration:
valid_batch = False
break
if not valid_batch:
return
input_ids = input_ids.to(self.device)
labels = labels.to(self.device)
yield input_ids, labels, self.bptt, True
n_retain = min(input_ids.size(1), self.ext_len)
if n_retain > 0:
input_ids[:, :n_retain] = input_ids[:, -n_retain:]
input_ids.resize_(input_ids.size(0), n_retain + self.bptt)
def __iter__(self) -> Generator[Tuple, None, None]:
if self.shuffle:
np.random.shuffle(self.paths)
for path in self.paths:
sequences = self.get_sequences(path)
sequences_chunks = torch.chunk(sequences, self.n_chunks, 0)
for i in range(self.n_chunks):
iterator = iter(sequences_chunks[i])
for idx, batch in enumerate(self.stream_iterator(iterator)):
yield batch
self.last_iter = idx
|
archai/archai/datasets/nlp/nvidia_data_loader_utils.py/0
|
{
"file_path": "archai/archai/datasets/nlp/nvidia_data_loader_utils.py",
"repo_id": "archai",
"token_count": 5218
}
| 327 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import random
from pathlib import Path
from typing import Optional
from overrides import overrides
from archai.api.dataset_provider import DatasetProvider
from archai.common.ordered_dict_logger import OrderedDictLogger
from archai.discrete_search.api.search_objectives import SearchObjectives
from archai.discrete_search.api.search_results import SearchResults
from archai.discrete_search.api.search_space import DiscreteSearchSpace
from archai.discrete_search.api.searcher import Searcher
from archai.discrete_search.utils.multi_objective import get_non_dominated_sorting
logger = OrderedDictLogger(source=__name__)
class SuccessiveHalvingSearch(Searcher):
"""Successive Halving algorithm"""
def __init__(
self,
search_space: DiscreteSearchSpace,
objectives: SearchObjectives,
dataset_provider: DatasetProvider,
output_dir: str,
num_iters: Optional[int] = 10,
init_num_models: Optional[int] = 10,
init_budget: Optional[float] = 1.0,
budget_multiplier: Optional[float] = 2.0,
seed: Optional[int] = 1,
) -> None:
"""Initialize the Successive Halving.
Args:
search_space: Discrete search space.
search_objectives: Search objectives.
dataset_provider: Dataset provider.
output_dir: Output directory.
num_iters: Number of iterations.
init_num_models: Number of initial models to evaluate.
init_budget: Initial budget.
budget_multiplier: Budget multiplier.
seed: Random seed.
"""
super(SuccessiveHalvingSearch, self).__init__()
assert isinstance(search_space, DiscreteSearchSpace)
# Search parameters
self.search_space = search_space
self.objectives = objectives
self.dataset_provider = dataset_provider
self.output_dir = Path(output_dir)
self.num_iters = num_iters
self.init_num_models = init_num_models
self.init_budget = init_budget
self.budget_multiplier = budget_multiplier
self.output_dir.mkdir(exist_ok=True)
# Utils
self.iter_num = 0
self.num_sampled_models = 0
self.seed = seed
self.search_state = SearchResults(search_space, objectives)
self.rng = random.Random(seed)
self.output_dir.mkdir(exist_ok=True, parents=True)
@overrides
def search(self) -> SearchResults:
current_budget = self.init_budget
population = [self.search_space.random_sample() for _ in range(self.init_num_models)]
selected_models = population
for i in range(self.num_iters):
if len(selected_models) <= 1:
logger.info(f"Search ended. Architecture selected: {selected_models[0].archid}")
self.search_space.save_arch(selected_models[0], self.output_dir / "final_model")
break
self.on_start_iteration(i + 1)
logger.info(f"Iteration {i+1}/{self.num_iters}")
logger.info(f"Evaluating {len(selected_models)} models with budget {current_budget} ...")
results = self.objectives.eval_all_objs(
selected_models,
budgets={obj_name: current_budget for obj_name in self.objectives.objectives},
)
# Logs results and saves iteration models
self.search_state.add_iteration_results(
selected_models, results, extra_model_data={"budget": [current_budget] * len(selected_models)}
)
models_dir = self.output_dir / f"models_iter_{self.iter_num}"
models_dir.mkdir(exist_ok=True)
for model in selected_models:
self.search_space.save_arch(model, str(models_dir / f"{model.archid}"))
self.search_state.save_search_state(str(self.output_dir / f"search_state_{self.iter_num}.csv"))
self.search_state.save_all_2d_pareto_evolution_plots(self.output_dir)
# Keeps only the best `1/self.budget_multiplier` NDS frontiers
logger.info("Choosing models for the next iteration ...")
nds_frontiers = get_non_dominated_sorting(selected_models, results, self.objectives)
nds_frontiers = nds_frontiers[: int(len(nds_frontiers) * 1 / self.budget_multiplier)]
selected_models = [model for frontier in nds_frontiers for model in frontier["models"]]
logger.info(f"Kept {len(selected_models)} models for next iteration.")
# Update parameters for next iteration
self.iter_num += 1
current_budget = current_budget * self.budget_multiplier
return self.search_state
|
archai/archai/discrete_search/algos/successive_halving.py/0
|
{
"file_path": "archai/archai/discrete_search/algos/successive_halving.py",
"repo_id": "archai",
"token_count": 1997
}
| 328 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import copy
import pathlib
import shutil
from typing import Any, Dict, Optional
import torch
from overrides import overrides
from archai.discrete_search.api.archai_model import ArchaiModel
from archai.discrete_search.api.model_evaluator import ModelEvaluator
from archai.discrete_search.search_spaces.nlp.transformer_flex.search_space import (
TransformerFlexSearchSpace,
)
from archai.onnx.export import export_to_onnx
from archai.onnx.export_utils import prepare_model_for_onnx
from archai.onnx.optimization import optimize_onnx
TMP_FOLDER = pathlib.Path("tmp")
class TransformerFlexOnnxMemory(ModelEvaluator):
"""Measure the memory usage of models from the Transformer-Flex search space."""
def __init__(
self,
search_space: TransformerFlexSearchSpace,
use_past: Optional[bool] = True,
validate: Optional[bool] = True,
share_weights: Optional[bool] = True,
opset: Optional[int] = 11,
optimize: Optional[bool] = True,
only_ort: Optional[bool] = False,
) -> None:
"""Initialize the evaluator.
Args:
search_space: The search space to use for loading the model.
use_past: Whether to include past key/values in the model.
validate: Whether to validate the exported model.
share_weights: Whether to share the embedding and softmax weights.
opset: Set of operations to use with ONNX.
optimize: Whether to optimize the ONNX model.
only_ort: Whether to only apply ORT optimization.
"""
assert search_space.arch_type in ["codegen", "gpt2", "gpt2-flex"]
self.search_space = search_space
# Benchmark settings
self.use_past = use_past
self.validate = validate
self.share_weights = share_weights
self.opset = opset
self.optimize = optimize
self.only_ort = only_ort
def _load_and_prepare(self, config: Dict[str, Any]) -> torch.nn.Module:
config = copy.deepcopy(config)
if self.use_past:
config["use_cache"] = True
model = self.search_space._load_model_from_config(config)
return prepare_model_for_onnx(model, self.search_space.arch_type)
@overrides
def evaluate(self, arch: ArchaiModel, budget: Optional[float] = None) -> float:
model = self._load_and_prepare(arch.metadata["config"])
# There is a bug for Python < 3.10 when using TemporaryFile with Windows,
# thus, we opted to manually save and remove the temporary file
TMP_FOLDER.mkdir(parents=True, exist_ok=True)
onnx_path = TMP_FOLDER / "model.onnx"
onnx_config = export_to_onnx(
model,
onnx_path.as_posix(),
task="causal-lm",
use_past=self.use_past,
validate=self.validate,
share_weights=self.share_weights,
opset=self.opset,
)
if self.optimize:
onnx_path = optimize_onnx(onnx_path.as_posix(), onnx_config, opt_level=0, only_ort=self.only_ort)
memory = pathlib.Path(onnx_path).stat().st_size / (1024**2)
shutil.rmtree(TMP_FOLDER)
return memory
|
archai/archai/discrete_search/evaluators/nlp/transformer_flex_memory.py/0
|
{
"file_path": "archai/archai/discrete_search/evaluators/nlp/transformer_flex_memory.py",
"repo_id": "archai",
"token_count": 1378
}
| 329 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from __future__ import annotations
import json
from collections import OrderedDict
from copy import deepcopy
from pathlib import Path
from typing import Any, Dict, Optional, Union
import yaml
def build_arch_config(config_dict: Dict[str, Any]) -> ArchConfig:
"""Build an `ArchConfig` object from a sampled config dictionary.
Args:
config_dict: Config dictionary
Returns:
`ArchConfig` object.
"""
ARCH_CONFIGS = {"default": ArchConfig, "config_list": ArchConfigList}
config_type = config_dict.get("_config_type", "default")
return ARCH_CONFIGS[config_type](config_dict)
class ArchConfig:
"""Store architecture configs."""
def __init__(self, config_dict: Dict[str, Union[dict, float, int, str]]) -> None:
"""Initialize the class.
Args:
config_dict: Configuration dictionary.
"""
# Set that stores all parameters used to build the model instance
self._used_params = set()
# Original config dictionary
self._config_dict = deepcopy(config_dict)
# ArchConfig nodes
self.nodes = OrderedDict()
for param_name, param in self._config_dict.items():
if isinstance(param, dict):
self.nodes[param_name] = build_arch_config(param)
else:
self.nodes[param_name] = param
def __repr__(self) -> str:
class ArchConfigJsonEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, ArchConfig):
return o.to_dict(remove_metadata_info=True)
return super().default(o)
cls_name = self.__class__.__name__
return f"{cls_name}({json.dumps(self, cls=ArchConfigJsonEncoder, indent=4)})"
def __contains__(self, param_name: str) -> bool:
return param_name in self.nodes
def get_used_params(self) -> Dict[str, Union[Dict, bool]]:
"""Get the parameter usage tree.
Terminal nodes with value `True` represent architecture parameters that were used
by calling `ArchConfig.pick(param_name)`.
Returns:
Used parameters.
"""
used_params = OrderedDict()
for param_name, param in self.nodes.items():
used_params[param_name] = param_name in self._used_params
if isinstance(param, ArchConfig):
used_params[param_name] = param.get_used_params()
return used_params
def pick(self, param_name: str, default: Optional[Any] = None, record_usage: Optional[bool] = True) -> Any:
"""Pick an architecture parameter, possibly recording its usage.
Args:
param_name: Architecture parameter name
default: Default value to return if parameter is not found. If `None`, an
exception is raised.
record_usage: If this parameter should be recorded as 'used' in
`ArchConfig._used_params`.
Returns:
Parameter value.
"""
if param_name in self.nodes:
param_value = self.nodes[param_name]
else:
if default is None:
raise ValueError(
f"Architecture parameter {param_name} not found in config and "
f"no default value provided. Available parameters are: {self.nodes.keys()}"
)
param_value = default
if record_usage:
self._used_params.add(param_name)
return param_value
def to_dict(self, remove_metadata_info: Optional[bool] = False) -> OrderedDict:
"""Convert `ArchConfig` object to an ordered dictionary.
Args:
remove_metadata_info: If keys used to store extra metadata should be removed.
Returns:
Ordered dictionary.
"""
return OrderedDict(
(k, v.to_dict(remove_metadata_info)) if isinstance(v, ArchConfig) else (k, v)
for k, v in self.nodes.items()
if not remove_metadata_info or not k.startswith("_")
)
def to_file(self, path: str) -> None:
"""Save `ArchConfig` object to a file.
Args:
path: Path to save the file to.
"""
path = Path(path)
path = path.parent / f"{path.name}.json" if path.suffix == "" else path
d = self.to_dict()
if path.suffix == ".yaml":
yaml.dump(d, open(path, "w", encoding="utf-8"), default_flow_style=False, sort_keys=False)
elif path.suffix == ".json":
json.dump(d, open(path, "w", encoding="utf-8"), indent=4)
else:
raise ValueError(f"Unsupported file extension {path.suffix}")
@classmethod
def from_file(cls, path: str) -> ArchConfig:
"""Load `ArchConfig` object from a file.
Args:
path: Path to load the file from.
Returns:
`ArchConfig` object.
"""
path = Path(path)
path = path.parent / f"{path.name}.json" if path.suffix == "" else path
if path.suffix == ".yaml":
d = yaml.load(open(path, "r", encoding="utf-8"), Loader=yaml.Loader)
elif path.suffix == ".json":
d = json.load(open(path, "r", encoding="utf-8"))
else:
raise ValueError(f"Unsupported file extension {path.suffix}")
return build_arch_config(d)
class ArchConfigList(ArchConfig):
"""Store a list of architecture configs."""
def __init__(self, config: OrderedDict):
"""Initialize the class.
Args:
config: Configuration dictionary.
"""
super().__init__(config)
assert "_configs" in config
assert "_repeat_times" in config
self.max_size = config["_repeat_times"]
def __len__(self) -> int:
self._used_params.add("_repeat_times")
return self.max_size
def __getitem__(self, idx: int) -> ArchConfig:
if 0 <= idx < len(self):
self._used_params.add("_repeat_times")
return self.nodes["_configs"].pick(str(idx))
raise IndexError
def __iter__(self):
yield from [self[i] for i in range(len(self))]
def pick(self, param_name: str, record_usage: Optional[bool] = True) -> None:
raise ValueError(
"Attempted to use .pick in an ArchConfigList instance. "
"Select a config first using indexing (e.g `config_list[i]`)."
)
def to_dict(self, remove_metadata_info: Optional[bool] = False) -> OrderedDict:
if remove_metadata_info:
return [
self.nodes["_configs"].pick(str(i), record_usage=False).to_dict(remove_metadata_info)
for i in range(self.max_size)
][:self.max_size]
return super().to_dict(remove_metadata_info)
|
archai/archai/discrete_search/search_spaces/config/arch_config.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/config/arch_config.py",
"repo_id": "archai",
"token_count": 3014
}
| 330 |
# coding=utf-8
# Copyright 2022 Salesforce authors, The EleutherAI, and HuggingFace Teams. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch CodeGen model."""
from typing import Optional, Tuple, Union
import torch
import torch.utils.checkpoint
from torch import nn
from torch.nn import CrossEntropyLoss
from transformers.activations import ACT2FN
from transformers.modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast
from transformers.modeling_utils import PreTrainedModel
from transformers.utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging
from transformers.models.codegen.configuration_codegen import CodeGenConfig
from transformers.models.codegen.modeling_codegen import CodeGenPreTrainedModel
from archai.discrete_search.search_spaces.config import ArchConfig
from .block import CodeGenBlock
logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "Salesforce/codegen-2B-mono"
_CONFIG_FOR_DOC = "CodeGenConfig"
_TOKENIZER_FOR_DOC = "GPT2Tokenizer"
class CodeGenModel(CodeGenPreTrainedModel):
def __init__(self, arch_config: ArchConfig, hf_config):
super().__init__(hf_config)
self.hf_config = hf_config
self.hidden_size = arch_config.pick('hidden_size')
self.vocab_size = hf_config.vocab_size
self.wte = nn.Embedding(hf_config.vocab_size, self.hidden_size)
self.embd_pdrop = hf_config.embd_pdrop
self.resid_pdrop = hf_config.resid_pdrop
self.embed_dropout = nn.Dropout(hf_config.embd_pdrop)
self.h = nn.ModuleList([
CodeGenBlock(
block_config, hf_config, self.hidden_size
) for block_config in arch_config.pick('hidden_layers')
])
self.ln_f = nn.LayerNorm(self.hidden_size, eps=hf_config.layer_norm_epsilon)
self.rotary_dim = hf_config.rotary_dim
self.gradient_checkpointing = False
# Initialize weights and apply final processing
self.post_init()
def get_input_embeddings(self):
return self.wte
def set_input_embeddings(self, new_embeddings):
self.wte = new_embeddings
@add_code_sample_docstrings(
processor_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=BaseModelOutputWithPast,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids: Optional[torch.LongTensor] = None,
past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None,
attention_mask: Optional[torch.FloatTensor] = None,
token_type_ids: Optional[torch.LongTensor] = None,
position_ids: Optional[torch.LongTensor] = None,
head_mask: Optional[torch.FloatTensor] = None,
inputs_embeds: Optional[torch.FloatTensor] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, BaseModelOutputWithPast]:
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
use_cache = use_cache if use_cache is not None else self.config.use_cache
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
input_ids = input_ids.view(-1, input_shape[-1])
batch_size = input_ids.shape[0]
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
batch_size = inputs_embeds.shape[0]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
device = input_ids.device if input_ids is not None else inputs_embeds.device
if token_type_ids is not None:
token_type_ids = token_type_ids.view(-1, input_shape[-1])
if position_ids is not None:
position_ids = position_ids.view(-1, input_shape[-1])
if past_key_values is None:
past_length = 0
past_key_values = tuple([None] * len(self.h))
else:
past_length = past_key_values[0][0].size(-2)
if position_ids is None:
position_ids = torch.arange(past_length, input_shape[-1] + past_length, dtype=torch.long, device=device)
position_ids = position_ids.unsqueeze(0).view(-1, input_shape[-1])
bin_attention_mask = attention_mask
# Attention mask.
if attention_mask is not None:
if batch_size <= 0:
raise ValueError("batch_size has to be defined and > 0")
attention_mask = attention_mask.view(batch_size, -1)
# We save the binarized attention mask for LocalAttention and LSHAttention
bin_attention_mask = attention_mask.clone()
# We create a 3D attention mask from a 2D tensor mask.
# Sizes are [batch_size, 1, 1, to_seq_length]
# So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length]
# this attention mask is more simple than the triangular masking of causal attention
# used in OpenAI GPT, we just need to prepare the broadcast dimension here.
attention_mask = attention_mask[:, None, None, :]
# Since attention_mask is 1.0 for positions we want to attend and 0.0 for
# masked positions, this operation will create a tensor which is 0.0 for
# positions we want to attend and the dtype's smallest value for masked positions.
# Since we are adding it to the raw scores before the softmax, this is
# effectively the same as removing these entirely.
attention_mask = attention_mask.to(dtype=self.dtype) # fp16 compatibility
bin_attention_mask = bin_attention_mask.to(dtype=self.dtype) # fp16 compatibility
attention_mask = (1.0 - attention_mask) * torch.finfo(self.dtype).min
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x num_attention_heads x N x N
# head_mask has shape n_layer x batch x num_attention_heads x N x N
head_mask = self.get_head_mask(head_mask, len(self.h))
if inputs_embeds is None:
inputs_embeds = self.wte(input_ids)
hidden_states = inputs_embeds
if token_type_ids is not None:
token_type_embeds = self.wte(token_type_ids)
hidden_states = hidden_states + token_type_embeds
hidden_states = self.embed_dropout(hidden_states)
output_shape = input_shape + (hidden_states.size(-1),)
presents = () if use_cache else None
all_self_attentions = () if output_attentions else None
all_hidden_states = () if output_hidden_states else None
residual = None
for i, (block, layer_past) in enumerate(zip(self.h, past_key_values)):
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):
# None for past_key_value
return module(*inputs, use_cache, output_attentions)
return custom_forward
hidden_states = torch.utils.checkpoint.checkpoint(
create_custom_forward(block),
hidden_states,
None,
attention_mask,
head_mask[i],
bin_attention_mask
)
else:
hidden_states = block(
hidden_states,
layer_past=layer_past,
attention_mask=attention_mask,
head_mask=head_mask[i],
use_cache=use_cache,
output_attentions=output_attentions,
bin_attention_mask=bin_attention_mask
)
if use_cache is True:
raise NotImplementedError
if output_attentions:
raise NotImplementedError
hidden_states = self.ln_f(hidden_states)
hidden_states = hidden_states.view(output_shape)
# Add last hidden state
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if not return_dict:
return tuple(v for v in [hidden_states, presents, all_hidden_states, all_self_attentions] if v is not None)
return BaseModelOutputWithPast(
last_hidden_state=hidden_states,
past_key_values=presents,
hidden_states=all_hidden_states,
attentions=all_self_attentions,
)
class CodeGenForCausalLM(CodeGenPreTrainedModel):
_keys_to_ignore_on_load_missing = [r"h\.\d+\.attn\.masked_bias", r"h\.\d+\.attn\.bias"]
def __init__(self, arch_config: ArchConfig, hf_config):
super().__init__(hf_config)
self.config = hf_config
self.transformer = CodeGenModel(arch_config, hf_config)
self.lm_head = nn.Linear(arch_config.pick('hidden_size'), hf_config.vocab_size)
# Initialize weights and apply final processing
self.post_init()
def get_output_embeddings(self):
return self.lm_head
def set_output_embeddings(self, new_embeddings):
self.lm_head = new_embeddings
def prepare_inputs_for_generation(self, input_ids, past=None, **kwargs):
token_type_ids = kwargs.get("token_type_ids", None)
# only last token for inputs_ids if past is defined in kwargs
if past:
input_ids = input_ids[:, -1].unsqueeze(-1)
if token_type_ids is not None:
token_type_ids = token_type_ids[:, -1].unsqueeze(-1)
attention_mask = kwargs.get("attention_mask", None)
position_ids = kwargs.get("position_ids", None)
if attention_mask is not None and position_ids is None:
# create position_ids on the fly for batch generation
position_ids = attention_mask.long().cumsum(-1) - 1
position_ids.masked_fill_(attention_mask == 0, 1)
if past:
position_ids = position_ids[:, -1].unsqueeze(-1)
else:
position_ids = None
return {
"input_ids": input_ids,
"past_key_values": past,
"use_cache": kwargs.get("use_cache"),
"position_ids": position_ids,
"attention_mask": attention_mask,
"token_type_ids": token_type_ids,
}
def forward(
self,
input_ids: Optional[torch.LongTensor] = None,
past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None,
attention_mask: Optional[torch.FloatTensor] = None,
token_type_ids: Optional[torch.LongTensor] = None,
position_ids: Optional[torch.LongTensor] = None,
head_mask: Optional[torch.FloatTensor] = None,
inputs_embeds: Optional[torch.FloatTensor] = None,
labels: Optional[torch.LongTensor] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, CausalLMOutputWithPast]:
r"""
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set
`labels = input_ids` Indices are selected in `[-100, 0, ..., config.vocab_size]` All labels set to `-100`
are ignored (masked), the loss is only computed for labels in `[0, ..., config.vocab_size]`
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
transformer_outputs = self.transformer(
input_ids,
past_key_values=past_key_values,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
hidden_states = transformer_outputs[0]
# make sure sampling in fp16 works correctly and
# compute loss in fp32 to match with mesh-tf version
# https://github.com/EleutherAI/gpt-neo/blob/89ce74164da2fb16179106f54e2269b5da8db333/models/gpt2/gpt2.py#L179
lm_logits = self.lm_head(hidden_states).to(torch.float32)
loss = None
if labels is not None:
# Shift so that tokens < n predict n
shift_logits = lm_logits[..., :-1, :].contiguous()
shift_labels = labels[..., 1:].contiguous()
# Flatten the tokens
loss_fct = CrossEntropyLoss()
loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1))
loss = loss.to(hidden_states.dtype)
if not return_dict:
output = (lm_logits,) + transformer_outputs[1:]
return ((loss,) + output) if loss is not None else output
return CausalLMOutputWithPast(
loss=loss,
logits=lm_logits,
past_key_values=transformer_outputs.past_key_values,
hidden_states=transformer_outputs.hidden_states,
attentions=transformer_outputs.attentions,
)
@staticmethod
def _reorder_cache(past: Tuple[Tuple[torch.Tensor]], beam_idx: torch.Tensor) -> Tuple[Tuple[torch.Tensor]]:
"""
This function is used to re-order the `past_key_values` cache if [`~PretrainedModel.beam_search`] or
[`~PretrainedModel.beam_sample`] is called. This is required to match `past_key_values` with the correct
beam_idx at every generation step.
"""
return tuple(
tuple(past_state.index_select(0, beam_idx.to(past_state.device)) for past_state in layer_past)
for layer_past in past
)
|
archai/archai/discrete_search/search_spaces/nlp/tfpp/backbones/codegen/model.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/nlp/tfpp/backbones/codegen/model.py",
"repo_id": "archai",
"token_count": 6869
}
| 331 |
from torch import nn
from archai.discrete_search.search_spaces.config import ArchConfig
class SeparableConv1d(nn.Module):
def __init__(self, arch_config: ArchConfig, hidden_size: int,
total_heads: int, op_heads: int, **kwargs):
super().__init__()
self.hidden_size = hidden_size
self.total_heads = total_heads
self.op_heads = op_heads
self.op_size = op_heads * (hidden_size // total_heads)
self.kernel_size = arch_config.pick('kernel_size')
self.conv_map_in = nn.Linear(hidden_size, self.op_size)
self.conv = nn.Conv1d(
self.op_size, self.op_size, self.kernel_size,
padding=(self.kernel_size-1), groups=self.op_size
)
self.act = nn.ReLU()
def forward(self, hidden_states, **kwargs):
out = self.act(self.conv_map_in(hidden_states))
out = self.act(self.conv(out.transpose(-1,-2)).transpose(-1,-2))
# Removes padding to get back the original sequence length
out = out[:, :hidden_states.shape[1], :]
return out, None
|
archai/archai/discrete_search/search_spaces/nlp/tfpp/ops/sep_conv1d.py/0
|
{
"file_path": "archai/archai/discrete_search/search_spaces/nlp/tfpp/ops/sep_conv1d.py",
"repo_id": "archai",
"token_count": 520
}
| 332 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import torch
import torch.nn.functional as F
from overrides import overrides
from torch import nn
from archai.common.common import get_conf
from archai.supergraph.algos.gumbelsoftmax.gs_op import GsOp
from archai.supergraph.nas.finalizers import Finalizers
from archai.supergraph.nas.model_desc import EdgeDesc, NodeDesc
class GsFinalizers(Finalizers):
@overrides
def finalize_node(self, node:nn.ModuleList, node_index:int,
node_desc:NodeDesc, max_final_edges:int,
*args, **kwargs)->NodeDesc:
conf = get_conf()
gs_num_sample = conf['nas']['search']['model_desc']['cell']['gs']['num_sample']
# gather the alphas of all edges in this node
node_alphas = []
for edge in node:
if hasattr(edge._op, 'PRIMITIVES') and type(edge._op) == GsOp:
alphas = [alpha for op, alpha in edge._op.ops()]
node_alphas.extend(alphas)
# TODO: will creating a tensor from a list of tensors preserve the graph?
node_alphas = torch.Tensor(node_alphas)
assert node_alphas.nelement() > 0
# sample ops via gumbel softmax
sample_storage = []
for _ in range(gs_num_sample):
sampled = F.gumbel_softmax(node_alphas, tau=1, hard=True, eps=1e-10, dim=-1)
sample_storage.append(sampled)
samples_summed = torch.sum(torch.stack(sample_storage, dim=0), dim=0)
# send the sampled op weights to their
# respective edges to be used for edge level finalize
selected_edges = []
counter = 0
for _, edge in enumerate(node):
if hasattr(edge._op, 'PRIMITIVES') and type(edge._op) == GsOp:
this_edge_sampled_weights = samples_summed[counter:counter+len(edge._op.PRIMITIVES)]
counter += len(edge._op.PRIMITIVES)
# finalize the edge
if this_edge_sampled_weights.bool().any():
op_desc, _ = edge._op.finalize(this_edge_sampled_weights)
new_edge = EdgeDesc(op_desc, edge.input_ids)
selected_edges.append(new_edge)
# delete excess edges
if len(selected_edges) > max_final_edges:
# since these are sample edges there is no ordering
# amongst them so we just arbitrarily select a few
selected_edges = selected_edges[:max_final_edges]
return NodeDesc(selected_edges, node_desc.conv_params)
|
archai/archai/supergraph/algos/gumbelsoftmax/gs_finalizers.py/0
|
{
"file_path": "archai/archai/supergraph/algos/gumbelsoftmax/gs_finalizers.py",
"repo_id": "archai",
"token_count": 1140
}
| 333 |
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: skip-file
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: model_metrics.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='model_metrics.proto',
package='nasbench',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\x13model_metrics.proto\x12\x08nasbench\"s\n\x0cModelMetrics\x12\x31\n\x0f\x65valuation_data\x18\x01 \x03(\x0b\x32\x18.nasbench.EvaluationData\x12\x1c\n\x14trainable_parameters\x18\x02 \x01(\x05\x12\x12\n\ntotal_time\x18\x03 \x01(\x01\"\xa3\x01\n\x0e\x45valuationData\x12\x15\n\rcurrent_epoch\x18\x01 \x01(\x01\x12\x15\n\rtraining_time\x18\x02 \x01(\x01\x12\x16\n\x0etrain_accuracy\x18\x03 \x01(\x01\x12\x1b\n\x13validation_accuracy\x18\x04 \x01(\x01\x12\x15\n\rtest_accuracy\x18\x05 \x01(\x01\x12\x17\n\x0f\x63heckpoint_path\x18\x06 \x01(\t')
)
_MODELMETRICS = _descriptor.Descriptor(
name='ModelMetrics',
full_name='nasbench.ModelMetrics',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='evaluation_data', full_name='nasbench.ModelMetrics.evaluation_data', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trainable_parameters', full_name='nasbench.ModelMetrics.trainable_parameters', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_time', full_name='nasbench.ModelMetrics.total_time', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=33,
serialized_end=148,
)
_EVALUATIONDATA = _descriptor.Descriptor(
name='EvaluationData',
full_name='nasbench.EvaluationData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='current_epoch', full_name='nasbench.EvaluationData.current_epoch', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='training_time', full_name='nasbench.EvaluationData.training_time', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='train_accuracy', full_name='nasbench.EvaluationData.train_accuracy', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='validation_accuracy', full_name='nasbench.EvaluationData.validation_accuracy', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='test_accuracy', full_name='nasbench.EvaluationData.test_accuracy', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='checkpoint_path', full_name='nasbench.EvaluationData.checkpoint_path', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=151,
serialized_end=314,
)
_MODELMETRICS.fields_by_name['evaluation_data'].message_type = _EVALUATIONDATA
DESCRIPTOR.message_types_by_name['ModelMetrics'] = _MODELMETRICS
DESCRIPTOR.message_types_by_name['EvaluationData'] = _EVALUATIONDATA
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ModelMetrics = _reflection.GeneratedProtocolMessageType('ModelMetrics', (_message.Message,), dict(
DESCRIPTOR = _MODELMETRICS,
__module__ = 'model_metrics_pb2'
# @@protoc_insertion_point(class_scope:nasbench.ModelMetrics)
))
_sym_db.RegisterMessage(ModelMetrics)
EvaluationData = _reflection.GeneratedProtocolMessageType('EvaluationData', (_message.Message,), dict(
DESCRIPTOR = _EVALUATIONDATA,
__module__ = 'model_metrics_pb2'
# @@protoc_insertion_point(class_scope:nasbench.EvaluationData)
))
_sym_db.RegisterMessage(EvaluationData)
# @@protoc_insertion_point(module_scope)
|
archai/archai/supergraph/algos/nasbench101/model_metrics_pb2.py/0
|
{
"file_path": "archai/archai/supergraph/algos/nasbench101/model_metrics_pb2.py",
"repo_id": "archai",
"token_count": 2728
}
| 334 |
import os
from collections import namedtuple
import torch
import torch.nn as nn
import torch.nn.functional as F
__all__ = ['Inception3', 'inception_v3']
_InceptionOuputs = namedtuple('InceptionOuputs', ['logits', 'aux_logits'])
def inception_v3(pretrained=False, progress=True, device='cpu', **kwargs):
r"""Inception v3 model architecture from
`"Rethinking the Inception Architecture for Computer Vision" <https://arxiv.org/abs/1512.00567>`_.
.. note::
**Important**: In contrast to the other models the inception_v3 expects tensors with a size of
N x 3 x 299 x 299, so ensure your images are sized accordingly.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
aux_logits (bool): If True, add an auxiliary branch that can improve training.
Default: *True*
transform_input (bool): If True, preprocesses the input according to the method with which it
was trained on ImageNet. Default: *False*
"""
model = Inception3()
if pretrained:
script_dir = os.path.dirname(__file__)
state_dict = torch.load(script_dir + '/state_dicts/inception_v3.pt', map_location=device)
model.load_state_dict(state_dict)
return model
class Inception3(nn.Module):
## CIFAR10: aux_logits True->False
def __init__(self, num_classes=10, aux_logits=False, transform_input=False):
super(Inception3, self).__init__()
self.aux_logits = aux_logits
self.transform_input = transform_input
## CIFAR10: stride 2->1, padding 0 -> 1
self.Conv2d_1a_3x3 = BasicConv2d(3, 192, kernel_size=3, stride=1, padding=1)
# self.Conv2d_2a_3x3 = BasicConv2d(32, 32, kernel_size=3)
# self.Conv2d_2b_3x3 = BasicConv2d(32, 64, kernel_size=3, padding=1)
# self.Conv2d_3b_1x1 = BasicConv2d(64, 80, kernel_size=1)
# self.Conv2d_4a_3x3 = BasicConv2d(80, 192, kernel_size=3)
self.Mixed_5b = InceptionA(192, pool_features=32)
self.Mixed_5c = InceptionA(256, pool_features=64)
self.Mixed_5d = InceptionA(288, pool_features=64)
self.Mixed_6a = InceptionB(288)
self.Mixed_6b = InceptionC(768, channels_7x7=128)
self.Mixed_6c = InceptionC(768, channels_7x7=160)
self.Mixed_6d = InceptionC(768, channels_7x7=160)
self.Mixed_6e = InceptionC(768, channels_7x7=192)
if aux_logits:
self.AuxLogits = InceptionAux(768, num_classes)
self.Mixed_7a = InceptionD(768)
self.Mixed_7b = InceptionE(1280)
self.Mixed_7c = InceptionE(2048)
self.fc = nn.Linear(2048, num_classes)
# for m in self.modules():
# if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):
# import scipy.stats as stats
# stddev = m.stddev if hasattr(m, 'stddev') else 0.1
# X = stats.truncnorm(-2, 2, scale=stddev)
# values = torch.as_tensor(X.rvs(m.weight.numel()), dtype=m.weight.dtype)
# values = values.view(m.weight.size())
# with torch.no_grad():
# m.weight.copy_(values)
# elif isinstance(m, nn.BatchNorm2d):
# nn.init.constant_(m.weight, 1)
# nn.init.constant_(m.bias, 0)
def forward(self, x):
if self.transform_input:
x_ch0 = torch.unsqueeze(x[:, 0], 1) * (0.229 / 0.5) + (0.485 - 0.5) / 0.5
x_ch1 = torch.unsqueeze(x[:, 1], 1) * (0.224 / 0.5) + (0.456 - 0.5) / 0.5
x_ch2 = torch.unsqueeze(x[:, 2], 1) * (0.225 / 0.5) + (0.406 - 0.5) / 0.5
x = torch.cat((x_ch0, x_ch1, x_ch2), 1)
# N x 3 x 299 x 299
x = self.Conv2d_1a_3x3(x)
## CIFAR10
# N x 32 x 149 x 149
# x = self.Conv2d_2a_3x3(x)
# N x 32 x 147 x 147
# x = self.Conv2d_2b_3x3(x)
# N x 64 x 147 x 147
# x = F.max_pool2d(x, kernel_size=3, stride=2)
# N x 64 x 73 x 73
# x = self.Conv2d_3b_1x1(x)
# N x 80 x 73 x 73
# x = self.Conv2d_4a_3x3(x)
# N x 192 x 71 x 71
# x = F.max_pool2d(x, kernel_size=3, stride=2)
# N x 192 x 35 x 35
x = self.Mixed_5b(x)
# N x 256 x 35 x 35
x = self.Mixed_5c(x)
# N x 288 x 35 x 35
x = self.Mixed_5d(x)
# N x 288 x 35 x 35
x = self.Mixed_6a(x)
# N x 768 x 17 x 17
x = self.Mixed_6b(x)
# N x 768 x 17 x 17
x = self.Mixed_6c(x)
# N x 768 x 17 x 17
x = self.Mixed_6d(x)
# N x 768 x 17 x 17
x = self.Mixed_6e(x)
# N x 768 x 17 x 17
if self.training and self.aux_logits:
aux = self.AuxLogits(x)
# N x 768 x 17 x 17
x = self.Mixed_7a(x)
# N x 1280 x 8 x 8
x = self.Mixed_7b(x)
# N x 2048 x 8 x 8
x = self.Mixed_7c(x)
# N x 2048 x 8 x 8
# Adaptive average pooling
x = F.adaptive_avg_pool2d(x, (1, 1))
# N x 2048 x 1 x 1
x = F.dropout(x, training=self.training)
# N x 2048 x 1 x 1
x = x.view(x.size(0), -1)
# N x 2048
x = self.fc(x)
# N x 1000 (num_classes)
if self.training and self.aux_logits:
return _InceptionOuputs(x, aux)
return x
class InceptionA(nn.Module):
def __init__(self, in_channels, pool_features):
super(InceptionA, self).__init__()
self.branch1x1 = BasicConv2d(in_channels, 64, kernel_size=1)
self.branch5x5_1 = BasicConv2d(in_channels, 48, kernel_size=1)
self.branch5x5_2 = BasicConv2d(48, 64, kernel_size=5, padding=2)
self.branch3x3dbl_1 = BasicConv2d(in_channels, 64, kernel_size=1)
self.branch3x3dbl_2 = BasicConv2d(64, 96, kernel_size=3, padding=1)
self.branch3x3dbl_3 = BasicConv2d(96, 96, kernel_size=3, padding=1)
self.branch_pool = BasicConv2d(in_channels, pool_features, kernel_size=1)
def forward(self, x):
branch1x1 = self.branch1x1(x)
branch5x5 = self.branch5x5_1(x)
branch5x5 = self.branch5x5_2(branch5x5)
branch3x3dbl = self.branch3x3dbl_1(x)
branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)
branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl)
branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1)
branch_pool = self.branch_pool(branch_pool)
outputs = [branch1x1, branch5x5, branch3x3dbl, branch_pool]
return torch.cat(outputs, 1)
class InceptionB(nn.Module):
def __init__(self, in_channels):
super(InceptionB, self).__init__()
self.branch3x3 = BasicConv2d(in_channels, 384, kernel_size=3, stride=2)
self.branch3x3dbl_1 = BasicConv2d(in_channels, 64, kernel_size=1)
self.branch3x3dbl_2 = BasicConv2d(64, 96, kernel_size=3, padding=1)
self.branch3x3dbl_3 = BasicConv2d(96, 96, kernel_size=3, stride=2)
def forward(self, x):
branch3x3 = self.branch3x3(x)
branch3x3dbl = self.branch3x3dbl_1(x)
branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)
branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl)
branch_pool = F.max_pool2d(x, kernel_size=3, stride=2)
outputs = [branch3x3, branch3x3dbl, branch_pool]
return torch.cat(outputs, 1)
class InceptionC(nn.Module):
def __init__(self, in_channels, channels_7x7):
super(InceptionC, self).__init__()
self.branch1x1 = BasicConv2d(in_channels, 192, kernel_size=1)
c7 = channels_7x7
self.branch7x7_1 = BasicConv2d(in_channels, c7, kernel_size=1)
self.branch7x7_2 = BasicConv2d(c7, c7, kernel_size=(1, 7), padding=(0, 3))
self.branch7x7_3 = BasicConv2d(c7, 192, kernel_size=(7, 1), padding=(3, 0))
self.branch7x7dbl_1 = BasicConv2d(in_channels, c7, kernel_size=1)
self.branch7x7dbl_2 = BasicConv2d(c7, c7, kernel_size=(7, 1), padding=(3, 0))
self.branch7x7dbl_3 = BasicConv2d(c7, c7, kernel_size=(1, 7), padding=(0, 3))
self.branch7x7dbl_4 = BasicConv2d(c7, c7, kernel_size=(7, 1), padding=(3, 0))
self.branch7x7dbl_5 = BasicConv2d(c7, 192, kernel_size=(1, 7), padding=(0, 3))
self.branch_pool = BasicConv2d(in_channels, 192, kernel_size=1)
def forward(self, x):
branch1x1 = self.branch1x1(x)
branch7x7 = self.branch7x7_1(x)
branch7x7 = self.branch7x7_2(branch7x7)
branch7x7 = self.branch7x7_3(branch7x7)
branch7x7dbl = self.branch7x7dbl_1(x)
branch7x7dbl = self.branch7x7dbl_2(branch7x7dbl)
branch7x7dbl = self.branch7x7dbl_3(branch7x7dbl)
branch7x7dbl = self.branch7x7dbl_4(branch7x7dbl)
branch7x7dbl = self.branch7x7dbl_5(branch7x7dbl)
branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1)
branch_pool = self.branch_pool(branch_pool)
outputs = [branch1x1, branch7x7, branch7x7dbl, branch_pool]
return torch.cat(outputs, 1)
class InceptionD(nn.Module):
def __init__(self, in_channels):
super(InceptionD, self).__init__()
self.branch3x3_1 = BasicConv2d(in_channels, 192, kernel_size=1)
self.branch3x3_2 = BasicConv2d(192, 320, kernel_size=3, stride=2)
self.branch7x7x3_1 = BasicConv2d(in_channels, 192, kernel_size=1)
self.branch7x7x3_2 = BasicConv2d(192, 192, kernel_size=(1, 7), padding=(0, 3))
self.branch7x7x3_3 = BasicConv2d(192, 192, kernel_size=(7, 1), padding=(3, 0))
self.branch7x7x3_4 = BasicConv2d(192, 192, kernel_size=3, stride=2)
def forward(self, x):
branch3x3 = self.branch3x3_1(x)
branch3x3 = self.branch3x3_2(branch3x3)
branch7x7x3 = self.branch7x7x3_1(x)
branch7x7x3 = self.branch7x7x3_2(branch7x7x3)
branch7x7x3 = self.branch7x7x3_3(branch7x7x3)
branch7x7x3 = self.branch7x7x3_4(branch7x7x3)
branch_pool = F.max_pool2d(x, kernel_size=3, stride=2)
outputs = [branch3x3, branch7x7x3, branch_pool]
return torch.cat(outputs, 1)
class InceptionE(nn.Module):
def __init__(self, in_channels):
super(InceptionE, self).__init__()
self.branch1x1 = BasicConv2d(in_channels, 320, kernel_size=1)
self.branch3x3_1 = BasicConv2d(in_channels, 384, kernel_size=1)
self.branch3x3_2a = BasicConv2d(384, 384, kernel_size=(1, 3), padding=(0, 1))
self.branch3x3_2b = BasicConv2d(384, 384, kernel_size=(3, 1), padding=(1, 0))
self.branch3x3dbl_1 = BasicConv2d(in_channels, 448, kernel_size=1)
self.branch3x3dbl_2 = BasicConv2d(448, 384, kernel_size=3, padding=1)
self.branch3x3dbl_3a = BasicConv2d(384, 384, kernel_size=(1, 3), padding=(0, 1))
self.branch3x3dbl_3b = BasicConv2d(384, 384, kernel_size=(3, 1), padding=(1, 0))
self.branch_pool = BasicConv2d(in_channels, 192, kernel_size=1)
def forward(self, x):
branch1x1 = self.branch1x1(x)
branch3x3 = self.branch3x3_1(x)
branch3x3 = [
self.branch3x3_2a(branch3x3),
self.branch3x3_2b(branch3x3),
]
branch3x3 = torch.cat(branch3x3, 1)
branch3x3dbl = self.branch3x3dbl_1(x)
branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)
branch3x3dbl = [
self.branch3x3dbl_3a(branch3x3dbl),
self.branch3x3dbl_3b(branch3x3dbl),
]
branch3x3dbl = torch.cat(branch3x3dbl, 1)
branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1)
branch_pool = self.branch_pool(branch_pool)
outputs = [branch1x1, branch3x3, branch3x3dbl, branch_pool]
return torch.cat(outputs, 1)
class InceptionAux(nn.Module):
def __init__(self, in_channels, num_classes):
super(InceptionAux, self).__init__()
self.conv0 = BasicConv2d(in_channels, 128, kernel_size=1)
self.conv1 = BasicConv2d(128, 768, kernel_size=5)
self.conv1.stddev = 0.01
self.fc = nn.Linear(768, num_classes)
self.fc.stddev = 0.001
def forward(self, x):
# N x 768 x 17 x 17
x = F.avg_pool2d(x, kernel_size=5, stride=3)
# N x 768 x 5 x 5
x = self.conv0(x)
# N x 128 x 5 x 5
x = self.conv1(x)
# N x 768 x 1 x 1
# Adaptive average pooling
x = F.adaptive_avg_pool2d(x, (1, 1))
# N x 768 x 1 x 1
x = x.view(x.size(0), -1)
# N x 768
x = self.fc(x)
# N x 1000
return x
class BasicConv2d(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(BasicConv2d, self).__init__()
self.conv = nn.Conv2d(in_channels, out_channels, bias=False, **kwargs)
self.bn = nn.BatchNorm2d(out_channels, eps=0.001)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
return F.relu(x, inplace=True)
|
archai/archai/supergraph/models/inception.py/0
|
{
"file_path": "archai/archai/supergraph/models/inception.py",
"repo_id": "archai",
"token_count": 6820
}
| 335 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
from typing import Callable, Optional, Type
import torch
from overrides import EnforceOverrides, overrides
from torch import Tensor
from archai.common import utils
from archai.common.config import Config
from archai.supergraph.datasets import data
from archai.supergraph.nas.model import Model
from archai.supergraph.nas.vis_model_desc import draw_model_desc
from archai.supergraph.utils.checkpoint import CheckPoint
from archai.supergraph.utils.trainer import Trainer
TArchTrainer = Optional[Type['ArchTrainer']]
class ArchTrainer(Trainer, EnforceOverrides):
def __init__(self, conf_train: Config, model: Model,
checkpoint:Optional[CheckPoint]) -> None:
super().__init__(conf_train, model, checkpoint)
self._l1_alphas = conf_train['l1_alphas']
self._plotsdir = conf_train['plotsdir']
# if l1 regularization is needed then cache alphas
if self._l1_alphas > 0.0:
self._alphas = list(self.model.all_owned().param_by_kind('alphas'))
@overrides
def compute_loss(self, lossfn: Callable,
y: Tensor, logits: Tensor,
aux_weight: float, aux_logits: Optional[Tensor]) -> Tensor:
loss = super().compute_loss(lossfn, y, logits,
aux_weight, aux_logits)
# add L1 alpha regularization
if self._l1_alphas > 0.0:
l_extra = sum(torch.sum(a.abs()) for a in self._alphas)
loss += self._l1_alphas * l_extra
return loss
@overrides
def post_epoch(self, data_loaders:data.DataLoaders)->None:
super().post_epoch(data_loaders)
self._draw_model()
# TODO: move this outside as utility
def _draw_model(self) -> None:
if not self._plotsdir:
return
train_metrics = self.get_metrics()
if train_metrics:
best_train, best_val, best_test = train_metrics.run_metrics.best_epoch()
# if test is available and is best for this epoch then mark it as best
is_best = best_test and best_test.index==train_metrics.cur_epoch().index
# if val is available and is best for this epoch then mark it as best
is_best = is_best or best_val and best_val.index==train_metrics.cur_epoch().index
# if neither val or test availavle then use train metrics
is_best = is_best or best_train.index==train_metrics.cur_epoch().index
if is_best:
# log model_desc as a image
plot_filepath = utils.full_path(os.path.join(
self._plotsdir,
f"EP{train_metrics.cur_epoch().index:03d}"),
create=True)
draw_model_desc(self.model.finalize(), filepath=plot_filepath,
caption=f"Epoch {train_metrics.cur_epoch().index}")
|
archai/archai/supergraph/nas/arch_trainer.py/0
|
{
"file_path": "archai/archai/supergraph/nas/arch_trainer.py",
"repo_id": "archai",
"token_count": 1344
}
| 336 |
import copy
import json
import os
import time
from collections import OrderedDict
from typing import Optional
import gorilla
import numpy as np
import ray
import torch
from hyperopt import hp
from ray.tune import register_trainable, run_experiments
from ray.tune.suggest import HyperOptSearch
from ray.tune.trial import Trial
from tqdm import tqdm
from archai.common.common import expdir_abspath
from archai.common.config import Config
from archai.common.ordered_dict_logger import get_global_logger
from archai.common.stopwatch import StopWatch
from archai.supergraph.datasets.augmentation import (
augment_list,
policy_decoder,
remove_deplicates,
)
from archai.supergraph.datasets.data import get_dataloaders
from archai.supergraph.models import get_model, num_class
from archai.supergraph.utils.augmented_trainer import train_and_eval
from archai.supergraph.utils.metrics import Accumulator
logger = get_global_logger()
# this method is overriden version of ray.tune.trial_runner.TrialRunner.step using monkey patching
def _step_w_log(self):
original = gorilla.get_original_attribute(ray.tune.trial_runner.TrialRunner, "step")
# collect counts by status for all trials
cnts = OrderedDict()
for status in [Trial.RUNNING, Trial.TERMINATED, Trial.PENDING, Trial.PAUSED, Trial.ERROR]:
cnt = len(list(filter(lambda x: x.status == status, self._trials)))
cnts[status] = cnt
# get the best top1 accuracy from all finished trials so far
best_top1_acc = 0.0
for trial in filter(lambda x: x.status == Trial.TERMINATED, self._trials):
if not trial.last_result: # TODO: why would this happen?
continue
best_top1_acc = max(best_top1_acc, trial.last_result["top1_valid"])
# display best accuracy from all finished trial
logger.info("iter", self._iteration, "top1_acc=%.3f" % best_top1_acc, cnts, end="\r")
# call original step method
return original(self)
# override ray.tune.trial_runner.TrialRunner.step method so we can print best accuracy at each step
patch = gorilla.Patch(ray.tune.trial_runner.TrialRunner, "step", _step_w_log, settings=gorilla.Settings(allow_hit=True))
gorilla.apply(patch)
@ray.remote(num_gpus=torch.cuda.device_count(), max_calls=1)
def _train_model(conf, dataroot, augment, val_ratio, val_fold, save_path=None, only_eval=False):
Config.set_inst(conf)
conf["autoaug"]["loader"]["aug"] = augment
model_type = conf["autoaug"]["model"]["type"]
result = train_and_eval(conf, val_ratio=val_ratio, val_fold=val_fold, save_path=save_path, only_eval=only_eval)
return model_type, val_fold, result
def _get_model_filepath(dataset, model, tag) -> Optional[str]:
filename = "%s_%s_%s.model" % (dataset, model, tag)
return expdir_abspath(filename)
def _train_no_aug(conf):
sw = StopWatch.get()
# region conf vars
conf_dataset = conf["dataset"]
dataroot = conf["dataroot"]
conf_loader = conf["autoaug"]["loader"]
conf_model = conf["autoaug"]["model"]
model_type = conf_model["type"]
ds_name = conf_dataset["name"]
aug = conf_loader["aug"]
val_ratio = conf_loader["val_ratio"]
epochs = conf_loader["epochs"]
cv_num = conf_loader["cv_num"]
# endregion
logger.info("----- Train without Augmentations cv=%d ratio(test)=%.1f -----" % (cv_num, val_ratio))
sw.start(tag="train_no_aug")
# for each fold, we will save model
save_paths = [_get_model_filepath(ds_name, model_type, "ratio%.1f_fold%d" % (val_ratio, i)) for i in range(cv_num)]
# Train model for each fold, save model in specified path, put result
# in reqs list. These models are trained with aug specified in config.
# TODO: configuration will be changed ('aug' key),
# but do we really need deepcopy everywhere?
reqs = [
# TODO: eliminate need for deep copy as only aug key is changed
_train_model.remote(
copy.deepcopy(copy.deepcopy(conf)), dataroot, aug, val_ratio, i, save_path=save_paths[i], only_eval=True
)
for i in range(cv_num)
]
# we now probe saved models for each fold to check the epoch number
# they are on. When every fold crosses an epoch number, we update
# the progress.
tqdm_epoch = tqdm(range(epochs))
is_done = False
for epoch in tqdm_epoch:
while True:
epochs_per_cv = OrderedDict()
for cv_idx in range(cv_num):
try:
if os.path.exists(save_paths[cv_idx]):
latest_ckpt = torch.load(save_paths[cv_idx])
if "epoch" not in latest_ckpt:
epochs_per_cv["cv%d" % (cv_idx + 1)] = epochs
continue
else:
continue
epochs_per_cv["cv%d" % (cv_idx + 1)] = latest_ckpt["epoch"]
except Exception:
continue
tqdm_epoch.set_postfix(epochs_per_cv)
if len(epochs_per_cv) == cv_num and min(epochs_per_cv.values()) >= epochs:
is_done = True
if len(epochs_per_cv) == cv_num and min(epochs_per_cv.values()) >= epoch:
break
time.sleep(10)
if is_done:
break
logger.info("getting results...")
pretrain_results = ray.get(reqs)
for r_model, r_cv, r_dict in pretrain_results:
logger.info(
"model=%s cv=%d top1_train=%.4f top1_valid=%.4f"
% (r_model, r_cv + 1, r_dict["top1_train"], r_dict["top1_valid"])
)
logger.info("processed in %.4f secs" % sw.pause("train_no_aug"))
def search(conf):
sw = StopWatch.get()
# region conf vars
conf_dataset = conf["dataset"]
dataroot = conf["dataroot"]
redis_ip = conf["redis"]
conf_loader = conf["autoaug"]["loader"]
conf_model = conf["autoaug"]["model"]
model_type = conf_model["type"]
ds_name = conf_dataset["name"]
aug = conf_loader["aug"]
val_ratio = conf_loader["val_ratio"]
epochs = conf_loader["epochs"]
val_fold = conf_loader["val_fold"]
cv_num = conf_loader["cv_num"]
num_policy = conf["autoaug"]["num_policy"]
num_op = conf["autoaug"]["num_op"]
num_search = conf["autoaug"]["num_search"]
num_result_per_cv = conf["autoaug"]["num_result_per_cv"]
smoke_test = conf["smoke_test"]
resume = conf["resume"]
# endregion
ray.init(
redis_address=redis_ip,
# allocate all GPUs on local node if cluster is not specified
num_gpus=torch.cuda.device_count() if not redis_ip else None,
)
# first train with no aug
_train_no_aug(conf)
# get values from config
num_samples = 4 if smoke_test else num_search
logger.info("----- Search Test-Time Augmentation Policies -----")
sw.start(tag="search")
save_paths = [_get_model_filepath(ds_name, model_type, "ratio%.1f_fold%d" % (val_ratio, i)) for i in range(cv_num)]
copied_c = copy.deepcopy(conf)
ops = augment_list(False)
space = {}
for i in range(num_policy):
for j in range(num_op):
space["policy_%d_%d" % (i, j)] = hp.choice("policy_%d_%d" % (i, j), list(range(0, len(ops))))
space["prob_%d_%d" % (i, j)] = hp.uniform("prob_%d_ %d" % (i, j), 0.0, 1.0)
space["level_%d_%d" % (i, j)] = hp.uniform("level_%d_ %d" % (i, j), 0.0, 1.0)
final_policy_set = []
total_computation = 0
reward_attr = "top1_valid" # top1_valid or minus_loss
for _ in range(1): # run multiple times.
for val_fold in range(cv_num):
name = "search_%s_%s_fold%d_ratio%.1f" % (ds_name, model_type, val_fold, val_ratio)
# logger.info(name)
register_trainable(name, (lambda augs, rpt: _eval_tta(copy.deepcopy(copied_c), augs, rpt)))
algo = HyperOptSearch(space, max_concurrent=4 * 20, reward_attr=reward_attr)
exp_config = {
name: {
"run": name,
"num_samples": num_samples,
"resources_per_trial": {"gpu": 1},
"stop": {"training_iteration": num_policy},
"config": {
"dataroot": dataroot,
"save_path": save_paths[val_fold],
"val_ratio": val_ratio,
"val_fold": val_fold,
"num_op": num_op,
"num_policy": num_policy,
},
}
}
results = run_experiments(
exp_config,
search_alg=algo,
scheduler=None,
verbose=0,
queue_trials=True,
resume=resume,
raise_on_failed_trial=False,
)
results = [x for x in results if x.last_result is not None]
results = sorted(results, key=lambda x: x.last_result[reward_attr], reverse=True)
# calculate computation usage
for result in results:
total_computation += result.last_result["elapsed_time"]
for result in results[:num_result_per_cv]:
final_policy = policy_decoder(result.config, num_policy, num_op)
logger.info(
"loss=%.12f top1_valid=%.4f %s"
% (result.last_result["minus_loss"], result.last_result["top1_valid"], final_policy)
)
final_policy = remove_deplicates(final_policy)
final_policy_set.extend(final_policy)
logger.info(json.dumps(final_policy_set))
logger.info("final_policy=%d" % len(final_policy_set))
logger.info("processed in %.4f secs, gpu hours=%.4f" % (sw.pause("search"), total_computation / 3600.0))
logger.info(
"----- Train with Augmentations model=%s dataset=%s aug=%s ratio(test)=%.1f -----"
% (model_type, ds_name, aug, val_ratio)
)
sw.start(tag="train_aug")
num_experiments = 5
default_path = [
_get_model_filepath(ds_name, model_type, "ratio%.1f_default%d" % (val_ratio, _)) for _ in range(num_experiments)
]
augment_path = [
_get_model_filepath(ds_name, model_type, "ratio%.1f_augment%d" % (val_ratio, _)) for _ in range(num_experiments)
]
reqs = [
_train_model.remote(copy.deepcopy(copied_c), dataroot, aug, 0.0, 0, save_path=default_path[_], only_eval=True)
for _ in range(num_experiments)
] + [
_train_model.remote(copy.deepcopy(copied_c), dataroot, final_policy_set, 0.0, 0, save_path=augment_path[_])
for _ in range(num_experiments)
]
tqdm_epoch = tqdm(range(epochs))
is_done = False
for epoch in tqdm_epoch:
while True:
epochs = OrderedDict()
for exp_idx in range(num_experiments):
try:
if os.path.exists(default_path[exp_idx]):
latest_ckpt = torch.load(default_path[exp_idx])
epochs["default_exp%d" % (exp_idx + 1)] = latest_ckpt["epoch"]
except:
pass
try:
if os.path.exists(augment_path[exp_idx]):
latest_ckpt = torch.load(augment_path[exp_idx])
epochs["augment_exp%d" % (exp_idx + 1)] = latest_ckpt["epoch"]
except:
pass
tqdm_epoch.set_postfix(epochs)
if len(epochs) == num_experiments * 2 and min(epochs.values()) >= epochs:
is_done = True
if len(epochs) == num_experiments * 2 and min(epochs.values()) >= epoch:
break
time.sleep(10)
if is_done:
break
logger.info("getting results...")
final_results = ray.get(reqs)
for train_mode in ["default", "augment"]:
avg = 0.0
for _ in range(num_experiments):
r_model, r_cv, r_dict = final_results.pop(0)
logger.info("[%s] top1_train=%.4f top1_test=%.4f" % (train_mode, r_dict["top1_train"], r_dict["top1_test"]))
avg += r_dict["top1_test"]
avg /= num_experiments
logger.info("[%s] top1_test average=%.4f (#experiments=%d)" % (train_mode, avg, num_experiments))
logger.info("processed in %.4f secs" % sw.pause("train_aug"))
logger.info(sw)
def _eval_tta(conf, augment, reporter):
Config.set_inst(conf)
# region conf vars
conf_dataset = conf["dataset"]
conf_loader = conf["autoaug"]["loader"]
conf_model = conf["autoaug"]["model"]
ds_name = conf_dataset["name"]
cutout = conf_loader["cutout"]
n_workers = conf_loader["n_workers"]
# endregion
val_ratio, val_fold, save_path = augment["val_ratio"], augment["val_fold"], augment["save_path"]
# setup - provided augmentation rules
aug = policy_decoder(augment, augment["num_policy"], augment["num_op"])
# eval
model = get_model(conf_model, num_class(ds_name))
ckpt = torch.load(save_path)
if "model" in ckpt:
model.load_state_dict(ckpt["model"])
else:
model.load_state_dict(ckpt)
model.eval()
loaders = []
for _ in range(augment["num_policy"]):
tl, validloader, tl2 = get_dataloaders(
augment["dataroot"],
ds_name,
aug,
cutout,
load_train=True,
load_test=True,
val_ratio=val_ratio,
val_fold=val_fold,
n_workers=n_workers,
)
loaders.append(iter(validloader))
del tl, tl2 # TODO: why exclude validloader?
start_t = time.time()
metrics = Accumulator()
loss_fn = torch.nn.CrossEntropyLoss(reduction="none")
try:
while True:
losses = []
corrects = []
for loader in loaders:
data, label = next(loader)
data, label = data.cuda(), label.cuda()
pred = model(data)
loss = loss_fn(pred, label)
losses.append(loss.detach().cpu().numpy())
_, pred = pred.topk(1, 1, True, True)
pred = pred.t()
correct = pred.eq(label.view(1, -1).expand_as(pred)).detach().cpu().numpy()
corrects.append(correct)
del loss, correct, pred, data, label
losses = np.concatenate(losses)
losses_min = np.min(losses, axis=0).squeeze()
corrects = np.concatenate(corrects)
corrects_max = np.max(corrects, axis=0).squeeze()
metrics.add_dict(
{"minus_loss": -1 * np.sum(losses_min), "correct": np.sum(corrects_max), "cnt": len(corrects_max)}
)
del corrects, corrects_max
except StopIteration:
pass
del model
metrics = metrics / "cnt"
gpu_secs = (time.time() - start_t) * torch.cuda.device_count()
reporter(minus_loss=metrics["minus_loss"], top1_valid=metrics["correct"], elapsed_time=gpu_secs, done=True)
return metrics["correct"]
|
archai/archai/supergraph/utils/augmented_searcher.py/0
|
{
"file_path": "archai/archai/supergraph/utils/augmented_searcher.py",
"repo_id": "archai",
"token_count": 7179
}
| 337 |
__include__: 'darts.yaml' # just use darts defaults
nas:
eval:
model_factory_spec: 'resnet18'
#darts loader/trainer
loader:
train_batch: 128 #96
cutout: 0
trainer:
aux_weight: 0.0
grad_clip: 0.0
drop_path_prob: 0.0 # probability that given edge will be dropped
epochs: 200
optimizer:
type: 'sgd'
lr: 0.0333 #0.025 # init learning rate
decay: 3.0e-4 # pytorch default is 0.0
momentum: 0.9 # pytorch default is 0.0
nesterov: False # pytorch default is False
warmup: null
lr_schedule:
type: 'cosine'
min_lr: 0.001 # min learning rate to se bet in eta_min param of scheduler
# WRN schedule
# loader:
# train_batch: 128
# cutout: 0
# trainer:
# aux_weight: 0.0
# grad_clip: 0.0
# drop_path_prob: 0.0 # probability that given edge will be dropped
# epochs: 200
# optimizer:
# type: 'sgd'
# lr: 0.1 # init learning rate
# decay: 5.0e-4 # pytorch default is 0.0
# momentum: 0.9 # pytorch default is 0.0
# lr_schedule:
# type: 'multi_step'
# milestones: [60, 120, 160]
# gamma: 0.2
# Multi-step LR notes:
# rule of thumb is decay by 10 at 50% and 75% of epochs as in densenet
# but every one seem to be using their own schedule
# if epochs <= 100:
# return lr_scheduler.MultiStepLR(optimizer, [30, 60, 80])
# elif epochs <= 200: # wide resnet
# return lr_scheduler.MultiStepLR(optimizer, [60, 120, 160]) # gamma=0.2. wd=5e-4
# elif epochs <= 270: # autoaugment
# return lr_scheduler.MultiStepLR(optimizer, [90, 180, 240])
# elif epochs <= 300: # densenet
# return lr_scheduler.MultiStepLR(optimizer, [150, 225])
# else: # extrapolating for autoaug sched
# return lr_scheduler.MultiStepLR(optimizer, [i*90 for i in range(epochs//90)])
|
archai/confs/algos/manual.yaml/0
|
{
"file_path": "archai/confs/algos/manual.yaml",
"repo_id": "archai",
"token_count": 899
}
| 338 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.