topic
stringlengths 3
96
| wiki
stringlengths 33
127
| url
stringlengths 101
106
| action
stringclasses 7
values | sent
stringlengths 34
223
| annotation
stringlengths 74
227
| logic
stringlengths 207
5.45k
| logic_str
stringlengths 37
493
| interpret
stringlengths 43
471
| num_func
stringclasses 15
values | nid
stringclasses 13
values | g_ids
stringlengths 70
455
| g_ids_features
stringlengths 98
670
| g_adj
stringlengths 79
515
| table_header
stringlengths 40
458
| table_cont
large_stringlengths 135
4.41k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
amstel gold race | https://en.wikipedia.org/wiki/Amstel_Gold_Race | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-1749567-2.html.csv | comparative | in the amstel gold race there are 4 more kilometers in keutenberg than in fromberg . | {'row_1': '15', 'row_2': '14', 'col': '3', 'col_other': '2', 'relation': 'diff', 'record_mentioned': 'no', 'diff_result': {'diff_value': '4', 'bigger': 'row1'}} | {'func': 'eq', 'args': [{'func': 'diff', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'name', 'keutenberg'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose name record fuzzily matches to keutenberg .', 'tostr': 'filter_eq { all_rows ; name ; keutenberg }'}, 'kilometer'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; name ; keutenberg } ; kilometer }', 'tointer': 'select the rows whose name record fuzzily matches to keutenberg . take the kilometer record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'name', 'fromberg'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose name record fuzzily matches to fromberg .', 'tostr': 'filter_eq { all_rows ; name ; fromberg }'}, 'kilometer'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; name ; fromberg } ; kilometer }', 'tointer': 'select the rows whose name record fuzzily matches to fromberg . take the kilometer record of this row .'}], 'result': '4', 'ind': 4, 'tostr': 'diff { hop { filter_eq { all_rows ; name ; keutenberg } ; kilometer } ; hop { filter_eq { all_rows ; name ; fromberg } ; kilometer } }'}, '4'], 'result': True, 'ind': 5, 'tostr': 'eq { diff { hop { filter_eq { all_rows ; name ; keutenberg } ; kilometer } ; hop { filter_eq { all_rows ; name ; fromberg } ; kilometer } } ; 4 } = true', 'tointer': 'select the rows whose name record fuzzily matches to keutenberg . take the kilometer record of this row . select the rows whose name record fuzzily matches to fromberg . take the kilometer record of this row . the first record is 4 larger than the second record .'} | eq { diff { hop { filter_eq { all_rows ; name ; keutenberg } ; kilometer } ; hop { filter_eq { all_rows ; name ; fromberg } ; kilometer } } ; 4 } = true | select the rows whose name record fuzzily matches to keutenberg . take the kilometer record of this row . select the rows whose name record fuzzily matches to fromberg . take the kilometer record of this row . the first record is 4 larger than the second record . | 6 | 6 | {'eq_5': 5, 'result_6': 6, 'diff_4': 4, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_7': 7, 'name_8': 8, 'keutenberg_9': 9, 'kilometer_10': 10, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_11': 11, 'name_12': 12, 'fromberg_13': 13, 'kilometer_14': 14, '4_15': 15} | {'eq_5': 'eq', 'result_6': 'true', 'diff_4': 'diff', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_7': 'all_rows', 'name_8': 'name', 'keutenberg_9': 'keutenberg', 'kilometer_10': 'kilometer', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_11': 'all_rows', 'name_12': 'name', 'fromberg_13': 'fromberg', 'kilometer_14': 'kilometer', '4_15': '4'} | {'eq_5': [6], 'result_6': [], 'diff_4': [5], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_7': [0], 'name_8': [0], 'keutenberg_9': [0], 'kilometer_10': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_11': [1], 'name_12': [1], 'fromberg_13': [1], 'kilometer_14': [3], '4_15': [5]} | ['number', 'name', 'kilometer', 'location', 'length ( in m )', 'average climb ( % )'] | [['17', 'plettenbergweg', '159', 'eys', '1000', '42'], ['18', 'eyserweg', '160', 'eys', '2200', '43'], ['19', 'hulsberg', '165', 'simpelveld', '1000', '77'], ['20', 'vrakelberg', '171', 'voerendaal', '700', '79'], ['21', 'sibbergrubbe', '179', 'valkenburg', '2100', '41'], ['22', 'cauberg', '184', 'valkenburg', '1200', '58'], ['23', 'geulhemmerweg', '188', 'valkenburg', '1000', '62'], ['24', 'bemelerberg', '201', 'margraten', '900', '50'], ['25', 'wolfsberg', '218', 'noorbeek', '800', '44'], ['26', 'loorberg', '224', 'slenaken', '1500', '55'], ['27', 'gulperberg', '232', 'gulpen', '700', '81'], ['28', 'kruisberg', '238', 'eys', '800', '75'], ['29', 'eyserbosweg', '240', 'eys', '1100', '81'], ['30', 'fromberg', '244', 'voerendaal', '1600', '40'], ['31', 'keutenberg', '248', 'valkenburg', '700', '94']] |
2010 - 11 slovak superliga | https://en.wikipedia.org/wiki/2010%E2%80%9311_Slovak_Superliga | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-27683516-3.html.csv | comparative | karol marko departed as manager before ivan vrabec departed . | {'row_1': '5', 'row_2': '7', 'col': '4', 'col_other': '2', 'relation': 'less', 'record_mentioned': 'no', 'diff_result': None} | {'func': 'less', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'outgoing manager', 'karol marko'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose outgoing manager record fuzzily matches to karol marko .', 'tostr': 'filter_eq { all_rows ; outgoing manager ; karol marko }'}, 'date of vacancy'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; outgoing manager ; karol marko } ; date of vacancy }', 'tointer': 'select the rows whose outgoing manager record fuzzily matches to karol marko . take the date of vacancy record of this row .'}, {'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'outgoing manager', 'ivan vrabec'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose outgoing manager record fuzzily matches to ivan vrabec .', 'tostr': 'filter_eq { all_rows ; outgoing manager ; ivan vrabec }'}, 'date of vacancy'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; outgoing manager ; ivan vrabec } ; date of vacancy }', 'tointer': 'select the rows whose outgoing manager record fuzzily matches to ivan vrabec . take the date of vacancy record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'less { hop { filter_eq { all_rows ; outgoing manager ; karol marko } ; date of vacancy } ; hop { filter_eq { all_rows ; outgoing manager ; ivan vrabec } ; date of vacancy } } = true', 'tointer': 'select the rows whose outgoing manager record fuzzily matches to karol marko . take the date of vacancy record of this row . select the rows whose outgoing manager record fuzzily matches to ivan vrabec . take the date of vacancy record of this row . the first record is less than the second record .'} | less { hop { filter_eq { all_rows ; outgoing manager ; karol marko } ; date of vacancy } ; hop { filter_eq { all_rows ; outgoing manager ; ivan vrabec } ; date of vacancy } } = true | select the rows whose outgoing manager record fuzzily matches to karol marko . take the date of vacancy record of this row . select the rows whose outgoing manager record fuzzily matches to ivan vrabec . take the date of vacancy record of this row . the first record is less than the second record . | 5 | 5 | {'less_4': 4, 'result_5': 5, 'str_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'outgoing manager_7': 7, 'karol marko_8': 8, 'date of vacancy_9': 9, 'str_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'outgoing manager_11': 11, 'ivan vrabec_12': 12, 'date of vacancy_13': 13} | {'less_4': 'less', 'result_5': 'true', 'str_hop_2': 'str_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'outgoing manager_7': 'outgoing manager', 'karol marko_8': 'karol marko', 'date of vacancy_9': 'date of vacancy', 'str_hop_3': 'str_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'outgoing manager_11': 'outgoing manager', 'ivan vrabec_12': 'ivan vrabec', 'date of vacancy_13': 'date of vacancy'} | {'less_4': [5], 'result_5': [], 'str_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'outgoing manager_7': [0], 'karol marko_8': [0], 'date of vacancy_9': [2], 'str_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'outgoing manager_11': [1], 'ivan vrabec_12': [1], 'date of vacancy_13': [3]} | ['team', 'outgoing manager', 'manner of departure', 'date of vacancy', 'table', 'incoming manager', 'date of appointment'] | [['tatran prešov', 'roman pivarník', 'sacked', '22 august 2010', 'pre - season', 'ladislav pecko', '23 august 2010'], ['mfk košice', 'žarko djurović', 'mutual agreement', '28 september 2010', 'pre - season', 'štefan tarkovič', '28 september 2010'], ['mfk ružomberok', 'ladislav jurkemik', 'mutual agreement', '10 october 2010', 'pre - season', 'goran milojević', '11 octobert 2010'], ['slovan bratislava', 'jozef jankech', 'mutual agreement', '13 october 2010', 'pre - season', 'karel jarolím', '13 octobert 2010'], ['dukla banská bystrica', 'karol marko', 'mutual agreement', '30 october 2010', 'pre - season', 'štefan zaťko', '8 november 2010'], ['fc nitra', 'ivan galád', 'sacked', '24 november 2010', 'pre - season', 'ivan vrabec', '21 december 2010'], ['fc nitra', 'ivan vrabec', 'sacked', '13 march 2011', 'pre - season', 'cyril stachura', '14 march 2011'], ['spartak trnava', 'dušan radolský', 'sacked', '19 march 2011', 'pre - season', 'peter zelenský', '22 march 2011']] |
kumar sanu | https://en.wikipedia.org/wiki/Kumar_Sanu | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1368369-1.html.csv | comparative | kurmar sanu 's song ek ladki ko dekha was from one year after the song yeh kaali kaali aankhen . | {'row_1': '5', 'row_2': '4', 'col': '1', 'col_other': '2', 'relation': 'greater', 'record_mentioned': 'no', 'diff_result': None} | {'func': 'greater', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'song', 'ek ladki ko dekha'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose song record fuzzily matches to ek ladki ko dekha .', 'tostr': 'filter_eq { all_rows ; song ; ek ladki ko dekha }'}, 'year'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; song ; ek ladki ko dekha } ; year }', 'tointer': 'select the rows whose song record fuzzily matches to ek ladki ko dekha . take the year record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'song', 'yeh kaali kaali aankhen'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose song record fuzzily matches to yeh kaali kaali aankhen .', 'tostr': 'filter_eq { all_rows ; song ; yeh kaali kaali aankhen }'}, 'year'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; song ; yeh kaali kaali aankhen } ; year }', 'tointer': 'select the rows whose song record fuzzily matches to yeh kaali kaali aankhen . take the year record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; song ; ek ladki ko dekha } ; year } ; hop { filter_eq { all_rows ; song ; yeh kaali kaali aankhen } ; year } } = true', 'tointer': 'select the rows whose song record fuzzily matches to ek ladki ko dekha . take the year record of this row . select the rows whose song record fuzzily matches to yeh kaali kaali aankhen . take the year record of this row . the first record is greater than the second record .'} | greater { hop { filter_eq { all_rows ; song ; ek ladki ko dekha } ; year } ; hop { filter_eq { all_rows ; song ; yeh kaali kaali aankhen } ; year } } = true | select the rows whose song record fuzzily matches to ek ladki ko dekha . take the year record of this row . select the rows whose song record fuzzily matches to yeh kaali kaali aankhen . take the year record of this row . the first record is greater than the second record . | 5 | 5 | {'greater_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'song_7': 7, 'ek ladki ko dekha_8': 8, 'year_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'song_11': 11, 'yeh kaali kaali aankhen_12': 12, 'year_13': 13} | {'greater_4': 'greater', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'song_7': 'song', 'ek ladki ko dekha_8': 'ek ladki ko dekha', 'year_9': 'year', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'song_11': 'song', 'yeh kaali kaali aankhen_12': 'yeh kaali kaali aankhen', 'year_13': 'year'} | {'greater_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'song_7': [0], 'ek ladki ko dekha_8': [0], 'year_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'song_11': [1], 'yeh kaali kaali aankhen_12': [1], 'year_13': [3]} | ['year', 'song', 'film', 'music director ( s )', 'lyricist'] | [['1991', 'ab tere bin', 'aashiqui', 'nadeem - shravan', 'sameer'], ['1992', 'mera dil bhi kitna pagal hai', 'saajan', 'nadeem - shravan', 'sameer'], ['1993', 'sochenge tumhe pyaar', 'deewana', 'nadeem - shravan', 'sameer'], ['1994', 'yeh kaali kaali aankhen', 'baazigar', 'anu malik', 'rani malik'], ['1995', 'ek ladki ko dekha', '1942 : a love story', 'rd burman', 'javed akhtar']] |
1926 in brazilian football | https://en.wikipedia.org/wiki/1926_in_Brazilian_football | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15385631-2.html.csv | count | in 1926 in brazilian football , when a team scored over 10 points , there were 2 teams that had 2 draws . | {'scope': 'subset', 'criterion': 'equal', 'value': '2', 'result': '2', 'col': '5', 'subset': {'col': '3', 'criterion': 'greater_than', 'value': '10'}} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_eq', 'args': [{'func': 'filter_greater', 'args': ['all_rows', 'points', '10'], 'result': None, 'ind': 0, 'tostr': 'filter_greater { all_rows ; points ; 10 }', 'tointer': 'select the rows whose points record is greater than 10 .'}, 'drawn', '2'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose points record is greater than 10 . among these rows , select the rows whose drawn record is equal to 2 .', 'tostr': 'filter_eq { filter_greater { all_rows ; points ; 10 } ; drawn ; 2 }'}], 'result': '2', 'ind': 2, 'tostr': 'count { filter_eq { filter_greater { all_rows ; points ; 10 } ; drawn ; 2 } }', 'tointer': 'select the rows whose points record is greater than 10 . among these rows , select the rows whose drawn record is equal to 2 . the number of such rows is 2 .'}, '2'], 'result': True, 'ind': 3, 'tostr': 'eq { count { filter_eq { filter_greater { all_rows ; points ; 10 } ; drawn ; 2 } } ; 2 } = true', 'tointer': 'select the rows whose points record is greater than 10 . among these rows , select the rows whose drawn record is equal to 2 . the number of such rows is 2 .'} | eq { count { filter_eq { filter_greater { all_rows ; points ; 10 } ; drawn ; 2 } } ; 2 } = true | select the rows whose points record is greater than 10 . among these rows , select the rows whose drawn record is equal to 2 . the number of such rows is 2 . | 4 | 4 | {'eq_3': 3, 'result_4': 4, 'count_2': 2, 'filter_eq_1': 1, 'filter_greater_0': 0, 'all_rows_5': 5, 'points_6': 6, '10_7': 7, 'drawn_8': 8, '2_9': 9, '2_10': 10} | {'eq_3': 'eq', 'result_4': 'true', 'count_2': 'count', 'filter_eq_1': 'filter_eq', 'filter_greater_0': 'filter_greater', 'all_rows_5': 'all_rows', 'points_6': 'points', '10_7': '10', 'drawn_8': 'drawn', '2_9': '2', '2_10': '2'} | {'eq_3': [4], 'result_4': [], 'count_2': [3], 'filter_eq_1': [2], 'filter_greater_0': [1], 'all_rows_5': [0], 'points_6': [0], '10_7': [0], 'drawn_8': [1], '2_9': [1], '2_10': [3]} | ['position', 'team', 'points', 'played', 'drawn', 'lost', 'against', 'difference'] | [['1', 'paulistano', '24', '14', '2', '1', '14', '41'], ['2', 'germnia', '18', '14', '2', '4', '28', '10'], ['3', 'independência', '17', '14', '3', '4', '30', '7'], ['4', 'antártica', '17', '14', '5', '3', '19', '6'], ['5', 'aa palmeiras', '15', '14', '3', '5', '24', '4'], ['6', 'atlético santista', '11', '14', '1', '8', '32', '- 2'], ['7', 'paulista', '8', '14', '4', '8', '46', '- 22'], ['8', 'britannia', '2', '14', '2', '12', '64', '- 44']] |
united states senate election in arizona , 2004 | https://en.wikipedia.org/wiki/United_States_Senate_election_in_Arizona%2C_2004 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-19681738-1.html.csv | aggregation | mccain received an average of 73.54 % of the votes in all arizona counties in 2004 . | {'scope': 'all', 'col': '7', 'type': 'average', 'result': '73.54', 'subset': None} | {'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 'mccain %'], 'result': '73.54', 'ind': 0, 'tostr': 'avg { all_rows ; mccain % }'}, '73.54'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; mccain % } ; 73.54 } = true', 'tointer': 'the average of the mccain % record of all rows is 73.54 .'} | round_eq { avg { all_rows ; mccain % } ; 73.54 } = true | the average of the mccain % record of all rows is 73.54 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 'mccain %_4': 4, '73.54_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 'mccain %_4': 'mccain %', '73.54_5': '73.54'} | {'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 'mccain %_4': [0], '73.54_5': [1]} | ['county', 'starky', 'starky %', 'hancock', 'hancock %', 'mccain', 'mccain %', 'total'] | [['apache', '9588', '40.95 %', '905', '3.86 %', '12923', '55.19 %', '23416'], ['cochise', '9555', '21.80 %', '1394', '3.18 %', '32879', '75.02 %', '43828'], ['coconino', '13520', '26.58 %', '1504', '2.96 %', '35849', '70.47 %', '50873'], ['gila', '4291', '20.96 %', '632', '3.09 %', '15551', '75.95 %', '20474'], ['graham', '2000', '19.06 %', '322', '3.07 %', '8171', '77.87 %', '10493'], ['greenlee', '746', '25.03 %', '68', '2.28 %', '2166', '72.68 %', '2980'], ['la paz', '965', '19.51 %', '156', '3.15 %', '3826', '77.34 %', '4947'], ['maricopa', '216124', '18.58 %', '29769', '2.56 %', '917527', '78.86 %', '1163420'], ['mohave', '10423', '18.44 %', '1686', '2.98 %', '44402', '78.57 %', '56511'], ['navajo', '7434', '23.42 %', '1222', '3.85 %', '23091', '72.73 %', '31747'], ['pima', '89483', '25.17 %', '7980', '2.24 %', '258010', '72.58 %', '355473'], ['pinal', '13595', '21.45 %', '1692', '2.67 %', '48094', '75.88 %', '63381'], ['santa cruz', '3583', '31.60 %', '252', '2.22 %', '7502', '66.17 %', '11337'], ['yavapai', '14852', '17.41 %', '3160', '3.70 %', '67312', '78.89 %', '85324'], ['yuma', '8348', '22.28 %', '1056', '2.82 %', '28069', '74.90 %', '37473']] |
1987 toronto blue jays season | https://en.wikipedia.org/wiki/1987_Toronto_Blue_Jays_season | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-12207430-2.html.csv | aggregation | the average attendance for the 1987 toronto blue jays games was 22526 . | {'scope': 'all', 'col': '5', 'type': 'average', 'result': '22526', 'subset': None} | {'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 'attendance'], 'result': '22526', 'ind': 0, 'tostr': 'avg { all_rows ; attendance }'}, '22526'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; attendance } ; 22526 } = true', 'tointer': 'the average of the attendance record of all rows is 22526 .'} | round_eq { avg { all_rows ; attendance } ; 22526 } = true | the average of the attendance record of all rows is 22526 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 'attendance_4': 4, '22526_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 'attendance_4': 'attendance', '22526_5': '22526'} | {'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 'attendance_4': [0], '22526_5': [1]} | ['date', 'opponent', 'score', 'loss', 'attendance', 'record'] | [['april 6', 'indians', '7 - 3', 'candiotti ( 0 - 1 )', '40404', '1 - 0'], ['april 8', 'indians', '5 - 1', 'swindell ( 0 - 1 )', '20388', '2 - 0'], ['april 9', 'indians', '14 - 3', 'johnson ( 0 - 1 )', '21088', '2 - 1'], ['april 10', 'red sox', '3 - 0', 'stieb ( 0 - 1 )', '33679', '2 - 2'], ['april 11', 'red sox', '11 - 1', 'clemens ( 0 - 1 )', '33365', '3 - 2'], ['april 12', 'red sox', '8 - 3', 'clancy ( 1 - 1 )', '27521', '3 - 3'], ['april 14', 'white sox', '4 - 3 ( 13 )', 'mckeon ( 0 - 1 )', '17324', '4 - 3'], ['april 15', 'white sox', '5 - 0', 'stieb ( 0 - 2 )', '17285', '4 - 4'], ['april 16', 'red sox', '4 - 2', 'clemens ( 0 - 2 )', '22065', '5 - 4'], ['april 17', 'red sox', '10 - 5', 'stanley ( 1 - 2 )', '36400', '6 - 4'], ['april 18', 'red sox', '6 - 4', 'clancy ( 1 - 2 )', '39107', '6 - 5'], ['april 19', 'red sox', '4 - 1', 'johnson ( 0 - 2 )', '28140', '6 - 6'], ['april 20', 'indians', '8 - 7 ( 10 )', 'jones ( 0 - 1 )', '11164', '7 - 6'], ['april 21', 'indians', '5 - 0', 'key ( 3 - 1 )', '7203', '7 - 7'], ['april 22', 'indians', '6 - 3', 'carlton ( 1 - 2 )', '6000', '8 - 7'], ['april 24', 'white sox', '4 - 2 ( 10 )', 'james ( 0 - 1 )', '10105', '9 - 7'], ['april 25', 'white sox', '5 - 4', 'eichhorn ( 3 - 1 )', '18644', '9 - 8'], ['april 26', 'white sox', '5 - 2', 'deleã cubicn ( 2 - 2 )', '20443', '10 - 8'], ['april 28', 'twins', '5 - 1', 'viola ( 1 - 3 )', '21182', '11 - 8'], ['april 29', 'twins', '8 - 1', 'smithson ( 3 - 1 )', '19020', '12 - 8']] |
hey venus ! | https://en.wikipedia.org/wiki/Hey_Venus%21 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10647532-1.html.csv | majority | on most of the occasions , hey venus ! was released by rough trade records . | {'scope': 'all', 'col': '3', 'most_or_all': 'most', 'criterion': 'equal', 'value': 'rough trade records', 'subset': None} | {'func': 'most_str_eq', 'args': ['all_rows', 'label', 'rough trade records'], 'result': True, 'ind': 0, 'tointer': 'for the label records of all rows , most of them fuzzily match to rough trade records .', 'tostr': 'most_eq { all_rows ; label ; rough trade records } = true'} | most_eq { all_rows ; label ; rough trade records } = true | for the label records of all rows , most of them fuzzily match to rough trade records . | 1 | 1 | {'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'label_3': 3, 'rough trade records_4': 4} | {'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'label_3': 'label', 'rough trade records_4': 'rough trade records'} | {'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'label_3': [0], 'rough trade records_4': [0]} | ['region', 'date', 'label', 'format', 'catalogue'] | [['united kingdom', '27 august 2007', 'rough trade records', 'vinyl record', 'rtradlp 346'], ['united kingdom', '27 august 2007', 'rough trade records', 'compact disc', 'rtradcd 346'], ['united kingdom', '27 august 2007', 'rough trade records', 'download', '-'], ['united states', '28 august 2007', 'rough trade america', 'vinyl record', 'rt - 346 - 1'], ['united states', '28 august 2007', 'rough trade america', 'download', 'rt - 346 - 5'], ['united states', '22 january 2008', 'rough trade america', 'double compact disc', 'rtradcd 423'], ['japan', '12 september 2007', 'rough trade japan', 'compact disc', 'xqcy - 1003']] |
wru division two east | https://en.wikipedia.org/wiki/WRU_Division_Two_East | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-12807904-3.html.csv | ordinal | the treorchy rfc club recorded the 2nd highest number of wins in the wru division two east league . | {'row': '3', 'col': '3', 'order': '2', 'col_other': '1', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'scope': 'all', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'won', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmax { all_rows ; won ; 2 }'}, 'club'], 'result': 'treorchy rfc', 'ind': 1, 'tostr': 'hop { nth_argmax { all_rows ; won ; 2 } ; club }'}, 'treorchy rfc'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmax { all_rows ; won ; 2 } ; club } ; treorchy rfc } = true', 'tointer': 'select the row whose won record of all rows is 2nd maximum . the club record of this row is treorchy rfc .'} | eq { hop { nth_argmax { all_rows ; won ; 2 } ; club } ; treorchy rfc } = true | select the row whose won record of all rows is 2nd maximum . the club record of this row is treorchy rfc . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmax_0': 0, 'all_rows_4': 4, 'won_5': 5, '2_6': 6, 'club_7': 7, 'treorchy rfc_8': 8} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmax_0': 'nth_argmax', 'all_rows_4': 'all_rows', 'won_5': 'won', '2_6': '2', 'club_7': 'club', 'treorchy rfc_8': 'treorchy rfc'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmax_0': [1], 'all_rows_4': [0], 'won_5': [0], '2_6': [0], 'club_7': [1], 'treorchy rfc_8': [2]} | ['club', 'played', 'won', 'drawn', 'lost', 'points for', 'points against', 'tries for', 'tries against', 'try bonus', 'losing bonus', 'points'] | [['club', 'played', 'won', 'drawn', 'lost', 'points for', 'points against', 'tries for', 'tries against', 'try bonus', 'losing bonus', 'points'], ['gilfach goch rfc', '22', '16', '1', '5', '560', '343', '65', '37', '7', '3', '76'], ['treorchy rfc', '22', '15', '0', '7', '636', '382', '79', '44', '10', '2', '72'], ['rhydyfelin rfc', '22', '13', '2', '7', '525', '431', '73', '51', '11', '4', '71'], ['mountain ash rfc', '22', '13', '3', '6', '404', '292', '50', '33', '6', '3', '67'], ['brynmawr rfc', '22', '11', '0', '11', '508', '406', '65', '47', '9', '7', '60'], ['ynysybwl rfc', '22', '10', '0', '12', '416', '453', '55', '54', '7', '5', '52'], ['llantrisant rfc', '22', '10', '1', '11', '438', '532', '54', '69', '5', '5', '52'], ['penallta rfc', '22', '11', '0', '11', '416', '488', '50', '63', '2', '2', '48'], ['llantwit fardre rfc', '22', '10', '1', '11', '392', '470', '50', '60', '2', '1', '45'], ['abercynon rfc', '22', '8', '0', '14', '418', '546', '41', '73', '5', '3', '40'], ['newport saracens rfc', '22', '6', '1', '15', '365', '453', '49', '56', '3', '6', '35'], ['garndiffaith rfc', '22', '4', '1', '17', '393', '675', '45', '89', '5', '4', '27']] |
southwestern conference ( illinois ) | https://en.wikipedia.org/wiki/Southwestern_Conference_%28Illinois%29 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-27653955-1.html.csv | aggregation | the average enrollment across all schools in the southwestern conference in illinois is about 2282 . | {'scope': 'all', 'col': '5', 'type': 'average', 'result': '2282', 'subset': None} | {'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 'enrollment'], 'result': '2282', 'ind': 0, 'tostr': 'avg { all_rows ; enrollment }'}, '2282'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; enrollment } ; 2282 } = true', 'tointer': 'the average of the enrollment record of all rows is 2282 .'} | round_eq { avg { all_rows ; enrollment } ; 2282 } = true | the average of the enrollment record of all rows is 2282 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 'enrollment_4': 4, '2282_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 'enrollment_4': 'enrollment', '2282_5': '2282'} | {'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 'enrollment_4': [0], '2282_5': [1]} | ['school', 'location', 'mascot', 'colors', 'enrollment', 'ihsa classes 2 / 3 / 4', 'ihsa music class', 'ihsa football class', 'ihsa cheerleading class'] | [['alton high school', 'alton , il', 'redbirds', 'red , gray', '2135', 'aa / 3a / 4a', 'aa', '7a', 'large squad'], ['belleville east high school', 'belleville , il', 'lancers', 'columbia blue , navy blue', '2600', 'aa / 3a / 4a', 'aa', '8a', 'large squad'], ['belleville west high school', 'belleville , il', 'maroons', 'maroon , white', '2434', 'aa / 3a / 4a', 'aa', '7a', 'large squad'], ['collinsville high school', 'collinsville , il', 'kahoks', 'purple , white', '2020', 'aa / 3a / 4a', 'aa', '7a', 'large squad'], ['east st louis senior high school', 'east st louis , il', 'flyers / flyerettes', 'orange , blue', '2146', 'aa / 3a / 4a', 'aa', '7a', 'large squad'], ['edwardsville high school', 'edwardsville , il', 'tigers', 'orange , black', '2514', 'aa / 3a / 4a', 'aa', '8a', 'large squad'], ['granite city high school', 'granite city , il', 'warriors', 'red , black , white', '2129', 'aa / 3a / 4a', 'aa', '7a', 'large squad']] |
2008 thailand national games | https://en.wikipedia.org/wiki/2008_Thailand_National_Games | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-14892957-1.html.csv | ordinal | chonburi received the 2nd highest total medal count in the 2008 thailand national games . | {'row': '2', 'col': '6', 'order': '2', 'col_other': '2', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'scope': 'all', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'total', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmax { all_rows ; total ; 2 }'}, 'province'], 'result': 'chonburi', 'ind': 1, 'tostr': 'hop { nth_argmax { all_rows ; total ; 2 } ; province }'}, 'chonburi'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmax { all_rows ; total ; 2 } ; province } ; chonburi } = true', 'tointer': 'select the row whose total record of all rows is 2nd maximum . the province record of this row is chonburi .'} | eq { hop { nth_argmax { all_rows ; total ; 2 } ; province } ; chonburi } = true | select the row whose total record of all rows is 2nd maximum . the province record of this row is chonburi . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmax_0': 0, 'all_rows_4': 4, 'total_5': 5, '2_6': 6, 'province_7': 7, 'chonburi_8': 8} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmax_0': 'nth_argmax', 'all_rows_4': 'all_rows', 'total_5': 'total', '2_6': '2', 'province_7': 'province', 'chonburi_8': 'chonburi'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmax_0': [1], 'all_rows_4': [0], 'total_5': [0], '2_6': [0], 'province_7': [1], 'chonburi_8': [2]} | ['rank', 'province', 'gold', 'silver', 'bronze', 'total'] | [['1', 'bangkok', '125', '90', '76', '291'], ['2', 'chonburi', '44', '34', '48', '126'], ['3', 'chiang mai', '37', '34', '41', '112'], ['4', 'phitsanulok', '24', '15', '31', '70'], ['5', 'suphan buri', '21', '27', '24', '72'], ['6', 'nakhon ratchasima', '21', '21', '31', '73'], ['7', 'nakhon si thammarat', '11', '10', '15', '36'], ['8', 'khon kaen', '9', '15', '8', '32'], ['9', 'pathum thani', '9', '11', '9', '29'], ['10', 'si sa ket', '9', '6', '21', '36']] |
billy casper | https://en.wikipedia.org/wiki/Billy_Casper | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1544891-3.html.csv | majority | for the tournaments that billy casper participated in , all of them were in the 1980s . | {'scope': 'all', 'col': '1', 'most_or_all': 'all', 'criterion': 'fuzzily_match', 'value': '198', 'subset': None} | {'func': 'all_str_eq', 'args': ['all_rows', 'date', '198'], 'result': True, 'ind': 0, 'tointer': 'for the date records of all rows , all of them fuzzily match to 198 .', 'tostr': 'all_eq { all_rows ; date ; 198 } = true'} | all_eq { all_rows ; date ; 198 } = true | for the date records of all rows , all of them fuzzily match to 198 . | 1 | 1 | {'all_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'date_3': 3, '198_4': 4} | {'all_str_eq_0': 'all_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'date_3': 'date', '198_4': '198'} | {'all_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'date_3': [0], '198_4': [0]} | ['date', 'tournament', 'winning score', 'margin of victory', 'runner ( s ) - up'] | [['aug 28 , 1982', 'shootout at jeremy ranch', '- 9 ( 74 + 71 + 69 + 65 = 279 )', '1 stroke', 'miller barber , don january'], ['sep 19 , 1982', 'merrill lynch / golf digest commemorative pro - am', '- 10 ( 68 + 7 + 68 = 206 )', 'playoff', 'bob toski'], ['jul 25 , 1983', 'us senior open', '+ 4 ( 73 + 73 + 69 + 73 = 288 )', 'playoff', 'rod funseth'], ['apr 22 , 1984', 'senior pga tour roundup', '- 14 ( 68 + 69 + 65 = 202 )', '2 strokes', 'bob stone'], ['mar 15 , 1987', 'del e webb arizona classic', '- 15 ( 68 + 65 + 68 = 201 )', '5 strokes', 'bob charles , dale douglass'], ['jun 28 , 1987', 'greater grand rapids open', '- 13 ( 69 + 68 + 63 = 200 )', '3 strokes', 'miller barber'], ['may 8 , 1988', 'vantage at the dominion', '- 14 ( 70 + 68 + 67 = 205 )', '1 stroke', 'chi - chi rodríguez'], ['jun 12 , 1988', 'mazda senior tournament players championship', '- 10 ( 69 + 68 + 74 + 67 = 278 )', '2 strokes', 'al geiberger'], ['oct 22 , 1989', 'transamerica senior golf championship', '- 9 ( 69 + 70 + 68 = 207 )', '3 strokes', 'al geiberger']] |
1969 italian grand prix | https://en.wikipedia.org/wiki/1969_Italian_Grand_Prix | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1122409-1.html.csv | count | five driver finished with 68 laps in the 1969 italian grand prix . | {'scope': 'all', 'criterion': 'equal', 'value': '68', 'result': '5', 'col': '3', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_eq', 'args': ['all_rows', 'laps', '68'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose laps record is equal to 68 .', 'tostr': 'filter_eq { all_rows ; laps ; 68 }'}], 'result': '5', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; laps ; 68 } }', 'tointer': 'select the rows whose laps record is equal to 68 . the number of such rows is 5 .'}, '5'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; laps ; 68 } } ; 5 } = true', 'tointer': 'select the rows whose laps record is equal to 68 . the number of such rows is 5 .'} | eq { count { filter_eq { all_rows ; laps ; 68 } } ; 5 } = true | select the rows whose laps record is equal to 68 . the number of such rows is 5 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_eq_0': 0, 'all_rows_4': 4, 'laps_5': 5, '68_6': 6, '5_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_eq_0': 'filter_eq', 'all_rows_4': 'all_rows', 'laps_5': 'laps', '68_6': '68', '5_7': '5'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_eq_0': [1], 'all_rows_4': [0], 'laps_5': [0], '68_6': [0], '5_7': [2]} | ['driver', 'constructor', 'laps', 'time / retired', 'grid'] | [['jackie stewart', 'matra - ford', '68', '1:39:11.26', '3'], ['jochen rindt', 'lotus - ford', '68', '+ 0.08', '1'], ['jean - pierre beltoise', 'matra - ford', '68', '+ 0.17', '6'], ['bruce mclaren', 'mclaren - ford', '68', '+ 0.19', '5'], ['piers courage', 'brabham - ford', '68', '+ 33.44', '4'], ['pedro rodrã\xadguez', 'ferrari', '66', '+ 2 laps', '12'], ['denny hulme', 'mclaren - ford', '66', '+ 2 laps', '2'], ['jo siffert', 'lotus - ford', '64', 'engine', '8'], ['graham hill', 'lotus - ford', '63', 'halfshaft', '9'], ['jacky ickx', 'brabham - ford', '61', 'out of fuel', '15'], ['john surtees', 'brm', '60', 'not classified', '10'], ['jackie oliver', 'brm', '48', 'oil pressure', '11'], ['silvio moser', 'brabham - ford', '9', 'fuel leak', '13'], ['jack brabham', 'brabham - ford', '6', 'oil leak', '7'], ['john miles', 'lotus - ford', '3', 'engine', '14']] |
2008 issf world cup final ( rifle and pistol ) | https://en.wikipedia.org/wiki/2008_ISSF_World_Cup_Final_%28rifle_and_pistol%29 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-18191407-15.html.csv | majority | the majority of the shooters had a prone score under 200 . | {'scope': 'all', 'col': '2', 'most_or_all': 'most', 'criterion': 'less_than', 'value': '200', 'subset': None} | {'func': 'most_less', 'args': ['all_rows', 'prone', '200'], 'result': True, 'ind': 0, 'tointer': 'for the prone records of all rows , most of them are less than 200 .', 'tostr': 'most_less { all_rows ; prone ; 200 } = true'} | most_less { all_rows ; prone ; 200 } = true | for the prone records of all rows , most of them are less than 200 . | 1 | 1 | {'most_less_0': 0, 'result_1': 1, 'all_rows_2': 2, 'prone_3': 3, '200_4': 4} | {'most_less_0': 'most_less', 'result_1': 'true', 'all_rows_2': 'all_rows', 'prone_3': 'prone', '200_4': '200'} | {'most_less_0': [1], 'result_1': [], 'all_rows_2': [0], 'prone_3': [0], '200_4': [0]} | ['shooter', 'prone', 'stand', 'kneel', 'qual'] | [['sonja pfeilschifter ( ger )', '199', '195', '196', '590'], ['olga dovgun ( kaz )', '200', '196', '193', '589'], ['lioubov galkina ( rus )', '199', '193', '194', '586'], ['yin wen ( chn )', '197', '195', '194', '586'], ['jamie beyerle ( usa )', '198', '188', '194', '580'], ['snježana pejčić ( cro )', '197', '193', '190', '580'], ['eglis yaima cruz ( cub )', '199', '186', '193', '578'], ['morgan hicks ( usa )', '196', '190', '192', '578'], ['du li ( chn )', '199', '189', '190', '578'], ['thanyalak chotphibunsin ( tha )', '197', '185', '194', '576'], ['kristina vestveit ( nor )', '195', '189', '191', '575'], ['adela sykorova ( cze )', '195', '187', '188', '570']] |
1932 vfl season | https://en.wikipedia.org/wiki/1932_VFL_season | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10790099-8.html.csv | count | in round eight of the 1932 vfl season , the home team scored over 10 points twice when the crowd had less than 10000 people in it . | {'scope': 'subset', 'criterion': 'greater_than', 'value': '10', 'result': '2', 'col': '2', 'subset': {'col': '6', 'criterion': 'less_than', 'value': '10000'}} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_greater', 'args': [{'func': 'filter_less', 'args': ['all_rows', 'crowd', '10000'], 'result': None, 'ind': 0, 'tostr': 'filter_less { all_rows ; crowd ; 10000 }', 'tointer': 'select the rows whose crowd record is less than 10000 .'}, 'home team score', '10'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose crowd record is less than 10000 . among these rows , select the rows whose home team score record is greater than 10 .', 'tostr': 'filter_greater { filter_less { all_rows ; crowd ; 10000 } ; home team score ; 10 }'}], 'result': '2', 'ind': 2, 'tostr': 'count { filter_greater { filter_less { all_rows ; crowd ; 10000 } ; home team score ; 10 } }', 'tointer': 'select the rows whose crowd record is less than 10000 . among these rows , select the rows whose home team score record is greater than 10 . the number of such rows is 2 .'}, '2'], 'result': True, 'ind': 3, 'tostr': 'eq { count { filter_greater { filter_less { all_rows ; crowd ; 10000 } ; home team score ; 10 } } ; 2 } = true', 'tointer': 'select the rows whose crowd record is less than 10000 . among these rows , select the rows whose home team score record is greater than 10 . the number of such rows is 2 .'} | eq { count { filter_greater { filter_less { all_rows ; crowd ; 10000 } ; home team score ; 10 } } ; 2 } = true | select the rows whose crowd record is less than 10000 . among these rows , select the rows whose home team score record is greater than 10 . the number of such rows is 2 . | 4 | 4 | {'eq_3': 3, 'result_4': 4, 'count_2': 2, 'filter_greater_1': 1, 'filter_less_0': 0, 'all_rows_5': 5, 'crowd_6': 6, '10000_7': 7, 'home team score_8': 8, '10_9': 9, '2_10': 10} | {'eq_3': 'eq', 'result_4': 'true', 'count_2': 'count', 'filter_greater_1': 'filter_greater', 'filter_less_0': 'filter_less', 'all_rows_5': 'all_rows', 'crowd_6': 'crowd', '10000_7': '10000', 'home team score_8': 'home team score', '10_9': '10', '2_10': '2'} | {'eq_3': [4], 'result_4': [], 'count_2': [3], 'filter_greater_1': [2], 'filter_less_0': [1], 'all_rows_5': [0], 'crowd_6': [0], '10000_7': [0], 'home team score_8': [1], '10_9': [1], '2_10': [3]} | ['home team', 'home team score', 'away team', 'away team score', 'venue', 'crowd', 'date'] | [['richmond', '8.7 ( 55 )', 'south melbourne', '8.9 ( 57 )', 'punt road oval', '35000', '25 june 1932'], ['footscray', '10.7 ( 67 )', 'melbourne', '3.9 ( 27 )', 'western oval', '10000', '25 june 1932'], ['fitzroy', '13.15 ( 93 )', 'hawthorn', '9.9 ( 63 )', 'brunswick street oval', '7500', '25 june 1932'], ['geelong', '9.9 ( 63 )', 'essendon', '11.12 ( 78 )', 'corio oval', '9500', '25 june 1932'], ['st kilda', '12.16 ( 88 )', 'collingwood', '15.15 ( 105 )', 'junction oval', '9000', '25 june 1932'], ['north melbourne', '11.7 ( 73 )', 'carlton', '14.16 ( 100 )', 'arden street oval', '15000', '25 june 1932']] |
luster , norway | https://en.wikipedia.org/wiki/Luster%2C_Norway | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-178398-1.html.csv | superlative | the earliest church in luster to have been built was urnes stavkyrkje . | {'scope': 'all', 'col_superlative': '4', 'row_superlative': '4', 'value_mentioned': 'no', 'max_or_min': 'min', 'other_col': '3', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmin', 'args': ['all_rows', 'year built'], 'result': None, 'ind': 0, 'tostr': 'argmin { all_rows ; year built }'}, 'church name'], 'result': 'urnes stavkyrkje', 'ind': 1, 'tostr': 'hop { argmin { all_rows ; year built } ; church name }'}, 'urnes stavkyrkje'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmin { all_rows ; year built } ; church name } ; urnes stavkyrkje } = true', 'tointer': 'select the row whose year built record of all rows is minimum . the church name record of this row is urnes stavkyrkje .'} | eq { hop { argmin { all_rows ; year built } ; church name } ; urnes stavkyrkje } = true | select the row whose year built record of all rows is minimum . the church name record of this row is urnes stavkyrkje . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmin_0': 0, 'all_rows_4': 4, 'year built_5': 5, 'church name_6': 6, 'urnes stavkyrkje_7': 7} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmin_0': 'argmin', 'all_rows_4': 'all_rows', 'year built_5': 'year built', 'church name_6': 'church name', 'urnes stavkyrkje_7': 'urnes stavkyrkje'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmin_0': [1], 'all_rows_4': [0], 'year built_5': [0], 'church name_6': [1], 'urnes stavkyrkje_7': [2]} | ['parish ( prestegjeld )', 'sub - parish ( sogn )', 'church name', 'year built', 'location of the church'] | [['hafslo parish', 'hafslo', 'hafslo kyrkje', '1878', 'hafslo'], ['hafslo parish', 'hafslo', 'veitastrond kapell', '1928', 'veitastrond'], ['hafslo parish', 'solvorn', 'solvorn kyrkje', '1883', 'solvorn'], ['hafslo parish', 'solvorn', 'urnes stavkyrkje', '1130', 'urnes'], ['jostedal parish', 'fet og joranger', 'fet kyrkje', '1894', 'fet'], ['jostedal parish', 'fet og joranger', 'joranger kyrkje', '1660', 'joranger'], ['jostedal parish', 'gaupne', 'gaupne kyrkje', '1908', 'gaupne'], ['jostedal parish', 'gaupne', 'gaupne gamle kyrkje', '1647', 'gaupne'], ['jostedal parish', 'jostedal', 'jostedal kyrkje', '1660', 'jostedal'], ['luster parish', 'dale', 'dale kyrkje', '1250', 'luster'], ['luster parish', 'fortun', 'fortun kyrkje', '1879', 'fortun']] |
miss usa 1989 | https://en.wikipedia.org/wiki/Miss_USA_1989 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-16268026-3.html.csv | count | two states scored a total of 8.432 in the swimsuit portion of the miss usa 1989 pageant . | {'scope': 'all', 'criterion': 'fuzzily_match', 'value': '8.432', 'result': '2', 'col': '4', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'swimsuit', '8.432'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose swimsuit record fuzzily matches to 8.432 .', 'tostr': 'filter_eq { all_rows ; swimsuit ; 8.432 }'}], 'result': '2', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; swimsuit ; 8.432 } }', 'tointer': 'select the rows whose swimsuit record fuzzily matches to 8.432 . the number of such rows is 2 .'}, '2'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; swimsuit ; 8.432 } } ; 2 } = true', 'tointer': 'select the rows whose swimsuit record fuzzily matches to 8.432 . the number of such rows is 2 .'} | eq { count { filter_eq { all_rows ; swimsuit ; 8.432 } } ; 2 } = true | select the rows whose swimsuit record fuzzily matches to 8.432 . the number of such rows is 2 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'swimsuit_5': 5, '8.432_6': 6, '2_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'swimsuit_5': 'swimsuit', '8.432_6': '8.432', '2_7': '2'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'swimsuit_5': [0], '8.432_6': [0], '2_7': [2]} | ['state', 'preliminary average', 'interview', 'swimsuit', 'evening gown', 'semifinal average'] | [['new jersey', '8.510 ( 7 )', '8.626 ( 5 )', '8.712 ( 6 )', '9.165 ( 3 )', '8.834 ( 3 )'], ['colorado', '8.388 ( 10 )', '8.638 ( 4 )', '8.432 ( 9 )', '8.786 ( 5 )', '8.618 ( 7 )'], ['louisiana', '8.829 ( 2 )', '8.600 ( 6 )', '8.820 ( 4 )', '8.710 ( 7 )', '8.710 ( 5 )'], ['oklahoma', '8.662 ( 3 )', '8.880 ( 2 )', '8.762 ( 5 )', '9.214 ( 2 )', '8.952 ( 2 )'], ['california', '8.659 ( 4 )', '8.313 ( 8 )', '8.977 ( 2 )', '8.774 ( 6 )', '8.688 ( 6 )'], ['illinois', '8.501 ( 8 )', '7.988 ( 9 )', '8.432 ( 9 )', '8.681 ( 9 )', '8.367 ( 10 )'], ['texas', '9.084 ( 1 )', '9.425 ( 1 )', '9.535 ( 1 )', '9.601 ( 1 )', '9.520 ( 1 )'], ['pennsylvania', '8.580 ( 5 )', '8.534 ( 7 )', '8.467 ( 8 )', '8.613 ( 10 )', '8.538 ( 8 )'], ['arizona', '8.529 ( 6 )', '7.792 ( 10 )', '8.833 ( 3 )', '8.703 ( 8 )', '8.442 ( 9 )']] |
sidecarcross world championship | https://en.wikipedia.org/wiki/Sidecarcross_World_Championship | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-16729457-16.html.csv | superlative | daniãl willemsen / sven verbrugge 1 scored the highest number of points in the sidecarcross world championship . | {'scope': 'all', 'col_superlative': '5', 'row_superlative': '1', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '2', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'points'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; points }'}, 'driver / passenger'], 'result': 'daniãl willemsen / sven verbrugge 1', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; points } ; driver / passenger }'}, 'daniãl willemsen / sven verbrugge 1'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; points } ; driver / passenger } ; daniãl willemsen / sven verbrugge 1 } = true', 'tointer': 'select the row whose points record of all rows is maximum . the driver / passenger record of this row is daniãl willemsen / sven verbrugge 1 .'} | eq { hop { argmax { all_rows ; points } ; driver / passenger } ; daniãl willemsen / sven verbrugge 1 } = true | select the row whose points record of all rows is maximum . the driver / passenger record of this row is daniãl willemsen / sven verbrugge 1 . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'points_5': 5, 'driver / passenger_6': 6, 'daniãl willemsen / sven verbrugge 1_7': 7} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'points_5': 'points', 'driver / passenger_6': 'driver / passenger', 'daniãl willemsen / sven verbrugge 1_7': 'daniãl willemsen / sven verbrugge 1'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'points_5': [0], 'driver / passenger_6': [1], 'daniãl willemsen / sven verbrugge 1_7': [2]} | ['position', 'driver / passenger', 'equipment', 'bike no', 'points'] | [['1', 'daniãl willemsen / sven verbrugge 1', 'zabel - wsp', '1', '487'], ['2', 'janis daiders / lauris daiders', 'zabel - vmc', '8', '478'], ['3', 'jan hendrickx / tim smeuninx', 'zabel - vmc', '3', '405'], ['4', 'maris rupeiks / kaspars stupelis 2', 'zabel - wsp', '5', '349'], ['5', 'etienne bax / ben van den bogaart', 'zabel - vmc', '4', '347'], ['6', 'ben adriaenssen / guennady auvray', 'ktm - vmc', '6', '346'], ['7', 'ewgeny scherbinin / haralds kurpnieks', 'zabel - wsp', '20', '321'], ['8', 'marko happich / meinrad schelbert', 'zabel - vmc', '15', '317'], ['9', 'joris hendrickx / kaspars liepins', 'ktm - vmc', '2', '315']] |
1970 vfl season | https://en.wikipedia.org/wiki/1970_VFL_season | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1164217-13.html.csv | count | in the 1970 vfl season , among the games where home team scored above 12.00 , two of them had attendance below 20,000 . | {'scope': 'subset', 'criterion': 'less_than', 'value': '20000', 'result': '2', 'col': '6', 'subset': {'col': '2', 'criterion': 'greater_than', 'value': '12'}} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_less', 'args': [{'func': 'filter_greater', 'args': ['all_rows', 'home team score', '12'], 'result': None, 'ind': 0, 'tostr': 'filter_greater { all_rows ; home team score ; 12 }', 'tointer': 'select the rows whose home team score record is greater than 12 .'}, 'crowd', '20000'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose home team score record is greater than 12 . among these rows , select the rows whose crowd record is less than 20000 .', 'tostr': 'filter_less { filter_greater { all_rows ; home team score ; 12 } ; crowd ; 20000 }'}], 'result': '2', 'ind': 2, 'tostr': 'count { filter_less { filter_greater { all_rows ; home team score ; 12 } ; crowd ; 20000 } }', 'tointer': 'select the rows whose home team score record is greater than 12 . among these rows , select the rows whose crowd record is less than 20000 . the number of such rows is 2 .'}, '2'], 'result': True, 'ind': 3, 'tostr': 'eq { count { filter_less { filter_greater { all_rows ; home team score ; 12 } ; crowd ; 20000 } } ; 2 } = true', 'tointer': 'select the rows whose home team score record is greater than 12 . among these rows , select the rows whose crowd record is less than 20000 . the number of such rows is 2 .'} | eq { count { filter_less { filter_greater { all_rows ; home team score ; 12 } ; crowd ; 20000 } } ; 2 } = true | select the rows whose home team score record is greater than 12 . among these rows , select the rows whose crowd record is less than 20000 . the number of such rows is 2 . | 4 | 4 | {'eq_3': 3, 'result_4': 4, 'count_2': 2, 'filter_less_1': 1, 'filter_greater_0': 0, 'all_rows_5': 5, 'home team score_6': 6, '12_7': 7, 'crowd_8': 8, '20000_9': 9, '2_10': 10} | {'eq_3': 'eq', 'result_4': 'true', 'count_2': 'count', 'filter_less_1': 'filter_less', 'filter_greater_0': 'filter_greater', 'all_rows_5': 'all_rows', 'home team score_6': 'home team score', '12_7': '12', 'crowd_8': 'crowd', '20000_9': '20000', '2_10': '2'} | {'eq_3': [4], 'result_4': [], 'count_2': [3], 'filter_less_1': [2], 'filter_greater_0': [1], 'all_rows_5': [0], 'home team score_6': [0], '12_7': [0], 'crowd_8': [1], '20000_9': [1], '2_10': [3]} | ['home team', 'home team score', 'away team', 'away team score', 'venue', 'crowd', 'date'] | [['hawthorn', '16.17 ( 113 )', 'melbourne', '11.8 ( 74 )', 'glenferrie oval', '13196', '27 june 1970'], ['carlton', '12.21 ( 93 )', 'geelong', '11.14 ( 80 )', 'princes park', '25519', '27 june 1970'], ['south melbourne', '16.9 ( 105 )', 'north melbourne', '14.12 ( 96 )', 'lake oval', '12407', '27 june 1970'], ['st kilda', '11.14 ( 80 )', 'fitzroy', '8.11 ( 59 )', 'moorabbin oval', '17073', '27 june 1970'], ['footscray', '11.12 ( 78 )', 'essendon', '9.8 ( 62 )', 'western oval', '18817', '27 june 1970'], ['richmond', '11.13 ( 79 )', 'collingwood', '9.13 ( 67 )', 'vfl park', '23939', '27 june 1970']] |
spicy horse | https://en.wikipedia.org/wiki/Spicy_Horse | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-17089493-1.html.csv | count | there are three games released specifically for the ipad . | {'scope': 'all', 'criterion': 'fuzzily_match', 'value': 'ipad', 'result': '3', 'col': '3', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'platforms', 'ipad'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose platforms record fuzzily matches to ipad .', 'tostr': 'filter_eq { all_rows ; platforms ; ipad }'}], 'result': '3', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; platforms ; ipad } }', 'tointer': 'select the rows whose platforms record fuzzily matches to ipad . the number of such rows is 3 .'}, '3'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; platforms ; ipad } } ; 3 } = true', 'tointer': 'select the rows whose platforms record fuzzily matches to ipad . the number of such rows is 3 .'} | eq { count { filter_eq { all_rows ; platforms ; ipad } } ; 3 } = true | select the rows whose platforms record fuzzily matches to ipad . the number of such rows is 3 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'platforms_5': 5, 'ipad_6': 6, '3_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'platforms_5': 'platforms', 'ipad_6': 'ipad', '3_7': '3'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'platforms_5': [0], 'ipad_6': [0], '3_7': [2]} | ['title', 'year', 'platforms', 'developer', 'notes'] | [["american mcgee 's grimm", '2008', 'pc', 'spicy horse', 'episodic mcgee takes on various grimm tales'], ['dexiq', '2009', 'ipad , iphone , ipod', 'spicy pony', 'puzzle game'], ['american mcgee presents akaneiro', '2010', 'ipad', 'spicy pony', 'little red riding hood in japan'], ["american mcgee 's crooked house", '2010', 'ipad , iphone , ipod', 'spicy pony', 'puzzle game'], ['alice : madness returns', '2011', 'playstation 3 , xbox 360 , pc', 'spicy horse', "sequel to american mcgee 's alice"], ['bighead bash', '2012', 'tba', 'spicy horse', 'side scrolling / battle multiplayer ( unreleased )'], ['crazy fairies', '2012', 'mobile , facebook', 'spicy horse', 'currently in the closed beta stage ( unreleased )'], ['akaneiro : demon hunters', '2013', 'browsers , tegra - powered tablets', 'spicy horse', 'a japanese / mcgee take on little red riding hood']] |
list of star wars : the clone wars episodes | https://en.wikipedia.org/wiki/List_of_Star_Wars%3A_The_Clone_Wars_episodes | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-19229713-6.html.csv | ordinal | for the star wars : the clone wars episodes , the one with the 2nd highest number of viewers was the one titled " sabotage . " . | {'row': '17', 'col': '8', 'order': '2', 'col_other': '3', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'scope': 'all', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'us viewers ( million )', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmax { all_rows ; us viewers ( million ) ; 2 }'}, 'title'], 'result': 'sabotage', 'ind': 1, 'tostr': 'hop { nth_argmax { all_rows ; us viewers ( million ) ; 2 } ; title }'}, 'sabotage'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmax { all_rows ; us viewers ( million ) ; 2 } ; title } ; sabotage } = true', 'tointer': 'select the row whose us viewers ( million ) record of all rows is 2nd maximum . the title record of this row is sabotage .'} | eq { hop { nth_argmax { all_rows ; us viewers ( million ) ; 2 } ; title } ; sabotage } = true | select the row whose us viewers ( million ) record of all rows is 2nd maximum . the title record of this row is sabotage . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmax_0': 0, 'all_rows_4': 4, 'us viewers (million)_5': 5, '2_6': 6, 'title_7': 7, 'sabotage_8': 8} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmax_0': 'nth_argmax', 'all_rows_4': 'all_rows', 'us viewers (million)_5': 'us viewers ( million )', '2_6': '2', 'title_7': 'title', 'sabotage_8': 'sabotage'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmax_0': [1], 'all_rows_4': [0], 'us viewers (million)_5': [0], '2_6': [0], 'title_7': [1], 'sabotage_8': [2]} | ['no', '-', 'title', 'directed by', 'written by', 'original air date', 'production code', 'us viewers ( million )'] | [['89', '1', 'revival', 'steward lee', 'chris collins', 'september 29 , 2012', '4.26', '1.94'], ['90', '2', 'a war on two fronts', 'dave filoni', 'chris collins', 'october 6 , 2012', '4.15', '1.71'], ['91', '3', 'front runners', 'steward lee', 'chris collins', 'october 13 , 2012', '4.16', '1.75'], ['92', '4', 'the soft war', 'kyle dunlevy', 'chris collins', 'october 20 , 2012', '4.17', '1.57'], ['93', '5', 'tipping points', 'bosco ng', 'chris collins', 'october 27 , 2012', '4.18', '1.42'], ['94', '6', 'the gathering', 'kyle dunlevy', 'christian taylor', 'november 3 , 2012', '4.22', '1.66'], ['95', '7', 'a test of strength', 'bosco ng', 'christian taylor', 'november 10 , 2012', '4.23', '1.74'], ['96', '8', 'bound for rescue', "brian kalin o'connell", 'christian taylor', 'november 17 , 2012', '4.24', '1.96'], ['97', '9', 'a necessary bond', 'danny keller', 'christian taylor', 'november 24 , 2012', '4.25', '1.39'], ['98', '10', 'secret weapons', 'danny keller', 'brent friedman', 'december 1 , 2012', '5.04', '1.46'], ['99', '11', 'a sunny day in the void', 'kyle dunlevy', 'brent friedman', 'december 8 , 2012', '5.05', '1.43'], ['100', '12', 'missing in action', 'steward lee', 'brent friedman', 'january 5 , 2013', '5.06', '1.74'], ['101', '13', 'point of no return', 'bosco ng', 'brent friedman', 'january 12 , 2013', '5.07', '1.47'], ['102', '14', 'eminence', 'kyle dunlevy', 'chris collins', 'january 19 , 2013', '5.01', '1.85'], ['103', '15', 'shades of reason', 'bosco ng', 'chris collins', 'january 26 , 2013', '5.02', '1.83'], ['104', '16', 'the lawless', "brian kalin o'connell", 'chris collins', 'february 2 , 2013', '5.03', '1.86'], ['105', '17', 'sabotage', "brian kalin o'connell", 'charles murray', 'february 9 , 2013', '5.08', '2.02'], ['106', '18', 'the jedi who knew too much', 'danny keller', 'charles murray', 'february 16 , 2013', '5.09', '1.64'], ['107', '19', 'to catch a jedi', 'kyle dunlevy', 'charles murray', 'february 23 , 2013', '5.10', '2.06']] |
kingco athletic conference | https://en.wikipedia.org/wiki/Kingco_Athletic_Conference | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-13759592-1.html.csv | comparative | ballard high school has a lower total enrollment than newport high school . | {'row_1': '1', 'row_2': '7', 'col': '5', 'col_other': '1', 'relation': 'less', 'record_mentioned': 'no', 'diff_result': None} | {'func': 'less', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'high school', 'ballard'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose high school record fuzzily matches to ballard .', 'tostr': 'filter_eq { all_rows ; high school ; ballard }'}, 'enrollment'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; high school ; ballard } ; enrollment }', 'tointer': 'select the rows whose high school record fuzzily matches to ballard . take the enrollment record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'high school', 'newport'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose high school record fuzzily matches to newport .', 'tostr': 'filter_eq { all_rows ; high school ; newport }'}, 'enrollment'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; high school ; newport } ; enrollment }', 'tointer': 'select the rows whose high school record fuzzily matches to newport . take the enrollment record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'less { hop { filter_eq { all_rows ; high school ; ballard } ; enrollment } ; hop { filter_eq { all_rows ; high school ; newport } ; enrollment } } = true', 'tointer': 'select the rows whose high school record fuzzily matches to ballard . take the enrollment record of this row . select the rows whose high school record fuzzily matches to newport . take the enrollment record of this row . the first record is less than the second record .'} | less { hop { filter_eq { all_rows ; high school ; ballard } ; enrollment } ; hop { filter_eq { all_rows ; high school ; newport } ; enrollment } } = true | select the rows whose high school record fuzzily matches to ballard . take the enrollment record of this row . select the rows whose high school record fuzzily matches to newport . take the enrollment record of this row . the first record is less than the second record . | 5 | 5 | {'less_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'high school_7': 7, 'ballard_8': 8, 'enrollment_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'high school_11': 11, 'newport_12': 12, 'enrollment_13': 13} | {'less_4': 'less', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'high school_7': 'high school', 'ballard_8': 'ballard', 'enrollment_9': 'enrollment', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'high school_11': 'high school', 'newport_12': 'newport', 'enrollment_13': 'enrollment'} | {'less_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'high school_7': [0], 'ballard_8': [0], 'enrollment_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'high school_11': [1], 'newport_12': [1], 'enrollment_13': [3]} | ['high school', 'location', 'founded', 'affiliation', 'enrollment', 'nickname', 'division'] | [['ballard', 'seattle', '1903', 'public ( seattle ps )', '1649', 's beaver', 'crown'], ['bothell', 'bothell', '1959', 'public ( northshore sd )', '1800', 's cougar', 'crown'], ['eastlake', 'sammamish', '1993', 'public ( lake washington sd )', '1329', 'wolves', 'crest'], ['garfield', 'seattle', '1920', 'public ( seattle ps )', '1702', 's bulldog', 'crown'], ['inglemoor', 'kenmore', '1964', 'public ( northshore sd )', '1880', 'vikings', 'crown'], ['issaquah', 'issaquah', '1905', 'public ( issaquah sd )', '1844', 's eagle', 'crest'], ['newport', 'bellevue', '1964', 'public ( bellevue sd )', '1712', 's knight', 'crest'], ['redmond', 'redmond', '1965', 'public ( lake washington sd )', '1442', 'mustangs', 'crest'], ['roosevelt', 'seattle', '1922', 'public ( seattle ps )', '1710', 'rough riders', 'crown'], ['skyline', 'sammamish', '1997', 'public ( issaquah sd )', '1889', 'ns sparta', 'crest']] |
2010 - 11 danish 1st division | https://en.wikipedia.org/wiki/2010%E2%80%9311_Danish_1st_Division | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-27782699-3.html.csv | majority | most of the managers left a vacancy in the year 2010 . | {'scope': 'all', 'col': '4', 'most_or_all': 'most', 'criterion': 'fuzzily_match', 'value': '2010', 'subset': None} | {'func': 'most_str_eq', 'args': ['all_rows', 'date of vacancy', '2010'], 'result': True, 'ind': 0, 'tointer': 'for the date of vacancy records of all rows , most of them fuzzily match to 2010 .', 'tostr': 'most_eq { all_rows ; date of vacancy ; 2010 } = true'} | most_eq { all_rows ; date of vacancy ; 2010 } = true | for the date of vacancy records of all rows , most of them fuzzily match to 2010 . | 1 | 1 | {'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'date of vacancy_3': 3, '2010_4': 4} | {'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'date of vacancy_3': 'date of vacancy', '2010_4': '2010'} | {'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'date of vacancy_3': [0], '2010_4': [0]} | ['team', 'outgoing manager', 'manner of departure', 'date of vacancy', 'replaced by', 'date of appointment', 'position in table'] | [['aarhus gf', 'erik rasmussen', 'sacked', '20 may 2010', 'peter sørensen', '1 july 2010', 'pre - season'], ['næstved bk', 'kim poulsen', 'mutual consent', '30 june 2010', 'brian flies', '1 july 2010', 'pre - season'], ['fc roskilde', 'martin jungsgaard', 'end of contract', '30 june 2010', 'carsten broe', '1 july 2010', 'pre - season'], ['hobro ik', 'søren kusk', 'end of contract', '30 june 2010', 'jan østergaard', '1 july 2010', 'pre - season'], ['ab', 'flemming christensen', 'end of contract', '30 june 2010', 'kasper kurland', '1 july 2010', 'pre - season'], ['fc fredericia', 'peter sørensen', 'signed by aarhus gf', '30 june 2010', 'thomas thomasberg', '1 july 2010', 'pre - season'], ['fc hjørring', 'thomas thomasberg', 'signed by fc fredericia', '30 june 2010', 'kim poulsen', '1 july 2010', 'pre - season'], ['hobro ik', 'jan østergaard', 'sacked', '2 november 2010', 'jens hammer sørensen', '2 november 2010', '11th'], ['viborg ff', 'lars søndergaard', 'sacked', '24 november 2010', 'steffen højer & søren frederiksen', '24 november 2010', '13th'], ['hobro ik', 'jens hammer sørensen', 'mutual consent', '26 november 2010', 'jakob michelsen', '8 january 2011', '11th'], ['hvidovre if', 'kenneth brylle larsen', 'end of contract', '31 december 2010', 'per nielsen', '1 january 2011', '14th'], ['kolding fc', 'jens letort', 'end of contract', '31 december 2010', 'kim fogh', '1 january 2011', '10th'], ['vejle bk', 'mats gren', 'sacked', '12 april 2011', 'viggo jensen', '14 april 2011', '3rd']] |
belgium women 's national rugby union team | https://en.wikipedia.org/wiki/Belgium_women%27s_national_rugby_union_team | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-13095097-1.html.csv | count | there were five years when the belgium women 's national rugby union team lost one game . | {'scope': 'all', 'criterion': 'equal', 'value': '1', 'result': '5', 'col': '4', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_eq', 'args': ['all_rows', 'lost', '1'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose lost record is equal to 1 .', 'tostr': 'filter_eq { all_rows ; lost ; 1 }'}], 'result': '5', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; lost ; 1 } }', 'tointer': 'select the rows whose lost record is equal to 1 . the number of such rows is 5 .'}, '5'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; lost ; 1 } } ; 5 } = true', 'tointer': 'select the rows whose lost record is equal to 1 . the number of such rows is 5 .'} | eq { count { filter_eq { all_rows ; lost ; 1 } } ; 5 } = true | select the rows whose lost record is equal to 1 . the number of such rows is 5 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_eq_0': 0, 'all_rows_4': 4, 'lost_5': 5, '1_6': 6, '5_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_eq_0': 'filter_eq', 'all_rows_4': 'all_rows', 'lost_5': 'lost', '1_6': '1', '5_7': '5'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_eq_0': [1], 'all_rows_4': [0], 'lost_5': [0], '1_6': [0], '5_7': [2]} | ['first game', 'played', 'drawn', 'lost', 'percentage'] | [['1988', '1', '0', '1', '0.00 %'], ['2001', '4', '1', '3', '12.50 %'], ['2006', '1', '0', '1', '0.00 %'], ['2007', '1', '0', '0', '100.00 %'], ['2000', '7', '0', '7', '0.00 %'], ['2006', '1', '0', '0', '100.00 %'], ['2007', '2', '0', '0', '100.00 %'], ['2006', '3', '0', '3', '0.00 %'], ['2009', '1', '0', '1', '0.00 %'], ['2010', '1', '0', '1', '0.00 %'], ['1986', '3', '0', '3', '0.00 %'], ['2011', '2', '0', '1', '50.00 %'], ['1986', '28', '1', '21', '23.21 %']] |
list of serbian submissions for the academy award for best foreign language film | https://en.wikipedia.org/wiki/List_of_Serbian_submissions_for_the_Academy_Award_for_Best_Foreign_Language_Film | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-22265716-1.html.csv | comparative | in terms of serbian submissions for the academy award for best foreign language film , vukovar poste restante was submitted one year before underground . | {'row_1': '1', 'row_2': '2', 'col': '1', 'col_other': '2', 'relation': 'less', 'record_mentioned': 'yes', 'diff_result': None} | {'func': 'and', 'args': [{'func': 'less', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'film title used in nomination', 'vukovar poste restante'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose film title used in nomination record fuzzily matches to vukovar poste restante .', 'tostr': 'filter_eq { all_rows ; film title used in nomination ; vukovar poste restante }'}, 'year ( ceremony )'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; film title used in nomination ; vukovar poste restante } ; year ( ceremony ) }', 'tointer': 'select the rows whose film title used in nomination record fuzzily matches to vukovar poste restante . take the year ( ceremony ) record of this row .'}, {'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'film title used in nomination', 'underground'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose film title used in nomination record fuzzily matches to underground .', 'tostr': 'filter_eq { all_rows ; film title used in nomination ; underground }'}, 'year ( ceremony )'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; film title used in nomination ; underground } ; year ( ceremony ) }', 'tointer': 'select the rows whose film title used in nomination record fuzzily matches to underground . take the year ( ceremony ) record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'less { hop { filter_eq { all_rows ; film title used in nomination ; vukovar poste restante } ; year ( ceremony ) } ; hop { filter_eq { all_rows ; film title used in nomination ; underground } ; year ( ceremony ) } }', 'tointer': 'select the rows whose film title used in nomination record fuzzily matches to vukovar poste restante . take the year ( ceremony ) record of this row . select the rows whose film title used in nomination record fuzzily matches to underground . take the year ( ceremony ) record of this row . the first record is less than the second record .'}, {'func': 'and', 'args': [{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'film title used in nomination', 'vukovar poste restante'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose film title used in nomination record fuzzily matches to vukovar poste restante .', 'tostr': 'filter_eq { all_rows ; film title used in nomination ; vukovar poste restante }'}, 'year ( ceremony )'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; film title used in nomination ; vukovar poste restante } ; year ( ceremony ) }', 'tointer': 'select the rows whose film title used in nomination record fuzzily matches to vukovar poste restante . take the year ( ceremony ) record of this row .'}, '1994 ( 67th )'], 'result': True, 'ind': 5, 'tostr': 'eq { hop { filter_eq { all_rows ; film title used in nomination ; vukovar poste restante } ; year ( ceremony ) } ; 1994 ( 67th ) }', 'tointer': 'the year ( ceremony ) record of the first row is 1994 ( 67th ) .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'film title used in nomination', 'underground'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose film title used in nomination record fuzzily matches to underground .', 'tostr': 'filter_eq { all_rows ; film title used in nomination ; underground }'}, 'year ( ceremony )'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; film title used in nomination ; underground } ; year ( ceremony ) }', 'tointer': 'select the rows whose film title used in nomination record fuzzily matches to underground . take the year ( ceremony ) record of this row .'}, '1995 ( 68th )'], 'result': True, 'ind': 6, 'tostr': 'eq { hop { filter_eq { all_rows ; film title used in nomination ; underground } ; year ( ceremony ) } ; 1995 ( 68th ) }', 'tointer': 'the year ( ceremony ) record of the second row is 1995 ( 68th ) .'}], 'result': True, 'ind': 7, 'tostr': 'and { eq { hop { filter_eq { all_rows ; film title used in nomination ; vukovar poste restante } ; year ( ceremony ) } ; 1994 ( 67th ) } ; eq { hop { filter_eq { all_rows ; film title used in nomination ; underground } ; year ( ceremony ) } ; 1995 ( 68th ) } }', 'tointer': 'the year ( ceremony ) record of the first row is 1994 ( 67th ) . the year ( ceremony ) record of the second row is 1995 ( 68th ) .'}], 'result': True, 'ind': 8, 'tostr': 'and { less { hop { filter_eq { all_rows ; film title used in nomination ; vukovar poste restante } ; year ( ceremony ) } ; hop { filter_eq { all_rows ; film title used in nomination ; underground } ; year ( ceremony ) } } ; and { eq { hop { filter_eq { all_rows ; film title used in nomination ; vukovar poste restante } ; year ( ceremony ) } ; 1994 ( 67th ) } ; eq { hop { filter_eq { all_rows ; film title used in nomination ; underground } ; year ( ceremony ) } ; 1995 ( 68th ) } } } = true', 'tointer': 'select the rows whose film title used in nomination record fuzzily matches to vukovar poste restante . take the year ( ceremony ) record of this row . select the rows whose film title used in nomination record fuzzily matches to underground . take the year ( ceremony ) record of this row . the first record is less than the second record . the year ( ceremony ) record of the first row is 1994 ( 67th ) . the year ( ceremony ) record of the second row is 1995 ( 68th ) .'} | and { less { hop { filter_eq { all_rows ; film title used in nomination ; vukovar poste restante } ; year ( ceremony ) } ; hop { filter_eq { all_rows ; film title used in nomination ; underground } ; year ( ceremony ) } } ; and { eq { hop { filter_eq { all_rows ; film title used in nomination ; vukovar poste restante } ; year ( ceremony ) } ; 1994 ( 67th ) } ; eq { hop { filter_eq { all_rows ; film title used in nomination ; underground } ; year ( ceremony ) } ; 1995 ( 68th ) } } } = true | select the rows whose film title used in nomination record fuzzily matches to vukovar poste restante . take the year ( ceremony ) record of this row . select the rows whose film title used in nomination record fuzzily matches to underground . take the year ( ceremony ) record of this row . the first record is less than the second record . the year ( ceremony ) record of the first row is 1994 ( 67th ) . the year ( ceremony ) record of the second row is 1995 ( 68th ) . | 13 | 9 | {'and_8': 8, 'result_9': 9, 'less_4': 4, 'str_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_10': 10, 'film title used in nomination_11': 11, 'vukovar poste restante_12': 12, 'year (ceremony)_13': 13, 'str_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_14': 14, 'film title used in nomination_15': 15, 'underground_16': 16, 'year (ceremony)_17': 17, 'and_7': 7, 'str_eq_5': 5, '1994 (67th)_18': 18, 'str_eq_6': 6, '1995 (68th)_19': 19} | {'and_8': 'and', 'result_9': 'true', 'less_4': 'less', 'str_hop_2': 'str_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_10': 'all_rows', 'film title used in nomination_11': 'film title used in nomination', 'vukovar poste restante_12': 'vukovar poste restante', 'year (ceremony)_13': 'year ( ceremony )', 'str_hop_3': 'str_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_14': 'all_rows', 'film title used in nomination_15': 'film title used in nomination', 'underground_16': 'underground', 'year (ceremony)_17': 'year ( ceremony )', 'and_7': 'and', 'str_eq_5': 'str_eq', '1994 (67th)_18': '1994 ( 67th )', 'str_eq_6': 'str_eq', '1995 (68th)_19': '1995 ( 68th )'} | {'and_8': [9], 'result_9': [], 'less_4': [8], 'str_hop_2': [4, 5], 'filter_str_eq_0': [2], 'all_rows_10': [0], 'film title used in nomination_11': [0], 'vukovar poste restante_12': [0], 'year (ceremony)_13': [2], 'str_hop_3': [4, 6], 'filter_str_eq_1': [3], 'all_rows_14': [1], 'film title used in nomination_15': [1], 'underground_16': [1], 'year (ceremony)_17': [3], 'and_7': [8], 'str_eq_5': [7], '1994 (67th)_18': [5], 'str_eq_6': [7], '1995 (68th)_19': [6]} | ['year ( ceremony )', 'film title used in nomination', 'original title', 'director', 'result'] | [['1994 ( 67th )', 'vukovar poste restante', 'вуковар , једна прича', 'boro drašković', 'not nominated'], ['1995 ( 68th )', 'underground', 'подземље', 'emir kusturica', 'not nominated'], ['1996 ( 69th )', 'pretty village , pretty flame', 'лепа села лепо горе', 'srđan dragojević', 'not nominated'], ['1997 ( 70th )', 'three summer days', 'три летња дана', 'mirjana vukomanović', 'not nominated'], ['1998 ( 71st )', 'powder keg', 'буре барута', 'goran paskaljević', 'not nominated'], ['1999 ( 72nd )', 'the white suit', 'бело одело', 'lazar ristovski', 'not nominated'], ['2000 ( 73rd )', 'sky hook', 'небеска удица', 'ljubiša samardžić', 'not nominated'], ['2001 ( 74th )', 'war live', 'рат уживо', 'darko bajić', 'not nominated']] |
north yorkshire | https://en.wikipedia.org/wiki/North_Yorkshire | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-143759-3.html.csv | ordinal | in north yorkshire , ripon has the highest population among those of civil parish definition . | {'scope': 'subset', 'row': '4', 'col': '3', 'order': '1', 'col_other': '2', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'subset': {'col': '6', 'criterion': 'equal', 'value': 'civil parish'}} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'definition', 'civil parish'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; definition ; civil parish }', 'tointer': 'select the rows whose definition record fuzzily matches to civil parish .'}, 'population', '1'], 'result': None, 'ind': 1, 'tostr': 'nth_argmax { filter_eq { all_rows ; definition ; civil parish } ; population ; 1 }'}, 'town'], 'result': 'ripon', 'ind': 2, 'tostr': 'hop { nth_argmax { filter_eq { all_rows ; definition ; civil parish } ; population ; 1 } ; town }'}, 'ripon'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { nth_argmax { filter_eq { all_rows ; definition ; civil parish } ; population ; 1 } ; town } ; ripon } = true', 'tointer': 'select the rows whose definition record fuzzily matches to civil parish . select the row whose population record of these rows is 1st maximum . the town record of this row is ripon .'} | eq { hop { nth_argmax { filter_eq { all_rows ; definition ; civil parish } ; population ; 1 } ; town } ; ripon } = true | select the rows whose definition record fuzzily matches to civil parish . select the row whose population record of these rows is 1st maximum . the town record of this row is ripon . | 4 | 4 | {'str_eq_3': 3, 'result_4': 4, 'str_hop_2': 2, 'nth_argmax_1': 1, 'filter_str_eq_0': 0, 'all_rows_5': 5, 'definition_6': 6, 'civil parish_7': 7, 'population_8': 8, '1_9': 9, 'town_10': 10, 'ripon_11': 11} | {'str_eq_3': 'str_eq', 'result_4': 'true', 'str_hop_2': 'str_hop', 'nth_argmax_1': 'nth_argmax', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_5': 'all_rows', 'definition_6': 'definition', 'civil parish_7': 'civil parish', 'population_8': 'population', '1_9': '1', 'town_10': 'town', 'ripon_11': 'ripon'} | {'str_eq_3': [4], 'result_4': [], 'str_hop_2': [3], 'nth_argmax_1': [2], 'filter_str_eq_0': [1], 'all_rows_5': [0], 'definition_6': [0], 'civil parish_7': [0], 'population_8': [1], '1_9': [1], 'town_10': [2], 'ripon_11': [3]} | ['rank', 'town', 'population', 'year', 'borough', 'definition'] | [['1', 'harrogate', '71594', '2001', 'harrogate', 'town'], ['2', 'scarborough', '50135', '2001', 'scarborough', 'town'], ['3', 'acomb', '22215', '2001', 'york', 'town'], ['4', 'ripon', '15922', '2001', 'harrogate', 'civil parish'], ['5', 'northallerton', '15741', '2001', 'hambleton', 'civil parish'], ['6', 'knaresborough', '14740', '2001', 'harrogate', 'civil parish'], ['7', 'skipton', '14313', '2001', 'craven', 'civil parish'], ['8', 'whitby', '13594', '2001', 'scarborough', 'civil parish'], ['9', 'selby', '13012', '2001', 'selby', 'civil parish'], ['10', 'richmond', '8178', '2001', 'richmondshire', 'civil parish'], ['11', 'tadcaster', '7000', '2001', 'selby', 'civil parish'], ['12', 'norton', '6943', '2001', 'ryedale', 'civil parish'], ['13', 'pickering', '6846', '2001', 'ryedale', 'civil parish'], ['14', 'filey', '6819', '2001', 'scarborough', 'civil parish'], ['15', 'sherburn - in - elmet', '6221', '2001', 'selby', 'civil parish']] |
ranked list of cuban provinces | https://en.wikipedia.org/wiki/Ranked_list_of_Cuban_provinces | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1154469-1.html.csv | comparative | santiago de cuba has a higher population density than pinar del río . | {'row_1': '14', 'row_2': '12', 'col': '7', 'col_other': '1', 'relation': 'greater', 'record_mentioned': 'no', 'diff_result': None} | {'func': 'greater', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'province', 'santiago de cuba'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose province record fuzzily matches to santiago de cuba .', 'tostr': 'filter_eq { all_rows ; province ; santiago de cuba }'}, 'density'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; province ; santiago de cuba } ; density }', 'tointer': 'select the rows whose province record fuzzily matches to santiago de cuba . take the density record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'province', 'pinar del río'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose province record fuzzily matches to pinar del río .', 'tostr': 'filter_eq { all_rows ; province ; pinar del río }'}, 'density'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; province ; pinar del río } ; density }', 'tointer': 'select the rows whose province record fuzzily matches to pinar del río . take the density record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; province ; santiago de cuba } ; density } ; hop { filter_eq { all_rows ; province ; pinar del río } ; density } } = true', 'tointer': 'select the rows whose province record fuzzily matches to santiago de cuba . take the density record of this row . select the rows whose province record fuzzily matches to pinar del río . take the density record of this row . the first record is greater than the second record .'} | greater { hop { filter_eq { all_rows ; province ; santiago de cuba } ; density } ; hop { filter_eq { all_rows ; province ; pinar del río } ; density } } = true | select the rows whose province record fuzzily matches to santiago de cuba . take the density record of this row . select the rows whose province record fuzzily matches to pinar del río . take the density record of this row . the first record is greater than the second record . | 5 | 5 | {'greater_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'province_7': 7, 'santiago de cuba_8': 8, 'density_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'province_11': 11, 'pinar del río_12': 12, 'density_13': 13} | {'greater_4': 'greater', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'province_7': 'province', 'santiago de cuba_8': 'santiago de cuba', 'density_9': 'density', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'province_11': 'province', 'pinar del río_12': 'pinar del río', 'density_13': 'density'} | {'greater_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'province_7': [0], 'santiago de cuba_8': [0], 'density_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'province_11': [1], 'pinar del río_12': [1], 'density_13': [3]} | ['province', 'capital', 'population ( 2005 )', 'population ( % )', 'area ( km square )', 'area ( % )', 'density'] | [['camagüey', 'camagüey', '784178', '7.02', '14134', '13.2', '50.22'], ['ciego de ávila', 'ciego de ávila', '411766', '3.68', '5962', '5.6', '60.70'], ['cienfuegos', 'cienfuegos', '395183', '3.54', '4149', '3.9', '94.54'], ['ciudad de la habana', 'havana', '2201610', '19.70', '740', '0.7', '3053.49'], ['granma', 'bayamo', '822452', '7.36', '8452', '7.9', '98.20'], ['guantánamo', 'guantánamo', '507118', '4.54', '6366', '6.0', '82.22'], ['holguín', 'holguín', '1021321', '9.14', '9105', '8.5', '109.90'], ['isla de la juventud', 'nueva gerona', '86559', '0.77', '2199', '2.1', '35.78'], ['la habana ( 1976 - 2011 )', 'havana', '711066', '6.36', '5669', '5.3', '124.06'], ['las tunas', 'las tunas', '525485', '4.70', '6373', '6.0', '79.77'], ['matanzas', 'matanzas', '670427', '6.00', '11669', '10', '56.80'], ['pinar del río', 'pinar del río', '726574', '6.50', '10860', '10.2', '66.63'], ['sancti spíritus', 'sancti spíritus', '460328', '4.12', '6737', '6.3', '68.33'], ['santiago de cuba', 'santiago de cuba', '1036281', '9.27', '6343', '5.9', '168.32'], ['villa clara', 'santa clara', '817395', '7.31', '8069', '7.6', '97.17'], ['cuba', 'havana', '11177743', '100', '106827', '100', '101.72']] |
1977 dallas cowboys season | https://en.wikipedia.org/wiki/1977_Dallas_Cowboys_season | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15847691-2.html.csv | aggregation | the average number of first downs for all games during the 1977 dallas cowboys season is 19.42 . | {'scope': 'all', 'col': '8', 'type': 'average', 'result': '19.42', 'subset': None} | {'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 'first downs'], 'result': '19.42', 'ind': 0, 'tostr': 'avg { all_rows ; first downs }'}, '19.42'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; first downs } ; 19.42 } = true', 'tointer': 'the average of the first downs record of all rows is 19.42 .'} | round_eq { avg { all_rows ; first downs } ; 19.42 } = true | the average of the first downs record of all rows is 19.42 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 'first downs_4': 4, '19.42_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 'first downs_4': 'first downs', '19.42_5': '19.42'} | {'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 'first downs_4': [0], '19.42_5': [1]} | ['week', 'date', 'result', 'record', 'opponent', 'points for', 'points against', 'first downs', 'attendance'] | [['1', 'september 18', 'win', '1 - 0', 'minnesota vikings', '16', '10', '16', '47678'], ['2', 'september 25', 'win', '2 - 0', 'new york giants', '41', '21', '25', '64215'], ['3', 'october 2', 'win', '3 - 0', 'tampa bay buccaneers', '23', '7', '23', '55316'], ['4', 'october 9', 'win', '4 - 0', 'st louis cardinals', '30', '24', '22', '50129'], ['5', 'october 16', 'win', '5 - 0', 'washington redskins', '34', '16', '23', '62115'], ['6', 'october 23', 'win', '6 - 0', 'philadelphia eagles', '16', '10', '17', '65507'], ['7', 'october 30', 'win', '7 - 0', 'detroit lions', '37', '0', '20', '63160'], ['8', 'november 6', 'win', '8 - 0', 'new york giants', '24', '10', '13', '74532'], ['9', 'november 14', 'loss', '8 - 1', 'st louis cardinals', '17', '24', '16', '64038'], ['10', 'november 20', 'loss', '8 - 2', 'pittsburgh steelers', '13', '28', '20', '49761'], ['11', 'november 27', 'win', '9 - 2', 'washington redskins', '14', '7', '19', '55031'], ['12', 'december 4', 'win', '10 - 2', 'philadelphia eagles', '24', '14', '19', '60289'], ['13', 'december 12', 'win', '11 - 2', 'san francisco 49ers', '42', '35', '24', '55851'], ['14', 'december 18', 'win', '12 - 2', 'denver broncos', '14', '6', '15', '63752']] |
list of tallest buildings in albuquerque | https://en.wikipedia.org/wiki/List_of_tallest_buildings_in_Albuquerque | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-12982226-3.html.csv | majority | all buildings in gold avenue sw have a height of 180 ft or more in the list of tallest buildings in albuquerque . | {'scope': 'subset', 'col': '4', 'most_or_all': 'all', 'criterion': 'greater_than_eq', 'value': '180', 'subset': {'col': '2', 'criterion': 'fuzzily_match', 'value': 'gold avenue sw'}} | {'func': 'all_greater_eq', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'street address', 'gold avenue sw'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; street address ; gold avenue sw }', 'tointer': 'select the rows whose street address record fuzzily matches to gold avenue sw .'}, 'height ft / m', '180'], 'result': True, 'ind': 1, 'tointer': 'select the rows whose street address record fuzzily matches to gold avenue sw . for the height ft / m records of these rows , all of them are greater than or equal to 180 .', 'tostr': 'all_greater_eq { filter_eq { all_rows ; street address ; gold avenue sw } ; height ft / m ; 180 } = true'} | all_greater_eq { filter_eq { all_rows ; street address ; gold avenue sw } ; height ft / m ; 180 } = true | select the rows whose street address record fuzzily matches to gold avenue sw . for the height ft / m records of these rows , all of them are greater than or equal to 180 . | 2 | 2 | {'all_greater_eq_1': 1, 'result_2': 2, 'filter_str_eq_0': 0, 'all_rows_3': 3, 'street address_4': 4, 'gold avenue sw_5': 5, 'height ft / m_6': 6, '180_7': 7} | {'all_greater_eq_1': 'all_greater_eq', 'result_2': 'true', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_3': 'all_rows', 'street address_4': 'street address', 'gold avenue sw_5': 'gold avenue sw', 'height ft / m_6': 'height ft / m', '180_7': '180'} | {'all_greater_eq_1': [2], 'result_2': [], 'filter_str_eq_0': [1], 'all_rows_3': [0], 'street address_4': [0], 'gold avenue sw_5': [0], 'height ft / m_6': [1], '180_7': [1]} | ['name', 'street address', 'years as tallest', 'height ft / m', 'floors'] | [['first national bank building', '217 - 233 central avenue nw', '1922 - 1954', '141 / 43', '9'], ['simms building', '400 gold avenue sw', '1954 - 1961', '180 / 55', '13'], ['gold building', '320 gold avenue sw', '1961 - 1963', '203 / 62', '14'], ['bank of the west tower', '5301 central avenue ne', '1963 - 1966', '213 / 65', '17'], ['compass bank building', '505 marquette avenue nw', '1966 - 1990', '238 / 73', '18'], ['albuquerque plaza office tower', '201 third street nw', '1990 - present', '351 / 107', '22']] |
alto de l'angliru | https://en.wikipedia.org/wiki/Alto_de_L%27Angliru | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1756060-2.html.csv | superlative | roberto heras is the fastest person to have ever ascended alto de l'angliru . | {'scope': 'all', 'col_superlative': '3', 'row_superlative': '1', 'value_mentioned': 'no', 'max_or_min': 'min', 'other_col': '5', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmin', 'args': ['all_rows', 'ascent time'], 'result': None, 'ind': 0, 'tostr': 'argmin { all_rows ; ascent time }'}, 'rider'], 'result': 'roberto heras ( esp )', 'ind': 1, 'tostr': 'hop { argmin { all_rows ; ascent time } ; rider }'}, 'roberto heras ( esp )'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmin { all_rows ; ascent time } ; rider } ; roberto heras ( esp ) } = true', 'tointer': 'select the row whose ascent time record of all rows is minimum . the rider record of this row is roberto heras ( esp ) .'} | eq { hop { argmin { all_rows ; ascent time } ; rider } ; roberto heras ( esp ) } = true | select the row whose ascent time record of all rows is minimum . the rider record of this row is roberto heras ( esp ) . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmin_0': 0, 'all_rows_4': 4, 'ascent time_5': 5, 'rider_6': 6, 'roberto heras ( esp )_7': 7} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmin_0': 'argmin', 'all_rows_4': 'all_rows', 'ascent time_5': 'ascent time', 'rider_6': 'rider', 'roberto heras ( esp )_7': 'roberto heras ( esp )'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmin_0': [1], 'all_rows_4': [0], 'ascent time_5': [0], 'rider_6': [1], 'roberto heras ( esp )_7': [2]} | ['rank', 'year', 'ascent time', 'speed', 'rider'] | [['1', '2000', '41:55', '18.32 km / h', 'roberto heras ( esp )'], ['2', '2013', '43:07', '17.81 km / h', 'chris horner ( usa )'], ['3', '2008', '43:12', '17.78 km / h', 'alberto contador ( esp )'], ['4', '2000', '43:24', '17.70 km / h', 'pavel tonkov ( rus )'], ['5', '2000', '43:24', '17.70 km / h', 'roberto laiseka ( esp )'], ['6', '2013', '43:35', '17.62 km / h', 'alejandro valverde ( esp )'], ['7', '2013', '43:35', '17.62 km / h', 'vincenzo nibali ( ita )'], ['8', '2008', '43:54', '17.49 km / h', 'alejandro valverde ( esp )'], ['9', '2002', '43:55', '17.49 km / h', 'roberto heras ( esp )'], ['10', '2011', '43:57', '17.47 km / h', 'juan jose cobo ( esp )'], ['11', '2008', '44:10', '17.39 km / h', 'joaquim rodriguez ( esp )'], ['12', '2000', '44:13', '17.37 km / h', 'raimondas rumå ¡ as ( lit )'], ['13', '2008', '44:17', '17.34 km / h', 'levi leipheimer ( usa )']] |
the bachelorette | https://en.wikipedia.org/wiki/The_Bachelorette | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-174953-1.html.csv | unique | season 3 was the only season that did not have a winner . | {'scope': 'all', 'row': '3', 'col': '5', 'col_other': '1', 'criterion': 'equal', 'value': 'none', 'subset': None} | {'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'winner', 'none'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose winner record fuzzily matches to none .', 'tostr': 'filter_eq { all_rows ; winner ; none }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; winner ; none } }', 'tointer': 'select the rows whose winner record fuzzily matches to none . there is only one such row in the table .'}, {'func': 'eq', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'winner', 'none'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose winner record fuzzily matches to none .', 'tostr': 'filter_eq { all_rows ; winner ; none }'}, 'season'], 'result': '3', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; winner ; none } ; season }'}, '3'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; winner ; none } ; season } ; 3 }', 'tointer': 'the season record of this unqiue row is 3 .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; winner ; none } } ; eq { hop { filter_eq { all_rows ; winner ; none } ; season } ; 3 } } = true', 'tointer': 'select the rows whose winner record fuzzily matches to none . there is only one such row in the table . the season record of this unqiue row is 3 .'} | and { only { filter_eq { all_rows ; winner ; none } } ; eq { hop { filter_eq { all_rows ; winner ; none } ; season } ; 3 } } = true | select the rows whose winner record fuzzily matches to none . there is only one such row in the table . the season record of this unqiue row is 3 . | 6 | 5 | {'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'winner_7': 7, 'none_8': 8, 'eq_3': 3, 'num_hop_2': 2, 'season_9': 9, '3_10': 10} | {'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'winner_7': 'winner', 'none_8': 'none', 'eq_3': 'eq', 'num_hop_2': 'num_hop', 'season_9': 'season', '3_10': '3'} | {'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'winner_7': [0], 'none_8': [0], 'eq_3': [4], 'num_hop_2': [3], 'season_9': [2], '3_10': [3]} | ['season', 'premiered', 'bachelorette', 'profile', 'winner', 'runner ( s ) - up', 'proposal'] | [['1', 'january 8 , 2003', 'trista rehn', 'physical therapist', 'ryan sutter', 'charlie maher', 'yes'], ['2', 'january 14 , 2004', 'meredith phillips', 'makeup artist', 'ian mckee', 'matthew hickl', 'yes'], ['3', 'january 10 , 2005', 'jennifer schefft', 'publicist', 'none', 'jerry ferris and john paul merritt', 'no'], ['4', 'may 19 , 2008', 'deanna pappas', 'real estate agent', 'jesse csincsak', 'jason mesnick', 'yes'], ['5', 'may 18 , 2009', 'jillian harris', 'interior designer', 'ed swiderski', 'kiptyn locke', 'yes'], ['6', 'may 24 , 2010', 'ali fedotowsky', 'advertising account manager', 'roberto martinez', 'chris lambton', 'yes'], ['7', 'may 23 , 2011', 'ashley hebert', 'dental student', 'jp rosenbaum', 'ben flajnik', 'yes'], ['8', 'may 14 , 2012', 'emily maynard', "children 's hospital event planner", 'jef holm', 'arie luyendyk , jr', 'yes'], ['9', 'may 27 , 2013', 'desiree hartsock', 'bridal stylist', 'chris siegfried', 'drew kenney', 'yes']] |
1982 all - ireland senior hurling championship | https://en.wikipedia.org/wiki/1982_All-Ireland_Senior_Hurling_Championship | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10577744-2.html.csv | count | two of the players were from kilkenny county . | {'scope': 'all', 'criterion': 'equal', 'value': 'kilkenny', 'result': '2', 'col': '3', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'county', 'kilkenny'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose county record fuzzily matches to kilkenny .', 'tostr': 'filter_eq { all_rows ; county ; kilkenny }'}], 'result': '2', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; county ; kilkenny } }', 'tointer': 'select the rows whose county record fuzzily matches to kilkenny . the number of such rows is 2 .'}, '2'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; county ; kilkenny } } ; 2 } = true', 'tointer': 'select the rows whose county record fuzzily matches to kilkenny . the number of such rows is 2 .'} | eq { count { filter_eq { all_rows ; county ; kilkenny } } ; 2 } = true | select the rows whose county record fuzzily matches to kilkenny . the number of such rows is 2 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'county_5': 5, 'kilkenny_6': 6, '2_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'county_5': 'county', 'kilkenny_6': 'kilkenny', '2_7': '2'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'county_5': [0], 'kilkenny_6': [0], '2_7': [2]} | ['rank', 'player', 'county', 'tally', 'total', 'matches', 'average'] | [['1', 'pádraig horan', 'offaly', '5 - 17', '32', '4', '8.00'], ['2', 'billy fitzpatrick', 'kilkenny', '2 - 24', '30', '4', '7.50'], ['3', "tony o ' sullivan", 'cork', '0 - 28', '28', '4', '7.00'], ['4', 'p j molloy', 'galway', '3 - 11', '20', '2', '10.00'], ['5', 'christy heffernan', 'kilkenny', '3 - 9', '18', '4', '4.50'], ['5', 'pat horgan', 'cork', '0 - 18', '18', '4', '4.50']] |
zambia national under - 20 football team | https://en.wikipedia.org/wiki/Zambia_national_under-20_football_team | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-18177477-1.html.csv | ordinal | of the tournaments the zambia national under - 20 football team played in , the latest at woodlands stadium ndola was on 29 june 2008 . | {'scope': 'subset', 'row': '3', 'col': '1', 'order': '1', 'col_other': '3', 'max_or_min': 'max_to_min', 'value_mentioned': 'yes', 'subset': {'col': '3', 'criterion': 'equal', 'value': 'woodlands stadium ndola'}} | {'func': 'eq', 'args': [{'func': 'nth_max', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'location', 'woodlands stadium ndola'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; location ; woodlands stadium ndola }', 'tointer': 'select the rows whose location record fuzzily matches to woodlands stadium ndola .'}, 'date', '1'], 'result': '29 june 2008', 'ind': 1, 'tostr': 'nth_max { filter_eq { all_rows ; location ; woodlands stadium ndola } ; date ; 1 }', 'tointer': 'select the rows whose location record fuzzily matches to woodlands stadium ndola . the 1st maximum date record of these rows is 29 june 2008 .'}, '29 june 2008'], 'result': True, 'ind': 2, 'tostr': 'eq { nth_max { filter_eq { all_rows ; location ; woodlands stadium ndola } ; date ; 1 } ; 29 june 2008 } = true', 'tointer': 'select the rows whose location record fuzzily matches to woodlands stadium ndola . the 1st maximum date record of these rows is 29 june 2008 .'} | eq { nth_max { filter_eq { all_rows ; location ; woodlands stadium ndola } ; date ; 1 } ; 29 june 2008 } = true | select the rows whose location record fuzzily matches to woodlands stadium ndola . the 1st maximum date record of these rows is 29 june 2008 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'nth_max_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'location_5': 5, 'woodlands stadium ndola_6': 6, 'date_7': 7, '1_8': 8, '29 june 2008_9': 9} | {'eq_2': 'eq', 'result_3': 'true', 'nth_max_1': 'nth_max', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'location_5': 'location', 'woodlands stadium ndola_6': 'woodlands stadium ndola', 'date_7': 'date', '1_8': '1', '29 june 2008_9': '29 june 2008'} | {'eq_2': [3], 'result_3': [], 'nth_max_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'location_5': [0], 'woodlands stadium ndola_6': [0], 'date_7': [1], '1_8': [1], '29 june 2008_9': [2]} | ['date', 'tournament', 'location', 'home team', 'away team'] | [['28 may 2008', 'international friendly', 'king abdul aziz makkah', 'saudi arabia', 'zambia'], ['27 june 2008', '2009 african youth championship ( qualifiers )', 'woodlands stadium ndola', 'zambia', 'zambia'], ['29 june 2008', '2009 african youth championship ( qualifiers )', 'woodlands stadium ndola', 'zambia', 'mauritius'], ['13 july 2008', '2009 african youth championship ( qualifiers )', 'stade germain comarmond bambous', 'mauritius', 'zambia'], ['28 september 2008', '2009 african youth championship ( qualifiers )', 'alexandria', 'egypt', 'zambia']] |
estonia in the eurovision song contest 2002 | https://en.wikipedia.org/wiki/Estonia_in_the_Eurovision_Song_Contest_2002 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-12676284-1.html.csv | unique | for estonians in the eurovision song contest in 2002 , the only artist with over 80 votes is sahlene . | {'scope': 'all', 'row': '9', 'col': '4', 'col_other': '2', 'criterion': 'greater_than', 'value': '80', 'subset': None} | {'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_greater', 'args': ['all_rows', 'votes', '80'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose votes record is greater than 80 .', 'tostr': 'filter_greater { all_rows ; votes ; 80 }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_greater { all_rows ; votes ; 80 } }', 'tointer': 'select the rows whose votes record is greater than 80 . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_greater', 'args': ['all_rows', 'votes', '80'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose votes record is greater than 80 .', 'tostr': 'filter_greater { all_rows ; votes ; 80 }'}, 'artist'], 'result': 'sahlene', 'ind': 2, 'tostr': 'hop { filter_greater { all_rows ; votes ; 80 } ; artist }'}, 'sahlene'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_greater { all_rows ; votes ; 80 } ; artist } ; sahlene }', 'tointer': 'the artist record of this unqiue row is sahlene .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_greater { all_rows ; votes ; 80 } } ; eq { hop { filter_greater { all_rows ; votes ; 80 } ; artist } ; sahlene } } = true', 'tointer': 'select the rows whose votes record is greater than 80 . there is only one such row in the table . the artist record of this unqiue row is sahlene .'} | and { only { filter_greater { all_rows ; votes ; 80 } } ; eq { hop { filter_greater { all_rows ; votes ; 80 } ; artist } ; sahlene } } = true | select the rows whose votes record is greater than 80 . there is only one such row in the table . the artist record of this unqiue row is sahlene . | 6 | 5 | {'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_greater_0': 0, 'all_rows_6': 6, 'votes_7': 7, '80_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'artist_9': 9, 'sahlene_10': 10} | {'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_greater_0': 'filter_greater', 'all_rows_6': 'all_rows', 'votes_7': 'votes', '80_8': '80', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'artist_9': 'artist', 'sahlene_10': 'sahlene'} | {'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_greater_0': [1, 2], 'all_rows_6': [0], 'votes_7': [0], '80_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'artist_9': [2], 'sahlene_10': [3]} | ['draw', 'artist', 'song', 'votes', 'place'] | [['1', 'jaanika vilipo', "i 'm falling", '49', '5'], ['2', 'yvetta kadakas & ivo linna', 'computer love', '14', '10'], ['3', 'maarja kivi', 'a dream', '38', '7'], ['4', 'lea liitmaa & jaagup kreem', 'what if i fell', '31', '9'], ['5', 'gerli padar', 'need a little nothing', '60', '3'], ['6', 'hatuna & riina riistop', 'this is ( what luv can do )', '32', '8'], ['7', 'maarja tãµkke', "i 'll never forget", '51', '4'], ['8', 'nightlight duo & cowboys', 'another country song', '65', '2'], ['9', 'sahlene', 'runaway', '85', '1'], ['10', 'julia hillens', "u ca n't", '39', '6']] |
wru division two west | https://en.wikipedia.org/wiki/WRU_Division_Two_West | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-12828723-4.html.csv | aggregation | all clubs in the wru division two west had an average try bonus of 5 . | {'scope': 'all', 'col': '9', 'type': 'average', 'result': '5', 'subset': None} | {'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 'try bonus'], 'result': '5', 'ind': 0, 'tostr': 'avg { all_rows ; try bonus }'}, '5'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; try bonus } ; 5 } = true', 'tointer': 'the average of the try bonus record of all rows is 5 .'} | round_eq { avg { all_rows ; try bonus } ; 5 } = true | the average of the try bonus record of all rows is 5 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 'try bonus_4': 4, '5_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 'try bonus_4': 'try bonus', '5_5': '5'} | {'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 'try bonus_4': [0], '5_5': [1]} | ['club', 'played', 'drawn', 'lost', 'points for', 'points against', 'tries for', 'tries against', 'try bonus', 'losing bonus', 'points'] | [['club', 'played', 'drawn', 'lost', 'points for', 'points against', 'tries for', 'tries against', 'try bonus', 'losing bonus', 'points'], ['bridgend athletic rfc', '22', '0', '6', '523', '303', '68', '31', '10', '4', '78'], ['builth wells rfc', '22', '0', '5', '473', '305', '57', '29', '7', '2', '77'], ['kidwelly rfc', '22', '1', '7', '532', '386', '63', '45', '5', '3', '66'], ['loughor rfc', '22', '1', '8', '532', '388', '69', '43', '9', '1', '64'], ['ammanford rfc', '22', '0', '9', '447', '394', '58', '51', '6', '4', '62'], ['waunarlwydd rfc', '22', '2', '8', '504', '439', '57', '55', '6', '3', '61'], ['pencoed rfc', '22', '0', '9', '425', '328', '53', '36', '4', '4', '60'], ['bp rfc', '22', '1', '12', '367', '358', '39', '43', '2', '7', '47'], ['mumbles rfc', '22', '2', '12', '373', '450', '50', '56', '4', '4', '44'], ['cwmavon rfc', '22', '2', '14', '332', '515', '39', '66', '3', '5', '36'], ['penclawdd rfc', '22', '1', '17', '263', '520', '28', '68', '1', '3', '22'], ['gorseinon rfc', '22', '0', '20', '340', '725', '48', '106', '3', '4', '15']] |
portugal in the eurovision song contest 1996 | https://en.wikipedia.org/wiki/Portugal_in_the_Eurovision_Song_Contest_1996 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-18994360-1.html.csv | aggregation | the average number of points received by portuguese singers in the 1996 eurovision song contest was 58.1 . | {'scope': 'all', 'col': '4', 'type': 'average', 'result': '58', 'subset': None} | {'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 'points'], 'result': '58', 'ind': 0, 'tostr': 'avg { all_rows ; points }'}, '58'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; points } ; 58 } = true', 'tointer': 'the average of the points record of all rows is 58 .'} | round_eq { avg { all_rows ; points } ; 58 } = true | the average of the points record of all rows is 58 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 'points_4': 4, '58_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 'points_4': 'points', '58_5': '58'} | {'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 'points_4': [0], '58_5': [1]} | ['draw', 'singer', 'song', 'points', 'place'] | [['1', 'vnia maroti', 'start stop', '33', '10'], ['2', 'tó leal', 'eu mesmo', '42', '8'], ['3', 'patricia antunes', 'canto em português', '91', '2'], ['4', 'barbara reis', 'a minha ilha', '43', '7'], ['5', 'elaisa', 'ai a noite', '49', '6'], ['6', 'somseis', 'a canção da paz', '76', '3'], ['7', 'cristina castro pereira', 'ganhamos o ceu', '63', '4'], ['8', 'lúcia moniz', 'o meu coração não tem cor', '95', '1'], ['9', 'pedro miguéis', 'prazer em conhecer', '54', '5'], ['10', 'joão portugal', 'top model', '34', '9']] |
list of dr. floyd episodes | https://en.wikipedia.org/wiki/List_of_Dr._Floyd_episodes | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10621888-3.html.csv | count | for dr. floyd episodes , there are three episodes with a run time of over 6:00 . | {'scope': 'all', 'criterion': 'greater_than', 'value': '6:00', 'result': '3', 'col': '4', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_greater', 'args': ['all_rows', 'run time', '6:00'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose run time record is greater than 6:00 .', 'tostr': 'filter_greater { all_rows ; run time ; 6:00 }'}], 'result': '3', 'ind': 1, 'tostr': 'count { filter_greater { all_rows ; run time ; 6:00 } }', 'tointer': 'select the rows whose run time record is greater than 6:00 . the number of such rows is 3 .'}, '3'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_greater { all_rows ; run time ; 6:00 } } ; 3 } = true', 'tointer': 'select the rows whose run time record is greater than 6:00 . the number of such rows is 3 .'} | eq { count { filter_greater { all_rows ; run time ; 6:00 } } ; 3 } = true | select the rows whose run time record is greater than 6:00 . the number of such rows is 3 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_greater_0': 0, 'all_rows_4': 4, 'run time_5': 5, '6:00_6': 6, '3_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_greater_0': 'filter_greater', 'all_rows_4': 'all_rows', 'run time_5': 'run time', '6:00_6': '6:00', '3_7': '3'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_greater_0': [1], 'all_rows_4': [0], 'run time_5': [0], '6:00_6': [0], '3_7': [2]} | ['episode number', 'title', 'podcast date', 'run time', 'historical references'] | [['301', 'home sweet home !', 'august 1 , 2005', '6:07', 'none'], ['302', 'the adventures of lewis & clark !', 'august 8 , 2005', '4:16', 'meriwether lewis & william clark'], ['303', 'call of the wild !', 'august 14 , 2005', '4:49', 'meriwether lewis & william clark'], ['304', 'the greatest show on earth !', 'august 21 , 2005', '5:16', 'pt barnum'], ['305', 'hitting the bricks !', 'august 28 , 2005', '5:48', 'pt barnum'], ['306', 'fiji queasy !', 'september 4 , 2005', '4:59', 'pt barnum'], ['307', 'accident in time !', 'september 11 , 2005', '5:04', 'none'], ['308', "all 's wells that ends welles !", 'september 18 , 2005', '5:51', 'hg wells & orson welles'], ['309', 'jump the shark !', 'september 25 , 2005', '5:04', 'jumping the shark'], ['310', 'jump the shark ! part ii !', 'october 2 , 2005', '4:36', 'jumping the shark'], ['311', 'annie are you oakley are you oakley , annie !', 'october 9 , 2005', '6:13', 'annie oakley & buffalo bill cody'], ['312', 'reach for the sky !', 'october 16 , 2005', '5:52', 'annie oakley & buffalo bill cody'], ['313', 'as the worm turns !', 'october 23 , 2005', '6:31', 'none']] |
toppserien | https://en.wikipedia.org/wiki/Toppserien | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-2522473-1.html.csv | comparative | of the current toppserien teams , the sandviken team has been in the league longer than the kolbotn team . | {'row_1': '9', 'row_2': '5', 'col': '4', 'col_other': '1', 'relation': 'greater', 'record_mentioned': 'no', 'diff_result': None} | {'func': 'greater', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'team', 'sandviken'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose team record fuzzily matches to sandviken .', 'tostr': 'filter_eq { all_rows ; team ; sandviken }'}, 'in toppserien since'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; team ; sandviken } ; in toppserien since }', 'tointer': 'select the rows whose team record fuzzily matches to sandviken . take the in toppserien since record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'team', 'kolbotn'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose team record fuzzily matches to kolbotn .', 'tostr': 'filter_eq { all_rows ; team ; kolbotn }'}, 'in toppserien since'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; team ; kolbotn } ; in toppserien since }', 'tointer': 'select the rows whose team record fuzzily matches to kolbotn . take the in toppserien since record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; team ; sandviken } ; in toppserien since } ; hop { filter_eq { all_rows ; team ; kolbotn } ; in toppserien since } } = true', 'tointer': 'select the rows whose team record fuzzily matches to sandviken . take the in toppserien since record of this row . select the rows whose team record fuzzily matches to kolbotn . take the in toppserien since record of this row . the first record is greater than the second record .'} | greater { hop { filter_eq { all_rows ; team ; sandviken } ; in toppserien since } ; hop { filter_eq { all_rows ; team ; kolbotn } ; in toppserien since } } = true | select the rows whose team record fuzzily matches to sandviken . take the in toppserien since record of this row . select the rows whose team record fuzzily matches to kolbotn . take the in toppserien since record of this row . the first record is greater than the second record . | 5 | 5 | {'greater_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'team_7': 7, 'sandviken_8': 8, 'in toppserien since_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'team_11': 11, 'kolbotn_12': 12, 'in toppserien since_13': 13} | {'greater_4': 'greater', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'team_7': 'team', 'sandviken_8': 'sandviken', 'in toppserien since_9': 'in toppserien since', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'team_11': 'team', 'kolbotn_12': 'kolbotn', 'in toppserien since_13': 'in toppserien since'} | {'greater_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'team_7': [0], 'sandviken_8': [0], 'in toppserien since_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'team_11': [1], 'kolbotn_12': [1], 'in toppserien since_13': [3]} | ['team', 'home city', 'home ground', 'in toppserien since', 'first appearance', 'seasons'] | [['amazon grimstad', 'grimstad', 'jj ugland stadion', '2006', '2006', '8'], ['arna - bjørnar', 'ytre arna ( bergen )', 'arna idrettspark', '2006', '1997', '16'], ['avaldsnes', 'avaldsnes', 'avaldsnes idrettssenter', '2013', '2013', '1'], ['klepp', 'kleppe ( stavanger )', 'klepp stadion', '1987', '1987', '27'], ['kolbotn', 'kolbotn ( oslo )', 'sofiemyr', '1995', '1995', '19'], ['lsk kvinner', 'lillestrøm ( oslo )', 'lsk - hallen', '1987', '1987', '27'], ['medkila', 'harstad', 'harstad stadion', '2013', '2004', '3'], ['røa', 'oslo', 'røabanen', '2001', '2001', '13'], ['sandviken', 'bergen', 'stemmemyren', '2011', '1987', '21'], ['stabæk', 'bærum ( oslo )', 'nadderud stadion', '2009', '2009', '5'], ['trondheims - ørn', 'trondheim', 'dnb nor arena', '1987', '1987', '27']] |
list of chicago blackhawks statistics and records | https://en.wikipedia.org/wiki/List_of_Chicago_Blackhawks_statistics_and_records | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-16366700-2.html.csv | comparative | out of the chicago blackhawks ' players , stan mikita played in more regular season games than dennis hull . | {'row_1': '2', 'row_2': '5', 'col': '3', 'col_other': '1', 'relation': 'greater', 'record_mentioned': 'no', 'diff_result': None} | {'func': 'greater', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'name', 'stan mikita'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose name record fuzzily matches to stan mikita .', 'tostr': 'filter_eq { all_rows ; name ; stan mikita }'}, 'regular season'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; name ; stan mikita } ; regular season }', 'tointer': 'select the rows whose name record fuzzily matches to stan mikita . take the regular season record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'name', 'dennis hull'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose name record fuzzily matches to dennis hull .', 'tostr': 'filter_eq { all_rows ; name ; dennis hull }'}, 'regular season'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; name ; dennis hull } ; regular season }', 'tointer': 'select the rows whose name record fuzzily matches to dennis hull . take the regular season record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; name ; stan mikita } ; regular season } ; hop { filter_eq { all_rows ; name ; dennis hull } ; regular season } } = true', 'tointer': 'select the rows whose name record fuzzily matches to stan mikita . take the regular season record of this row . select the rows whose name record fuzzily matches to dennis hull . take the regular season record of this row . the first record is greater than the second record .'} | greater { hop { filter_eq { all_rows ; name ; stan mikita } ; regular season } ; hop { filter_eq { all_rows ; name ; dennis hull } ; regular season } } = true | select the rows whose name record fuzzily matches to stan mikita . take the regular season record of this row . select the rows whose name record fuzzily matches to dennis hull . take the regular season record of this row . the first record is greater than the second record . | 5 | 5 | {'greater_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'name_7': 7, 'stan mikita_8': 8, 'regular season_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'name_11': 11, 'dennis hull_12': 12, 'regular season_13': 13} | {'greater_4': 'greater', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'name_7': 'name', 'stan mikita_8': 'stan mikita', 'regular season_9': 'regular season', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'name_11': 'name', 'dennis hull_12': 'dennis hull', 'regular season_13': 'regular season'} | {'greater_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'name_7': [0], 'stan mikita_8': [0], 'regular season_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'name_11': [1], 'dennis hull_12': [1], 'regular season_13': [3]} | ['name', 'years', 'regular season', 'playoffs', 'total'] | [['bobby hull', '1957 - 1972', '604', '62', '666'], ['stan mikita', '1959 - 1979', '541', '59', '600'], ['steve larmer', '1981 - 1993', '406', '45', '451'], ['denis savard', '1980 - 1990 1995 - 1997', '377', '61', '438'], ['dennis hull', '1964 - 1977', '298', '33', '331'], ['jeremy roenick', '1988 - 1996', '267', '35', '302'], ['tony amonte', '1994 - 2002', '268', '13', '281'], ['hubert pit martin', '1967 - 1977', '243', '26', '269'], ['bill mosienko', '1942 - 1955', '258', '10', '268'], ['ken wharram', '1952 - 1969', '252', '16', '268']] |
kjetil andré aamodt | https://en.wikipedia.org/wiki/Kjetil_Andr%C3%A9_Aamodt | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1302729-1.html.csv | ordinal | kjetil andré aamodt 's 2nd lowest position in slalom was in the same year when he was 3rd overall . | {'scope': 'all', 'row': '14', 'col': '3', 'order': '2', 'col_other': '2', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'slalom', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmax { all_rows ; slalom ; 2 }'}, 'overall'], 'result': '3', 'ind': 1, 'tostr': 'hop { nth_argmax { all_rows ; slalom ; 2 } ; overall }'}, '3'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmax { all_rows ; slalom ; 2 } ; overall } ; 3 } = true', 'tointer': 'select the row whose slalom record of all rows is 2nd maximum . the overall record of this row is 3 .'} | eq { hop { nth_argmax { all_rows ; slalom ; 2 } ; overall } ; 3 } = true | select the row whose slalom record of all rows is 2nd maximum . the overall record of this row is 3 . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmax_0': 0, 'all_rows_4': 4, 'slalom_5': 5, '2_6': 6, 'overall_7': 7, '3_8': 8} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmax_0': 'nth_argmax', 'all_rows_4': 'all_rows', 'slalom_5': 'slalom', '2_6': '2', 'overall_7': 'overall', '3_8': '3'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmax_0': [1], 'all_rows_4': [0], 'slalom_5': [0], '2_6': [0], 'overall_7': [1], '3_8': [2]} | ['season', 'overall', 'slalom', 'giant slalom', 'super g', 'downhill', 'combined'] | [['1990', '39', '-', '14', '19', '-', '-'], ['1991', '17', '20', '10', '8', '-', '-'], ['1992', '13', '26', '11', '5', '-', '17'], ['1993', '2', '5', '1', '1', '28', '3'], ['1994', '1', '9', '2', '4', '10', '1'], ['1995', '5', '14', '4', '19', '13', '4'], ['1996', '10', '18', '14', '8', '44', '7'], ['1997', '2', '6', '2', '12', '24', '1'], ['1998', '4', '13', '9', '21', '12', '2'], ['1999', '2', '4', '4', '9', '5', '1'], ['2000', '2', '1', '9', '13', '13', '1'], ['2001', '7', '7', '16', '10', '36', '3'], ['2002', '2', '9', '16', '6', '6', '1'], ['2003', '3', '23', '14', '4', '7', '2'], ['2004', 'broken ankle in october 2003 , out for season', 'broken ankle in october 2003 , out for season', 'broken ankle in october 2003 , out for season', 'broken ankle in october 2003 , out for season', 'broken ankle in october 2003 , out for season', 'broken ankle in october 2003 , out for season'], ['2005', '26', '-', '40', '14', '28', '-'], ['2006', '8', '-', '-', '5', '6', '5']] |
trevor taylor | https://en.wikipedia.org/wiki/Trevor_Taylor | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1226576-1.html.csv | count | trevor taylor raced for team lotus four times between 1961 and 1963 . | {'scope': 'all', 'criterion': 'equal', 'value': 'team lotus', 'result': '4', 'col': '2', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'entrant', 'team lotus'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose entrant record fuzzily matches to team lotus .', 'tostr': 'filter_eq { all_rows ; entrant ; team lotus }'}], 'result': '4', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; entrant ; team lotus } }', 'tointer': 'select the rows whose entrant record fuzzily matches to team lotus . the number of such rows is 4 .'}, '4'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; entrant ; team lotus } } ; 4 } = true', 'tointer': 'select the rows whose entrant record fuzzily matches to team lotus . the number of such rows is 4 .'} | eq { count { filter_eq { all_rows ; entrant ; team lotus } } ; 4 } = true | select the rows whose entrant record fuzzily matches to team lotus . the number of such rows is 4 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'entrant_5': 5, 'team lotus_6': 6, '4_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'entrant_5': 'entrant', 'team lotus_6': 'team lotus', '4_7': '4'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'entrant_5': [0], 'team lotus_6': [0], '4_7': [2]} | ['year', 'entrant', 'chassis', 'engine', 'points'] | [['1959', 'ace garage ( rotherham )', 'cooper t51', 'climax straight - 4', '0'], ['1961', 'team lotus', 'lotus 18', 'climax straight - 4', '0'], ['1962', 'team lotus', 'lotus 24', 'climax v8', '6'], ['1962', 'team lotus', 'lotus 25', 'climax v8', '6'], ['1963', 'team lotus', 'lotus 25', 'climax v8', '1'], ['1964', 'british racing partnership', 'brp 1', 'brm v8', '1'], ['1964', 'british racing partnership', 'brp 2', 'brm v8', '1'], ['1964', 'british racing partnership', 'lotus 24', 'brm v8', '1'], ['1966', 'aiden jones / paul emery', 'shannon', 'climax v8', '0']] |
2009 atp world tour masters 1000 | https://en.wikipedia.org/wiki/2009_ATP_World_Tour_Masters_1000 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-17057363-1.html.csv | majority | the majority of 2009 atp world tour masters 1000 tournaments are played on a hard court surface . | {'scope': 'all', 'col': '6', 'most_or_all': 'most', 'criterion': 'equal', 'value': 'hard', 'subset': None} | {'func': 'most_str_eq', 'args': ['all_rows', 'court surface', 'hard'], 'result': True, 'ind': 0, 'tointer': 'for the court surface records of all rows , most of them fuzzily match to hard .', 'tostr': 'most_eq { all_rows ; court surface ; hard } = true'} | most_eq { all_rows ; court surface ; hard } = true | for the court surface records of all rows , most of them fuzzily match to hard . | 1 | 1 | {'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'court surface_3': 3, 'hard_4': 4} | {'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'court surface_3': 'court surface', 'hard_4': 'hard'} | {'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'court surface_3': [0], 'hard_4': [0]} | ['tournament', 'country', 'location', 'current venue', 'began', 'court surface'] | [['indian wells masters', 'united states', 'indian wells', 'indian wells tennis garden', '1987', 'hard'], ['miami masters', 'united states', 'miami', 'tennis center at crandon park', '1987', 'hard'], ['monte carlo masters', 'monaco', 'roquebrune - cap - martin , france', 'monte carlo country club', '1897', 'clay'], ['rome masters', 'italy', 'rome', 'foro italico', '1930', 'clay'], ['madrid masters', 'spain', 'madrid', 'park manzanares', '2002', 'clay'], ['canada masters', 'canada', 'montreal / toronto', 'stade uniprix / rexall centre', '1881', 'hard'], ['cincinnati masters', 'united states', 'mason , ohio', 'lindner family tennis center', '1899', 'hard'], ['shanghai masters', 'china', 'shanghai', 'qi zhong stadium', '2009', 'hard'], ['paris masters', 'france', 'paris', 'palais omnisports de paris - bercy', '1968', 'hard ( i )']] |
2008 in hip hop music | https://en.wikipedia.org/wiki/2008_in_hip_hop_music | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-16444986-2.html.csv | ordinal | artist ti had the second highest amount of 1st week sales in 2008 hip hop music . | {'row': '2', 'col': '4', 'order': '2', 'col_other': '2', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'scope': 'all', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', '1st week sales', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmax { all_rows ; 1st week sales ; 2 }'}, 'artist'], 'result': 'ti', 'ind': 1, 'tostr': 'hop { nth_argmax { all_rows ; 1st week sales ; 2 } ; artist }'}, 'ti'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmax { all_rows ; 1st week sales ; 2 } ; artist } ; ti } = true', 'tointer': 'select the row whose 1st week sales record of all rows is 2nd maximum . the artist record of this row is ti .'} | eq { hop { nth_argmax { all_rows ; 1st week sales ; 2 } ; artist } ; ti } = true | select the row whose 1st week sales record of all rows is 2nd maximum . the artist record of this row is ti . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmax_0': 0, 'all_rows_4': 4, '1st week sales_5': 5, '2_6': 6, 'artist_7': 7, 'ti_8': 8} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmax_0': 'nth_argmax', 'all_rows_4': 'all_rows', '1st week sales_5': '1st week sales', '2_6': '2', 'artist_7': 'artist', 'ti_8': 'ti'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmax_0': [1], 'all_rows_4': [0], '1st week sales_5': [0], '2_6': [0], 'artist_7': [1], 'ti_8': [2]} | ['number', 'artist', 'album', '1st week sales', '1st week position'] | [['1', 'lil wayne', 'tha carter iii', '1006000', '1'], ['2', 'ti', 'paper trail', '568000', '1'], ['3', 'kanye west', '808s and heartbreak', '450000', '1'], ['4', 'young jeezy', 'the recession', '260000', '1'], ['5', 'the game', 'lax', '240000', '2'], ['6', 'plies', 'definition of real', '215000', '2'], ['7', 'ludacris', 'theater of the mind', '214000', '5'], ['8', 'rick ross', 'trilla', '198000', '1'], ['9', 'nas', 'untitled', '187000', '1'], ['10', 'snoop dogg', "ego trippin '", '137000', '5']] |
shaun murphy ( snooker player ) | https://en.wikipedia.org/wiki/Shaun_Murphy_%28snooker_player%29 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1795208-5.html.csv | unique | the only time mark selby was a competitor was in the uk championship . | {'scope': 'all', 'row': '8', 'col': '4', 'col_other': '3', 'criterion': 'equal', 'value': 'mark selby', 'subset': None} | {'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'opponent in the final', 'mark selby'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose opponent in the final record fuzzily matches to mark selby .', 'tostr': 'filter_eq { all_rows ; opponent in the final ; mark selby }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; opponent in the final ; mark selby } }', 'tointer': 'select the rows whose opponent in the final record fuzzily matches to mark selby . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'opponent in the final', 'mark selby'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose opponent in the final record fuzzily matches to mark selby .', 'tostr': 'filter_eq { all_rows ; opponent in the final ; mark selby }'}, 'championship'], 'result': 'uk championship', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; opponent in the final ; mark selby } ; championship }'}, 'uk championship'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; opponent in the final ; mark selby } ; championship } ; uk championship }', 'tointer': 'the championship record of this unqiue row is uk championship .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; opponent in the final ; mark selby } } ; eq { hop { filter_eq { all_rows ; opponent in the final ; mark selby } ; championship } ; uk championship } } = true', 'tointer': 'select the rows whose opponent in the final record fuzzily matches to mark selby . there is only one such row in the table . the championship record of this unqiue row is uk championship .'} | and { only { filter_eq { all_rows ; opponent in the final ; mark selby } } ; eq { hop { filter_eq { all_rows ; opponent in the final ; mark selby } ; championship } ; uk championship } } = true | select the rows whose opponent in the final record fuzzily matches to mark selby . there is only one such row in the table . the championship record of this unqiue row is uk championship . | 6 | 5 | {'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'opponent in the final_7': 7, 'mark selby_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'championship_9': 9, 'uk championship_10': 10} | {'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'opponent in the final_7': 'opponent in the final', 'mark selby_8': 'mark selby', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'championship_9': 'championship', 'uk championship_10': 'uk championship'} | {'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'opponent in the final_7': [0], 'mark selby_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'championship_9': [2], 'uk championship_10': [3]} | ['outcome', 'year', 'championship', 'opponent in the final', 'score'] | [['winner', '2005', 'world snooker championship', 'matthew stevens', '18 - 16'], ['runner - up', '2006', 'welsh open', 'stephen lee', '4 - 9'], ['winner', '2007', 'malta cup', 'ryan day', '9 - 4'], ['runner - up', '2008', 'china open', 'stephen maguire', '9 - 10'], ['winner', '2008', 'uk championship', 'marco fu', '10 - 9'], ['runner - up', '2009', 'world snooker championship', 'john higgins', '9 - 18'], ['winner', '2011', 'players tour championship - finals', 'martin gould', '4 - 0'], ['runner - up', '2012', 'uk championship', 'mark selby', '6 - 10']] |
charmed ( season 3 ) | https://en.wikipedia.org/wiki/Charmed_%28season_3%29 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-21165255-1.html.csv | ordinal | sin francisco had the second lowest number of viewers during season 3 of charmed . | {'row': '18', 'col': '8', 'order': '2', 'col_other': '3', 'max_or_min': 'min_to_max', 'value_mentioned': 'no', 'scope': 'all', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmin', 'args': ['all_rows', 'us viewers ( millions )', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmin { all_rows ; us viewers ( millions ) ; 2 }'}, 'title'], 'result': 'sin francisco', 'ind': 1, 'tostr': 'hop { nth_argmin { all_rows ; us viewers ( millions ) ; 2 } ; title }'}, 'sin francisco'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmin { all_rows ; us viewers ( millions ) ; 2 } ; title } ; sin francisco } = true', 'tointer': 'select the row whose us viewers ( millions ) record of all rows is 2nd minimum . the title record of this row is sin francisco .'} | eq { hop { nth_argmin { all_rows ; us viewers ( millions ) ; 2 } ; title } ; sin francisco } = true | select the row whose us viewers ( millions ) record of all rows is 2nd minimum . the title record of this row is sin francisco . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmin_0': 0, 'all_rows_4': 4, 'us viewers (millions)_5': 5, '2_6': 6, 'title_7': 7, 'sin francisco_8': 8} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmin_0': 'nth_argmin', 'all_rows_4': 'all_rows', 'us viewers (millions)_5': 'us viewers ( millions )', '2_6': '2', 'title_7': 'title', 'sin francisco_8': 'sin francisco'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmin_0': [1], 'all_rows_4': [0], 'us viewers (millions)_5': [0], '2_6': [0], 'title_7': [1], 'sin francisco_8': [2]} | ['no in series', 'no in season', 'title', 'directed by', 'written by', 'original air date', 'production code', 'us viewers ( millions )'] | [['45', '1', "the honeymoon 's over", 'jim conway', 'brad kern', 'october 5 , 2000', '4300045', '7.7'], ['46', '2', 'magic hour', 'john behring', 'zack estrin & chris levinson', 'october 12 , 2000', '4300046', '5.1'], ['47', '3', 'once upon a time', 'joel j feigenbaum', 'krista vernoff', 'october 19 , 2000', '4300047', '5.4'], ['48', '4', "all halliwell 's eve", 'anson williams', 'sheryl j anderson', 'october 26 , 2000', '4300048', '6.5'], ['49', '5', 'sight unseen', 'perry lang', 'william schmidt', 'november 2 , 2000', '4300049', '5.7'], ['50', '6', 'primrose empath', 'mel damski', 'daniel cerone', 'november 9 , 2000', '4300051', '6.1'], ['51', '7', 'power outage', 'craig zisk', 'monica breen & alison schapker', 'november 16 , 2000', '4300050', '5.7'], ['52', '8', 'sleuthing with the enemy', 'noel nosseck', 'peter hume', 'december 14 , 2000', '4300052', '5.5'], ['53', '9', 'coyote piper', 'chris long', 'krista vernoff', 'january 11 , 2001', '4300053', '5.1'], ['54', '10', 'we all scream for ice cream', 'allan kroeker', 'chris levinson & zack estrin', 'january 18 , 2001', '4300054', '5.4'], ['55', '11', 'blinded by the whitelighter', 'david straiton', 'nell scovell', 'january 25 , 2001', '4300055', '5.4'], ['56', '12', 'wrestling with demons', 'joel j feigenbaum', 'sheryl j anderson', 'february 1 , 2001', '4300056', '5.9'], ['57', '13', 'bride and gloom', 'chris long', 'william schmidt', 'february 8 , 2001', '4300057', '5.4'], ['58', '14', 'the good , the bad and the cursed', 'shannen doherty', 'monica breen & alison schapker', 'february 15 , 2001', '4300058', '5.1'], ['59', '15', 'just harried', 'mel damski', 'daniel cerone', 'february 22 , 2001', '4300059', '5.8'], ['60', '16', 'death takes a halliwell', 'jon pare', 'krista vernoff', 'march 15 , 2001', '4300060', '5.4'], ['61', '17', 'pre - witched', 'david straiton', 'chris levinson & zack estrin', 'march 22 , 2001', '4300061', '5.1'], ['62', '18', 'sin francisco', 'joel j feigenbaum', 'nell scovell', 'april 19 , 2001', '4300062', '4.0'], ['63', '19', 'the demon who came in from the cold', 'anson williams', 'sheryl j anderson', 'april 26 , 2001', '4300063', '3.6'], ['64', '20', 'exit strategy', 'joel j feigenbaum', 'peter hume & daniel cerone', 'may 3 , 2001', '4300064', '4.1']] |
european parliament election , 1989 ( ireland ) | https://en.wikipedia.org/wiki/European_Parliament_election%2C_1989_%28Ireland%29 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-13564557-2.html.csv | aggregation | in the 1989 european parliament election , the constituencies of ireland had an average of 13 candidates . | {'scope': 'all', 'col': '8', 'type': 'average', 'result': '13', 'subset': None} | {'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 'candidates'], 'result': '13', 'ind': 0, 'tostr': 'avg { all_rows ; candidates }'}, '13'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; candidates } ; 13 } = true', 'tointer': 'the average of the candidates record of all rows is 13 .'} | round_eq { avg { all_rows ; candidates } ; 13 } = true | the average of the candidates record of all rows is 13 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 'candidates_4': 4, '13_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 'candidates_4': 'candidates', '13_5': '13'} | {'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 'candidates_4': [0], '13_5': [1]} | ['constituency', 'electorate', 'turnout', 'spoilt', 'valid poll', 'quota', 'seats', 'candidates'] | [['connachtulster', '464661', '322664 ( 69.4 % )', '10362 ( 3.2 % )', '312302', '78076', '3', '13'], ['dublin', '711416', '455539 ( 64.0 % )', '7137 ( 1.5 % )', '448402', '89681', '4', '11'], ['leinster', '571694', '391697 ( 68.5 % )', '14106 ( 3.6 % )', '377591', '94398', '3', '15'], ['munster', '703913', '505219 ( 71.7 % )', '10786 ( 2.2 % )', '494433', '82406', '5', '15'], ['total', '2451684', '1675119 ( 68.3 % )', '42391 ( 2.6 % )', '1632728', 'n / a', '15', '44']] |
list of formula one driver records | https://en.wikipedia.org/wiki/List_of_Formula_One_driver_records | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-13599687-60.html.csv | majority | most of the drivers earned less than 70 % of the possible points . | {'scope': 'all', 'col': '5', 'most_or_all': 'most', 'criterion': 'less_than', 'value': '70', 'subset': None} | {'func': 'most_less', 'args': ['all_rows', 'percentage of possible points', '70'], 'result': True, 'ind': 0, 'tointer': 'for the percentage of possible points records of all rows , most of them are less than 70 .', 'tostr': 'most_less { all_rows ; percentage of possible points ; 70 } = true'} | most_less { all_rows ; percentage of possible points ; 70 } = true | for the percentage of possible points records of all rows , most of them are less than 70 . | 1 | 1 | {'most_less_0': 0, 'result_1': 1, 'all_rows_2': 2, 'percentage of possible points_3': 3, '70_4': 4} | {'most_less_0': 'most_less', 'result_1': 'true', 'all_rows_2': 'all_rows', 'percentage of possible points_3': 'percentage of possible points', '70_4': '70'} | {'most_less_0': [1], 'result_1': [], 'all_rows_2': [0], 'percentage of possible points_3': [0], '70_4': [0]} | ['driver', 'points', 'season', 'races', 'percentage of possible points'] | [['michael schumacher', '148', '2004', '18', '82.22 %'], ['michael schumacher', '144', '2002', '17', '84.71 %'], ['fernando alonso', '134', '2006', '18', '74.44 %'], ['fernando alonso', '133', '2005', '19', '70.00 %'], ['michael schumacher', '123', '2001', '17', '72.36 %'], ['michael schumacher', '121', '2006', '18', '67.22 %'], ['rubens barrichello', '114', '2004', '18', '63.33 %'], ['kimi räikkönen', '112', '2005', '19', '58.95 %'], ['kimi räikkönen', '110', '2007', '17', '64.71 %'], ['lewis hamilton', '109', '2007', '17', '64.12 %'], ['fernando alonso', '109', '2007', '17', '64.12 %']] |
united states house of representatives elections , 1800 | https://en.wikipedia.org/wiki/United_States_House_of_Representatives_elections%2C_1800 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-2668401-17.html.csv | count | eight virginia incumbents in the 1800 united states house of representatives elections were re-elected . | {'scope': 'all', 'criterion': 'equal', 'value': 're - elected', 'result': '8', 'col': '5', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'result', 're - elected'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose result record fuzzily matches to re - elected .', 'tostr': 'filter_eq { all_rows ; result ; re - elected }'}], 'result': '8', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; result ; re - elected } }', 'tointer': 'select the rows whose result record fuzzily matches to re - elected . the number of such rows is 8 .'}, '8'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; result ; re - elected } } ; 8 } = true', 'tointer': 'select the rows whose result record fuzzily matches to re - elected . the number of such rows is 8 .'} | eq { count { filter_eq { all_rows ; result ; re - elected } } ; 8 } = true | select the rows whose result record fuzzily matches to re - elected . the number of such rows is 8 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'result_5': 5, 're - elected_6': 6, '8_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'result_5': 'result', 're - elected_6': 're - elected', '8_7': '8'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'result_5': [0], 're - elected_6': [0], '8_7': [2]} | ['district', 'incumbent', 'party', 'first elected', 'result', 'candidates'] | [['virginia 2', 'david holmes', 'democratic - republican', '1797', 're - elected', 'david holmes ( dr ) alexander sinclair ( f )'], ['virginia 4', 'abram trigg', 'democratic - republican', '1797', 're - elected', 'abram trigg ( dr )'], ['virginia 5', 'john j trigg', 'democratic - republican', '1797', 're - elected', 'john j trigg ( dr )'], ['virginia 6', 'matthew clay', 'democratic - republican', '1797', 're - elected', 'matthew clay ( dr )'], ['virginia 7', 'john randolph', 'democratic - republican', '1799', 're - elected', 'john randolph ( dr )'], ['virginia 8', 'samuel goode', 'federalist', '1799', 'democratic - republican gain', 'thomas claiborne ( dr )'], ['virginia 9', 'joseph eggleston', 'democratic - republican', '1798 ( special )', 'democratic - republican hold', 'william b giles ( dr )'], ['virginia 10', 'edwin gray', 'democratic - republican', '1799', 're - elected', 'edwin gray ( dr ) nicholas faulcon ( dr )'], ['virginia 12', 'thomas evans', 'federalist', '1797', 'retired federalist hold', 'john stratton ( f ) john page ( dr )'], ['virginia 13', 'littleton waller tazewell', 'democratic - republican', '1800 ( special )', 'retired democratic - republican hold', 'john clopton ( dr ) samuel tyler ( dr )'], ['virginia 14', 'samuel j cabell', 'democratic - republican', '1795', 're - elected', 'samuel j cabell ( dr )'], ['virginia 15', 'john dawson', 'democratic - republican', '1797', 're - elected', 'john dawson ( dr )'], ['virginia 18', 'john nicholas', 'democratic - republican', '1793', 'retired democratic - republican hold', 'philip r thompson ( dr ) john blackwell ( f )']] |
zdeněk zikán | https://en.wikipedia.org/wiki/Zden%C4%9Bk_Zik%C3%A1n | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15813318-1.html.csv | superlative | zdeněk zikán achieved his highest score on 15 june 1958 . | {'scope': 'all', 'col_superlative': '3', 'row_superlative': '3', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '1', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'score'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; score }'}, 'date'], 'result': '15 june 1958', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; score } ; date }'}, '15 june 1958'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; score } ; date } ; 15 june 1958 } = true', 'tointer': 'select the row whose score record of all rows is maximum . the date record of this row is 15 june 1958 .'} | eq { hop { argmax { all_rows ; score } ; date } ; 15 june 1958 } = true | select the row whose score record of all rows is maximum . the date record of this row is 15 june 1958 . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'score_5': 5, 'date_6': 6, '15 june 1958_7': 7} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'score_5': 'score', 'date_6': 'date', '15 june 1958_7': '15 june 1958'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'score_5': [0], 'date_6': [1], '15 june 1958_7': [2]} | ['date', 'venue', 'score', 'result', 'competition'] | [['2 april 1958', 'strahov stadium , prague , czechoslovakia', '3 - 2', 'win', 'friendly'], ['11 june 1958', 'olympiastadion , helsingborg , sweden', '2 - 2', 'draw', '1958 world cup'], ['15 june 1958', 'olympiastadion , helsingborg , sweden', '6 - 1', 'win', '1958 world cup'], ['17 june 1958', 'malmö stadion , malmö , sweden', '2 - 1', 'lost', '1958 world cup']] |
vitantonio liuzzi | https://en.wikipedia.org/wiki/Vitantonio_Liuzzi | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1393912-3.html.csv | superlative | force india f1 team had the highest points among the others in vitantonio liuzzi . | {'scope': 'all', 'col_superlative': '5', 'row_superlative': '5', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '2', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'points'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; points }'}, 'entrant'], 'result': 'force india f1 team', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; points } ; entrant }'}, 'force india f1 team'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; points } ; entrant } ; force india f1 team } = true', 'tointer': 'select the row whose points record of all rows is maximum . the entrant record of this row is force india f1 team .'} | eq { hop { argmax { all_rows ; points } ; entrant } ; force india f1 team } = true | select the row whose points record of all rows is maximum . the entrant record of this row is force india f1 team . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'points_5': 5, 'entrant_6': 6, 'force india f1 team_7': 7} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'points_5': 'points', 'entrant_6': 'entrant', 'force india f1 team_7': 'force india f1 team'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'points_5': [0], 'entrant_6': [1], 'force india f1 team_7': [2]} | ['year', 'entrant', 'chassis', 'engine', 'points'] | [['2005', 'red bull racing', 'red bull rb1', 'cosworth tj2005 3.0 v10', '1'], ['2006', 'scuderia toro rosso', 'toro rosso str1', 'cosworth tj2006 3.0 v10 14 series', '1'], ['2007', 'scuderia toro rosso', 'toro rosso str2', 'ferrari 056 2.4 v8', '3'], ['2009', 'force india f1 team', 'force india vjm02', 'mercedes fo 108w 2.4 l v8', '0'], ['2010', 'force india f1 team', 'force india vjm03', 'mercedes fo 108x 2.4 v8', '21'], ['2011', 'hispania racing f1 team', 'hispania f111', 'cosworth ca2011 v8', '0']] |
1954 vfl season | https://en.wikipedia.org/wiki/1954_VFL_season | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10773616-6.html.csv | count | there were 6 game venues used during the 1954 vfl season . | {'scope': 'all', 'criterion': 'all', 'value': 'n/a', 'result': '6', 'col': '5', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_all', 'args': ['all_rows', 'venue'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose venue record is arbitrary .', 'tostr': 'filter_all { all_rows ; venue }'}], 'result': '6', 'ind': 1, 'tostr': 'count { filter_all { all_rows ; venue } }', 'tointer': 'select the rows whose venue record is arbitrary . the number of such rows is 6 .'}, '6'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_all { all_rows ; venue } } ; 6 } = true', 'tointer': 'select the rows whose venue record is arbitrary . the number of such rows is 6 .'} | eq { count { filter_all { all_rows ; venue } } ; 6 } = true | select the rows whose venue record is arbitrary . the number of such rows is 6 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_all_0': 0, 'all_rows_4': 4, 'venue_5': 5, '6_6': 6} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_all_0': 'filter_all', 'all_rows_4': 'all_rows', 'venue_5': 'venue', '6_6': '6'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_all_0': [1], 'all_rows_4': [0], 'venue_5': [0], '6_6': [2]} | ['home team', 'home team score', 'away team', 'away team score', 'venue', 'crowd', 'date'] | [['north melbourne', '8.14 ( 62 )', 'south melbourne', '12.13 ( 85 )', 'arden street oval', '15000', '22 may 1954'], ['st kilda', '14.15 ( 99 )', 'melbourne', '12.6 ( 78 )', 'junction oval', '16000', '22 may 1954'], ['richmond', '10.12 ( 72 )', 'hawthorn', '11.12 ( 78 )', 'punt road oval', '23000', '22 may 1954'], ['footscray', '12.8 ( 80 )', 'essendon', '4.7 ( 31 )', 'western oval', '30000', '22 may 1954'], ['geelong', '13.17 ( 95 )', 'collingwood', '9.8 ( 62 )', 'kardinia park', '32500', '22 may 1954'], ['fitzroy', '8.10 ( 58 )', 'carlton', '15.10 ( 100 )', 'brunswick street oval', '20000', '22 may 1954']] |
geothermal power in new zealand | https://en.wikipedia.org/wiki/Geothermal_power_in_New_Zealand | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15908826-1.html.csv | aggregation | the total capacity of the geothermal power stations in new zealand listed is 815.6 mw . | {'scope': 'all', 'col': '4', 'type': 'sum', 'result': '815.6', 'subset': None} | {'func': 'round_eq', 'args': [{'func': 'sum', 'args': ['all_rows', 'capacity ( mw )'], 'result': '815.6', 'ind': 0, 'tostr': 'sum { all_rows ; capacity ( mw ) }'}, '815.6'], 'result': True, 'ind': 1, 'tostr': 'round_eq { sum { all_rows ; capacity ( mw ) } ; 815.6 } = true', 'tointer': 'the sum of the capacity ( mw ) record of all rows is 815.6 .'} | round_eq { sum { all_rows ; capacity ( mw ) } ; 815.6 } = true | the sum of the capacity ( mw ) record of all rows is 815.6 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'sum_0': 0, 'all_rows_3': 3, 'capacity (mw)_4': 4, '815.6_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'sum_0': 'sum', 'all_rows_3': 'all_rows', 'capacity (mw)_4': 'capacity ( mw )', '815.6_5': '815.6'} | {'eq_1': [2], 'result_2': [], 'sum_0': [1], 'all_rows_3': [0], 'capacity (mw)_4': [0], '815.6_5': [1]} | ['name', 'location', 'field', 'capacity ( mw )', 'annual generation ( average gwh )', 'commissioned'] | [['kawerau ( bope )', 'kawerau , bay of plenty', 'kawerau', '6.3', '35', '1989 , 1993'], ['kawerau ( ka24 )', 'kawerau , bay of plenty', 'kawerau', '8.3', '70', '2008'], ['kawerau ( mrp )', 'kawerau , bay of plenty', 'kawerau', '100', '800', '2008'], ['mokai', 'northwest of taupo', 'mokai', '112', '900', '2000'], ['nga awa purua', 'north of taupo', 'rotokawa', '140', '1100', '2010'], ['ngatamariki', 'north of taupo', 'ngatamariki', '82', '600 ( approx )', '2013'], ['ngawha', 'near kaikohe , northland', 'ngawha', '25', '78', '1998'], ['ohaaki', 'between rotorua and taupo', 'ohaaki', '70', '400', '1989'], ['poihipi', 'north of taupo', 'wairakie', '55', '350', '1997'], ['rotokawa', 'north of taupo', 'rotokawa', '33', '210', '1997'], ['te huka', 'north of taupo', 'tauhara', '23', '170 ( approx )', '2010'], ['wairakei', 'north of taupo', 'wairakei', '161', '1310', '1958 , 2005']] |
list of intel core i7 microprocessors | https://en.wikipedia.org/wiki/List_of_Intel_Core_i7_microprocessors | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-18823880-10.html.csv | ordinal | the core i7 - 820qm model microprocessor has the third highest release price . | {'row': '3', 'col': '16', 'order': '3', 'col_other': '1', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'scope': 'all', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'release price ( usd )', '3'], 'result': None, 'ind': 0, 'tostr': 'nth_argmax { all_rows ; release price ( usd ) ; 3 }'}, 'model number'], 'result': 'core i7 - 820qm', 'ind': 1, 'tostr': 'hop { nth_argmax { all_rows ; release price ( usd ) ; 3 } ; model number }'}, 'core i7 - 820qm'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmax { all_rows ; release price ( usd ) ; 3 } ; model number } ; core i7 - 820qm } = true', 'tointer': 'select the row whose release price ( usd ) record of all rows is 3rd maximum . the model number record of this row is core i7 - 820qm .'} | eq { hop { nth_argmax { all_rows ; release price ( usd ) ; 3 } ; model number } ; core i7 - 820qm } = true | select the row whose release price ( usd ) record of all rows is 3rd maximum . the model number record of this row is core i7 - 820qm . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmax_0': 0, 'all_rows_4': 4, 'release price ( usd )_5': 5, '3_6': 6, 'model number_7': 7, 'core i7 - 820qm_8': 8} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmax_0': 'nth_argmax', 'all_rows_4': 'all_rows', 'release price ( usd )_5': 'release price ( usd )', '3_6': '3', 'model number_7': 'model number', 'core i7 - 820qm_8': 'core i7 - 820qm'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmax_0': [1], 'all_rows_4': [0], 'release price ( usd )_5': [0], '3_6': [0], 'model number_7': [1], 'core i7 - 820qm_8': [2]} | ['model number', 'sspec number', 'frequency', 'turbo', 'cores', 'l2 cache', 'l3 cache', 'i / o bus', 'mult', 'memory', 'voltage', 'tdp', 'socket', 'release date', 'part number ( s )', 'release price ( usd )'] | [['core i7 - 720qm', 'slbly ( b1 )', '1.6 ghz', '1 / 1 / 6 / 9', '4', '4 256 kb', '6 mb', 'dmi', '12', '2 ddr3 - 1333', '0.65 - 1.4 v', '45 w', 'socketg1', 'september 2009', 'by80607002907ahbx80607i7720qm', '364'], ['core i7 - 740qm', 'slbqg ( b1 )', '1.73 ghz', '1 / 1 / 6 / 9', '4', '4 256 kb', '6 mb', 'dmi', '13', '2 ddr3 - 1333', '0.65 - 1.4 v', '45 w', 'socket g1', 'june 2010', 'by80607005259aabx80607i7740qm', '378'], ['core i7 - 820qm', 'slblx ( b1 )', '1.73 ghz', '1 / 1 / 8 / 10', '4', '4 256 kb', '8 mb', 'dmi', '13', '2 ddr3 - 1333', '0.65 - 1.4 v', '45 w', 'socket g1', 'september 2009', 'by80607002904ak', '546'], ['core i7 - 840qm', 'slbmp ( b1 )', '1.87 ghz', '1 / 1 / 8 / 10', '4', '4 256 kb', '8 mb', 'dmi', '14', '2 ddr3 - 1333', '0.65 - 1.4 v', '45 w', 'socket g1', 'june 2010', 'by80607002901aibx80607i7840qm', '568'], ['core i7 - 920xm', 'slblw ( b1 )', '2 ghz', '2 / 2 / 8 / 9', '4', '4 256 kb', '8 mb', 'dmi', '15', '2 ddr3 - 1333', '0.65 - 1.4 v', '55 w', 'socket g1', 'september 2009', 'by80607002529af', '1054']] |
federal government college ikot ekpene | https://en.wikipedia.org/wiki/Federal_Government_College_Ikot_Ekpene | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-11464746-1.html.csv | comparative | house gongola was founded later than house benue in the federal government college ikot ekpene . | {'row_1': '2', 'row_2': '1', 'col': '4', 'col_other': '1', 'relation': 'greater', 'record_mentioned': 'no', 'diff_result': None} | {'func': 'greater', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'house name', 'gongola'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose house name record fuzzily matches to gongola .', 'tostr': 'filter_eq { all_rows ; house name ; gongola }'}, 'founded'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; house name ; gongola } ; founded }', 'tointer': 'select the rows whose house name record fuzzily matches to gongola . take the founded record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'house name', 'benue'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose house name record fuzzily matches to benue .', 'tostr': 'filter_eq { all_rows ; house name ; benue }'}, 'founded'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; house name ; benue } ; founded }', 'tointer': 'select the rows whose house name record fuzzily matches to benue . take the founded record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; house name ; gongola } ; founded } ; hop { filter_eq { all_rows ; house name ; benue } ; founded } } = true', 'tointer': 'select the rows whose house name record fuzzily matches to gongola . take the founded record of this row . select the rows whose house name record fuzzily matches to benue . take the founded record of this row . the first record is greater than the second record .'} | greater { hop { filter_eq { all_rows ; house name ; gongola } ; founded } ; hop { filter_eq { all_rows ; house name ; benue } ; founded } } = true | select the rows whose house name record fuzzily matches to gongola . take the founded record of this row . select the rows whose house name record fuzzily matches to benue . take the founded record of this row . the first record is greater than the second record . | 5 | 5 | {'greater_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'house name_7': 7, 'gongola_8': 8, 'founded_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'house name_11': 11, 'benue_12': 12, 'founded_13': 13} | {'greater_4': 'greater', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'house name_7': 'house name', 'gongola_8': 'gongola', 'founded_9': 'founded', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'house name_11': 'house name', 'benue_12': 'benue', 'founded_13': 'founded'} | {'greater_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'house name_7': [0], 'gongola_8': [0], 'founded_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'house name_11': [1], 'benue_12': [1], 'founded_13': [3]} | ['house name', 'composition', 'named after', 'founded', 'colours'] | [['benue', 'coed', 'river benue', '1973', 'yellow'], ['gongola', 'coed', 'gongola river', '1980', 'purple'], ['niger', 'coed', 'river niger', '1973', 'green'], ['rima', 'coed', 'rima river', '1980', 'brown'], ['ogun', 'coed', 'ogun river', '1980', 'blue']] |
1985 u.s. open ( golf ) | https://en.wikipedia.org/wiki/1985_U.S._Open_%28golf%29 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-17231246-4.html.csv | majority | most of the players in the 1985 u.s. open golf tournament were representing the united states . | {'scope': 'all', 'col': '3', 'most_or_all': 'most', 'criterion': 'equal', 'value': 'united states', 'subset': None} | {'func': 'most_str_eq', 'args': ['all_rows', 'country', 'united states'], 'result': True, 'ind': 0, 'tointer': 'for the country records of all rows , most of them fuzzily match to united states .', 'tostr': 'most_eq { all_rows ; country ; united states } = true'} | most_eq { all_rows ; country ; united states } = true | for the country records of all rows , most of them fuzzily match to united states . | 1 | 1 | {'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'country_3': 3, 'united states_4': 4} | {'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'country_3': 'country', 'united states_4': 'united states'} | {'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'country_3': [0], 'united states_4': [0]} | ['place', 'player', 'country', 'score', 'to par'] | [['1', 'tze - chung chen', 'taiwan', '65', '- 5'], ['2', 'fred couples', 'united states', '66', '- 4'], ['t3', 'andy bean', 'united states', '69', '- 1'], ['t3', 'rick fehr', 'united states', '69', '- 1'], ['t3', 'jay haas', 'united states', '69', '- 1'], ['t3', 'tom kite', 'united states', '69', '- 1'], ['t3', 'mike reid', 'united states', '69', '- 1'], ['t8', 'dave barr', 'canada', '70', 'e'], ['t8', 'bill glasson', 'united states', '70', 'e'], ['t8', 'skeeter heath', 'united states', '70', 'e'], ['t8', 'andy north', 'united states', '70', 'e'], ['t8', 'gene sauers', 'united states', '70', 'e'], ['t8', 'payne stewart', 'united states', '70', 'e'], ['t8', 'lanny wadkins', 'united states', '70', 'e']] |
list of childrens hospital episodes | https://en.wikipedia.org/wiki/List_of_Childrens_Hospital_episodes | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-28081876-4.html.csv | majority | all of the childrens hospital episodes had original air dates in the year 2010 . | {'scope': 'all', 'col': '6', 'most_or_all': 'all', 'criterion': 'fuzzily_match', 'value': '2010', 'subset': None} | {'func': 'all_str_eq', 'args': ['all_rows', 'original air date', '2010'], 'result': True, 'ind': 0, 'tointer': 'for the original air date records of all rows , all of them fuzzily match to 2010 .', 'tostr': 'all_eq { all_rows ; original air date ; 2010 } = true'} | all_eq { all_rows ; original air date ; 2010 } = true | for the original air date records of all rows , all of them fuzzily match to 2010 . | 1 | 1 | {'all_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'original air date_3': 3, '2010_4': 4} | {'all_str_eq_0': 'all_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'original air date_3': 'original air date', '2010_4': '2010'} | {'all_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'original air date_3': [0], '2010_4': [0]} | ['series no', 'season no', 'title', 'directed by', 'written by', 'original air date', 'production code'] | [['6', '1', 'i see her face everywhere', 'matt shakman', 'rob corddry', 'august 22 , 2010', '201'], ['7', '2', 'no one can replace her', 'matt shakman', 'rob corddry', 'august 29 , 2010', '202'], ['8', '3', 'i am not afraid of any ghost', 'bryan gordon', 'rob huebel', 'september 5 , 2010', '203'], ['9', '4', 'give a painted brother a break', 'rob schrab', 'paul scheer', 'september 12 , 2010', '205'], ['10', '5', 'joke overload', 'john inwood', 'jason mantzoukas', 'september 19 , 2010', '207'], ['11', '6', 'end of the middle', 'david wain', 'jonathan stern', 'september 26 , 2010', '206'], ['13', '8', 'hot enough for you', 'david wain', 'rob corddry & david wain', 'october 10 , 2010', '208'], ['14', '9', 'the coffee machine paid for itself', 'bryan gordon', 'ken marino & erica oyama', 'october 17 , 2010', '209'], ['16', '11', 'you know no one can hear you , right', 'ken marino', 'brian huskey and rob corddry', 'october 31 , 2010', '211']] |
emerich dembrovschi | https://en.wikipedia.org/wiki/Emerich_Dembrovschi | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-11732849-1.html.csv | unique | the only international goal emerich dembrovschi scored outside of europe was on 10 june 1970 in mexico . | {'scope': 'all', 'row': '3', 'col': '2', 'col_other': '1', 'criterion': 'equal', 'value': 'estadio jalisco , guadalajara , mexico', 'subset': None} | {'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'venue', 'estadio jalisco , guadalajara , mexico'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose venue record fuzzily matches to estadio jalisco , guadalajara , mexico .', 'tostr': 'filter_eq { all_rows ; venue ; estadio jalisco , guadalajara , mexico }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; venue ; estadio jalisco , guadalajara , mexico } }', 'tointer': 'select the rows whose venue record fuzzily matches to estadio jalisco , guadalajara , mexico . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'venue', 'estadio jalisco , guadalajara , mexico'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose venue record fuzzily matches to estadio jalisco , guadalajara , mexico .', 'tostr': 'filter_eq { all_rows ; venue ; estadio jalisco , guadalajara , mexico }'}, 'date'], 'result': '10 june 1970', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; venue ; estadio jalisco , guadalajara , mexico } ; date }'}, '10 june 1970'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; venue ; estadio jalisco , guadalajara , mexico } ; date } ; 10 june 1970 }', 'tointer': 'the date record of this unqiue row is 10 june 1970 .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; venue ; estadio jalisco , guadalajara , mexico } } ; eq { hop { filter_eq { all_rows ; venue ; estadio jalisco , guadalajara , mexico } ; date } ; 10 june 1970 } } = true', 'tointer': 'select the rows whose venue record fuzzily matches to estadio jalisco , guadalajara , mexico . there is only one such row in the table . the date record of this unqiue row is 10 june 1970 .'} | and { only { filter_eq { all_rows ; venue ; estadio jalisco , guadalajara , mexico } } ; eq { hop { filter_eq { all_rows ; venue ; estadio jalisco , guadalajara , mexico } ; date } ; 10 june 1970 } } = true | select the rows whose venue record fuzzily matches to estadio jalisco , guadalajara , mexico . there is only one such row in the table . the date record of this unqiue row is 10 june 1970 . | 6 | 5 | {'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'venue_7': 7, 'estadio jalisco , guadalajara , mexico_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'date_9': 9, '10 june 1970_10': 10} | {'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'venue_7': 'venue', 'estadio jalisco , guadalajara , mexico_8': 'estadio jalisco , guadalajara , mexico', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'date_9': 'date', '10 june 1970_10': '10 june 1970'} | {'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'venue_7': [0], 'estadio jalisco , guadalajara , mexico_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'date_9': [2], '10 june 1970_10': [3]} | ['date', 'venue', 'score', 'result', 'competition'] | [['3 september 1969', 'jna stadium , belgrade , yugoslavia', '0 - 1', '1 - 1', 'friendly'], ['16 november 1969', '23 august stadium , bucharest , romania', '1 - 0', '1 - 1', '1970 fifa world cup qualification'], ['10 june 1970', 'estadio jalisco , guadalajara , mexico', '3 - 2', '3 - 2', '1970 fifa world cup'], ['21 april 1971', 'vojvodina stadium , novi sad , yugoslavia', '0 - 1', '0 - 1', 'friendly'], ['22 september 1972', 'olympic stadium , helsinki , finland', '0 - 3', '0 - 4', 'uefa euro 1972 qual'], ['10 october 1971', 'idrætsparken stadium , copenhagen , denmark', '2 - 1', '2 - 1', '1972 summer olympics qual'], ['14 november 1971', '23 august stadium , bucharest , romania', '1 - 0', '2 - 1', 'uefa euro 1972 qual'], ['3 september 1972', 'stadionul central , craiova , romania', '1 - 1', '1 - 1', 'friendly'], ['29 october 1972', '23 august stadium , bucharest , romania', '2 - 0', '2 - 0', '1974 fifa world cup qualification']] |
giorgio zampori | https://en.wikipedia.org/wiki/Giorgio_Zampori | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-11138928-1.html.csv | majority | the majority of giorgio zampori 's positions were first place positions . | {'scope': 'all', 'col': '4', 'most_or_all': 'most', 'criterion': 'equal', 'value': '1st', 'subset': None} | {'func': 'most_str_eq', 'args': ['all_rows', 'position', '1st'], 'result': True, 'ind': 0, 'tointer': 'for the position records of all rows , most of them fuzzily match to 1st .', 'tostr': 'most_eq { all_rows ; position ; 1st } = true'} | most_eq { all_rows ; position ; 1st } = true | for the position records of all rows , most of them fuzzily match to 1st . | 1 | 1 | {'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'position_3': 3, '1st_4': 4} | {'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'position_3': 'position', '1st_4': '1st'} | {'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'position_3': [0], '1st_4': [0]} | ['year', 'competition', 'venue', 'position', 'event'] | [['1909', 'world championships', 'turin', '3rd', 'rings'], ['1909', 'world championships', 'turin', '3rd', 'team all - round'], ['1911', 'world championships', 'luxembourg', '1st', 'parallel bars'], ['1911', 'world championships', 'luxembourg', '2nd', 'pommel horse'], ['1913', 'world championships', 'paris', '1st', 'parallel bars'], ['1913', 'world championships', 'paris', '1st', 'pommel horse'], ['1913', 'world championships', 'paris', '1st', 'rings'], ['1913', 'world championships', 'paris', '3rd', 'team all - round']] |
turkish airlines | https://en.wikipedia.org/wiki/Turkish_Airlines | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-167925-2.html.csv | aggregation | there has been a combined total of 899 fatalities in turkish airlines flight crashes . | {'scope': 'all', 'col': '6', 'type': 'sum', 'result': '899', 'subset': None} | {'func': 'round_eq', 'args': [{'func': 'sum', 'args': ['all_rows', 'fatalities'], 'result': '899', 'ind': 0, 'tostr': 'sum { all_rows ; fatalities }'}, '899'], 'result': True, 'ind': 1, 'tostr': 'round_eq { sum { all_rows ; fatalities } ; 899 } = true', 'tointer': 'the sum of the fatalities record of all rows is 899 .'} | round_eq { sum { all_rows ; fatalities } ; 899 } = true | the sum of the fatalities record of all rows is 899 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'sum_0': 0, 'all_rows_3': 3, 'fatalities_4': 4, '899_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'sum_0': 'sum', 'all_rows_3': 'all_rows', 'fatalities_4': 'fatalities', '899_5': '899'} | {'eq_1': [2], 'result_2': [], 'sum_0': [1], 'all_rows_3': [0], 'fatalities_4': [0], '899_5': [1]} | ['date', 'flight', 'aircraft', 'registration', 'location', 'fatalities'] | [['17 february 1959', 'n / a', 'vickers viscount type 793', 'tc - sev', 'london', '14'], ['23 september 1961', '100', 'fokker f27 - 100', 'tc - tay', 'ankara', '28'], ['8 march 1962', 'n / a', 'fairchild f - 27', 'tc - kop', 'adana', '11'], ['3 february 1964', 'n / a', 'douglas c - 47', 'tc - eti', 'ankara', '3'], ['2 february 1969', 'n / a', 'vickers viscount type 794', 'tc - set', 'ankara', '0'], ['26 january 1974', 'n / a', 'fokker f28 - 1000', 'tc - jao', 'izmir', '66'], ['3 march 1974', '981', 'mcdonnell douglas dc - 10', 'tc - jav', 'fontaine - chaalis , oise', '346'], ['30 january 1975', '345', 'fokker f28 - 1000', 'tc - jap', 'istanbul', '42'], ['19 september 1976', '452', 'boeing 727', 'tc - jbh', 'isparta', '154'], ['23 december 1979', 'n / a', 'fokker f28 - 1000', 'tc - jat', 'ankara', '41'], ['16 january 1983', '158', 'boeing 727', 'tc - jbr', 'ankara', '47'], ['29 december 1994', '278', 'boeing 737', 'tc - jes', 'van', '57'], ['7 april 1999', '5904', 'boeing 737', 'tc - jep', 'ceyhan', '6'], ['8 january 2003', '634', 'avro rj - 100', 'tc - thg', 'diyarbakä ± r', '75'], ['25 february 2009', '1951', 'boeing 737', 'tc - jge', 'amsterdam', '9']] |
athletics at the 1998 central american and caribbean games | https://en.wikipedia.org/wiki/Athletics_at_the_1998_Central_American_and_Caribbean_Games | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10535131-3.html.csv | comparative | guatemala won one more silver medal than el salvador in athletics at the 1998 central american and caribbean games . | {'row_1': '13', 'row_2': '14', 'col': '4', 'col_other': '2', 'relation': 'diff', 'record_mentioned': 'no', 'diff_result': {'diff_value': '1', 'bigger': 'row1'}} | {'func': 'eq', 'args': [{'func': 'diff', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'nation', 'guatemala'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose nation record fuzzily matches to guatemala .', 'tostr': 'filter_eq { all_rows ; nation ; guatemala }'}, 'silver'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; nation ; guatemala } ; silver }', 'tointer': 'select the rows whose nation record fuzzily matches to guatemala . take the silver record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'nation', 'el salvador'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose nation record fuzzily matches to el salvador .', 'tostr': 'filter_eq { all_rows ; nation ; el salvador }'}, 'silver'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; nation ; el salvador } ; silver }', 'tointer': 'select the rows whose nation record fuzzily matches to el salvador . take the silver record of this row .'}], 'result': '1', 'ind': 4, 'tostr': 'diff { hop { filter_eq { all_rows ; nation ; guatemala } ; silver } ; hop { filter_eq { all_rows ; nation ; el salvador } ; silver } }'}, '1'], 'result': True, 'ind': 5, 'tostr': 'eq { diff { hop { filter_eq { all_rows ; nation ; guatemala } ; silver } ; hop { filter_eq { all_rows ; nation ; el salvador } ; silver } } ; 1 } = true', 'tointer': 'select the rows whose nation record fuzzily matches to guatemala . take the silver record of this row . select the rows whose nation record fuzzily matches to el salvador . take the silver record of this row . the first record is 1 larger than the second record .'} | eq { diff { hop { filter_eq { all_rows ; nation ; guatemala } ; silver } ; hop { filter_eq { all_rows ; nation ; el salvador } ; silver } } ; 1 } = true | select the rows whose nation record fuzzily matches to guatemala . take the silver record of this row . select the rows whose nation record fuzzily matches to el salvador . take the silver record of this row . the first record is 1 larger than the second record . | 6 | 6 | {'eq_5': 5, 'result_6': 6, 'diff_4': 4, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_7': 7, 'nation_8': 8, 'guatemala_9': 9, 'silver_10': 10, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_11': 11, 'nation_12': 12, 'el salvador_13': 13, 'silver_14': 14, '1_15': 15} | {'eq_5': 'eq', 'result_6': 'true', 'diff_4': 'diff', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_7': 'all_rows', 'nation_8': 'nation', 'guatemala_9': 'guatemala', 'silver_10': 'silver', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_11': 'all_rows', 'nation_12': 'nation', 'el salvador_13': 'el salvador', 'silver_14': 'silver', '1_15': '1'} | {'eq_5': [6], 'result_6': [], 'diff_4': [5], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_7': [0], 'nation_8': [0], 'guatemala_9': [0], 'silver_10': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_11': [1], 'nation_12': [1], 'el salvador_13': [1], 'silver_14': [3], '1_15': [5]} | ['rank', 'nation', 'gold', 'silver', 'bronze', 'total'] | [['1', 'cuba', '19', '13', '12', '44'], ['2', 'mexico', '12', '9', '7', '28'], ['3', 'jamaica', '6', '9', '7', '22'], ['4', 'venezuela', '2', '4', '5', '11'], ['5', 'bahamas', '2', '2', '3', '7'], ['6', 'barbados', '1', '1', '1', '3'], ['7', 'dominican republic', '1', '0', '1', '2'], ['7', 'puerto rico', '1', '0', '1', '2'], ['9', 'us virgin islands', '1', '0', '0', '1'], ['9', 'suriname', '1', '0', '0', '1'], ['11', 'colombia', '0', '4', '6', '10'], ['12', 'trinidad and tobago', '0', '3', '0', '3'], ['13', 'guatemala', '0', '1', '2', '3'], ['14', 'el salvador', '0', '0', '1', '1']] |
2007 - 08 sacramento kings season | https://en.wikipedia.org/wiki/2007%E2%80%9308_Sacramento_Kings_season | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-11965631-4.html.csv | aggregation | the average attendance for the 07-08 sacramento kings season was around 15000 . | {'scope': 'all', 'col': '6', 'type': 'average', 'result': '15000', 'subset': None} | {'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 'attendance'], 'result': '15000', 'ind': 0, 'tostr': 'avg { all_rows ; attendance }'}, '15000'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; attendance } ; 15000 } = true', 'tointer': 'the average of the attendance record of all rows is 15000 .'} | round_eq { avg { all_rows ; attendance } ; 15000 } = true | the average of the attendance record of all rows is 15000 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 'attendance_4': 4, '15000_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 'attendance_4': 'attendance', '15000_5': '15000'} | {'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 'attendance_4': [0], '15000_5': [1]} | ['date', 'visitor', 'score', 'home', 'leading scorer', 'attendance', 'record'] | [['2 november 2007', 'kings', '80 - 96', 'spurs', 'kevin martin ( 22 )', '17072', '0 - 2'], ['3 november 2007', 'kings', '102 - 123', 'mavericks', 'kevin martin ( 28 )', '20343', '0 - 3'], ['6 november 2007', 'supersonics', '98 - 104', 'kings', 'kevin martin ( 31 )', '14908', '1 - 3'], ['9 november 2007', 'cavaliers', '93 - 91', 'kings', 'kevin martin ( 32 )', '15293', '1 - 4'], ['10 november 2007', 'timberwolves', '93 - 100', 'kings', 'kevin martin ( 29 )', '13170', '2 - 4'], ['12 november 2007', 'kings', '93 - 117', 'jazz', 'john salmons ( 22 )', '19911', '2 - 5'], ['14 november 2007', 'kings', '103 - 108', 'timberwolves', 'kevin martin ( 22 )', '11656', '2 - 6'], ['16 november 2007', 'knicks', '118 - 123', 'kings', 'kevin martin ( 43 )', '12549', '3 - 6'], ['18 november 2007', 'pistons', '95 - 105', 'kings', 'beno udrih ( 23 )', '12978', '4 - 6'], ['20 november 2007', 'suns', '100 - 98', 'kings', 'ron artest ( 33 )', '13598', '4 - 7'], ['21 november 2007', 'kings', '111 - 127', 'suns', 'francisco garcia ( 31 )', '18422', '4 - 8'], ['23 november 2007', 'kings', '84 - 87', 'trail blazers', 'kevin martin ( 21 )', '19980', '4 - 9'], ['26 november 2007', 'spurs', '99 - 112', 'kings', 'beno udrih ( 27 )', '12587', '5 - 9']] |
chalid arrab | https://en.wikipedia.org/wiki/Chalid_Arrab | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10363239-3.html.csv | majority | the majority of marches were won by the judges decision method . | {'scope': 'all', 'col': '4', 'most_or_all': 'most', 'criterion': 'fuzzily_match', 'value': 'decision', 'subset': None} | {'func': 'most_str_eq', 'args': ['all_rows', 'method', 'decision'], 'result': True, 'ind': 0, 'tointer': 'for the method records of all rows , most of them fuzzily match to decision .', 'tostr': 'most_eq { all_rows ; method ; decision } = true'} | most_eq { all_rows ; method ; decision } = true | for the method records of all rows , most of them fuzzily match to decision . | 1 | 1 | {'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'method_3': 3, 'decision_4': 4} | {'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'method_3': 'method', 'decision_4': 'decision'} | {'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'method_3': [0], 'decision_4': [0]} | ['res', 'record', 'opponent', 'method', 'event', 'location'] | [['win', '7 - 3', 'hiromitsu kanehara', 'decision ( majority )', "hero 's 2005 in seoul", 'seoul , south korea'], ['win', '6 - 3', 'yukiya naito', 'decision ( unanimous )', "hero 's 1", 'saitama , saitama , japan'], ['loss', '5 - 3', 'kazuhiro nakamura', 'submission ( armbar )', 'pride bushido 3', 'yokohama , japan'], ['win', '5 - 2', 'rodney glunder', 'decision ( unanimous )', 'pride bushido 1', 'saitama , saitama , japan'], ['loss', '4 - 2', 'jeremy horn', 'decision ( unanimous )', '2h2h 6 - simply the best 6', 'rotterdam , netherlands'], ['win', '4 - 1', 'stanislav nuschik', 'ko ( punches )', 'm - 1 mfc - european championship 2002', 'saint petersburg , russia'], ['win', '3 - 1', 'roman zentsov', 'ko', 'm - 1 mfc - russia vs the world 2', 'saint petersburg , russia'], ['win', '2 - 1', 'peter varga', 'submission ( arm lock )', 'millenniumsports - veni vidi vici', 'veenendaal , netherlands'], ['loss', '1 - 1', 'ramazan mezhidov', 'submission ( rear naked choke )', 'iafc - pankration world championship 2000', 'moscow , russia'], ['win', '1 - 0', 'spartak kochnev', 'tko ( strikes )', 'iafc - pankration world championship 2000', 'moscow , russia']] |
aveiro district | https://en.wikipedia.org/wiki/Aveiro_District | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1794599-1.html.csv | comparative | santa maria da feira has a larger area in square kilometers than vale de cambra . | {'row_1': '15', 'row_2': '19', 'col': '2', 'col_other': '1', 'relation': 'greater', 'record_mentioned': 'yes', 'diff_result': None} | {'func': 'and', 'args': [{'func': 'greater', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'name', 'santa maria da feira'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose name record fuzzily matches to santa maria da feira .', 'tostr': 'filter_eq { all_rows ; name ; santa maria da feira }'}, 'area ( km square )'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; name ; santa maria da feira } ; area ( km square ) }', 'tointer': 'select the rows whose name record fuzzily matches to santa maria da feira . take the area ( km square ) record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'name', 'vale de cambra'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose name record fuzzily matches to vale de cambra .', 'tostr': 'filter_eq { all_rows ; name ; vale de cambra }'}, 'area ( km square )'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; name ; vale de cambra } ; area ( km square ) }', 'tointer': 'select the rows whose name record fuzzily matches to vale de cambra . take the area ( km square ) record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; name ; santa maria da feira } ; area ( km square ) } ; hop { filter_eq { all_rows ; name ; vale de cambra } ; area ( km square ) } }', 'tointer': 'select the rows whose name record fuzzily matches to santa maria da feira . take the area ( km square ) record of this row . select the rows whose name record fuzzily matches to vale de cambra . take the area ( km square ) record of this row . the first record is greater than the second record .'}, {'func': 'and', 'args': [{'func': 'eq', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'name', 'santa maria da feira'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose name record fuzzily matches to santa maria da feira .', 'tostr': 'filter_eq { all_rows ; name ; santa maria da feira }'}, 'area ( km square )'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; name ; santa maria da feira } ; area ( km square ) }', 'tointer': 'select the rows whose name record fuzzily matches to santa maria da feira . take the area ( km square ) record of this row .'}, '215.1'], 'result': True, 'ind': 5, 'tostr': 'eq { hop { filter_eq { all_rows ; name ; santa maria da feira } ; area ( km square ) } ; 215.1 }', 'tointer': 'the area ( km square ) record of the first row is 215.1 .'}, {'func': 'eq', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'name', 'vale de cambra'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose name record fuzzily matches to vale de cambra .', 'tostr': 'filter_eq { all_rows ; name ; vale de cambra }'}, 'area ( km square )'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; name ; vale de cambra } ; area ( km square ) }', 'tointer': 'select the rows whose name record fuzzily matches to vale de cambra . take the area ( km square ) record of this row .'}, '146.5'], 'result': True, 'ind': 6, 'tostr': 'eq { hop { filter_eq { all_rows ; name ; vale de cambra } ; area ( km square ) } ; 146.5 }', 'tointer': 'the area ( km square ) record of the second row is 146.5 .'}], 'result': True, 'ind': 7, 'tostr': 'and { eq { hop { filter_eq { all_rows ; name ; santa maria da feira } ; area ( km square ) } ; 215.1 } ; eq { hop { filter_eq { all_rows ; name ; vale de cambra } ; area ( km square ) } ; 146.5 } }', 'tointer': 'the area ( km square ) record of the first row is 215.1 . the area ( km square ) record of the second row is 146.5 .'}], 'result': True, 'ind': 8, 'tostr': 'and { greater { hop { filter_eq { all_rows ; name ; santa maria da feira } ; area ( km square ) } ; hop { filter_eq { all_rows ; name ; vale de cambra } ; area ( km square ) } } ; and { eq { hop { filter_eq { all_rows ; name ; santa maria da feira } ; area ( km square ) } ; 215.1 } ; eq { hop { filter_eq { all_rows ; name ; vale de cambra } ; area ( km square ) } ; 146.5 } } } = true', 'tointer': 'select the rows whose name record fuzzily matches to santa maria da feira . take the area ( km square ) record of this row . select the rows whose name record fuzzily matches to vale de cambra . take the area ( km square ) record of this row . the first record is greater than the second record . the area ( km square ) record of the first row is 215.1 . the area ( km square ) record of the second row is 146.5 .'} | and { greater { hop { filter_eq { all_rows ; name ; santa maria da feira } ; area ( km square ) } ; hop { filter_eq { all_rows ; name ; vale de cambra } ; area ( km square ) } } ; and { eq { hop { filter_eq { all_rows ; name ; santa maria da feira } ; area ( km square ) } ; 215.1 } ; eq { hop { filter_eq { all_rows ; name ; vale de cambra } ; area ( km square ) } ; 146.5 } } } = true | select the rows whose name record fuzzily matches to santa maria da feira . take the area ( km square ) record of this row . select the rows whose name record fuzzily matches to vale de cambra . take the area ( km square ) record of this row . the first record is greater than the second record . the area ( km square ) record of the first row is 215.1 . the area ( km square ) record of the second row is 146.5 . | 13 | 9 | {'and_8': 8, 'result_9': 9, 'greater_4': 4, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_10': 10, 'name_11': 11, 'santa maria da feira_12': 12, 'area (km square)_13': 13, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_14': 14, 'name_15': 15, 'vale de cambra_16': 16, 'area (km square)_17': 17, 'and_7': 7, 'eq_5': 5, '215.1_18': 18, 'eq_6': 6, '146.5_19': 19} | {'and_8': 'and', 'result_9': 'true', 'greater_4': 'greater', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_10': 'all_rows', 'name_11': 'name', 'santa maria da feira_12': 'santa maria da feira', 'area (km square)_13': 'area ( km square )', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_14': 'all_rows', 'name_15': 'name', 'vale de cambra_16': 'vale de cambra', 'area (km square)_17': 'area ( km square )', 'and_7': 'and', 'eq_5': 'eq', '215.1_18': '215.1', 'eq_6': 'eq', '146.5_19': '146.5'} | {'and_8': [9], 'result_9': [], 'greater_4': [8], 'num_hop_2': [4, 5], 'filter_str_eq_0': [2], 'all_rows_10': [0], 'name_11': [0], 'santa maria da feira_12': [0], 'area (km square)_13': [2], 'num_hop_3': [4, 6], 'filter_str_eq_1': [3], 'all_rows_14': [1], 'name_15': [1], 'vale de cambra_16': [1], 'area (km square)_17': [3], 'and_7': [8], 'eq_5': [7], '215.1_18': [5], 'eq_6': [7], '146.5_19': [6]} | ['name', 'area ( km square )', 'pop', 'pop / area ( 1 / km square )', 'no p', 'no c / no t', 'subregion'] | [['águeda', '335.3', '47729', '148', '20', '1', 'baixo vouga'], ['albergaria - a - velha', '155.4', '25497', '164', '8', '0', 'baixo vouga'], ['anadia', '216.6', '31671', '146', '15', '1', 'baixo vouga'], ['arouca', '329.1', '24019', '73', '20', '0', 'entre douro e vouga'], ['aveiro', '199.9', '73626', '368', '14', '1', 'baixo vouga'], ['castelo de paiva', '115.0', '17089', '149', '9', '0 / 2', 'tmega'], ['espinho', '21.1', '31703', '1503', '5', '1 / 1', 'grande porto'], ['estarreja', '108.4', '28279', '261', '7', '1 / 3', 'baixo vouga'], ['ílhavo', '73.5', '39247', '534', '4', '2', 'baixo vouga'], ['mealhada', '110.7', '20496', '194', '8', '1', 'baixo vouga'], ['murtosa', '73.3', '9657', '132', '4', '0 / 1', 'baixo vouga'], ['oliveira de azeméis', '163.5', '71243', '436', '19', '1 / 9', 'entre douro e vouga'], ['oliveira do bairro', '87.3', '22365', '256', '6', '1', 'baixo vouga'], ['ovar', '147.4', '56715', '385', '8', '2 / 3', 'baixo vouga'], ['santa maria da feira', '215.1', '142295', '662', '31', '3 / 13', 'entre douro e vouga'], ['são joão da madeira', '7.9', '21538', '2726', '1', '1 / 0', 'entre douro e vouga'], ['sever do vouga', '129.6', '12940', '100', '9', '0', 'baixo vouga'], ['vagos', '169.9', '23205', '137', '11', '0 / 2', 'baixo vouga'], ['vale de cambra', '146.5', '22864', '169', '9', '1', 'entre douro e vouga']] |
volleyball at the 2004 summer olympics - men 's team rosters | https://en.wikipedia.org/wiki/Volleyball_at_the_2004_Summer_Olympics_%E2%80%93_Men%27s_team_rosters | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15859432-3.html.csv | superlative | the tallest men 's volleyball player at the 2004 summer olympics was rodrigo santana . | {'scope': 'all', 'col_superlative': '3', 'row_superlative': '10', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '1', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'height'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; height }'}, 'name'], 'result': 'rodrigo santana', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; height } ; name }'}, 'rodrigo santana'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; height } ; name } ; rodrigo santana } = true', 'tointer': 'select the row whose height record of all rows is maximum . the name record of this row is rodrigo santana .'} | eq { hop { argmax { all_rows ; height } ; name } ; rodrigo santana } = true | select the row whose height record of all rows is maximum . the name record of this row is rodrigo santana . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'height_5': 5, 'name_6': 6, 'rodrigo santana_7': 7} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'height_5': 'height', 'name_6': 'name', 'rodrigo santana_7': 'rodrigo santana'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'height_5': [0], 'name_6': [1], 'rodrigo santana_7': [2]} | ['name', 'date of birth', 'height', 'weight', 'spike', 'block'] | [['giovane gávio', '07.09.1970', '196', '89', '340', '322'], ['andré heller', '17.12.1975', '199', '93', '339', '321'], ['mauricio lima', '27.01.1968', '184', '79', '321', '304'], ['gilberto godoy filho', '23.12.1976', '192', '85', '325', '312'], ['andré nascimento', '04.03.1979', '195', '95', '340', '320'], ['sérgio dutra santos', '15.10.1975', '184', '78', '325', '310'], ['anderson rodrigues', '21.05.1974', '190', '95', '330', '321'], ['nalbert bitencourt', '09.03.1974', '195', '82', '329', '309'], ['gustavo endres', '23.08.1975', '203', '98', '337', '325'], ['rodrigo santana', '17.04.1979', '205', '85', '350', '328'], ['ricardo garcia', '19.11.1975', '191', '89', '337', '320'], ['dante amaral', '30.09.1980', '201', '86', '345', '327']] |
north american x - 15 | https://en.wikipedia.org/wiki/North_American_X-15 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-221315-3.html.csv | superlative | joseph a walker reached the highest max altitude among all pilots of the the north american x - 15 . | {'scope': 'all', 'col_superlative': '8', 'row_superlative': '11', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '1', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'max altitude ( miles )'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; max altitude ( miles ) }'}, 'pilot'], 'result': 'joseph a walker', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; max altitude ( miles ) } ; pilot }'}, 'joseph a walker'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; max altitude ( miles ) } ; pilot } ; joseph a walker } = true', 'tointer': 'select the row whose max altitude ( miles ) record of all rows is maximum . the pilot record of this row is joseph a walker .'} | eq { hop { argmax { all_rows ; max altitude ( miles ) } ; pilot } ; joseph a walker } = true | select the row whose max altitude ( miles ) record of all rows is maximum . the pilot record of this row is joseph a walker . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'max altitude (miles)_5': 5, 'pilot_6': 6, 'joseph a walker_7': 7} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'max altitude (miles)_5': 'max altitude ( miles )', 'pilot_6': 'pilot', 'joseph a walker_7': 'joseph a walker'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'max altitude (miles)_5': [0], 'pilot_6': [1], 'joseph a walker_7': [2]} | ['pilot', 'organization', 'total flights', 'usaf space flights', 'fai space flights', 'max mach', 'max speed ( mph )', 'max altitude ( miles )'] | [['michael j adams', 'us air force', '7', '1', '0', '5.59', '3822', '50.3'], ['neil armstrong', 'nasa', '7', '0', '0', '5.74', '3989', '39.2'], ['scott crossfield', 'north american aviation', '14', '0', '0', '2.97', '1959', '15.3'], ['william h dana', 'nasa', '16', '2', '0', '5.53', '3897', '58.1'], ['joseph h engle', 'us air force', '16', '3', '0', '5.71', '3887', '53.1'], ['william j pete knight', 'us air force', '16', '1', '0', '6.70', '4519', '53.1'], ['john b mckay', 'nasa', '29', '1', '0', '5.65', '3863', '55.9'], ['forrest s petersen', 'us navy', '5', '0', '0', '5.3', '3600', '19.2'], ['robert a rushworth', 'us air force', '34', '1', '0', '6.06', '4017', '53.9'], ['milton o thompson', 'nasa', '14', '0', '0', '5.48', '3723', '40.5'], ['joseph a walker', 'nasa', '25', '3', '2', '5.92', '4104', '67.0']] |
1961 buffalo bills season | https://en.wikipedia.org/wiki/1961_Buffalo_Bills_season | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-14102379-4.html.csv | count | in the 1961 buffalo bills season , when the bills won , there were two times the game was at war memorial stadium . | {'scope': 'subset', 'criterion': 'equal', 'value': 'war memorial stadium', 'result': '2', 'col': '5', 'subset': {'col': '4', 'criterion': 'fuzzily_match', 'value': 'w'}} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'result', 'w'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; result ; w }', 'tointer': 'select the rows whose result record fuzzily matches to w .'}, 'stadium', 'war memorial stadium'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose result record fuzzily matches to w . among these rows , select the rows whose stadium record fuzzily matches to war memorial stadium .', 'tostr': 'filter_eq { filter_eq { all_rows ; result ; w } ; stadium ; war memorial stadium }'}], 'result': '2', 'ind': 2, 'tostr': 'count { filter_eq { filter_eq { all_rows ; result ; w } ; stadium ; war memorial stadium } }', 'tointer': 'select the rows whose result record fuzzily matches to w . among these rows , select the rows whose stadium record fuzzily matches to war memorial stadium . the number of such rows is 2 .'}, '2'], 'result': True, 'ind': 3, 'tostr': 'eq { count { filter_eq { filter_eq { all_rows ; result ; w } ; stadium ; war memorial stadium } } ; 2 } = true', 'tointer': 'select the rows whose result record fuzzily matches to w . among these rows , select the rows whose stadium record fuzzily matches to war memorial stadium . the number of such rows is 2 .'} | eq { count { filter_eq { filter_eq { all_rows ; result ; w } ; stadium ; war memorial stadium } } ; 2 } = true | select the rows whose result record fuzzily matches to w . among these rows , select the rows whose stadium record fuzzily matches to war memorial stadium . the number of such rows is 2 . | 4 | 4 | {'eq_3': 3, 'result_4': 4, 'count_2': 2, 'filter_str_eq_1': 1, 'filter_str_eq_0': 0, 'all_rows_5': 5, 'result_6': 6, 'w_7': 7, 'stadium_8': 8, 'war memorial stadium_9': 9, '2_10': 10} | {'eq_3': 'eq', 'result_4': 'true', 'count_2': 'count', 'filter_str_eq_1': 'filter_str_eq', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_5': 'all_rows', 'result_6': 'result', 'w_7': 'w', 'stadium_8': 'stadium', 'war memorial stadium_9': 'war memorial stadium', '2_10': '2'} | {'eq_3': [4], 'result_4': [], 'count_2': [3], 'filter_str_eq_1': [2], 'filter_str_eq_0': [1], 'all_rows_5': [0], 'result_6': [0], 'w_7': [0], 'stadium_8': [1], 'war memorial stadium_9': [1], '2_10': [3]} | ['week', 'date', 'opponent', 'result', 'stadium', 'record', 'attendance'] | [['1', 'september 10 , 1961', 'denver broncos', 'l 22 - 10', 'war memorial stadium', '0 - 1', '16636'], ['2', 'september 17 , 1961', 'new york titans', 'w 41 - 31', 'war memorial stadium', '1 - 1', '15584'], ['3', 'september 23 , 1961', 'boston patriots', 'l 23 - 21', 'war memorial stadium', '1 - 2', '21504'], ['4', 'september 30 , 1961', 'san diego chargers', 'l 19 - 11', 'war memorial stadium', '1 - 3', '20742'], ['5', 'october 8 , 1961', 'houston oilers', 'w 22 - 12', 'jeppesen stadium', '2 - 3', '22761'], ['6', 'october 15 , 1961', 'dallas texans', 'w 27 - 24', 'war memorial stadium', '3 - 3', '20678'], ['7', 'october 22 , 1961', 'boston patriots', 'l 52 - 21', 'boston university field', '3 - 4', '9398'], ['8', 'october 29 , 1961', 'houston oilers', 'l 28 - 16', 'war memorial stadium', '3 - 5', '21237'], ['9', 'november 5 , 1961', 'oakland raiders', 'l 31 - 22', 'war memorial stadium', '3 - 6', '17027'], ['10', 'november 12 , 1961', 'dallas texans', 'w 30 - 20', 'cotton bowl', '4 - 6', '15000'], ['11', 'november 19 , 1961', 'denver broncos', 'w 23 - 10', 'bears stadium', '5 - 6', '7645'], ['12', 'november 23 , 1961', 'new york titans', 'l 21 - 14', 'polo grounds', '5 - 7', '12023'], ['13', 'december 3 , 1961', 'oakland raiders', 'w 26 - 21', 'candlestick park', '6 - 7', '8011'], ['14', 'december 10 , 1961', 'san diego chargers', 'l 28 - 10', 'balboa stadium', '6 - 8', '24486']] |
swimming at the 2000 summer olympics - women 's 200 metre breaststroke | https://en.wikipedia.org/wiki/Swimming_at_the_2000_Summer_Olympics_%E2%80%93_Women%27s_200_metre_breaststroke | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-12382876-4.html.csv | majority | the majority of swimmers in the women 's 200 metre breaststroke at the 2000 summer olympics , came in with a time of 2:31.71 or better . | {'scope': 'all', 'col': '5', 'most_or_all': 'all', 'criterion': 'less_than_eq', 'value': '2:31.71', 'subset': None} | {'func': 'all_less_eq', 'args': ['all_rows', 'time', '2:31.71'], 'result': True, 'ind': 0, 'tointer': 'for the time records of all rows , all of them are less than or equal to 2:31.71 .', 'tostr': 'all_less_eq { all_rows ; time ; 2:31.71 } = true'} | all_less_eq { all_rows ; time ; 2:31.71 } = true | for the time records of all rows , all of them are less than or equal to 2:31.71 . | 1 | 1 | {'all_less_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'time_3': 3, '2:31.71_4': 4} | {'all_less_eq_0': 'all_less_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'time_3': 'time', '2:31.71_4': '2:31.71'} | {'all_less_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'time_3': [0], '2:31.71_4': [0]} | ['rank', 'lane', 'name', 'nationality', 'time'] | [['1', '4', 'kristy kowal', 'united states', '2:25.46'], ['2', '6', 'sarah poewe', 'south africa', '2:25.54'], ['3', '7', 'luo xuejuan', 'china', '2:25.86'], ['4', '5', 'karine brãmond', 'france', '2:27.86'], ['5', '3', 'caroline hildreth', 'australia', '2:28.30'], ['6', '2', 'ku hyo - jin', 'south korea', '2:28.50'], ['7', '1', 'anne poleska', 'germany', '2:28.99'], ['8', '8', 'junko isoda', 'japan', '2:31.71']] |
the great british bake off | https://en.wikipedia.org/wiki/The_Great_British_Bake_Off | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-28962227-1.html.csv | count | four runners-up competed in a series that premiered on the 14th of august . | {'scope': 'all', 'criterion': 'fuzzily_match', 'value': '14 august', 'result': '4', 'col': '2', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'premiere', '14 august'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose premiere record fuzzily matches to 14 august .', 'tostr': 'filter_eq { all_rows ; premiere ; 14 august }'}], 'result': '4', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; premiere ; 14 august } }', 'tointer': 'select the rows whose premiere record fuzzily matches to 14 august . the number of such rows is 4 .'}, '4'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; premiere ; 14 august } } ; 4 } = true', 'tointer': 'select the rows whose premiere record fuzzily matches to 14 august . the number of such rows is 4 .'} | eq { count { filter_eq { all_rows ; premiere ; 14 august } } ; 4 } = true | select the rows whose premiere record fuzzily matches to 14 august . the number of such rows is 4 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'premiere_5': 5, '14 august_6': 6, '4_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'premiere_5': 'premiere', '14 august_6': '14 august', '4_7': '4'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'premiere_5': [0], '14 august_6': [0], '4_7': [2]} | ['series', 'premiere', 'finale', 'runners - up', 'winner'] | [['1', '17 august 2010', '21 september 2010', 'miranda gore browne', 'edd kimber'], ['1', '17 august 2010', '21 september 2010', 'ruth clemens', 'edd kimber'], ['2', '14 august 2011', '4 october 2011', 'holly bell', 'joanne wheatley'], ['2', '14 august 2011', '4 october 2011', 'mary - anne boermans', 'joanne wheatley'], ['3', '14 august 2012', '16 october 2012', 'brendan lynch', 'john whaite'], ['3', '14 august 2012', '16 october 2012', 'james morton', 'john whaite'], ['4', '20 august 2013', '22 october 2013', 'kimberley wilson', 'frances quinn']] |
2005 pga championship | https://en.wikipedia.org/wiki/2005_PGA_Championship | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-12512153-4.html.csv | count | during the 2005 pga championship , with players from the united states , 5 players had scores of over 67 . | {'scope': 'subset', 'criterion': 'greater_than', 'value': '67', 'result': '5', 'col': '4', 'subset': {'col': '3', 'criterion': 'equal', 'value': 'united states'}} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_greater', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'country', 'united states'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; country ; united states }', 'tointer': 'select the rows whose country record fuzzily matches to united states .'}, 'score', '67'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose country record fuzzily matches to united states . among these rows , select the rows whose score record is greater than 67 .', 'tostr': 'filter_greater { filter_eq { all_rows ; country ; united states } ; score ; 67 }'}], 'result': '5', 'ind': 2, 'tostr': 'count { filter_greater { filter_eq { all_rows ; country ; united states } ; score ; 67 } }', 'tointer': 'select the rows whose country record fuzzily matches to united states . among these rows , select the rows whose score record is greater than 67 . the number of such rows is 5 .'}, '5'], 'result': True, 'ind': 3, 'tostr': 'eq { count { filter_greater { filter_eq { all_rows ; country ; united states } ; score ; 67 } } ; 5 } = true', 'tointer': 'select the rows whose country record fuzzily matches to united states . among these rows , select the rows whose score record is greater than 67 . the number of such rows is 5 .'} | eq { count { filter_greater { filter_eq { all_rows ; country ; united states } ; score ; 67 } } ; 5 } = true | select the rows whose country record fuzzily matches to united states . among these rows , select the rows whose score record is greater than 67 . the number of such rows is 5 . | 4 | 4 | {'eq_3': 3, 'result_4': 4, 'count_2': 2, 'filter_greater_1': 1, 'filter_str_eq_0': 0, 'all_rows_5': 5, 'country_6': 6, 'united states_7': 7, 'score_8': 8, '67_9': 9, '5_10': 10} | {'eq_3': 'eq', 'result_4': 'true', 'count_2': 'count', 'filter_greater_1': 'filter_greater', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_5': 'all_rows', 'country_6': 'country', 'united states_7': 'united states', 'score_8': 'score', '67_9': '67', '5_10': '5'} | {'eq_3': [4], 'result_4': [], 'count_2': [3], 'filter_greater_1': [2], 'filter_str_eq_0': [1], 'all_rows_5': [0], 'country_6': [0], 'united states_7': [0], 'score_8': [1], '67_9': [1], '5_10': [3]} | ['place', 'player', 'country', 'score', 'to par'] | [['t1', 'stephen ames', 'canada', '67', '- 3'], ['t1', 'stuart appleby', 'australia', '67', '- 3'], ['t1', 'ben curtis', 'united states', '67', '- 3'], ['t1', 'trevor immelman', 'south africa', '67', '- 3'], ['t1', 'phil mickelson', 'united states', '67', '- 3'], ['t1', 'rory sabbatini', 'south africa', '67', '- 3'], ['t7', 'ben crane', 'united states', '68', '- 2'], ['t7', 'steve elkington', 'australia', '68', '- 2'], ['t7', 'retief goosen', 'south africa', '68', '- 2'], ['t7', 'bernhard langer', 'germany', '68', '- 2'], ['t7', 'davis love iii', 'united states', '68', '- 2'], ['t7', 'greg owen', 'england', '68', '- 2'], ['t7', 'jesper parnevik', 'sweden', '68', '- 2'], ['t7', 'pat perez', 'united states', '68', '- 2'], ['t7', 'john rollins', 'united states', '68', '- 2'], ['t7', 'heath slocum', 'united states', '68', '- 2'], ['t7', 'lee westwood', 'england', '68', '- 2']] |
2008 - 09 temple owls men 's basketball team | https://en.wikipedia.org/wiki/2008%E2%80%9309_Temple_Owls_men%27s_basketball_team | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-30054758-5.html.csv | superlative | the most points scored in an owl 's game in the 08-09 season between february 5th and february 26th was 83 . | {'scope': 'all', 'col_superlative': '4', 'row_superlative': '1', 'value_mentioned': 'yes', 'max_or_min': 'max', 'other_col': '2', 'subset': None} | {'func': 'and', 'args': [{'func': 'eq', 'args': [{'func': 'max', 'args': ['all_rows', 'score'], 'result': 'l 83 - 74', 'ind': 0, 'tostr': 'max { all_rows ; score }', 'tointer': 'the maximum score record of all rows is l 83 - 74 .'}, 'l 83 - 74'], 'result': True, 'ind': 1, 'tostr': 'eq { max { all_rows ; score } ; l 83 - 74 }', 'tointer': 'the maximum score record of all rows is l 83 - 74 .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'score'], 'result': None, 'ind': 2, 'tostr': 'argmax { all_rows ; score }'}, 'date'], 'result': 'february 5', 'ind': 3, 'tostr': 'hop { argmax { all_rows ; score } ; date }'}, 'february 5'], 'result': True, 'ind': 4, 'tostr': 'eq { hop { argmax { all_rows ; score } ; date } ; february 5 }', 'tointer': 'the date record of the row with superlative score record is february 5 .'}], 'result': True, 'ind': 5, 'tostr': 'and { eq { max { all_rows ; score } ; l 83 - 74 } ; eq { hop { argmax { all_rows ; score } ; date } ; february 5 } } = true', 'tointer': 'the maximum score record of all rows is l 83 - 74 . the date record of the row with superlative score record is february 5 .'} | and { eq { max { all_rows ; score } ; l 83 - 74 } ; eq { hop { argmax { all_rows ; score } ; date } ; february 5 } } = true | the maximum score record of all rows is l 83 - 74 . the date record of the row with superlative score record is february 5 . | 6 | 6 | {'and_5': 5, 'result_6': 6, 'eq_1': 1, 'max_0': 0, 'all_rows_7': 7, 'score_8': 8, 'l 83 - 74_9': 9, 'str_eq_4': 4, 'str_hop_3': 3, 'argmax_2': 2, 'all_rows_10': 10, 'score_11': 11, 'date_12': 12, 'february 5_13': 13} | {'and_5': 'and', 'result_6': 'true', 'eq_1': 'eq', 'max_0': 'max', 'all_rows_7': 'all_rows', 'score_8': 'score', 'l 83 - 74_9': 'l 83 - 74', 'str_eq_4': 'str_eq', 'str_hop_3': 'str_hop', 'argmax_2': 'argmax', 'all_rows_10': 'all_rows', 'score_11': 'score', 'date_12': 'date', 'february 5_13': 'february 5'} | {'and_5': [6], 'result_6': [], 'eq_1': [5], 'max_0': [1], 'all_rows_7': [0], 'score_8': [0], 'l 83 - 74_9': [1], 'str_eq_4': [5], 'str_hop_3': [4], 'argmax_2': [3], 'all_rows_10': [2], 'score_11': [2], 'date_12': [3], 'february 5_13': [4]} | ['game', 'date', 'team', 'score', 'high points', 'high rebounds', 'high assists', 'location attendance', 'record'] | [['21', 'february 5', '9 xavier', 'l 83 - 74', 'olmos - 18', 'allen - 14', 'christmas - 8', 'cintas center , cincinnati , oh ( 10250 )', '12 - 9 ( 4 - 3 )'], ['22', 'february 8', 'rhode island', 'w 68 - 62', 'allen - 23', 'allen - 13', 'inge - 7', 'liacouras center , philadelphia , pa ( 5654 )', '13 - 9 ( 5 - 3 )'], ['23', 'february 12', "saint joseph 's", 'w 61 - 59', 'christmas - 19', 'christmas - 11', 'christmas / inge - 4', 'the palestra , philadelphia , pa ( 8722 )', '14 - 9 ( 6 - 3 )'], ['24', 'february 15', 'duquesne', 'w 78 - 73', 'fernandez - 19', 'craig williams - 6', 'christmas - 4', 'aj palumbo center , pittsburgh , pa ( 4029 )', '15 - 9 ( 7 - 3 )'], ['25', 'february 18', 'fordham', 'w 72 - 45', 'allen - 19', 'allen - 11', 'fernandez - 4', 'liacouras center , philadelphia , pa ( 3837 )', '16 - 9 ( 8 - 3 )'], ['26', 'february 22', 'st bonaventure', 'w 70 - 56', 'allen - 20', 'allen - 18', 'allen - 4', 'liacouras center , philadelphia , pa ( 7092 )', '17 - 9 ( 9 - 3 )'], ['27', 'february 26', 'la salle', 'l 70 - 63', 'christmas - 19', 'allen - 12', 'fernandez - 6', 'liacouras center , philadelphia , pa ( 6031 )', '17 - 10 ( 9 - 4 )']] |
stephanie vogt | https://en.wikipedia.org/wiki/Stephanie_Vogt | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-16306899-6.html.csv | unique | the tournament played on 31 october 2010 was the only tournament that stephanie vogt played in egypt . | {'scope': 'all', 'row': '7', 'col': '3', 'col_other': '2', 'criterion': 'equal', 'value': 'egypt', 'subset': None} | {'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'tournament', 'egypt'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose tournament record fuzzily matches to egypt .', 'tostr': 'filter_eq { all_rows ; tournament ; egypt }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; tournament ; egypt } }', 'tointer': 'select the rows whose tournament record fuzzily matches to egypt . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'tournament', 'egypt'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose tournament record fuzzily matches to egypt .', 'tostr': 'filter_eq { all_rows ; tournament ; egypt }'}, 'date'], 'result': '31 october 2010', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; tournament ; egypt } ; date }'}, '31 october 2010'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; tournament ; egypt } ; date } ; 31 october 2010 }', 'tointer': 'the date record of this unqiue row is 31 october 2010 .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; tournament ; egypt } } ; eq { hop { filter_eq { all_rows ; tournament ; egypt } ; date } ; 31 october 2010 } } = true', 'tointer': 'select the rows whose tournament record fuzzily matches to egypt . there is only one such row in the table . the date record of this unqiue row is 31 october 2010 .'} | and { only { filter_eq { all_rows ; tournament ; egypt } } ; eq { hop { filter_eq { all_rows ; tournament ; egypt } ; date } ; 31 october 2010 } } = true | select the rows whose tournament record fuzzily matches to egypt . there is only one such row in the table . the date record of this unqiue row is 31 october 2010 . | 6 | 5 | {'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'tournament_7': 7, 'egypt_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'date_9': 9, '31 october 2010_10': 10} | {'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'tournament_7': 'tournament', 'egypt_8': 'egypt', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'date_9': 'date', '31 october 2010_10': '31 october 2010'} | {'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'tournament_7': [0], 'egypt_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'date_9': [2], '31 october 2010_10': [3]} | ['outcome', 'date', 'tournament', 'surface', 'opponent', 'score'] | [['winner', '24 june 2007', 'davos , switzerland', 'clay', 'jessica moore', '6 - 4 , 4 - 6 , 6 - 3'], ['runner - up', '19 august 2007', 'pesaro , italy', 'clay', 'polona hercog', '2 - 6 , 6 - 2 , 1 - 6'], ['runner - up', '28 october 2007', 'mexico city , mexico', 'hard', 'olivia sanchez', '6 - 2 , 2 - 6 , 2 - 6'], ['runner - up', '16 february 2008', 'majora , spain', 'clay', 'polona hercog', '6 - 4 , 1 - 6 , 3 - 6'], ['winner', '4 may 2008', 'makarska , croatia', 'clay', 'anastasia pivovarova', '6 - 2 , 6 - 3'], ['winner', '29 may 2010', 'velenje , slovenia', 'clay', 'pavla šmídová', '6 - 1 , 6 - 2'], ['winner', '31 october 2010', 'cairo , egypt', 'clay', 'maša zec peškirič', '6 - 1 , 6 - 3'], ['runner - up', '23 january 2011', 'andrézieux - bouthéon , france', 'hard', 'mona barthel', '3 - 6 , 6 - 3 , 4 - 6'], ['runner - up', '10 july 2011', 'aschaffenburg , germany', 'clay', 'florencia molinero', '6 - 7 ( 6 - 8 ) , 1 - 6'], ['winner', '11 september 2011', 'alphen aan den rijn , netherlands', 'clay', 'katarzyna piter', '6 - 2 , 6 - 4'], ['runner - up', '18 september 2011', 'rotterdam , netherlands', 'clay', 'dinah pfizenmaier', '6 - 3 , 1 - 6 , 1 - 6'], ['runner - up', '3 november 2012', 'netanya , israel', 'hard', 'anna karolína schmiedlová', '6 - 0 , 3 - 6 , 4 - 6'], ['winner', '10 march 2013', 'sutton , united kingdom', 'hard ( i )', 'carina witthöft', '3 - 6 , 6 - 4 , 6 - 3'], ['winner', '17 march 2013', 'bath , united kingdom', 'hard ( i )', 'an - sophie mestach', '7 - 6 ( 7 - 3 ) , 6 - 3'], ['winner', '13 july 2013', 'biarritz , france', 'clay', 'anna karolína schmiedlová', '1 - 6 , 6 - 3 , 6 - 2'], ['winner', '15 september 2013', 'podgorica , montenegro', 'clay', 'anett kontaveit', '6 - 4 , 6 - 3']] |
wtbs - ld | https://en.wikipedia.org/wiki/WTBS-LD | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1097268-1.html.csv | unique | channel 26.1 is the only wtbs-ld to have an aspect of 16:9 . | {'scope': 'all', 'row': '1', 'col': '3', 'col_other': '1', 'criterion': 'equal', 'value': '16:9', 'subset': None} | {'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'aspect', '16:9'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose aspect record fuzzily matches to 16:9 .', 'tostr': 'filter_eq { all_rows ; aspect ; 16:9 }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; aspect ; 16:9 } }', 'tointer': 'select the rows whose aspect record fuzzily matches to 16:9 . there is only one such row in the table .'}, {'func': 'eq', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'aspect', '16:9'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose aspect record fuzzily matches to 16:9 .', 'tostr': 'filter_eq { all_rows ; aspect ; 16:9 }'}, 'channel'], 'result': '26.1', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; aspect ; 16:9 } ; channel }'}, '26.1'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; aspect ; 16:9 } ; channel } ; 26.1 }', 'tointer': 'the channel record of this unqiue row is 26.1 .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; aspect ; 16:9 } } ; eq { hop { filter_eq { all_rows ; aspect ; 16:9 } ; channel } ; 26.1 } } = true', 'tointer': 'select the rows whose aspect record fuzzily matches to 16:9 . there is only one such row in the table . the channel record of this unqiue row is 26.1 .'} | and { only { filter_eq { all_rows ; aspect ; 16:9 } } ; eq { hop { filter_eq { all_rows ; aspect ; 16:9 } ; channel } ; 26.1 } } = true | select the rows whose aspect record fuzzily matches to 16:9 . there is only one such row in the table . the channel record of this unqiue row is 26.1 . | 6 | 5 | {'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'aspect_7': 7, '16:9_8': 8, 'eq_3': 3, 'num_hop_2': 2, 'channel_9': 9, '26.1_10': 10} | {'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'aspect_7': 'aspect', '16:9_8': '16:9', 'eq_3': 'eq', 'num_hop_2': 'num_hop', 'channel_9': 'channel', '26.1_10': '26.1'} | {'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'aspect_7': [0], '16:9_8': [0], 'eq_3': [4], 'num_hop_2': [3], 'channel_9': [2], '26.1_10': [3]} | ['channel', 'video', 'aspect', 'psip short name', 'programming'] | [['26.1', '1080i', '16:9', 'mfox', 'mundofox'], ['26.2', '480i', '4:3', 'lwn', 'live well network'], ['26.4', '480i', '4:3', 'jtv', 'jewelry tv'], ['26.5', '480i', '4:3', 'f24news', 'france 24 blank screen'], ['26.8', '480i', '4:3', 'tuff tv', 'tuff tv']] |
amino acid | https://en.wikipedia.org/wiki/Amino_acid | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1207-4.html.csv | superlative | the highest listed hydropathy index of standard amino acids is 4.5 . | {'scope': 'all', 'col_superlative': '6', 'row_superlative': '2', 'value_mentioned': 'yes', 'max_or_min': 'max', 'other_col': 'n/a', 'subset': None} | {'func': 'eq', 'args': [{'func': 'max', 'args': ['all_rows', 'hydropathy index'], 'result': '4.5', 'ind': 0, 'tostr': 'max { all_rows ; hydropathy index }', 'tointer': 'the maximum hydropathy index record of all rows is 4.5 .'}, '4.5'], 'result': True, 'ind': 1, 'tostr': 'eq { max { all_rows ; hydropathy index } ; 4.5 } = true', 'tointer': 'the maximum hydropathy index record of all rows is 4.5 .'} | eq { max { all_rows ; hydropathy index } ; 4.5 } = true | the maximum hydropathy index record of all rows is 4.5 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'max_0': 0, 'all_rows_3': 3, 'hydropathy index_4': 4, '4.5_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'max_0': 'max', 'all_rows_3': 'all_rows', 'hydropathy index_4': 'hydropathy index', '4.5_5': '4.5'} | {'eq_1': [2], 'result_2': [], 'max_0': [1], 'all_rows_3': [0], 'hydropathy index_4': [0], '4.5_5': [1]} | ['amino acid', '3 - letter', '1 - letter', 'side - chain polarity', 'side - chain charge ( ph 7.4 )', 'hydropathy index'] | [['alanine', 'ala', 'a', 'nonpolar', 'neutral', '1.8'], ['arginine', 'arg', 'r', 'basic polar', 'positive', '4.5'], ['asparagine', 'asn', 'n', 'polar', 'neutral', '3.5'], ['aspartic acid', 'asp', 'd', 'acidic polar', 'negative', '3.5'], ['cysteine', 'cys', 'c', 'nonpolar', 'neutral', '2.5'], ['glutamic acid', 'glu', 'e', 'acidic polar', 'negative', '3.5'], ['glutamine', 'gln', 'q', 'polar', 'neutral', '3.5'], ['glycine', 'gly', 'g', 'nonpolar', 'neutral', '0.4'], ['histidine', 'his', 'h', 'basic polar', 'positive ( 10 % ) neutral ( 90 % )', '3.2'], ['isoleucine', 'ile', 'i', 'nonpolar', 'neutral', '4.5'], ['leucine', 'leu', 'l', 'nonpolar', 'neutral', '3.8'], ['lysine', 'lys', 'k', 'basic polar', 'positive', '3.9'], ['methionine', 'met', 'm', 'nonpolar', 'neutral', '1.9'], ['phenylalanine', 'phe', 'f', 'nonpolar', 'neutral', '2.8'], ['proline', 'pro', 'p', 'nonpolar', 'neutral', '1.6'], ['serine', 'ser', 's', 'polar', 'neutral', '0.8'], ['threonine', 'thr', 't', 'polar', 'neutral', '0.7'], ['tryptophan', 'trp', 'w', 'nonpolar', 'neutral', '0.9'], ['tyrosine', 'tyr', 'y', 'polar', 'neutral', '1.3'], ['valine', 'val', 'v', 'nonpolar', 'neutral', '4.2']] |
danny sullivan | https://en.wikipedia.org/wiki/Danny_Sullivan | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1226454-1.html.csv | superlative | the best qualifying time that danny sullivan had was a time of 225.496 . | {'scope': 'all', 'col_superlative': '3', 'row_superlative': '12', 'value_mentioned': 'yes', 'max_or_min': 'max', 'other_col': 'n/a', 'subset': None} | {'func': 'eq', 'args': [{'func': 'max', 'args': ['all_rows', 'qual'], 'result': '225.496', 'ind': 0, 'tostr': 'max { all_rows ; qual }', 'tointer': 'the maximum qual record of all rows is 225.496 .'}, '225.496'], 'result': True, 'ind': 1, 'tostr': 'eq { max { all_rows ; qual } ; 225.496 } = true', 'tointer': 'the maximum qual record of all rows is 225.496 .'} | eq { max { all_rows ; qual } ; 225.496 } = true | the maximum qual record of all rows is 225.496 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'max_0': 0, 'all_rows_3': 3, 'qual_4': 4, '225.496_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'max_0': 'max', 'all_rows_3': 'all_rows', 'qual_4': 'qual', '225.496_5': '225.496'} | {'eq_1': [2], 'result_2': [], 'max_0': [1], 'all_rows_3': [0], 'qual_4': [0], '225.496_5': [1]} | ['year', 'start', 'qual', 'rank', 'finish', 'laps'] | [['1982', '13', '196.292', '17', '14', '148'], ['1984', '28', '203.567', '17', '29', '57'], ['1985', '8', '210.298', '8', '1', '200'], ['1986', '2', '215.382', '2', '9', '197'], ['1987', '16', '210.271', '6', '13', '160'], ['1988', '2', '216.214', '2', '23', '101'], ['1989', '26', '216.027', '15', '28', '41'], ['1990', '9', '220.310', '9', '32', '19'], ['1991', '9', '218.343', '17', '10', '173'], ['1992', '8', '224.838', '9', '5', '199'], ['1993', '12', '219.428', '19', '33', '29'], ['1995', '18', '225.496', '29', '9', '199']] |
high - temperature superconductivity | https://en.wikipedia.org/wiki/High-temperature_superconductivity | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-101336-1.html.csv | aggregation | the average critical temperature of high - temperature superconductivity compounds is 99.8 degrees kelvin . | {'scope': 'all', 'col': '3', 'type': 'average', 'result': '99.8 degrees', 'subset': None} | {'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 't c ( k )'], 'result': '99.8 degrees', 'ind': 0, 'tostr': 'avg { all_rows ; t c ( k ) }'}, '99.8 degrees'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; t c ( k ) } ; 99.8 degrees } = true', 'tointer': 'the average of the t c ( k ) record of all rows is 99.8 degrees .'} | round_eq { avg { all_rows ; t c ( k ) } ; 99.8 degrees } = true | the average of the t c ( k ) record of all rows is 99.8 degrees . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 't c (k)_4': 4, '99.8 degrees_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 't c (k)_4': 't c ( k )', '99.8 degrees_5': '99.8 degrees'} | {'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 't c (k)_4': [0], '99.8 degrees_5': [1]} | ['formula', 'notation', 't c ( k )', 'no of cu - o planes in unit cell', 'crystal structure'] | [['yba 2 cu 3 o 7', '123', '92', '2', 'orthorhombic'], ['bi 2 sr 2 cuo 6', 'bi - 2201', '20', '1', 'tetragonal'], ['bi 2 sr 2 cacu 2 o 8', 'bi - 2212', '85', '2', 'tetragonal'], ['bi 2 sr 2 ca 2 cu 3 o 6', 'bi - 2223', '110', '3', 'tetragonal'], ['tl 2 ba 2 cuo 6', 'tl - 2201', '80', '1', 'tetragonal'], ['tl 2 ba 2 cacu 2 o 8', 'tl - 2212', '108', '2', 'tetragonal'], ['tl 2 ba 2 ca 2 cu 3 o 10', 'tl - 2223', '125', '3', 'tetragonal'], ['tlba 2 ca 3 cu 4 o 11', 'tl - 1234', '122', '4', 'tetragonal'], ['hgba 2 cuo 4', 'hg - 1201', '94', '1', 'tetragonal'], ['hgba 2 cacu 2 o 6', 'hg - 1212', '128', '2', 'tetragonal'], ['hgba 2 ca 2 cu 3 o 8', 'hg - 1223', '134', '3', 'tetragonal']] |
list of army wives episodes | https://en.wikipedia.org/wiki/List_of_Army_Wives_episodes | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-11111116-6.html.csv | ordinal | of the episodes of army wives , the one with the second highest number of us viewers was the one titled line of departure . | {'row': '1', 'col': '7', 'order': '2', 'col_other': '3', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'scope': 'all', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'us viewers ( million )', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmax { all_rows ; us viewers ( million ) ; 2 }'}, 'title'], 'result': 'line of departure', 'ind': 1, 'tostr': 'hop { nth_argmax { all_rows ; us viewers ( million ) ; 2 } ; title }'}, 'line of departure'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmax { all_rows ; us viewers ( million ) ; 2 } ; title } ; line of departure } = true', 'tointer': 'select the row whose us viewers ( million ) record of all rows is 2nd maximum . the title record of this row is line of departure .'} | eq { hop { nth_argmax { all_rows ; us viewers ( million ) ; 2 } ; title } ; line of departure } = true | select the row whose us viewers ( million ) record of all rows is 2nd maximum . the title record of this row is line of departure . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmax_0': 0, 'all_rows_4': 4, 'us viewers (million)_5': 5, '2_6': 6, 'title_7': 7, 'line of departure_8': 8} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmax_0': 'nth_argmax', 'all_rows_4': 'all_rows', 'us viewers (million)_5': 'us viewers ( million )', '2_6': '2', 'title_7': 'title', 'line of departure_8': 'line of departure'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmax_0': [1], 'all_rows_4': [0], 'us viewers (million)_5': [0], '2_6': [0], 'title_7': [1], 'line of departure_8': [2]} | ['no in season', 'no in series', 'title', 'directed by', 'written by', 'original air date', 'us viewers ( million )'] | [['1', '69', 'line of departure', 'john t kretchmer', 'debra fordham', 'march 6 , 2011', '4.2'], ['2', '70', 'command presence', 'carl lawrence ludwig', 'karen maser', 'march 13 , 2011', '3.3'], ['3', '71', 'movement to contact', 'john t kretchmer', 'rebecca dameron', 'march 20 , 2011', '3.8'], ['4', '72', 'on behalf of a grateful nation', 'john terlesky', 'tj brady & rasheed newson', 'march 27 , 2011', '4.7'], ['5', '73', 'soldier on', 'rob spera', 'bill rinier', 'april 3 , 2011', '3.3'], ['6', '74', 'walking wounded', 'chris peppe', 'james stanley', 'april 10 , 2011', '3.4'], ['7', '75', 'strategic alliances', 'melanie mayron', 'td mitchell', 'april 17 , 2011', '3.4'], ['8', '76', 'supporting arms', 'emile levisetti', 'mary leah sutton', 'may 1 , 2011', '3.1'], ['9', '77', 'countermeasures', 'james bruce', 'debra fordham', 'may 8 , 2011', '3.3'], ['10', '78', 'battle buddies', 'brian mcnamara', 'tanya biank', 'may 15 , 2011', '3.2'], ['11', '79', 'drop zone', 'rob spera', 'james stanley', 'may 22 , 2011', '3.6'], ['12', '80', 'firefight', 'john terlesky', 'rasheed newson', 'june 5 , 2011', '3.4']] |
arkansas rimrockers | https://en.wikipedia.org/wiki/Arkansas_RimRockers | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1806054-1.html.csv | unique | 2004-05 was the only season that the arkansas rimrockers played in the aba league . | {'scope': 'all', 'row': '2', 'col': '2', 'col_other': '1', 'criterion': 'equal', 'value': 'aba', 'subset': None} | {'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'league', 'aba'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose league record fuzzily matches to aba .', 'tostr': 'filter_eq { all_rows ; league ; aba }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; league ; aba } }', 'tointer': 'select the rows whose league record fuzzily matches to aba . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'league', 'aba'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose league record fuzzily matches to aba .', 'tostr': 'filter_eq { all_rows ; league ; aba }'}, 'season'], 'result': '2004 - 05', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; league ; aba } ; season }'}, '2004 - 05'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; league ; aba } ; season } ; 2004 - 05 }', 'tointer': 'the season record of this unqiue row is 2004 - 05 .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; league ; aba } } ; eq { hop { filter_eq { all_rows ; league ; aba } ; season } ; 2004 - 05 } } = true', 'tointer': 'select the rows whose league record fuzzily matches to aba . there is only one such row in the table . the season record of this unqiue row is 2004 - 05 .'} | and { only { filter_eq { all_rows ; league ; aba } } ; eq { hop { filter_eq { all_rows ; league ; aba } ; season } ; 2004 - 05 } } = true | select the rows whose league record fuzzily matches to aba . there is only one such row in the table . the season record of this unqiue row is 2004 - 05 . | 6 | 5 | {'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'league_7': 7, 'aba_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'season_9': 9, '2004 - 05_10': 10} | {'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'league_7': 'league', 'aba_8': 'aba', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'season_9': 'season', '2004 - 05_10': '2004 - 05'} | {'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'league_7': [0], 'aba_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'season_9': [2], '2004 - 05_10': [3]} | ['season', 'league', 'finish', 'wins', 'losses', 'pct'] | [['arkansas rimrockers', 'arkansas rimrockers', 'arkansas rimrockers', 'arkansas rimrockers', 'arkansas rimrockers', 'arkansas rimrockers'], ['2004 - 05', 'aba', '1st', '28', '5', '848'], ['2005 - 06', 'd - league', '5th', '24', '24', '500'], ['2006 - 07', 'd - league', '6th', '16', '34', '320'], ['regular season', 'regular season', 'regular season', '68', '63', '519'], ['playoffs', 'playoffs', 'playoffs', '4', '0', '1.000']] |
1984 washington redskins season | https://en.wikipedia.org/wiki/1984_Washington_Redskins_season | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15085579-1.html.csv | count | the washington redskins played against the dallas cowboys 2 times during the 1984 season . | {'scope': 'all', 'criterion': 'equal', 'value': 'dallas cowboys', 'result': '2', 'col': '3', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'opponent', 'dallas cowboys'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose opponent record fuzzily matches to dallas cowboys .', 'tostr': 'filter_eq { all_rows ; opponent ; dallas cowboys }'}], 'result': '2', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; opponent ; dallas cowboys } }', 'tointer': 'select the rows whose opponent record fuzzily matches to dallas cowboys . the number of such rows is 2 .'}, '2'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; opponent ; dallas cowboys } } ; 2 } = true', 'tointer': 'select the rows whose opponent record fuzzily matches to dallas cowboys . the number of such rows is 2 .'} | eq { count { filter_eq { all_rows ; opponent ; dallas cowboys } } ; 2 } = true | select the rows whose opponent record fuzzily matches to dallas cowboys . the number of such rows is 2 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'opponent_5': 5, 'dallas cowboys_6': 6, '2_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'opponent_5': 'opponent', 'dallas cowboys_6': 'dallas cowboys', '2_7': '2'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'opponent_5': [0], 'dallas cowboys_6': [0], '2_7': [2]} | ['week', 'date', 'opponent', 'result', 'attendance'] | [['1', 'september 2 , 1984', 'miami dolphins', 'l , 17 - 35', '52683'], ['2', 'september 10 , 1984', 'san francisco 49ers', 'l , 31 - 37', '59707'], ['3', 'september 16 , 1984', 'new york giants', 'w , 30 - 14', '52997'], ['4', 'september 23 , 1984', 'new england patriots', 'w , 26 - 10', '60503'], ['5', 'september 30 , 1984', 'philadelphia eagles', 'w , 20 - 0', '53064'], ['6', 'october 7 , 1984', 'indianapolis colts', 'w , 35 - 7', '60012'], ['7', 'october 14 , 1984', 'dallas cowboys', 'w , 34 - 14', '55431'], ['8', 'october 21 , 1984', 'st louis cardinals', 'l , 24 - 26', '50262'], ['9', 'october 28 , 1984', 'new york giants', 'l , 13 - 37', '76192'], ['10', 'november 5 , 1984', 'atlanta falcons', 'w , 14 - 27', '51301'], ['11', 'november 11 , 1984', 'detroit lions', 'w , 28 - 14', '50212'], ['12', 'november 18 , 1984', 'philadelphia eagles', 'l , 10 - 16', '63117'], ['13', 'november 25 , 1984', 'buffalo bills', 'w , 41 - 14', '51513'], ['14', 'november 29 , 1984', 'minnesota vikings', 'w , 31 - 17', '55017'], ['15', 'december 9 , 1984', 'dallas cowboys', 'w , 30 - 28', '64286'], ['16', 'december 16 , 1984', 'st louis cardinals', 'w , 29 - 27', '54299']] |
1981 denver broncos season | https://en.wikipedia.org/wiki/1981_Denver_Broncos_season | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-17972136-1.html.csv | majority | the majority of games during the 1981 denver broncos season were played at mile high stadium . | {'scope': 'all', 'col': '5', 'most_or_all': 'most', 'criterion': 'equal', 'value': 'mile high stadium', 'subset': None} | {'func': 'most_str_eq', 'args': ['all_rows', 'game site', 'mile high stadium'], 'result': True, 'ind': 0, 'tointer': 'for the game site records of all rows , most of them fuzzily match to mile high stadium .', 'tostr': 'most_eq { all_rows ; game site ; mile high stadium } = true'} | most_eq { all_rows ; game site ; mile high stadium } = true | for the game site records of all rows , most of them fuzzily match to mile high stadium . | 1 | 1 | {'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'game site_3': 3, 'mile high stadium_4': 4} | {'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'game site_3': 'game site', 'mile high stadium_4': 'mile high stadium'} | {'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'game site_3': [0], 'mile high stadium_4': [0]} | ['week', 'date', 'opponent', 'result', 'game site', 'record', 'attendance'] | [['1', 'september 6', 'oakland raiders', 'w 9 - 7', 'mile high stadium', '1 - 0', '74796'], ['2', 'september 13', 'seattle seahawks', 'l 10 - 13', 'kingdome', '1 - 1', '58513'], ['3', 'september 20', 'baltimore colts', 'w 28 - 10', 'mile high stadium', '2 - 1', '74804'], ['4', 'september 27', 'san diego chargers', 'w 42 - 24', 'mile high stadium', '3 - 1', '74822'], ['5', 'october 4', 'oakland raiders', 'w 17 - 0', 'oakland - alameda county coliseum', '4 - 1', '51035'], ['6', 'october 11', 'detroit lions', 'w 27 - 21', 'mile high stadium', '5 - 1', '74816'], ['7', 'october 18', 'kansas city chiefs', 'l 14 - 28', 'arrowhead stadium', '5 - 2', '74672'], ['8', 'october 25', 'buffalo bills', 'l 7 - 9', 'rich stadium', '5 - 3', '77757'], ['9', 'november 2', 'minnesota vikings', 'w 19 - 17', 'mile high stadium', '6 - 3', '74834'], ['10', 'november 8', 'cleveland browns', 'w 23 - 20 ( ot )', 'mile high stadium', '7 - 3', '74859'], ['11', 'november 15', 'tampa bay buccaneers', 'w 24 - 7', 'tampa stadium', '8 - 3', '64518'], ['12', 'november 22', 'cincinnati bengals', 'l 21 - 38', 'riverfront stadium', '8 - 4', '57207'], ['13', 'november 29', 'san diego chargers', 'l 17 - 34', 'jack murphy stadium', '8 - 5', '51533'], ['14', 'december 6', 'kansas city chiefs', 'w 16 - 13', 'mile high stadium', '9 - 5', '74744'], ['15', 'december 13', 'seattle seahawks', 'w 23 - 13', 'mile high stadium', '10 - 5', '74527']] |
2005 pba draft | https://en.wikipedia.org/wiki/2005_PBA_draft | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-11779131-2.html.csv | count | there are three players who have the united states as their country of origin . | {'scope': 'all', 'criterion': 'equal', 'value': 'united states', 'result': '3', 'col': '3', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'country of origin', 'united states'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose country of origin record fuzzily matches to united states .', 'tostr': 'filter_eq { all_rows ; country of origin ; united states }'}], 'result': '3', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; country of origin ; united states } }', 'tointer': 'select the rows whose country of origin record fuzzily matches to united states . the number of such rows is 3 .'}, '3'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; country of origin ; united states } } ; 3 } = true', 'tointer': 'select the rows whose country of origin record fuzzily matches to united states . the number of such rows is 3 .'} | eq { count { filter_eq { all_rows ; country of origin ; united states } } ; 3 } = true | select the rows whose country of origin record fuzzily matches to united states . the number of such rows is 3 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'country of origin_5': 5, 'united states_6': 6, '3_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'country of origin_5': 'country of origin', 'united states_6': 'united states', '3_7': '3'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'country of origin_5': [0], 'united states_6': [0], '3_7': [2]} | ['pick', 'player', 'country of origin', 'pba team', 'college'] | [['1', 'jay washington', 'united states', 'air21 express', 'eckerd'], ['2', 'alex cabagnot', 'united states', 'sta lucia realtors', 'hawaii - hilo'], ['3', 'dennis miranda', 'philippines', 'coca - cola tigers', 'feu'], ['4', 'jondan salvador', 'philippines', 'purefoods chunkee giants', 'st benilde'], ['5', 'mark cardona', 'philippines', 'air21 express', 'de la salle'], ['6', 'niã ± o canaleta', 'philippines', 'air21 express', 'ue'], ['7', 'michael holper', 'united states', 'barangay ginebra kings', 'san diego state'], ['8', 'paolo hubalde', 'philippines', 'san miguel beermen', 'ue'], ['9', 'leo najorda', 'philippines', 'red bull barako', 'san sebastian']] |
whobegotyou | https://en.wikipedia.org/wiki/Whobegotyou | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-30098144-2.html.csv | superlative | for races that whobegotyou participated in , the longest distance was 2500 meters . | {'scope': 'all', 'col_superlative': '6', 'row_superlative': '6', 'value_mentioned': 'yes', 'max_or_min': 'max', 'other_col': 'n/a', 'subset': None} | {'func': 'eq', 'args': [{'func': 'max', 'args': ['all_rows', 'distance'], 'result': '2500 m', 'ind': 0, 'tostr': 'max { all_rows ; distance }', 'tointer': 'the maximum distance record of all rows is 2500 m .'}, '2500 m'], 'result': True, 'ind': 1, 'tostr': 'eq { max { all_rows ; distance } ; 2500 m } = true', 'tointer': 'the maximum distance record of all rows is 2500 m .'} | eq { max { all_rows ; distance } ; 2500 m } = true | the maximum distance record of all rows is 2500 m . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'max_0': 0, 'all_rows_3': 3, 'distance_4': 4, '2500 m_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'max_0': 'max', 'all_rows_3': 'all_rows', 'distance_4': 'distance', '2500 m_5': '2500 m'} | {'eq_1': [2], 'result_2': [], 'max_0': [1], 'all_rows_3': [0], 'distance_4': [0], '2500 m_5': [1]} | ['result', 'date', 'race', 'venue', 'group', 'distance', 'weight ( kg )', 'jockey', 'winner / 2nd'] | [['2nd', '30 aug 2008', 'mcneil stakes', 'caulfield', 'g3', '1200 m', '55', 'm rodd', '1st - sugar babe'], ['7th', '20 sep 2008', 'guineas prelude', 'caulfield', 'g3', '1400 m', '54', 'm rodd', '1st - fernandina'], ['won', '26 sep 2008', 'bill stutt stakes', 'moonee valley', 'g2', '1600 m', '55.5', 'm rodd', '2nd - all american'], ['won', '11 oct 2008', 'caulfield guineas', 'caulfield', 'g1', '1600 m', '55.5', 'm rodd', '2nd - time thief'], ['won', '25 oct 2008', 'aami vase', 'moonee valley', 'g2', '2040 m', '55.5', 'm rodd', '2nd - buffett'], ['2nd', '01 nov 2008', 'victoria derby', 'flemington', 'g1', '2500 m', '55.5', 'm rodd', '1st - rebel raider'], ['2nd', '07 mar 2009', 'food and wine stakes', 'flemington', 'lr', '1400 m', '58.5', 'd oliver', '1st - rockpecker'], ['2nd', '21 mar 2009', 'phar lap stakes', 'rosehill', 'g2', '1500 m', '56.5', 'm rodd', '1st - heart of dreams'], ['9th', '04 apr 2009', 'george ryder stakes', 'rosehill', 'g1', '1500 m', '56.5', 'm rodd', '1st - vision and power'], ['3rd', '18 apr 2009', 'doncaster handicap', 'randwick', 'g1', '1600 m', '54.5', 'm rodd', '1st - vision and power']] |
1985 tampa bay buccaneers season | https://en.wikipedia.org/wiki/1985_Tampa_Bay_Buccaneers_season | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-11449311-2.html.csv | unique | the game on november 24 , 1985 was the only buccaneers game to go to overtime . | {'scope': 'all', 'row': '13', 'col': '4', 'col_other': '2', 'criterion': 'fuzzily_match', 'value': 'ot', 'subset': None} | {'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'result', 'ot'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose result record fuzzily matches to ot .', 'tostr': 'filter_eq { all_rows ; result ; ot }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; result ; ot } }', 'tointer': 'select the rows whose result record fuzzily matches to ot . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'result', 'ot'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose result record fuzzily matches to ot .', 'tostr': 'filter_eq { all_rows ; result ; ot }'}, 'date'], 'result': 'november 24 , 1985', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; result ; ot } ; date }'}, 'november 24 , 1985'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; result ; ot } ; date } ; november 24 , 1985 }', 'tointer': 'the date record of this unqiue row is november 24 , 1985 .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; result ; ot } } ; eq { hop { filter_eq { all_rows ; result ; ot } ; date } ; november 24 , 1985 } } = true', 'tointer': 'select the rows whose result record fuzzily matches to ot . there is only one such row in the table . the date record of this unqiue row is november 24 , 1985 .'} | and { only { filter_eq { all_rows ; result ; ot } } ; eq { hop { filter_eq { all_rows ; result ; ot } ; date } ; november 24 , 1985 } } = true | select the rows whose result record fuzzily matches to ot . there is only one such row in the table . the date record of this unqiue row is november 24 , 1985 . | 6 | 5 | {'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'result_7': 7, 'ot_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'date_9': 9, 'november 24 , 1985_10': 10} | {'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'result_7': 'result', 'ot_8': 'ot', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'date_9': 'date', 'november 24 , 1985_10': 'november 24 , 1985'} | {'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'result_7': [0], 'ot_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'date_9': [2], 'november 24 , 1985_10': [3]} | ['week', 'date', 'opponent', 'result', 'kickoff', 'game site', 'attendance', 'record'] | [['week', 'date', 'opponent', 'result', 'kickoff', 'game site', 'attendance', 'record'], ['1', 'september 8 , 1985', 'chicago bears', 'l 38 - 28', '1:00', 'soldier field', '57828', '0 - 1'], ['2', 'september 15 , 1985', 'minnesota vikings', 'l 31 - 16', '4:00', 'tampa stadium', '46188', '0 - 2'], ['3', 'september 22 , 1985', 'new orleans saints', 'l 20 - 13', '1:00', 'louisiana superdome', '45320', '0 - 3'], ['4', 'september 29 , 1985', 'detroit lions', 'l 30 - 9', '1:00', 'pontiac silverdome', '45023', '0 - 4'], ['5', 'october 6 , 1985', 'chicago bears', 'l 27 - 19', '1:00', 'tampa stadium', '51795', '0 - 5'], ['6', 'october 13 , 1985', 'los angeles rams', 'l 31 - 27', '1:00', 'tampa stadium', '39607', '0 - 6'], ['7', 'october 20 , 1985', 'miami dolphins', 'l 41 - 38', '4:00', 'orange bowl', '62335', '0 - 7'], ['8', 'october 27 , 1985', 'new england patriots', 'l 32 - 14', '1:00', 'tampa stadium', '34661', '0 - 8'], ['9', 'november 3 , 1985', 'new york giants', 'l 22 - 20', '1:00', 'giants stadium', '72031', '0 - 9'], ['10', 'november 10 , 1985', 'st louis cardinals', 'w 16 - 0', '1:00', 'tampa stadium', '34736', '1 - 9'], ['11', 'november 17 , 1985', 'new york jets', 'l 62 - 28', '1:00', 'the meadowlands', '65344', '1 - 10'], ['12', 'november 24 , 1985', 'detroit lions', 'w 19 - 16 ot', '1:00', 'tampa stadium', '43471', '2 - 10'], ['13', 'december 1 , 1985', 'green bay packers', 'l 21 - 0', '1:00', 'lambeau field', '19856', '2 - 11'], ['14', 'december 8 , 1985', 'minnesota vikings', 'l 26 - 7', '4:00', 'hubert h humphrey metrodome', '51593', '2 - 12'], ['15', 'december 15 , 1985', 'indianapolis colts', 'l 31 - 23', '1:00', 'tampa stadium', '25577', '2 - 13'], ['16', 'december 22 , 1985', 'green bay packers', 'l 20 - 17', '1:00', 'tampa stadium', '33992', '2 - 14']] |
eurobasket 1965 | https://en.wikipedia.org/wiki/EuroBasket_1965 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-13841481-3.html.csv | ordinal | the best team in the eurobasket 1965 tournament had a positive point difference of +176 . | {'scope': 'all', 'row': '1', 'col': '1', 'order': '1', 'col_other': '7', 'max_or_min': 'min_to_max', 'value_mentioned': 'yes', 'subset': None} | {'func': 'and', 'args': [{'func': 'eq', 'args': [{'func': 'nth_min', 'args': ['all_rows', 'pos', '1'], 'result': '1', 'ind': 0, 'tostr': 'nth_min { all_rows ; pos ; 1 }', 'tointer': 'the 1st minimum pos record of all rows is 1 .'}, '1'], 'result': True, 'ind': 1, 'tostr': 'eq { nth_min { all_rows ; pos ; 1 } ; 1 }', 'tointer': 'the 1st minimum pos record of all rows is 1 .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmin', 'args': ['all_rows', 'pos', '1'], 'result': None, 'ind': 2, 'tostr': 'nth_argmin { all_rows ; pos ; 1 }'}, 'diff'], 'result': '+ 176', 'ind': 3, 'tostr': 'hop { nth_argmin { all_rows ; pos ; 1 } ; diff }'}, '+ 176'], 'result': True, 'ind': 4, 'tostr': 'eq { hop { nth_argmin { all_rows ; pos ; 1 } ; diff } ; + 176 }', 'tointer': 'the diff record of the row with 1st minimum pos record is + 176 .'}], 'result': True, 'ind': 5, 'tostr': 'and { eq { nth_min { all_rows ; pos ; 1 } ; 1 } ; eq { hop { nth_argmin { all_rows ; pos ; 1 } ; diff } ; + 176 } } = true', 'tointer': 'the 1st minimum pos record of all rows is 1 . the diff record of the row with 1st minimum pos record is + 176 .'} | and { eq { nth_min { all_rows ; pos ; 1 } ; 1 } ; eq { hop { nth_argmin { all_rows ; pos ; 1 } ; diff } ; + 176 } } = true | the 1st minimum pos record of all rows is 1 . the diff record of the row with 1st minimum pos record is + 176 . | 6 | 6 | {'and_5': 5, 'result_6': 6, 'eq_1': 1, 'nth_min_0': 0, 'all_rows_7': 7, 'pos_8': 8, '1_9': 9, '1_10': 10, 'str_eq_4': 4, 'str_hop_3': 3, 'nth_argmin_2': 2, 'all_rows_11': 11, 'pos_12': 12, '1_13': 13, 'diff_14': 14, '+ 176_15': 15} | {'and_5': 'and', 'result_6': 'true', 'eq_1': 'eq', 'nth_min_0': 'nth_min', 'all_rows_7': 'all_rows', 'pos_8': 'pos', '1_9': '1', '1_10': '1', 'str_eq_4': 'str_eq', 'str_hop_3': 'str_hop', 'nth_argmin_2': 'nth_argmin', 'all_rows_11': 'all_rows', 'pos_12': 'pos', '1_13': '1', 'diff_14': 'diff', '+ 176_15': '+ 176'} | {'and_5': [6], 'result_6': [], 'eq_1': [5], 'nth_min_0': [1], 'all_rows_7': [0], 'pos_8': [0], '1_9': [0], '1_10': [1], 'str_eq_4': [5], 'str_hop_3': [4], 'nth_argmin_2': [3], 'all_rows_11': [2], 'pos_12': [2], '1_13': [2], 'diff_14': [3], '+ 176_15': [4]} | ['pos', 'matches', 'wins', 'loses', 'results', 'points', 'diff'] | [['1', '7', '7', '0', '546:370', '14', '+ 176'], ['2', '7', '5', '2', '487:466', '10', '+ 21'], ['3', '7', '5', '2', '522:443', '10', '+ 79'], ['4', '7', '4', '3', '395:439', '8', '46'], ['5', '7', '3', '4', '394:458', '6', '64'], ['6', '7', '2', '5', '389:454', '4', '65'], ['7', '7', '2', '5', '477:464', '4', '+ 13'], ['8', '7', '0', '7', '364:478', '0', '114']] |
nassim akrour | https://en.wikipedia.org/wiki/Nassim_Akrour | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1646697-2.html.csv | aggregation | the total points scored in the 2003 matches from both competitors was eight . | {'scope': 'subset', 'col': '3', 'type': 'sum', 'result': '8', 'subset': {'col': '1', 'criterion': 'fuzzily_match', 'value': '2003'}} | {'func': 'round_eq', 'args': [{'func': 'sum', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'date', '2003'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; date ; 2003 }', 'tointer': 'select the rows whose date record fuzzily matches to 2003 .'}, 'score'], 'result': '8', 'ind': 1, 'tostr': 'sum { filter_eq { all_rows ; date ; 2003 } ; score }'}, '8'], 'result': True, 'ind': 2, 'tostr': 'round_eq { sum { filter_eq { all_rows ; date ; 2003 } ; score } ; 8 } = true', 'tointer': 'select the rows whose date record fuzzily matches to 2003 . the sum of the score record of these rows is 8 .'} | round_eq { sum { filter_eq { all_rows ; date ; 2003 } ; score } ; 8 } = true | select the rows whose date record fuzzily matches to 2003 . the sum of the score record of these rows is 8 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'sum_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'date_5': 5, '2003_6': 6, 'score_7': 7, '8_8': 8} | {'eq_2': 'eq', 'result_3': 'true', 'sum_1': 'sum', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'date_5': 'date', '2003_6': '2003', 'score_7': 'score', '8_8': '8'} | {'eq_2': [3], 'result_3': [], 'sum_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'date_5': [0], '2003_6': [0], 'score_7': [1], '8_8': [2]} | ['date', 'venue', 'score', 'result', 'competition'] | [['january 14 , 2002', 'stade 5 juillet 1962 , algiers , algeria', '3 - 0', '4 - 0', 'friendly match'], ['january 25 , 2002', 'stade 26 mars , bamako , mali', '1 - 1', '2 - 2', '2002 african cup of nations'], ['october 11 , 2002', 'stade 19 mai 1956 , annaba , algeria', '1 - 0', '4 - 1', '2004 african cup of nations ( qualification )'], ['october 11 , 2002', 'stade 19 mai 1956 , annaba , algeria', '4 - 1', '4 - 1', '2004 african cup of nations ( qualification )'], ['march 29 , 2003', 'estádio da cidadela , luanda , angola', '1 - 1', '1 - 1', 'friendly match'], ['november 14 , 2003', 'stade 5 juillet 1962 , algiers , algeria', '6 - 0', '6 - 0', '2006 fifa world cup qualification']] |
premier league of bosnia and herzegovina | https://en.wikipedia.org/wiki/Premier_League_of_Bosnia_and_Herzegovina | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-1474099-1.html.csv | comparative | travnik spent more seasons in premier league a of bosnia and herzegovina than zvijezda did . | {'row_1': '12', 'row_2': '15', 'col': '5', 'col_other': '1', 'relation': 'greater', 'record_mentioned': 'no', 'diff_result': None} | {'func': 'greater', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'club', 'travnik'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose club record fuzzily matches to travnik .', 'tostr': 'filter_eq { all_rows ; club ; travnik }'}, 'number of seasons in premier league a'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; club ; travnik } ; number of seasons in premier league a }', 'tointer': 'select the rows whose club record fuzzily matches to travnik . take the number of seasons in premier league a record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'club', 'zvijezda'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose club record fuzzily matches to zvijezda .', 'tostr': 'filter_eq { all_rows ; club ; zvijezda }'}, 'number of seasons in premier league a'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; club ; zvijezda } ; number of seasons in premier league a }', 'tointer': 'select the rows whose club record fuzzily matches to zvijezda . take the number of seasons in premier league a record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; club ; travnik } ; number of seasons in premier league a } ; hop { filter_eq { all_rows ; club ; zvijezda } ; number of seasons in premier league a } } = true', 'tointer': 'select the rows whose club record fuzzily matches to travnik . take the number of seasons in premier league a record of this row . select the rows whose club record fuzzily matches to zvijezda . take the number of seasons in premier league a record of this row . the first record is greater than the second record .'} | greater { hop { filter_eq { all_rows ; club ; travnik } ; number of seasons in premier league a } ; hop { filter_eq { all_rows ; club ; zvijezda } ; number of seasons in premier league a } } = true | select the rows whose club record fuzzily matches to travnik . take the number of seasons in premier league a record of this row . select the rows whose club record fuzzily matches to zvijezda . take the number of seasons in premier league a record of this row . the first record is greater than the second record . | 5 | 5 | {'greater_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'club_7': 7, 'travnik_8': 8, 'number of seasons in premier league a_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'club_11': 11, 'zvijezda_12': 12, 'number of seasons in premier league a_13': 13} | {'greater_4': 'greater', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'club_7': 'club', 'travnik_8': 'travnik', 'number of seasons in premier league a_9': 'number of seasons in premier league a', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'club_11': 'club', 'zvijezda_12': 'zvijezda', 'number of seasons in premier league a_13': 'number of seasons in premier league a'} | {'greater_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'club_7': [0], 'travnik_8': [0], 'number of seasons in premier league a_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'club_11': [1], 'zvijezda_12': [1], 'number of seasons in premier league a_13': [3]} | ['club', 'position in 2012 - 13', 'first season in top division', 'number of seasons in top division', 'number of seasons in premier league a', 'first season of current spell in top division', 'top division titles', 'last top division title'] | [['borac b', '003 3rd', '1961 - 62', '23', '9', '2008 - 09', '1', '2010 - 11'], ['čelik b , c', '004 4th', '1966 - 67', '30', '13', '2000 - 01', '3 d', '1996 - 97'], ['gošk ( r )', '015 15th', '2011 - 12', '2', '2', '2011 - 12', '0', 'n / a'], ['gradina ( r )', '016 16th', '2012 - 13', '1', '1', '2012 - 13', '0', 'n / a'], ['leotar b , c', '008 8th', '2002 - 03', '11', '11', '2002 - 03', '1', '2002 - 03'], ['olimpic', '005 5th', '2000 - 01', '6', '6', '2009 - 10', '0', 'n / a'], ['radnik', '012 12th', '2006 - 07', '3', '3', '2012 - 13', '1 e', '1998 - 99'], ['rudar', '011 11th', '2009 - 10', '4', '4', '2009 - 10', '0', 'n / a'], ['sarajevo b , c', '002 2nd', '1947 - 48', '55', '13', '1958 - 59', '4 f', '2006 - 07'], ['slavija', '007 7th', '1930', '17', '9', '2004 - 05', '0', 'n / a'], ['široki brijeg b , c', '006 6th', '2000 - 01', '13', '13', '2000 - 01', '6 g', '2005 - 06'], ['travnik', '014 14th', '2000 - 01', '10', '10', '2007 - 08', '0', 'n / a'], ['velež b', '013 13th', '1952 - 53', '48', '10', '2006 - 07', '0', 'n / a'], ['zrinjski b , c', '009 9th', '2000 - 01', '13', '13', '2000 - 01', '2', '2008 - 09'], ['zvijezda', '010 10th', '2008 - 09', '5', '5', '2008 - 09', '0', 'n / a']] |
cycling at the 2008 summer olympics - men 's bmx | https://en.wikipedia.org/wiki/Cycling_at_the_2008_Summer_Olympics_%E2%80%93_Men%27s_BMX | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-18603914-7.html.csv | majority | most of the cyclist finished their second run within 40 seconds . | {'scope': 'all', 'col': '4', 'most_or_all': 'most', 'criterion': 'less_than', 'value': '40', 'subset': None} | {'func': 'most_less', 'args': ['all_rows', '2nd run', '40'], 'result': True, 'ind': 0, 'tointer': 'for the 2nd run records of all rows , most of them are less than 40 .', 'tostr': 'most_less { all_rows ; 2nd run ; 40 } = true'} | most_less { all_rows ; 2nd run ; 40 } = true | for the 2nd run records of all rows , most of them are less than 40 . | 1 | 1 | {'most_less_0': 0, 'result_1': 1, 'all_rows_2': 2, '2nd run_3': 3, '40_4': 4} | {'most_less_0': 'most_less', 'result_1': 'true', 'all_rows_2': 'all_rows', '2nd run_3': '2nd run', '40_4': '40'} | {'most_less_0': [1], 'result_1': [], 'all_rows_2': [0], '2nd run_3': [0], '40_4': [0]} | ['rank', 'name', '1st run', '2nd run', '3rd run', 'total'] | [['1', 'mike day ( usa )', '36.470 ( 1 )', '36.219 ( 1 )', '37.461 ( 3 )', '5'], ['2', 'sifiso nhlapo ( rsa )', '37.197 ( 3 )', '36.597 ( 3 )', '36.457 ( 2 )', '8'], ['3', 'donny robinson ( usa )', '36.832 ( 2 )', '36.462 ( 2 )', '56.249 ( 6 )', '10'], ['4', 'andrés jiménez caicedo ( col )', '37.363 ( 4 )', '36.862 ( 4 )', '44.507 ( 5 )', '13'], ['5', 'raymon van der biezen ( ned )', '55.121 ( 7 )', '37.258 ( 6 )', '36.200 ( 1 )', '14'], ['6', 'kyle bennett ( usa )', '43.518 ( 5 )', '37.200 ( 5 )', '43.897 ( 4 )', '14'], ['7', 'artūrs matisons ( lat )', '53.379 ( 6 )', '1:17.170 ( 8 )', 'dnf ( 8 )', '22'], ['8', 'marc willers ( nzl )', '1:22.619 ( 8 )', '43.256 ( 7 )', 'dnf ( 8 )', '23']] |
matthew riddle | https://en.wikipedia.org/wiki/Matthew_Riddle | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-18017829-2.html.csv | count | there are eight matches that started at 5:00 . | {'scope': 'all', 'criterion': 'equal', 'value': '5:00', 'result': '8', 'col': '7', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'time', '5:00'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose time record fuzzily matches to 5:00 .', 'tostr': 'filter_eq { all_rows ; time ; 5:00 }'}], 'result': '8', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; time ; 5:00 } }', 'tointer': 'select the rows whose time record fuzzily matches to 5:00 . the number of such rows is 8 .'}, '8'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; time ; 5:00 } } ; 8 } = true', 'tointer': 'select the rows whose time record fuzzily matches to 5:00 . the number of such rows is 8 .'} | eq { count { filter_eq { all_rows ; time ; 5:00 } } ; 8 } = true | select the rows whose time record fuzzily matches to 5:00 . the number of such rows is 8 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'time_5': 5, '5:00_6': 6, '8_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'time_5': 'time', '5:00_6': '5:00', '8_7': '8'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'time_5': [0], '5:00_6': [0], '8_7': [2]} | ['res', 'record', 'opponent', 'method', 'event', 'round', 'time', 'location'] | [['nc', '7 - 3 ( 2 )', 'che mills', 'no contest', 'ufc on fuel tv : barao vs mcdonald', '3', '5:00', 'london , england'], ['win', '7 - 3 ( 1 )', 'john maguire', 'decision ( unanimous )', 'ufc 154', '3', '5:00', 'montreal , quebec , canada'], ['nc', '6 - 3 ( 1 )', 'chris clements', 'no contest', 'ufc 149', '3', '2:02', 'calgary , alberta , canada'], ['win', '6 - 3', 'henry martinez', 'decision ( split )', 'ufc 143', '3', '5:00', 'las vegas , nevada , united states'], ['loss', '5 - 3', 'lance benoist', 'decision ( unanimous )', 'ufc fight night : shields vs ellenberger', '3', '5:00', 'new orleans , louisiana , united states'], ['loss', '5 - 2', 'sean pierson', 'decision ( unanimous )', 'ufc 124', '3', '5:00', 'montreal , quebec , canada'], ['win', '5 - 1', 'damarques johnson', 'tko ( punches )', 'ufc live : jones vs matyushenko', '2', '4:29', 'san diego , california , united states'], ['win', '4 - 1', 'greg soto', 'dq ( illegal upkick )', 'ufc 111', '3', '1:30', 'newark , new jersey , united states'], ['loss', '3 - 1', 'nick osipczak', 'tko ( elbows & punches )', 'ufc 105', '3', '3:53', 'manchester , england'], ['win', '3 - 0', 'dan cramer', 'decision ( unanimous )', 'ufc 101', '3', '5:00', 'philadelphia , pennsylvania , united states'], ['win', '2 - 0', 'steve bruno', 'decision ( unanimous )', 'ufc fight night : lauzon vs stephens', '3', '5:00', 'tampa , florida , united states'], ['win', '1 - 0', 'dante rivera', 'decision ( unanimous )', 'the ultimate fighter 7 finale', '3', '5:00', 'las vegas , nevada , united states']] |
grado labs | https://en.wikipedia.org/wiki/Grado_Labs | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1601027-1.html.csv | aggregation | grado labs average driver - matched db in this chart is 0.07 . | {'scope': 'all', 'col': '3', 'type': 'average', 'result': '0.07', 'subset': None} | {'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 'driver - matched db'], 'result': '0.07', 'ind': 0, 'tostr': 'avg { all_rows ; driver - matched db }'}, '0.07'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; driver - matched db } ; 0.07 } = true', 'tointer': 'the average of the driver - matched db record of all rows is 0.07 .'} | round_eq { avg { all_rows ; driver - matched db } ; 0.07 } = true | the average of the driver - matched db record of all rows is 0.07 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 'driver - matched db_4': 4, '0.07_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 'driver - matched db_4': 'driver - matched db', '0.07_5': '0.07'} | {'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 'driver - matched db_4': [0], '0.07_5': [1]} | ['headphone model', 'headphone class', 'driver - matched db', 'construction', 'earpads', 'termination', 'us msrp'] | [['igrado', 'prestige', '0.1', 'plastic', 'comfort pads', '1 / 8 ( 3.5 mm ) plug', '49'], ['sr60i', 'prestige', '0.1', 'plastic', 'comfort pads', '1 / 8 ( 3.5 mm ) plug with 1 / 4 adaptor', '79'], ['sr80i', 'prestige', '0.1', 'plastic', 'comfort pads', '1 / 8 ( 3.5 mm ) plug with 1 / 4 adaptor', '99'], ['sr125i', 'prestige', '0.1', 'plastic', 'comfort pads', '1 / 4 ( 6.5 mm ) plug', '150'], ['sr225i', 'prestige', '0.05', 'plastic', 'bowls', '1 / 4 ( 6.5 mm ) plug', '200'], ['sr325is', 'prestige', '0.05', 'aluminum alloy / plastic inner sleeve', 'bowls', '1 / 4 ( 6.5 mm ) plug', '295'], ['rs2i', 'reference', '0.05', 'hand - crafted mahogany', 'bowls', '1 / 4 ( 6.5 mm ) plug', '495'], ['rs1i', 'reference', '0.05', 'hand - crafted mahogany', 'bowls', '1 / 4 ( 6.5 mm ) plug', '695'], ['gs1000i', 'statement', '0.05', 'hand - crafted mahogany', 'circumaural bowls', '1 / 4 ( 6.5 mm ) plug', '995'], ['ps500', 'professional', '0.05', 'hand - crafted mahogany / aluminum', 'bowls', '1 / 4 ( 6.5 mm ) plug', '595'], ['ps1000', 'professional', '0.05', 'hand - crafted mahogany / aluminum', 'circumaural bowls', '1 / 4 ( 6.5 mm ) plug', '1695']] |
merlin ( series 2 ) | https://en.wikipedia.org/wiki/Merlin_%28series_2%29 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-29063233-1.html.csv | majority | the majority of the episodes from merlin ( series 2 ) were seen by more than 6 million uk viewers . | {'scope': 'all', 'col': '7', 'most_or_all': 'most', 'criterion': 'greater_than', 'value': '6.0', 'subset': None} | {'func': 'most_greater', 'args': ['all_rows', 'uk viewers ( million )', '6.0'], 'result': True, 'ind': 0, 'tointer': 'for the uk viewers ( million ) records of all rows , most of them are greater than 6.0 .', 'tostr': 'most_greater { all_rows ; uk viewers ( million ) ; 6.0 } = true'} | most_greater { all_rows ; uk viewers ( million ) ; 6.0 } = true | for the uk viewers ( million ) records of all rows , most of them are greater than 6.0 . | 1 | 1 | {'most_greater_0': 0, 'result_1': 1, 'all_rows_2': 2, 'uk viewers (million)_3': 3, '6.0_4': 4} | {'most_greater_0': 'most_greater', 'result_1': 'true', 'all_rows_2': 'all_rows', 'uk viewers (million)_3': 'uk viewers ( million )', '6.0_4': '6.0'} | {'most_greater_0': [1], 'result_1': [], 'all_rows_2': [0], 'uk viewers (million)_3': [0], '6.0_4': [0]} | ['no overall', 'no for series', 'title', 'directed by', 'written by', 'original air date', 'uk viewers ( million )'] | [['14', '1', 'the curse of cornelius sigan', 'david moore', 'julian jones', '19 september 2009', '5.77'], ['15', '2', 'the once and future queen', 'jeremy webb', 'howard overman', '26 september 2009', '5.94'], ['16', '3', 'the nightmare begins', 'jeremy webb', 'ben vanstone', '3 october 2009', '6.09'], ['17', '4', 'lancelot and guinevere', 'david moore', 'howard overman', '10 october 2009', '5.69'], ['18', '5', 'beauty and the beast ( part 1 )', 'david moore', 'jake michie', '24 october 2009', '5.53'], ['19', '6', 'beauty and the beast ( part 2 )', 'metin huseyin', 'ben vanstone', '31 october 2009', '6.14'], ['20', '7', 'the witchfinder', 'jeremy webb', 'jake michie', '7 november 2009', '5.62'], ['21', '8', 'the sins of the father', 'metin huseyin', 'howard overman', '14 november 2009', '6.16'], ['22', '9', 'the lady of the lake', 'metin huseyin', 'julian jones', '21 november 2009', '6.30'], ['23', '10', 'sweet dreams', 'alice troughton', 'lucy watkins', '28 november 2009', '6.02'], ['24', '11', "the witch 's quickening", 'alice troughton', 'jake michie', '5 december 2009', '6.01'], ['25', '12', 'the fires of idirsholas', 'jeremy webb', 'julian jones', '12 december 2009', '6.01']] |
tamil nadu dr. m.g.r. medical university | https://en.wikipedia.org/wiki/Tamil_Nadu_Dr._M.G.R._Medical_University | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-11184686-1.html.csv | unique | madras medical college and research institute is the only affiliate college of tamil nadu dr mgr medical university to be established before the year 1900 . | {'scope': 'all', 'row': '2', 'col': '5', 'col_other': '1', 'criterion': 'less_than', 'value': '1900', 'subset': None} | {'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_less', 'args': ['all_rows', 'estd', '1900'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose estd record is less than 1900 .', 'tostr': 'filter_less { all_rows ; estd ; 1900 }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_less { all_rows ; estd ; 1900 } }', 'tointer': 'select the rows whose estd record is less than 1900 . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_less', 'args': ['all_rows', 'estd', '1900'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose estd record is less than 1900 .', 'tostr': 'filter_less { all_rows ; estd ; 1900 }'}, 'college name'], 'result': 'madras medical college and research institute', 'ind': 2, 'tostr': 'hop { filter_less { all_rows ; estd ; 1900 } ; college name }'}, 'madras medical college and research institute'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_less { all_rows ; estd ; 1900 } ; college name } ; madras medical college and research institute }', 'tointer': 'the college name record of this unqiue row is madras medical college and research institute .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_less { all_rows ; estd ; 1900 } } ; eq { hop { filter_less { all_rows ; estd ; 1900 } ; college name } ; madras medical college and research institute } } = true', 'tointer': 'select the rows whose estd record is less than 1900 . there is only one such row in the table . the college name record of this unqiue row is madras medical college and research institute .'} | and { only { filter_less { all_rows ; estd ; 1900 } } ; eq { hop { filter_less { all_rows ; estd ; 1900 } ; college name } ; madras medical college and research institute } } = true | select the rows whose estd record is less than 1900 . there is only one such row in the table . the college name record of this unqiue row is madras medical college and research institute . | 6 | 5 | {'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_less_0': 0, 'all_rows_6': 6, 'estd_7': 7, '1900_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'college name_9': 9, 'madras medical college and research institute_10': 10} | {'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_less_0': 'filter_less', 'all_rows_6': 'all_rows', 'estd_7': 'estd', '1900_8': '1900', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'college name_9': 'college name', 'madras medical college and research institute_10': 'madras medical college and research institute'} | {'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_less_0': [1, 2], 'all_rows_6': [0], 'estd_7': [0], '1900_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'college name_9': [2], 'madras medical college and research institute_10': [3]} | ['college name', 'location', 'district', 'affiliation', 'estd'] | [['thanjavur medical college', 'thanjavur', 'thanjavur district', 'tamil nadu dr mgr medical university', '1958'], ['madras medical college and research institute', 'park town , chennai', 'chennai district', 'tamil nadu dr mgr medical university', '1835'], ['stanley medical college', 'royapuram , chennai', 'chennai district', 'tamil nadu dr mgr medical university', '1938'], ['government kilpauk medical college', 'chetput ( chennai ) chennai', 'chennai district', 'tamil nadu dr mgr medical university', '1960'], ['chengalpattu medical college', 'chengalpattu', 'kanchipuram district', 'tamil nadu dr mgr medical university', '1965'], ['madurai medical college', 'madurai', 'madurai district', 'tamil nadu dr mgr medical university', '1954'], ['coimbatore medical college', 'coimbatore', 'coimbatore', 'tamil nadu dr mgr medical university', '1966'], ['tirunelveli medical college', 'tirunelveli', 'tirunelveli district', 'tamil nadu dr mgr medical university', 'july 1966'], ['mohan kumaramangalam medical college', 'salem , tamil nadu', 'salem district', 'tamil nadu dr mgr medical university', '1990'], ['kapviswanatham government medical college', 'tiruchirappalli tamil nadu', 'tiruchirappalli district', 'tamil nadu dr mgr medical university', '1929'], ['thoothukudi medical college', 'thoothukudi', 'thoothukudi', 'tamil nadu dr mgr medical university', '1982'], ['government vellore medical college', 'bagayam', 'vellore', 'tamil nadu dr mgr medical university', '2005'], ['kanyakumari government medical college', 'kanniyakumari', 'kanniyakumari', 'tamil nadu dr mgr medical university', '1965'], ['government theni medical college', 'theni', 'theni', 'tamil nadu dr mgr medical university', '2006'], ['government dharmapuri medical college', 'coimbatore', 'coimbatore', 'tamil nadu dr mgr medical university', '1982'], ['government villupuram medical college', 'villupuram', 'villupuram', 'tamil nadu dr mgr medical university', '1965'], ['government thiruvarur medical college', 'thiruvarur', 'thiruvarur', 'tamil nadu dr mgr medical university', '2007'], ['government sivgangai medical college', 'sivgangai', 'sivgangai', 'tamil nadu dr mgr medical university', 'proposed']] |
hoosier athletic conference | https://en.wikipedia.org/wiki/Hoosier_Athletic_Conference | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-18789626-1.html.csv | superlative | the school in the hoosier athletic conference that had the most people enrolled was twin lakes . | {'scope': 'all', 'col_superlative': '4', 'row_superlative': '6', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '1', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'enrollment 08 - 09'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; enrollment 08 - 09 }'}, 'school'], 'result': 'twin lakes', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; enrollment 08 - 09 } ; school }'}, 'twin lakes'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; enrollment 08 - 09 } ; school } ; twin lakes } = true', 'tointer': 'select the row whose enrollment 08 - 09 record of all rows is maximum . the school record of this row is twin lakes .'} | eq { hop { argmax { all_rows ; enrollment 08 - 09 } ; school } ; twin lakes } = true | select the row whose enrollment 08 - 09 record of all rows is maximum . the school record of this row is twin lakes . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'enrollment 08 - 09_5': 5, 'school_6': 6, 'twin lakes_7': 7} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'enrollment 08 - 09_5': 'enrollment 08 - 09', 'school_6': 'school', 'twin lakes_7': 'twin lakes'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'enrollment 08 - 09_5': [0], 'school_6': [1], 'twin lakes_7': [2]} | ['school', 'city', 'team name', 'enrollment 08 - 09', 'ihsaa class', 'ihsaa class football', 'county', 'year joined'] | [['benton central', 'oxford', 'bisons', '612', 'aaa', 'aaa', '04 benton', '1968'], ['delphi community', 'delphi', 'oracles', '515', 'aa', 'aa', '08 carroll', '1998'], ['central catholic', 'lafayette', 'knights', '220', 'a', 'aa', '79 tippecanoe', '2011'], ['rensselaer central', 'rensselaer', 'bombers', '599', 'aaa', 'aa', '37 jasper', '1998'], ['tipton', 'tipton', 'blue devils', '549', 'aa', 'aa', '80 tipton', '2000'], ['twin lakes', 'monticello', 'indians', '805', 'aaa', 'aaa', '91 white', '1974'], ['west lafayette', 'west lafayette', 'red devils', '672', 'aaa', 'aaa', '79 tippecanoe', '1947']] |
list of how it 's made episodes | https://en.wikipedia.org/wiki/List_of_How_It%27s_Made_episodes | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15187735-14.html.csv | ordinal | according to the list of how it 's made episodes , second earliest episode was coded s07e02 on netflix . | {'scope': 'all', 'row': '2', 'col': '1', 'order': '2', 'col_other': '3', 'max_or_min': 'min_to_max', 'value_mentioned': 'no', 'subset': None} | {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmin', 'args': ['all_rows', 'series ep', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmin { all_rows ; series ep ; 2 }'}, 'netflix'], 'result': 's07e02', 'ind': 1, 'tostr': 'hop { nth_argmin { all_rows ; series ep ; 2 } ; netflix }'}, 's07e02'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmin { all_rows ; series ep ; 2 } ; netflix } ; s07e02 } = true', 'tointer': 'select the row whose series ep record of all rows is 2nd minimum . the netflix record of this row is s07e02 .'} | eq { hop { nth_argmin { all_rows ; series ep ; 2 } ; netflix } ; s07e02 } = true | select the row whose series ep record of all rows is 2nd minimum . the netflix record of this row is s07e02 . | 3 | 3 | {'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmin_0': 0, 'all_rows_4': 4, 'series ep_5': 5, '2_6': 6, 'netflix_7': 7, 's07e02_8': 8} | {'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmin_0': 'nth_argmin', 'all_rows_4': 'all_rows', 'series ep_5': 'series ep', '2_6': '2', 'netflix_7': 'netflix', 's07e02_8': 's07e02'} | {'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmin_0': [1], 'all_rows_4': [0], 'series ep_5': [0], '2_6': [0], 'netflix_7': [1], 's07e02_8': [2]} | ['series ep', 'episode', 'netflix', 'segment a', 'segment b', 'segment c', 'segment d'] | [['14 - 01', '170', 's07e01', 'mini gp motorcycles', 'fig cookies', 'tool boxes', 'pipe bends'], ['14 - 02', '171', 's07e02', 'western revolver s replica', 'arc trainers', 'used - oil furnaces', 'vegetable peelers and s pizza cutter'], ['14 - 03', '172', 's07e03', 'metal s golf club', 's waffle', 'custom wires and s cable', 'train s wheel'], ['14 - 04', '173', 's07e04', 's sail', 's walnut', 'wheel immobilizers', 'honeycomb structural panels'], ['14 - 05', '174', 's07e05', 's surfboard', 's sticker', 'sandwich s cookie', 'concrete roofing s tile'], ['14 - 06', '175', 's07e06', 'ski goggles', 'tower cranes', 'porcelain s figurine', 's diesel engine'], ['14 - 07', '176', 's07e07', 'stuffed s olive', 's astrolabe', 's western saddle ( part 1 )', 's western saddle ( part 2 )'], ['14 - 08', '177', 's07e08', 'custom running shoes', 's axe', 'racing s kart', 's animatronic'], ['14 - 09', '178', 's07e09', 's headphone', 's diving regulator', 'reflector light bulbs ( part 1 )', 'reflector light bulbs ( part 2 )'], ['14 - 10', '179', 's07e10', 's fly fishing reel', 'house paint', 's weaving loom', 's ice maker'], ['14 - 11', '180', 's07e11', 's graphite pencil lead', 's clarinet', 's special effect ( part 1 )', 's special effect ( part 2 )'], ['14 - 12', '181', 's07e12', 's air boat', 's onion', '3d metal printing', 's curved cabinet door'], ['14 - 13', '182', 's07e13', 's retractable ballpoint pen', 'solar salt', 's tuba ( part 1 )', 's tuba ( part 2 )']] |
manuela maleeva | https://en.wikipedia.org/wiki/Manuela_Maleeva | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1092361-12.html.csv | count | manuela maleeva played on clay surface 2 times during qf round through the years 1983-1992 of her career . | {'scope': 'subset', 'criterion': 'equal', 'value': 'clay', 'result': '2', 'col': '5', 'subset': {'col': '2', 'criterion': 'equal', 'value': 'qf'}} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'round', 'qf'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; round ; qf }', 'tointer': 'select the rows whose round record fuzzily matches to qf .'}, 'surface', 'clay'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose round record fuzzily matches to qf . among these rows , select the rows whose surface record fuzzily matches to clay .', 'tostr': 'filter_eq { filter_eq { all_rows ; round ; qf } ; surface ; clay }'}], 'result': '2', 'ind': 2, 'tostr': 'count { filter_eq { filter_eq { all_rows ; round ; qf } ; surface ; clay } }', 'tointer': 'select the rows whose round record fuzzily matches to qf . among these rows , select the rows whose surface record fuzzily matches to clay . the number of such rows is 2 .'}, '2'], 'result': True, 'ind': 3, 'tostr': 'eq { count { filter_eq { filter_eq { all_rows ; round ; qf } ; surface ; clay } } ; 2 } = true', 'tointer': 'select the rows whose round record fuzzily matches to qf . among these rows , select the rows whose surface record fuzzily matches to clay . the number of such rows is 2 .'} | eq { count { filter_eq { filter_eq { all_rows ; round ; qf } ; surface ; clay } } ; 2 } = true | select the rows whose round record fuzzily matches to qf . among these rows , select the rows whose surface record fuzzily matches to clay . the number of such rows is 2 . | 4 | 4 | {'eq_3': 3, 'result_4': 4, 'count_2': 2, 'filter_str_eq_1': 1, 'filter_str_eq_0': 0, 'all_rows_5': 5, 'round_6': 6, 'qf_7': 7, 'surface_8': 8, 'clay_9': 9, '2_10': 10} | {'eq_3': 'eq', 'result_4': 'true', 'count_2': 'count', 'filter_str_eq_1': 'filter_str_eq', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_5': 'all_rows', 'round_6': 'round', 'qf_7': 'qf', 'surface_8': 'surface', 'clay_9': 'clay', '2_10': '2'} | {'eq_3': [4], 'result_4': [], 'count_2': [3], 'filter_str_eq_1': [2], 'filter_str_eq_0': [1], 'all_rows_5': [0], 'round_6': [0], 'qf_7': [0], 'surface_8': [1], 'clay_9': [1], '2_10': [3]} | ['edition', 'round', 'date', 'partner', 'surface', 'opponents', 'result'] | [['1983 world group i', 'po', 'july 19 , 1983', 'marina kondova', 'clay', 'angela longo lindsay standen', '6 - 4 , 6 - 2'], ['1983 world group i', 'po', 'july 21 , 1983', 'adriana velcheva', 'clay', 'svetlana cherneva larisa savchenko', '4 - 6 , 3 - 6'], ['1984 world group i', 'r1', 'july 15 , 1984', 'katerina maleeva', 'clay', 'amanda brown anne hobbs', '7 - 6 , 7 - 5'], ['1984 world group i', 'r2', 'july 16 , 1984', 'katerina maleeva', 'clay', 'elena eliseenko larisa savchenko', '7 - 5 , 5 - 7 , 1 - 6'], ['1984 world group i', 'qf', 'july 18 , 1984', 'katerina maleeva', 'clay', 'sabrina goleš mima jaušovec', '3 - 6 , 1 - 6'], ['1985 world group i', 'r1', 'october 6 , 1985', 'katerina maleeva', 'hard', 'natalia egorova svetlana cherneva', '6 - 3 , 7 - 5'], ['1985 world group i', 'r2', 'october 8 , 1985', 'katerina maleeva', 'hard', 'sabrina goleš aila winkler', '6 - 4 , 7 - 6 ( 9 - 7 )'], ['1985 world group i', 'qf', 'october 10 , 1985', 'katerina maleeva', 'hard', 'jo durie anne hobbs', '4 - 5 , ret'], ['1985 world group i', 'sf', 'october 12 , 1985', 'katerina maleeva', 'hard', 'hana mandlíková helena suková', '3 - 6 , 6 - 7 ( 4 - 7 )'], ['1986 world group i', 'r1', 'july 20 , 1986', 'katerina maleeva', 'clay', 'svetlana cherneva larisa savchenko', '6 - 1 , 4 - 6 , 1 - 6'], ['1986 world group i', 'qf', 'july 23 , 1986', 'katerina maleeva', 'clay', 'bettina bunge claudia kohde - kilsch', '4 - 6 , 2 - 6'], ['1989 world group i', 'r1', 'october 1 , 1989', 'katerina maleeva', 'hard', 'kim il - soon lee jeong - myung', '7 - 5 , 6 - 0'], ['1989 world group i', 'r2', 'october 3 , 1989', 'katerina maleeva', 'hard', 'florencia labat mercedes paz', '6 - 1 , 3 - 6 , 6 - 1'], ['1989 world group i', 'qf', 'october 5 , 1989', 'katerina maleeva', 'hard', 'elizabeth smylie janine tremelling', '7 - 5 , 4 - 6 , 0 - 6'], ['↓ representing ↓', '↓ representing ↓', '↓ representing ↓', '↓ representing ↓', '↓ representing ↓', '↓ representing ↓', '↓ representing ↓'], ['1991 world group i', 'r2', 'july 24 , 1991', 'cathy caverzasio', 'hard', 'li fang yi jing - qian', '1 - 3 , ret'], ['1991 world group i', 'qf', 'july 25 , 1991', 'cathy caverzasio', 'hard', 'jana novotná regina rajchrtová', '6 - 4 , 2 - 1 , ret'], ['1992 world group i', 'r1', 'july 14 , 1992', 'michelle strebel', 'clay', 'maria lindström maria strandlund', '4 - 6 , 7 - 5 , 4 - 6']] |
jacksonville jaguars draft history | https://en.wikipedia.org/wiki/Jacksonville_Jaguars_draft_history | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15100419-2.html.csv | count | the jacksonville jaguars drafted four players in the wide receiver position . | {'scope': 'all', 'criterion': 'equal', 'value': 'wide receiver', 'result': '4', 'col': '5', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'position', 'wide receiver'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose position record fuzzily matches to wide receiver .', 'tostr': 'filter_eq { all_rows ; position ; wide receiver }'}], 'result': '4', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; position ; wide receiver } }', 'tointer': 'select the rows whose position record fuzzily matches to wide receiver . the number of such rows is 4 .'}, '4'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; position ; wide receiver } } ; 4 } = true', 'tointer': 'select the rows whose position record fuzzily matches to wide receiver . the number of such rows is 4 .'} | eq { count { filter_eq { all_rows ; position ; wide receiver } } ; 4 } = true | select the rows whose position record fuzzily matches to wide receiver . the number of such rows is 4 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'position_5': 5, 'wide receiver_6': 6, '4_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'position_5': 'position', 'wide receiver_6': 'wide receiver', '4_7': '4'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'position_5': [0], 'wide receiver_6': [0], '4_7': [2]} | ['round', 'pick', 'overall', 'name', 'position', 'college'] | [['1', '2', '2', 'kevin hardy', 'linebacker', 'illinois'], ['2', '3', '33', 'tony brackens', 'defensive end', 'texas'], ['2', '30', '60', 'michael cheever', 'center', 'georgia tech'], ['3', '2', '63', 'aaron beasley', 'cornerback', 'west virginia'], ['4', '15', '110', 'reggie barlow', 'wide receiver', 'alabama state'], ['5', '14', '146', 'jimmy herndon', 'guard', 'houston'], ['6', '3', '170', 'john fisher', 'defensive back', 'missouri western'], ['6', '18', '185', 'chris doering', 'wide receiver', 'florida'], ['7', '18', '227', 'clarence jones', 'wide receiver', 'tennessee state'], ['7', '19', '228', 'gregory spann', 'wide receiver', 'jackson state']] |
comparison of brainwave entrainment software | https://en.wikipedia.org/wiki/Comparison_of_brainwave_entrainment_software | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-15038373-1.html.csv | majority | the majority of brainwave entrainment software programs have proprietary licenses . | {'scope': 'all', 'col': '5', 'most_or_all': 'most', 'criterion': 'equal', 'value': 'proprietary', 'subset': None} | {'func': 'most_str_eq', 'args': ['all_rows', 'license', 'proprietary'], 'result': True, 'ind': 0, 'tointer': 'for the license records of all rows , most of them fuzzily match to proprietary .', 'tostr': 'most_eq { all_rows ; license ; proprietary } = true'} | most_eq { all_rows ; license ; proprietary } = true | for the license records of all rows , most of them fuzzily match to proprietary . | 1 | 1 | {'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'license_3': 3, 'proprietary_4': 4} | {'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'license_3': 'license', 'proprietary_4': 'proprietary'} | {'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'license_3': [0], 'proprietary_4': [0]} | ['software', 'version', 'operating systems', 'developer', 'license'] | [['beeone smod / hms', '3.1', 'windows', 'hemi - synths explorers', 'proprietary'], ['brainwave generator', '3.1', 'windows', 'noromaa solutions oy', 'proprietary'], ['gnaural', '1.0.20100707', 'freebsd , linux , mac os x , windows', 'gnaural', 'gpl'], ['brainigniter player', '6.0', 'windows', 'volition', 'proprietary'], ['neuro - programmer 3', '3.0.9.0', 'windows', 'transparent corp', 'proprietary'], ['mind workstation', '1.2.2.0', 'windows', 'transparent corp', 'proprietary'], ['sbagen', '1.4.4', 'dos , freebsd , linux , mac os x , windows , wince', 'uazu', 'gpl'], ['brainwave studio', '1.5', 'mac os x , ios', 'rcs software', 'proprietary'], ['discord', '3.2.1', 'linux', 'stan lysiak', 'gpl']] |
list of castle episodes | https://en.wikipedia.org/wiki/List_of_Castle_episodes | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-23958944-2.html.csv | aggregation | the show castle had an average of about 9.5 million us viewers from series 1-8 . | {'scope': 'all', 'col': '7', 'type': 'average', 'result': '9.15', 'subset': None} | {'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 'us viewers ( in millions )'], 'result': '9.15', 'ind': 0, 'tostr': 'avg { all_rows ; us viewers ( in millions ) }'}, '9.15'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; us viewers ( in millions ) } ; 9.15 } = true', 'tointer': 'the average of the us viewers ( in millions ) record of all rows is 9.15 .'} | round_eq { avg { all_rows ; us viewers ( in millions ) } ; 9.15 } = true | the average of the us viewers ( in millions ) record of all rows is 9.15 . | 2 | 2 | {'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 'us viewers (in millions)_4': 4, '9.15_5': 5} | {'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 'us viewers (in millions)_4': 'us viewers ( in millions )', '9.15_5': '9.15'} | {'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 'us viewers (in millions)_4': [0], '9.15_5': [1]} | ['no by series', 'title', 'directed by', 'written by', 'original air date', 'production number', 'us viewers ( in millions )'] | [['1', 'flowers for your grave', 'rob bowman', 'andrew w marlowe', 'march 9 , 2009', '101', '10.76'], ['2', 'nanny mcdead', 'john terlesky', 'barry schindel', 'march 16 , 2009', '103', '10.97'], ['3', 'hedge fund homeboys', 'rob bowman', 'david grae', 'march 23 , 2009', '104', '9.14'], ['4', 'hell hath no fury', 'rob bowman', 'andrew w marlowe', 'march 30 , 2009', '102', '9.09'], ['5', 'a chill goes through her veins', 'bryan spicer', 'charles murray', 'april 6 , 2009', '105', '9.03'], ['6', 'always buy retail', 'jamie babbit', 'gabrielle stanton & harry werksman', 'april 13 , 2009', '107', '7.73'], ['7', 'home is where the heart stops', 'dean white', 'will beall', 'april 20 , 2009', '106', '8.21'], ['8', 'ghosts', 'bryan spicer', 'moira kirland', 'april 27 , 2009', '108', '8.24']] |
sat subject tests | https://en.wikipedia.org/wiki/SAT_subject_tests | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1637315-1.html.csv | unique | for the sat subject tests , when the subject is math , the only time there were over 100000 students was when the test was sat subject test in mathematics level 2 . | {'scope': 'subset', 'row': '5', 'col': '5', 'col_other': '1', 'criterion': 'greater_than', 'value': '100000', 'subset': {'col': '1', 'criterion': 'fuzzily_match', 'value': 'math'}} | {'func': 'only', 'args': [{'func': 'filter_greater', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'test', 'math'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; test ; math }', 'tointer': 'select the rows whose test record fuzzily matches to math .'}, 'number of students', '100000'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose test record fuzzily matches to math . among these rows , select the rows whose number of students record is greater than 100000 .', 'tostr': 'filter_greater { filter_eq { all_rows ; test ; math } ; number of students ; 100000 }'}], 'result': True, 'ind': 2, 'tostr': 'only { filter_greater { filter_eq { all_rows ; test ; math } ; number of students ; 100000 } } = true', 'tointer': 'select the rows whose test record fuzzily matches to math . among these rows , select the rows whose number of students record is greater than 100000 . there is only one such row in the table .'} | only { filter_greater { filter_eq { all_rows ; test ; math } ; number of students ; 100000 } } = true | select the rows whose test record fuzzily matches to math . among these rows , select the rows whose number of students record is greater than 100000 . there is only one such row in the table . | 3 | 3 | {'only_2': 2, 'result_3': 3, 'filter_greater_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'test_5': 5, 'math_6': 6, 'number of students_7': 7, '100000_8': 8} | {'only_2': 'only', 'result_3': 'true', 'filter_greater_1': 'filter_greater', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'test_5': 'test', 'math_6': 'math', 'number of students_7': 'number of students', '100000_8': '100000'} | {'only_2': [3], 'result_3': [], 'filter_greater_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'test_5': [0], 'math_6': [0], 'number of students_7': [1], '100000_8': [1]} | ['test', 'subject', 'mean score', 'standard deviation', 'number of students'] | [['sat subject test in literature', 'literature', '576', '111', '120004'], ['sat subject test in united states history', 'us history', '608', '113', '126681'], ['sat subject test in world history', 'world history', '607', '118', '19688'], ['sat subject test in mathematics level 1', 'mathematics', '610', '100', '82827'], ['sat subject test in mathematics level 2', 'mathematics', '654', '107', '176472'], ['sat subject test in biology e / m', 'biology', 'e - 605 m - 635', '110 108', '86206 in total , 40076 ( e ) 46130 ( m )'], ['sat subject test in chemistry', 'chemistry', '648', '110', '76077'], ['sat subject test in physics', 'physics', '656', '105', '49608'], ['sat subject test in chinese with listening', 'chinese', '758', '67', '7294'], ['sat subject test in french', 'french', '622', '123', '10391'], ['sat subject test in french with listening', 'french', '646', '117', '2370'], ['sat subject test in german', 'german', '622', '135', '777'], ['sat subject test in german with listening', 'german', '611', '122', '770'], ['sat subject test in modern hebrew', 'modern hebrew', '623', '140', '491'], ['sat subject test in italian', 'italian', '666', '122', '737'], ['sat subject test in japanese with listening', 'japanese', '684', '113', '1966'], ['sat subject test in korean with listening', 'korean', '767', '57', '4273'], ['sat subject test in latin', 'latin', '611', '107', '3010'], ['sat subject test in spanish', 'spanish', '647', '117', '37762'], ['sat subject test in spanish with listening', 'spanish', '663', '107', '6399']] |
list of doctor who audio plays by big finish | https://en.wikipedia.org/wiki/List_of_Doctor_Who_audio_plays_by_Big_Finish | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-1620397-2.html.csv | majority | all of the doctor who audio plays by big finish were about the 6th doctor . | {'scope': 'all', 'col': '5', 'most_or_all': 'all', 'criterion': 'equal', 'value': '6th', 'subset': None} | {'func': 'all_str_eq', 'args': ['all_rows', 'doctor', '6th'], 'result': True, 'ind': 0, 'tointer': 'for the doctor records of all rows , all of them fuzzily match to 6th .', 'tostr': 'all_eq { all_rows ; doctor ; 6th } = true'} | all_eq { all_rows ; doctor ; 6th } = true | for the doctor records of all rows , all of them fuzzily match to 6th . | 1 | 1 | {'all_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'doctor_3': 3, '6th_4': 4} | {'all_str_eq_0': 'all_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'doctor_3': 'doctor', '6th_4': '6th'} | {'all_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'doctor_3': [0], '6th_4': [0]} | ['', 'series sorted', 'title', 'author', 'doctor', 'featuring', 'released'] | [['1', '6y / aa', 'the nightmare fair', 'graham williams ( adapted by john ainsworth )', '6th', 'peri , celestial toymaker', 'november 2009'], ['2', '6y / ab', 'mission to magnus', 'philip martin', '6th', 'peri , s ice warrior , sil', 'december 2009'], ['3', '6y / ac', 'leviathan', 'brian finch ( adapted by paul finch )', '6th', 'peri', 'january 2010'], ['5', '6y / ae', 'paradise 5', 'pj hammond and andy lane', '6th', 'peri', 'march 2010'], ['6', '6y / af', 'point of entry', 'barbara clegg and marc platt', '6th', 'peri', 'april 2010'], ['7', '6y / ag', 'the song of megaptera', 'pat mills', '6th', 'peri', 'may 2010']] |
miracle ( celine dion album ) | https://en.wikipedia.org/wiki/Miracle_%28Celine_Dion_album%29 | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1097545-4.html.csv | majority | the album was released the most in october of 2004 . | {'scope': 'all', 'col': '2', 'most_or_all': 'most', 'criterion': 'fuzzily_match', 'value': 'october', 'subset': None} | {'func': 'most_str_eq', 'args': ['all_rows', 'date', 'october'], 'result': True, 'ind': 0, 'tointer': 'for the date records of all rows , most of them fuzzily match to october .', 'tostr': 'most_eq { all_rows ; date ; october } = true'} | most_eq { all_rows ; date ; october } = true | for the date records of all rows , most of them fuzzily match to october . | 1 | 1 | {'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'date_3': 3, 'october_4': 4} | {'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'date_3': 'date', 'october_4': 'october'} | {'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'date_3': [0], 'october_4': [0]} | ['region', 'date', 'label', 'format', 'catalog'] | [['europe', 'october 11 , 2004', 'sony bmg , columbia', 'cd', 'col 518748 9'], ['europe', 'october 11 , 2004', 'sony bmg , columbia', 'cd / dvd', 'col 518748 7'], ['united states', 'october 12 , 2004', 'epic', 'cd', '5187482'], ['united states', 'october 12 , 2004', 'epic', 'cd / dvd', '5187487'], ['canada', 'october 12 , 2004', 'sony bmg , columbia', 'cd', '5187482'], ['canada', 'october 12 , 2004', 'sony bmg , columbia', 'cd / dvd', '5187487'], ['australia', 'october 15 , 2004', 'sony bmg , epic', 'cd', '5187482'], ['australia', 'october 15 , 2004', 'sony bmg , epic', 'cd / dvd', '5187487'], ['south korea', 'november 16 , 2004', 'sony bmg , columbia', 'cd', 'cpk - 3337'], ['japan', 'december 22 , 2004', 'sony music japan , epic', 'cd', '5187482']] |
1956 team speedway polish championship | https://en.wikipedia.org/wiki/1956_Team_Speedway_Polish_Championship | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15432872-5.html.csv | unique | in the 1956 team speedway polish championship , for those that have over 10 points , the only one that lost 2 times is stal gorzów wlkp . | {'scope': 'subset', 'row': '2', 'col': '5', 'col_other': '1,3', 'criterion': 'equal', 'value': '2', 'subset': {'col': '3', 'criterion': 'greater_than', 'value': '10'}} | {'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_eq', 'args': [{'func': 'filter_greater', 'args': ['all_rows', 'points', '10'], 'result': None, 'ind': 0, 'tostr': 'filter_greater { all_rows ; points ; 10 }', 'tointer': 'select the rows whose points record is greater than 10 .'}, 'lost', '2'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose points record is greater than 10 . among these rows , select the rows whose lost record is equal to 2 .', 'tostr': 'filter_eq { filter_greater { all_rows ; points ; 10 } ; lost ; 2 }'}], 'result': True, 'ind': 2, 'tostr': 'only { filter_eq { filter_greater { all_rows ; points ; 10 } ; lost ; 2 } }', 'tointer': 'select the rows whose points record is greater than 10 . among these rows , select the rows whose lost record is equal to 2 . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_eq', 'args': [{'func': 'filter_greater', 'args': ['all_rows', 'points', '10'], 'result': None, 'ind': 0, 'tostr': 'filter_greater { all_rows ; points ; 10 }', 'tointer': 'select the rows whose points record is greater than 10 .'}, 'lost', '2'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose points record is greater than 10 . among these rows , select the rows whose lost record is equal to 2 .', 'tostr': 'filter_eq { filter_greater { all_rows ; points ; 10 } ; lost ; 2 }'}, 'team'], 'result': 'stal gorzów wlkp', 'ind': 3, 'tostr': 'hop { filter_eq { filter_greater { all_rows ; points ; 10 } ; lost ; 2 } ; team }'}, 'stal gorzów wlkp'], 'result': True, 'ind': 4, 'tostr': 'eq { hop { filter_eq { filter_greater { all_rows ; points ; 10 } ; lost ; 2 } ; team } ; stal gorzów wlkp }', 'tointer': 'the team record of this unqiue row is stal gorzów wlkp .'}], 'result': True, 'ind': 5, 'tostr': 'and { only { filter_eq { filter_greater { all_rows ; points ; 10 } ; lost ; 2 } } ; eq { hop { filter_eq { filter_greater { all_rows ; points ; 10 } ; lost ; 2 } ; team } ; stal gorzów wlkp } } = true', 'tointer': 'select the rows whose points record is greater than 10 . among these rows , select the rows whose lost record is equal to 2 . there is only one such row in the table . the team record of this unqiue row is stal gorzów wlkp .'} | and { only { filter_eq { filter_greater { all_rows ; points ; 10 } ; lost ; 2 } } ; eq { hop { filter_eq { filter_greater { all_rows ; points ; 10 } ; lost ; 2 } ; team } ; stal gorzów wlkp } } = true | select the rows whose points record is greater than 10 . among these rows , select the rows whose lost record is equal to 2 . there is only one such row in the table . the team record of this unqiue row is stal gorzów wlkp . | 8 | 6 | {'and_5': 5, 'result_6': 6, 'only_2': 2, 'filter_eq_1': 1, 'filter_greater_0': 0, 'all_rows_7': 7, 'points_8': 8, '10_9': 9, 'lost_10': 10, '2_11': 11, 'str_eq_4': 4, 'str_hop_3': 3, 'team_12': 12, 'stal gorzów wlkp_13': 13} | {'and_5': 'and', 'result_6': 'true', 'only_2': 'only', 'filter_eq_1': 'filter_eq', 'filter_greater_0': 'filter_greater', 'all_rows_7': 'all_rows', 'points_8': 'points', '10_9': '10', 'lost_10': 'lost', '2_11': '2', 'str_eq_4': 'str_eq', 'str_hop_3': 'str_hop', 'team_12': 'team', 'stal gorzów wlkp_13': 'stal gorzów wlkp'} | {'and_5': [6], 'result_6': [], 'only_2': [5], 'filter_eq_1': [2, 3], 'filter_greater_0': [1], 'all_rows_7': [0], 'points_8': [0], '10_9': [0], 'lost_10': [1], '2_11': [1], 'str_eq_4': [5], 'str_hop_3': [4], 'team_12': [3], 'stal gorzów wlkp_13': [4]} | ['team', 'match', 'points', 'draw', 'lost'] | [['włókniarz częstochowa', '14', '26', '0', '1'], ['stal gorzów wlkp', '14', '23', '1', '2'], ['amk katowice', '14', '15', '1', '6'], ['resovia rzeszów', '14', '14', '0', '7'], ['sparta śrem', '14', '13', '1', '7'], ['gwardia poznań', '14', '12', '0', '8'], ['start gniezno', '14', '7', '1', '10'], ['kolejarz piła', '14', '2', '0', '13']] |
main line broadcasting | https://en.wikipedia.org/wiki/Main_Line_Broadcasting | https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-19131921-1.html.csv | count | four of the main line broadcasting radio stations broadcast with an oldies type format . | {'scope': 'all', 'criterion': 'equal', 'value': 'oldies', 'result': '4', 'col': '6', 'subset': None} | {'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'format', 'oldies'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose format record fuzzily matches to oldies .', 'tostr': 'filter_eq { all_rows ; format ; oldies }'}], 'result': '4', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; format ; oldies } }', 'tointer': 'select the rows whose format record fuzzily matches to oldies . the number of such rows is 4 .'}, '4'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; format ; oldies } } ; 4 } = true', 'tointer': 'select the rows whose format record fuzzily matches to oldies . the number of such rows is 4 .'} | eq { count { filter_eq { all_rows ; format ; oldies } } ; 4 } = true | select the rows whose format record fuzzily matches to oldies . the number of such rows is 4 . | 3 | 3 | {'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'format_5': 5, 'oldies_6': 6, '4_7': 7} | {'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'format_5': 'format', 'oldies_6': 'oldies', '4_7': '4'} | {'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'format_5': [0], 'oldies_6': [0], '4_7': [2]} | ['dma', 'market', 'station', 'frequency', 'branding', 'format'] | [['53', 'louisville , ky', 'wgzb - fm', '96.5', 'b96 .5', 'urban'], ['53', 'louisville , ky', 'wdjx - fm', '99.7', '99.7 djx', 'contemporary hit radio'], ['53', 'louisville , ky', 'wmjm - fm', '101.3', 'magic 101.3', 'urban ac'], ['53', 'louisville , ky', 'wxma - fm', '102.3', '102.3 the max', 'hot ac'], ['53', 'louisville , ky', 'wesi', '105.1', 'easy rock 105.1', 'soft adult contemporary'], ['56', 'richmond - petersburg , va', 'wlfv - fm', '93.1', '93.1 the wolf', 'southern country'], ['56', 'richmond - petersburg , va', 'wwlb - fm', '98.9', '98.9 liberty', 'variety hits'], ['56', 'richmond - petersburg , va', 'warv - fm', '100.3', 'big oldies 107.3', 'oldies'], ['56', 'richmond - petersburg , va', 'wbbt - fm', '107.3', 'big oldies 107.3', 'oldies'], ['60', 'dayton , oh', 'wrou - fm', '92.1', '92.1 wrou', 'urban ac'], ['60', 'dayton , oh', 'wgtz - fm', '92.9', 'fly 92.9', 'variety hits'], ['60', 'dayton , oh', 'wcli - fm', '101.5', 'click 101.5', 'modern hit music'], ['60', 'dayton , oh', 'wdht - fm', '102.9', 'hot 102.9', 'rhythmic contemporary'], ['60', 'dayton , oh', 'wing - am', '1410', 'espn 1410', 'sports'], ['166', 'hagerstown , md - chambersburg , pa', 'wqcm - fm', '94.3', '94.3 wqcm', 'rock'], ['166', 'hagerstown , md - chambersburg , pa', 'wikz - fm', '95.1', 'mix 95.1', 'adult contemporary'], ['166', 'hagerstown , md - chambersburg , pa', 'wdld - fm', '96.7', 'wild 96.7', 'rhythmic contemporary hit radio'], ['166', 'hagerstown , md - chambersburg , pa', 'wcha - am', '800', 'true oldies 96.3', 'oldies'], ['166', 'hagerstown , md - chambersburg , pa', 'whag - am', '1410', 'true oldies 96.3', 'oldies']] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.