topic
stringlengths
3
96
wiki
stringlengths
33
127
url
stringlengths
101
106
action
stringclasses
7 values
sent
stringlengths
34
223
annotation
stringlengths
74
227
logic
stringlengths
207
5.45k
logic_str
stringlengths
37
493
interpret
stringlengths
43
471
num_func
stringclasses
15 values
nid
stringclasses
13 values
g_ids
stringlengths
70
455
g_ids_features
stringlengths
98
670
g_adj
stringlengths
79
515
table_header
stringlengths
40
458
table_cont
large_stringlengths
135
4.41k
list of bohemian consorts
https://en.wikipedia.org/wiki/List_of_Bohemian_consorts
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10870631-9.html.csv
superlative
maria louisa of spain was the earliest bohemian consort from the house of hasburg-lorraine to become queen .
{'scope': 'all', 'col_superlative': '5', 'row_superlative': '1', 'value_mentioned': 'no', 'max_or_min': 'min', 'other_col': '1', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmin', 'args': ['all_rows', 'became queen'], 'result': None, 'ind': 0, 'tostr': 'argmin { all_rows ; became queen }'}, 'name'], 'result': 'maria louisa of spain', 'ind': 1, 'tostr': 'hop { argmin { all_rows ; became queen } ; name }'}, 'maria louisa of spain'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmin { all_rows ; became queen } ; name } ; maria louisa of spain } = true', 'tointer': 'select the row whose became queen record of all rows is minimum . the name record of this row is maria louisa of spain .'}
eq { hop { argmin { all_rows ; became queen } ; name } ; maria louisa of spain } = true
select the row whose became queen record of all rows is minimum . the name record of this row is maria louisa of spain .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmin_0': 0, 'all_rows_4': 4, 'became queen_5': 5, 'name_6': 6, 'maria louisa of spain_7': 7}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmin_0': 'argmin', 'all_rows_4': 'all_rows', 'became queen_5': 'became queen', 'name_6': 'name', 'maria louisa of spain_7': 'maria louisa of spain'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmin_0': [1], 'all_rows_4': [0], 'became queen_5': [0], 'name_6': [1], 'maria louisa of spain_7': [2]}
['name', 'father', 'birth', 'marriage', 'became queen', 'ceased to be queen', 'death', 'spouse']
[['maria louisa of spain', 'charles iii of spain', '24 november 1745', '16 february 1764', '20 february 1790', '1 march 1792', '15 may 1792', 'leopold ii'], ['maria teresa of the two sicilies', 'ferdinand i of the two sicilies', '6 june 1772', '15 august 1790', '1 march 1792', '13 april 1807', '13 april 1807', 'francis ii'], ['maria ludovika of austria - este', 'archduke ferdinand of austria - este', '14 december 1787', '6 january 1808', '6 january 1808', '7 april 1816', '7 april 1816', 'francis ii'], ['caroline augusta of bavaria', 'maximilian i joseph of bavaria', '8 february 1792', '29 october 1816', '29 october 1816', "2 march 1835 husband 's death", '9 february 1873', 'francis ii'], ['maria anna of sardinia', 'victor emmanuel i of sardinia', '19 september 1803', '12 february 1831', "2 march 1835 husband 's ascension", "2 december 1848 husband 's abdication", '4 may 1884', 'ferdinand v'], ['elisabeth of bavaria', 'maximilian joseph , duke in bavaria', '24 december 1837', '24 april 1854', '24 april 1854', '10 september 1898', '10 september 1898', 'francis joseph i'], ['zita of bourbon - parma', 'robert i , duke of parma', '9 may 1892', '13 june 1911', "21 november 1916 husband 's ascension", "11 november 1918 husband 's deposition", '14 march 1989', 'charles iii']]
conference carolinas
https://en.wikipedia.org/wiki/Conference_Carolinas
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-11658094-3.html.csv
superlative
east carolina university left the conference carolinas earlier than any other institution .
{'scope': 'all', 'col_superlative': '8', 'row_superlative': '5', 'value_mentioned': 'no', 'max_or_min': 'min', 'other_col': '1', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmin', 'args': ['all_rows', 'left'], 'result': None, 'ind': 0, 'tostr': 'argmin { all_rows ; left }'}, 'institution'], 'result': 'east carolina university', 'ind': 1, 'tostr': 'hop { argmin { all_rows ; left } ; institution }'}, 'east carolina university'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmin { all_rows ; left } ; institution } ; east carolina university } = true', 'tointer': 'select the row whose left record of all rows is minimum . the institution record of this row is east carolina university .'}
eq { hop { argmin { all_rows ; left } ; institution } ; east carolina university } = true
select the row whose left record of all rows is minimum . the institution record of this row is east carolina university .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmin_0': 0, 'all_rows_4': 4, 'left_5': 5, 'institution_6': 6, 'east carolina university_7': 7}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmin_0': 'argmin', 'all_rows_4': 'all_rows', 'left_5': 'left', 'institution_6': 'institution', 'east carolina university_7': 'east carolina university'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmin_0': [1], 'all_rows_4': [0], 'left_5': [0], 'institution_6': [1], 'east carolina university_7': [2]}
['institution', 'location', 'founded', 'type', 'enrollment', 'nickname', 'joined', 'left', 'current conference']
[['anderson university', 'anderson , south carolina', '1911', 'private', '2907', 'trojans', '1998', '2010', 'sac'], ['appalachian state university', 'boone , north carolina', '1899', 'public', '17589', 'mountaineers', '1930', '1967', 'socon ( sun belt in 2014 ) ( ncaa division i )'], ['catawba college', 'salisbury , north carolina', '1851', 'private', '1300', 'indians', '1930', '1975', 'sac'], ['coker college', 'hartsville , south carolina', '1908', 'private', '1200', 'cobras', '1991', '2013', 'sac'], ['east carolina university', 'greenville , north carolina', '1907', 'public', '27386', 'pirates', '1947', '1962', 'c - usa ( the american in 2014 ) ( ncaa division i )'], ['elon university', 'elon , north carolina', '1889', 'private', '6720', 'phoenix', '1930', '1975', 'socon ( caa in 2014 ) ( ncaa division i )'], ['guilford college', 'greensboro , north carolina', '1837', 'private', '2706', 'quakers', '1930', '1988', 'odac ( ncaa division iii )'], ['high point university', 'high point , north carolina', '1924', 'private', '4519', 'panthers', '1930', '1997', 'big south ( ncaa division i )'], ['lenoirrhyne university', 'hickory , north carolina', '1891', 'private', '1983', 'bears', '1930', '1975', 'sac'], ['longwood university', 'farmville , virginia', '1839', 'public', '4800', 'lancers', '1995', '2003', 'big south ( ncaa division i )'], ['mars hill college', 'mars hill , north carolina', '1856', 'private', '1370', 'lions', '1973', '1975', 'sac'], ['newberry college', 'newberry , south carolina', '1856', 'private', '949', 'wolves', '1961', '1972', 'sac'], ['university of north carolina at pembroke', 'pembroke , north carolina', '1887', 'public', '6433', 'braves', '1976', '1992', 'peach belt ( pbc )'], ['presbyterian college', 'clinton , south carolina', '1880', 'private', '1300', 'blue hose', '1965', '1972', 'big south ( ncaa division i )'], ['queens university of charlotte', 'charlotte , north carolina', '1857', 'private', '2386', 'royals', '1995', '2013', 'sac'], ['st andrews university', 'laurinburg , north carolina', '1958', 'private', '600', 'knights', '1988', '2012', 'aac ( naia )'], ['western carolina university', 'cullowhee , north carolina', '1889', 'public', '9608', 'catamounts', '1933', '1975', 'socon ( ncaa division i )'], ['wingate university', 'wingate , north carolina', '1896', 'private', '2700', 'bulldogs', '1979', '1989', 'sac']]
the sunday night project
https://en.wikipedia.org/wiki/The_Sunday_Night_Project
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1590967-2.html.csv
count
four of the episodes aired in the month of january .
{'scope': 'all', 'criterion': 'fuzzily_match', 'value': 'january', 'result': '4', 'col': '2', 'subset': None}
{'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'air date', 'january'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose air date record fuzzily matches to january .', 'tostr': 'filter_eq { all_rows ; air date ; january }'}], 'result': '4', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; air date ; january } }', 'tointer': 'select the rows whose air date record fuzzily matches to january . the number of such rows is 4 .'}, '4'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; air date ; january } } ; 4 } = true', 'tointer': 'select the rows whose air date record fuzzily matches to january . the number of such rows is 4 .'}
eq { count { filter_eq { all_rows ; air date ; january } } ; 4 } = true
select the rows whose air date record fuzzily matches to january . the number of such rows is 4 .
3
3
{'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'air date_5': 5, 'january_6': 6, '4_7': 7}
{'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'air date_5': 'air date', 'january_6': 'january', '4_7': '4'}
{'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'air date_5': [0], 'january_6': [0], '4_7': [2]}
['episode number', 'air date', 'guest host', 'musical guest ( song performed )', 'who knows the most about the guest host panelists']
[['1', '6 january 2006', 'billie piper', 'texas ( sleep )', 'jade goody and kenzie'], ['2', '13 january 2006', 'lorraine kelly', 'editors ( munich )', 'myleene klass and phil tufnell'], ['3', '20 january 2006', 'christian slater', "the kooks ( you do n't love me )", 'lady isabella hervey and fearne cotton'], ['4', '27 january 2006', 'denise van outen', 'boy kill boy ( back again )', 'bez and nadia almada'], ['5', '3 february 2006', 'michael barrymore', 'the ordinary boys ( boys will be boys )', 'nancy sorrell and samia smith'], ['6', '10 february 2006', 'jamie oliver', 'kubb ( grow )', 'tara palmer - tomkinson and chantelle houghton'], ['7', '17 february 2006', 'jessie wallace', 'hard - fi ( hard to beat )', 'caprice bourret and hilda braid'], ['8', '24 february 2006', 'trisha goddard', 'the automatic ( raoul )', 'faria alam and pete burns']]
2008 atlanta dream season
https://en.wikipedia.org/wiki/2008_Atlanta_Dream_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-17103645-10.html.csv
unique
feenstra only had the high point total 1 time during the entire season .
{'scope': 'all', 'row': '6', 'col': '5', 'col_other': 'n/a', 'criterion': 'equal', 'value': 'feenstra', 'subset': None}
{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'high points', 'feenstra'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose high points record fuzzily matches to feenstra .', 'tostr': 'filter_eq { all_rows ; high points ; feenstra }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; high points ; feenstra } } = true', 'tointer': 'select the rows whose high points record fuzzily matches to feenstra . there is only one such row in the table .'}
only { filter_eq { all_rows ; high points ; feenstra } } = true
select the rows whose high points record fuzzily matches to feenstra . there is only one such row in the table .
2
2
{'only_1': 1, 'result_2': 2, 'filter_str_eq_0': 0, 'all_rows_3': 3, 'high points_4': 4, 'feenstra_5': 5}
{'only_1': 'only', 'result_2': 'true', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_3': 'all_rows', 'high points_4': 'high points', 'feenstra_5': 'feenstra'}
{'only_1': [2], 'result_2': [], 'filter_str_eq_0': [1], 'all_rows_3': [0], 'high points_4': [0], 'feenstra_5': [0]}
['game', 'date', 'opponent', 'score', 'high points', 'high rebounds', 'high assists', 'location / attendance', 'record']
[['16', 'july 1', 'phoenix', '79 - 97', 'lennox ( 18 )', 'lovelace , young ( 7 )', 'latta ( 5 )', 'philips arena 9795', '0 - 16'], ['17', 'july 3', 'houston', '65 - 72', 'lennox ( 15 )', 'feenstra ( 9 )', 'haynie , strother ( 3 )', 'philips arena 7430', '0 - 17'], ['18', 'july 5', 'chicago', '91 - 84', 'lacy , latta ( 18 )', 'young ( 8 )', 'haynie ( 11 )', 'philips arena 8468', '1 - 17'], ['19', 'july 9', 'minnesota', '73 - 67', 'lennox ( 24 )', 'bales ( 11 )', 'haynie ( 5 )', 'target center 5893', '2 - 17'], ['20', 'july 11', 'san antonio', '74 - 82', 'lennox ( 22 )', 'bales ( 9 )', 'lennox ( 4 )', 'at & t center 10943', '2 - 18'], ['21', 'july 13', 'chicago', '66 - 79', 'feenstra ( 21 )', 'feenstra ( 8 )', 'feenstra , haynie , lennox ( 2 )', 'uic pavilion 2907', '2 - 19'], ['22', 'july 16', 'indiana', '81 - 77', 'castro marques ( 24 )', 'bales ( 11 )', 'haynie ( 7 )', 'conseco fieldhouse 9303', '3 - 19'], ['23', 'july 18', 'sacramento', '73 - 77', 'haynie ( 12 )', 'feenstra ( 8 )', 'haynie ( 4 )', 'arco arena 7236', '3 - 20'], ['24', 'july 19', 'phoenix', '84 - 110', 'latta ( 18 )', 'terry ( 11 )', 'latta ( 3 )', 'us airways center 7913', '3 - 21'], ['25', 'july 22', 'sacramento', '66 - 79', 'terry , latta ( 15 )', 'terry ( 9 )', 'haynie ( 4 )', 'philips arena 10431', '3 - 22'], ['26', 'july 25', 'washington', '75 - 81', 'castro marques ( 23 )', 'bales ( 7 )', 'haynie ( 3 )', 'philips arena 8279', '3 - 23'], ['27', 'july 27', 'new york', '76 - 86', 'lennox ( 18 )', 'desouza ( 11 )', 'latta ( 5 )', 'philips arena 8759', '3 - 24']]
2010 big east conference football season
https://en.wikipedia.org/wiki/2010_Big_East_Conference_football_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-28298589-4.html.csv
unique
the marshall versus west virgina game was the only one televised on espn2 .
{'scope': 'all', 'row': '1', 'col': '6', 'col_other': '3,4', 'criterion': 'equal', 'value': 'espn2', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'broadcast', 'espn2'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose broadcast record fuzzily matches to espn2 .', 'tostr': 'filter_eq { all_rows ; broadcast ; espn2 }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; broadcast ; espn2 } }', 'tointer': 'select the rows whose broadcast record fuzzily matches to espn2 . there is only one such row in the table .'}, {'func': 'and', 'args': [{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'broadcast', 'espn2'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose broadcast record fuzzily matches to espn2 .', 'tostr': 'filter_eq { all_rows ; broadcast ; espn2 }'}, 'visiting team'], 'result': 'no 23 west virginia', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; broadcast ; espn2 } ; visiting team }'}, 'no 23 west virginia'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; broadcast ; espn2 } ; visiting team } ; no 23 west virginia }', 'tointer': 'the visiting team record of this unqiue row is no 23 west virginia .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'broadcast', 'espn2'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose broadcast record fuzzily matches to espn2 .', 'tostr': 'filter_eq { all_rows ; broadcast ; espn2 }'}, 'home team'], 'result': 'marshall', 'ind': 4, 'tostr': 'hop { filter_eq { all_rows ; broadcast ; espn2 } ; home team }'}, 'marshall'], 'result': True, 'ind': 5, 'tostr': 'eq { hop { filter_eq { all_rows ; broadcast ; espn2 } ; home team } ; marshall }', 'tointer': 'the home team record of this unqiue row is marshall .'}], 'result': True, 'ind': 6, 'tostr': 'and { eq { hop { filter_eq { all_rows ; broadcast ; espn2 } ; visiting team } ; no 23 west virginia } ; eq { hop { filter_eq { all_rows ; broadcast ; espn2 } ; home team } ; marshall } }', 'tointer': 'the visiting team record of this unqiue row is no 23 west virginia . the home team record of this unqiue row is marshall .'}], 'result': True, 'ind': 7, 'tostr': 'and { only { filter_eq { all_rows ; broadcast ; espn2 } } ; and { eq { hop { filter_eq { all_rows ; broadcast ; espn2 } ; visiting team } ; no 23 west virginia } ; eq { hop { filter_eq { all_rows ; broadcast ; espn2 } ; home team } ; marshall } } } = true', 'tointer': 'select the rows whose broadcast record fuzzily matches to espn2 . there is only one such row in the table . the visiting team record of this unqiue row is no 23 west virginia . the home team record of this unqiue row is marshall .'}
and { only { filter_eq { all_rows ; broadcast ; espn2 } } ; and { eq { hop { filter_eq { all_rows ; broadcast ; espn2 } ; visiting team } ; no 23 west virginia } ; eq { hop { filter_eq { all_rows ; broadcast ; espn2 } ; home team } ; marshall } } } = true
select the rows whose broadcast record fuzzily matches to espn2 . there is only one such row in the table . the visiting team record of this unqiue row is no 23 west virginia . the home team record of this unqiue row is marshall .
10
8
{'and_7': 7, 'result_8': 8, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_9': 9, 'broadcast_10': 10, 'espn2_11': 11, 'and_6': 6, 'str_eq_3': 3, 'str_hop_2': 2, 'visiting team_12': 12, 'no 23 west virginia_13': 13, 'str_eq_5': 5, 'str_hop_4': 4, 'home team_14': 14, 'marshall_15': 15}
{'and_7': 'and', 'result_8': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_9': 'all_rows', 'broadcast_10': 'broadcast', 'espn2_11': 'espn2', 'and_6': 'and', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'visiting team_12': 'visiting team', 'no 23 west virginia_13': 'no 23 west virginia', 'str_eq_5': 'str_eq', 'str_hop_4': 'str_hop', 'home team_14': 'home team', 'marshall_15': 'marshall'}
{'and_7': [8], 'result_8': [], 'only_1': [7], 'filter_str_eq_0': [1, 2, 4], 'all_rows_9': [0], 'broadcast_10': [0], 'espn2_11': [0], 'and_6': [7], 'str_eq_3': [6], 'str_hop_2': [3], 'visiting team_12': [2], 'no 23 west virginia_13': [3], 'str_eq_5': [6], 'str_hop_4': [5], 'home team_14': [4], 'marshall_15': [5]}
['date', 'time', 'visiting team', 'home team', 'site', 'broadcast', 'result', 'attendance']
[['september 10', '7:00 pm', 'no 23 west virginia', 'marshall', 'joan c edwards stadium huntington , wv', 'espn2', 'w 24 - 21 ot', '41382'], ['september 11', '12:00 pm', 'south florida', 'no 8 florida', 'ben hill griffin stadium gainesville , fl', 'big east network', 'l 14 - 38', '90612'], ['september 11', '12:00 pm', 'indiana state', 'cincinnati', 'nippert stadium cincinnati , oh', 'fsohio', 'w 40 - 7', '30807'], ['september 11', '12:00 pm', 'texas southern', 'connecticut', 'rentschler field east hartford , ct', 'big east network', 'w 62 - 3', '37359'], ['september 11', '1:00 pm', 'new hampshire', 'pittsburgh', 'heinz field pittsburgh , pa', 'espn3.com', 'w 38 - 16', '50120'], ['september 11', '3:30 pm', 'eastern kentucky', 'louisville', "papa john 's cardinal stadium louisville , ky", 'big east network', 'w 23 - 13', '51427'], ['september 11', '7:00 pm', 'syracuse', 'washington', 'husky stadium seattle , wa', 'fsn northwest', 'l 20 - 41', '62418']]
1965 philadelphia eagles season
https://en.wikipedia.org/wiki/1965_Philadelphia_Eagles_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-18843092-2.html.csv
ordinal
during the 1965 philadelphia eagles season , 54714 people attended the second game played against the dallas cowboys .
{'scope': 'subset', 'row': '12', 'col': '2', 'order': '2', 'col_other': '3,5', 'max_or_min': 'min_to_max', 'value_mentioned': 'no', 'subset': {'col': '3', 'criterion': 'equal', 'value': 'dallas cowboys'}}
{'func': 'eq', 'args': [{'func': 'num_hop', 'args': [{'func': 'nth_argmin', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'opponent', 'dallas cowboys'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; opponent ; dallas cowboys }', 'tointer': 'select the rows whose opponent record fuzzily matches to dallas cowboys .'}, 'date', '2'], 'result': None, 'ind': 1, 'tostr': 'nth_argmin { filter_eq { all_rows ; opponent ; dallas cowboys } ; date ; 2 }'}, 'attendance'], 'result': '54714', 'ind': 2, 'tostr': 'hop { nth_argmin { filter_eq { all_rows ; opponent ; dallas cowboys } ; date ; 2 } ; attendance }'}, '54714'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { nth_argmin { filter_eq { all_rows ; opponent ; dallas cowboys } ; date ; 2 } ; attendance } ; 54714 } = true', 'tointer': 'select the rows whose opponent record fuzzily matches to dallas cowboys . select the row whose date record of these rows is 2nd minimum . the attendance record of this row is 54714 .'}
eq { hop { nth_argmin { filter_eq { all_rows ; opponent ; dallas cowboys } ; date ; 2 } ; attendance } ; 54714 } = true
select the rows whose opponent record fuzzily matches to dallas cowboys . select the row whose date record of these rows is 2nd minimum . the attendance record of this row is 54714 .
4
4
{'eq_3': 3, 'result_4': 4, 'num_hop_2': 2, 'nth_argmin_1': 1, 'filter_str_eq_0': 0, 'all_rows_5': 5, 'opponent_6': 6, 'dallas cowboys_7': 7, 'date_8': 8, '2_9': 9, 'attendance_10': 10, '54714_11': 11}
{'eq_3': 'eq', 'result_4': 'true', 'num_hop_2': 'num_hop', 'nth_argmin_1': 'nth_argmin', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_5': 'all_rows', 'opponent_6': 'opponent', 'dallas cowboys_7': 'dallas cowboys', 'date_8': 'date', '2_9': '2', 'attendance_10': 'attendance', '54714_11': '54714'}
{'eq_3': [4], 'result_4': [], 'num_hop_2': [3], 'nth_argmin_1': [2], 'filter_str_eq_0': [1], 'all_rows_5': [0], 'opponent_6': [0], 'dallas cowboys_7': [0], 'date_8': [1], '2_9': [1], 'attendance_10': [2], '54714_11': [3]}
['week', 'date', 'opponent', 'result', 'attendance']
[['1', 'september 19 , 1965', 'st louis cardinals', 'w 34 - 27', '54260'], ['2', 'september 26 , 1965', 'new york giants', 'l 16 - 14', '57154'], ['3', 'october 3 , 1965', 'cleveland browns', 'l 35 - 17', '60759'], ['4', 'october 10 , 1965', 'dallas cowboys', 'w 35 - 24', '56249'], ['5', 'october 17 , 1965', 'new york giants', 'l 35 - 27', '62815'], ['6', 'october 24 , 1965', 'pittsburgh steelers', 'l 20 - 14', '56515'], ['7', 'october 31 , 1965', 'washington redskins', 'l 23 - 21', '50301'], ['8', 'november 7 , 1965', 'cleveland browns', 'l 38 - 34', '72807'], ['9', 'november 14 , 1965', 'washington redskins', 'w 21 - 14', '60444'], ['10', 'november 21 , 1965', 'baltimore colts', 'l 34 - 24', '60238'], ['11', 'november 28 , 1965', 'st louis cardinals', 'w 28 - 24', '28706'], ['12', 'december 5 , 1965', 'dallas cowboys', 'l 21 - 19', '54714'], ['13', 'december 12 , 1965', 'pittsburgh steelers', 'w 47 - 13', '22002'], ['14', 'december 19 , 1965', 'detroit lions', 'l 35 - 28', '56718']]
2006 - 07 isu junior grand prix
https://en.wikipedia.org/wiki/2006%E2%80%9307_ISU_Junior_Grand_Prix
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-12392804-3.html.csv
comparative
at the 2006 - 07 isu junior grand prix , south korea won more bronze medals than estonia .
{'row_1': '7', 'row_2': '5', 'col': '5', 'col_other': '2', 'relation': 'greater', 'record_mentioned': 'no', 'diff_result': None}
{'func': 'greater', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'nation', 'south korea'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose nation record fuzzily matches to south korea .', 'tostr': 'filter_eq { all_rows ; nation ; south korea }'}, 'bronze'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; nation ; south korea } ; bronze }', 'tointer': 'select the rows whose nation record fuzzily matches to south korea . take the bronze record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'nation', 'estonia'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose nation record fuzzily matches to estonia .', 'tostr': 'filter_eq { all_rows ; nation ; estonia }'}, 'bronze'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; nation ; estonia } ; bronze }', 'tointer': 'select the rows whose nation record fuzzily matches to estonia . take the bronze record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; nation ; south korea } ; bronze } ; hop { filter_eq { all_rows ; nation ; estonia } ; bronze } } = true', 'tointer': 'select the rows whose nation record fuzzily matches to south korea . take the bronze record of this row . select the rows whose nation record fuzzily matches to estonia . take the bronze record of this row . the first record is greater than the second record .'}
greater { hop { filter_eq { all_rows ; nation ; south korea } ; bronze } ; hop { filter_eq { all_rows ; nation ; estonia } ; bronze } } = true
select the rows whose nation record fuzzily matches to south korea . take the bronze record of this row . select the rows whose nation record fuzzily matches to estonia . take the bronze record of this row . the first record is greater than the second record .
5
5
{'greater_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'nation_7': 7, 'south korea_8': 8, 'bronze_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'nation_11': 11, 'estonia_12': 12, 'bronze_13': 13}
{'greater_4': 'greater', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'nation_7': 'nation', 'south korea_8': 'south korea', 'bronze_9': 'bronze', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'nation_11': 'nation', 'estonia_12': 'estonia', 'bronze_13': 'bronze'}
{'greater_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'nation_7': [0], 'south korea_8': [0], 'bronze_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'nation_11': [1], 'estonia_12': [1], 'bronze_13': [3]}
['rank', 'nation', 'gold', 'silver', 'bronze', 'total']
[['1', 'united states', '24', '12', '8', '44'], ['2', 'russia', '5', '5', '6', '16'], ['3', 'canada', '1', '2', '7', '10'], ['4', 'japan', '1', '4', '3', '8'], ['5', 'estonia', '1', '2', '1', '4'], ['5', 'italy', '0', '3', '1', '4'], ['6', 'south korea', '0', '0', '3', '3'], ['7', 'france', '0', '1', '1', '2'], ['7', 'ukraine', '0', '1', '1', '2'], ['8', 'spain', '0', '1', '0', '1'], ['8', 'china', '0', '1', '0', '1'], ['8', 'czech republic', '0', '0', '1', '1']]
special cities of japan
https://en.wikipedia.org/wiki/Special_cities_of_Japan
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-1585609-2.html.csv
unique
shimizu is the only special city of japan that was merged into another location .
{'scope': 'all', 'row': '2', 'col': '4', 'col_other': '1', 'criterion': 'fuzzily_match', 'value': 'merge into shizuoka', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'date of reclassification', 'merge into shizuoka'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose date of reclassification record fuzzily matches to merge into shizuoka .', 'tostr': 'filter_eq { all_rows ; date of reclassification ; merge into shizuoka }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; date of reclassification ; merge into shizuoka } }', 'tointer': 'select the rows whose date of reclassification record fuzzily matches to merge into shizuoka . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'date of reclassification', 'merge into shizuoka'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose date of reclassification record fuzzily matches to merge into shizuoka .', 'tostr': 'filter_eq { all_rows ; date of reclassification ; merge into shizuoka }'}, 'name'], 'result': 'shimizu', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; date of reclassification ; merge into shizuoka } ; name }'}, 'shimizu'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; date of reclassification ; merge into shizuoka } ; name } ; shimizu }', 'tointer': 'the name record of this unqiue row is shimizu .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; date of reclassification ; merge into shizuoka } } ; eq { hop { filter_eq { all_rows ; date of reclassification ; merge into shizuoka } ; name } ; shimizu } } = true', 'tointer': 'select the rows whose date of reclassification record fuzzily matches to merge into shizuoka . there is only one such row in the table . the name record of this unqiue row is shimizu .'}
and { only { filter_eq { all_rows ; date of reclassification ; merge into shizuoka } } ; eq { hop { filter_eq { all_rows ; date of reclassification ; merge into shizuoka } ; name } ; shimizu } } = true
select the rows whose date of reclassification record fuzzily matches to merge into shizuoka . there is only one such row in the table . the name record of this unqiue row is shimizu .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'date of reclassification_7': 7, 'merge into shizuoka_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'name_9': 9, 'shimizu_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'date of reclassification_7': 'date of reclassification', 'merge into shizuoka_8': 'merge into shizuoka', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'name_9': 'name', 'shimizu_10': 'shimizu'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'date of reclassification_7': [0], 'merge into shizuoka_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'name_9': [2], 'shimizu_10': [3]}
['name', 'japanese', 'date of designation', 'date of reclassification', 'region', 'prefecture']
[['hakodate', '函館', '2000 - 11 - 01', '2005 - 10 - 01 ( core city )', 'hokkaido', 'hokkaido'], ['shimizu', '清水', '2001 - 04 - 01', '2003 - 04 - 01 ( merge into shizuoka )', 'chūbu', 'shizuoka'], ['shimonoseki', '下関', '2002 - 04 - 01', '2005 - 02 - 12 ( core city )', 'chūgoku', 'yamaguchi'], ['morioka', '盛岡', '2000 - 11 - 01', '2008 - 04 - 01 ( core city )', 'tōhoku', 'iwate'], ['kurume', '久留米', '2001 - 04 - 01', '2008 - 04 - 01 ( core city )', 'kyushu', 'fukuoka'], ['maebashi', '前橋', '2001 - 04 - 01', '2009 - 04 - 01 ( core city )', 'kantō', 'gunma'], ['ōtsu', '大津', '2001 - 04 - 01', '2009 - 04 - 01 ( core city )', 'kansai', 'shiga'], ['amagasaki', '尼崎', '2001 - 04 - 01', '2009 - 04 - 01 ( core city )', 'kansai', 'hyōgo'], ['takasaki', '高崎', '2001 - 04 - 01', '2011 - 04 - 01 ( core city )', 'kantō', 'gunma']]
con todo
https://en.wikipedia.org/wiki/Con_Todo
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-25614153-1.html.csv
superlative
the longest recorded song on con todo is the one titled rey salvador .
{'scope': 'all', 'col_superlative': '8', 'row_superlative': '7', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '2', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'duration'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; duration }'}, 'song'], 'result': 'rey salvador', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; duration } ; song }'}, 'rey salvador'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; duration } ; song } ; rey salvador } = true', 'tointer': 'select the row whose duration record of all rows is maximum . the song record of this row is rey salvador .'}
eq { hop { argmax { all_rows ; duration } ; song } ; rey salvador } = true
select the row whose duration record of all rows is maximum . the song record of this row is rey salvador .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'duration_5': 5, 'song_6': 6, 'rey salvador_7': 7}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'duration_5': 'duration', 'song_6': 'song', 'rey salvador_7': 'rey salvador'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'duration_5': [0], 'song_6': [1], 'rey salvador_7': [2]}
['', 'song', 'english translation', 'original album', 'composer', 'worship leader', 'supporting vocal', 'duration']
[['1', 'para exaltarte', 'your name high', 'this is our god', 'joel houston', 'joel houston', 'none', '4:02'], ['2', 'correré', 'run', 'this is our god', 'joel houston', 'toni romero', 'none', '3:22'], ['3', 'hosanna', 'hosanna', 'saviour king', 'brooke fraser', 'darlene zschech', 'none', '6:08'], ['4', 'desde mi interior', 'from the inside out', 'unidos permanecemos', 'joel houston', 'jad gillies', 'none', '6:13'], ['5', 'canción del desierto', 'desert song', 'this is our god', 'brooke fraser', 'annie garratt', 'none', '4:16'], ['6', 'en la cruz', 'the cross', 'mighty to save', 'darlene zschech & reuben morgan', 'darlene zschech', 'none', '6:20'], ['7', 'rey salvador', 'saviour king', 'saviour king', 'marty sampson & mia fields', 'dave ware', 'none', '8:02'], ['8', 'poderoso para salvar', 'mighty to save', 'mighty to save', 'reuben morgan & ben fielding', 'reuben morgan', 'darlene zschech', '5:34'], ['9', 'soy libre', 'break free', 'saviour king', 'joel houston , matt crocker & scott ligertwood', 'matt crocker', 'none', '3:59'], ['10', 'poderoso', 'stronger', 'this is our god', 'reuben morgan & ben fielding', 'jad gillies', 'darlene zschech', '4:37'], ['11', 'sólo cristo', 'none but jesus', 'unidos permanecemos', 'brooke fraser', 'brooke fraser', 'none', '7:07'], ['12', 'es nuestro dios', 'this is our god', 'this is our god', 'reuben morgan', 'reuben morgan & darlene zschech', 'none', '6:10'], ['13', 'eres mi fortaleza', 'you are my strength', 'saviour king', 'reuben morgan', 'reuben morgan', 'none', '4:53']]
thai clubs in the afc champions league
https://en.wikipedia.org/wiki/Thai_clubs_in_the_AFC_Champions_League
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-16593799-8.html.csv
unique
the kashima antlers were the only one of these teams to earn exactly 8 points .
{'scope': 'all', 'row': '17', 'col': '3', 'col_other': '2', 'criterion': 'fuzzily_match', 'value': '8', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'score', '8'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose score record fuzzily matches to 8 .', 'tostr': 'filter_eq { all_rows ; score ; 8 }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; score ; 8 } }', 'tointer': 'select the rows whose score record fuzzily matches to 8 . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'score', '8'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose score record fuzzily matches to 8 .', 'tostr': 'filter_eq { all_rows ; score ; 8 }'}, 'team 1'], 'result': 'kashima antlers', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; score ; 8 } ; team 1 }'}, 'kashima antlers'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; score ; 8 } ; team 1 } ; kashima antlers }', 'tointer': 'the team 1 record of this unqiue row is kashima antlers .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; score ; 8 } } ; eq { hop { filter_eq { all_rows ; score ; 8 } ; team 1 } ; kashima antlers } } = true', 'tointer': 'select the rows whose score record fuzzily matches to 8 . there is only one such row in the table . the team 1 record of this unqiue row is kashima antlers .'}
and { only { filter_eq { all_rows ; score ; 8 } } ; eq { hop { filter_eq { all_rows ; score ; 8 } ; team 1 } ; kashima antlers } } = true
select the rows whose score record fuzzily matches to 8 . there is only one such row in the table . the team 1 record of this unqiue row is kashima antlers .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'score_7': 7, '8_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'team 1_9': 9, 'kashima antlers_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'score_7': 'score', '8_8': '8', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'team 1_9': 'team 1', 'kashima antlers_10': 'kashima antlers'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'score_7': [0], '8_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'team 1_9': [2], 'kashima antlers_10': [3]}
['season', 'team 1', 'score', 'team 2', 'venue']
[['2004', 'krung thai bank', '0 - 2', 'dalian shide', 'thai - japanese stadium , thailand'], ['2004', 'psm makassar', '2 - 3', 'krung thai bank', 'mattoangin stadium , indonesia'], ['2004', 'hoang anh gia lai', '0 - 1', 'krung thai bank', 'pleiku stadium , vietnam'], ['2004', 'krung thai bank', '2 - 2', 'hoang anh gia lai', 'suphachalasai stadium , thailand'], ['2004', 'dalian shide', '3 - 1', 'krung thai bank', "dalian people 's stadium , china pr"], ['2004', 'krung thai bank', '1 - 2', 'psm makassar', 'thai - japanese stadium , thailand'], ['2005', 'krung thai bank', '2 - 1', 'pisico bình ðinh', 'n / a'], ['2005', 'krung thai bank', '0 - 2', "busan i ' park", 'n / a'], ['2005', 'krung thai bank', '2 - 1', 'persebaya surabaya', 'n / a'], ['2005', 'krung thai bank', '0 - 1', 'pisico bình ðinh', 'n / a'], ['2005', 'krung thai bank', '0 - 4', "busan i ' park", 'n / a'], ['2005', 'krung thai bank', '1 - 0', 'persebaya surabaya', 'n / a'], ['2008', 'krung thai bank', '1 - 9', 'kashima antlers', 'chulalongkorn university sports stadium , thailand'], ['2008', 'beijing guoan', '4 - 2', 'krung thai bank', 'beijing fengtai stadium , china pr'], ['2008', 'krung thai bank', '9 - 1', 'nam dinh fc', 'chulalongkorn university sports stadium , thailand'], ['2008', 'nam dinh fc', '2 - 2', 'krung thai bank', 'my dinh national stadium , vietnam'], ['2008', 'kashima antlers', '8 - 1', 'krung thai bank', 'kashima soccer stadium , japan'], ['2008', 'krung thai bank', '5 - 3', 'beijing guoan', 'rajamangala stadium , thailand']]
argentine primera división
https://en.wikipedia.org/wiki/Argentine_Primera_Divisi%C3%B3n
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1211728-1.html.csv
superlative
godoy cruz has the newest first season in the argentine primera división .
{'scope': 'all', 'col_superlative': '5', 'row_superlative': '10', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '1', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'first season'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; first season }'}, 'club'], 'result': 'godoy cruz', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; first season } ; club }'}, 'godoy cruz'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; first season } ; club } ; godoy cruz } = true', 'tointer': 'select the row whose first season record of all rows is maximum . the club record of this row is godoy cruz .'}
eq { hop { argmax { all_rows ; first season } ; club } ; godoy cruz } = true
select the row whose first season record of all rows is maximum . the club record of this row is godoy cruz .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'first season_5': 5, 'club_6': 6, 'godoy cruz_7': 7}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'first season_5': 'first season', 'club_6': 'club', 'godoy cruz_7': 'godoy cruz'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'first season_5': [0], 'club_6': [1], 'godoy cruz_7': [2]}
['club', 'district', 'area / province', 'stadium', 'first season', 'last title']
[['all boys', 'floresta', 'buenos aires', 'islas malvinas', '1923', '( none )'], ['argentinos juniors', 'la paternal', 'buenos aires', 'diego a maradona', '1922', '2010 clausura'], ['arsenal', 'sarandí', 'greater buenos aires', 'julio h grondona', '2002 - 03', '2012 clausura'], ['atlético de rafaela', 'rafaela', 'santa fe', 'nuevo monumental', '2003 apertura', '( none )'], ['belgrano ( c )', 'córdoba', 'córdoba', 'gigante de alberdi', '1991 apertura', '( none )'], ['boca juniors', 'la boca', 'buenos aires', 'alberto j armando', '1913', '2011 apertura'], ['colón', 'santa fe', 'santa fe', 'brigadier estanislao lópez', '1966', '( none )'], ['estudiantes ( lp )', 'la plata', 'buenos aires province', 'ciudad de la plata', '1912', '2010 apertura'], ['gimnasia y esgrima ( lp )', 'la plata', 'buenos aires province', 'juan c zerillo', '1916', '1929'], ['godoy cruz', 'mendoza', 'mendoza', 'malvinas argentinas', '2006 apertura', '( none )'], ['lanús', 'lanús', 'greater buenos aires', 'ciudad de lanús', '1920', '2007 apertura'], ["newell 's old boys", 'rosario', 'santa fe', 'marcelo bielsa', '1939', '2013 final'], ['olimpo', 'bahía blanca', 'buenos aires province', 'roberto carminatti', '2002 apertura', '( none )'], ['quilmes', 'quilmes', 'greater buenos aires', 'centenario josé l meiszner', '1893', '1978 metropolitano'], ['racing', 'avellaneda', 'greater buenos aires', 'presidente juan d perón', '1911', '2001 apertura'], ['river plate', 'belgrano', 'buenos aires', 'monumental vespucio liberti', '1909', '2008 clausura'], ['rosario central', 'rosario', 'santa fe', 'gigante de arroyito', '1939', '1986 - 87'], ['san lorenzo', 'boedo', 'buenos aires', 'pedro bidegain', '1915', '2007 clausura'], ['tigre', 'victoria', 'greater buenos aires', 'coliseo de victoria', '1913', '( none )'], ['vélez sarsfield', 'liniers', 'buenos aires', 'josé amalfitani', '1919', '2013 superfinal']]
ranked lists of chilean regions
https://en.wikipedia.org/wiki/Ranked_lists_of_Chilean_regions
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-25042332-22.html.csv
superlative
the o'higgins chilean region has the highest primary education attainment at 95.41 .
{'scope': 'all', 'col_superlative': '3', 'row_superlative': '8', 'value_mentioned': 'yes', 'max_or_min': 'max', 'other_col': '1', 'subset': None}
{'func': 'and', 'args': [{'func': 'eq', 'args': [{'func': 'max', 'args': ['all_rows', 'primary ( 6 - 13 years )'], 'result': '95.41', 'ind': 0, 'tostr': 'max { all_rows ; primary ( 6 - 13 years ) }', 'tointer': 'the maximum primary ( 6 - 13 years ) record of all rows is 95.41 .'}, '95.41'], 'result': True, 'ind': 1, 'tostr': 'eq { max { all_rows ; primary ( 6 - 13 years ) } ; 95.41 }', 'tointer': 'the maximum primary ( 6 - 13 years ) record of all rows is 95.41 .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'primary ( 6 - 13 years )'], 'result': None, 'ind': 2, 'tostr': 'argmax { all_rows ; primary ( 6 - 13 years ) }'}, 'region'], 'result': "o'higgins", 'ind': 3, 'tostr': 'hop { argmax { all_rows ; primary ( 6 - 13 years ) } ; region }'}, "o'higgins"], 'result': True, 'ind': 4, 'tostr': "eq { hop { argmax { all_rows ; primary ( 6 - 13 years ) } ; region } ; o'higgins }", 'tointer': "the region record of the row with superlative primary ( 6 - 13 years ) record is o'higgins ."}], 'result': True, 'ind': 5, 'tostr': "and { eq { max { all_rows ; primary ( 6 - 13 years ) } ; 95.41 } ; eq { hop { argmax { all_rows ; primary ( 6 - 13 years ) } ; region } ; o'higgins } } = true", 'tointer': "the maximum primary ( 6 - 13 years ) record of all rows is 95.41 . the region record of the row with superlative primary ( 6 - 13 years ) record is o'higgins ."}
and { eq { max { all_rows ; primary ( 6 - 13 years ) } ; 95.41 } ; eq { hop { argmax { all_rows ; primary ( 6 - 13 years ) } ; region } ; o'higgins } } = true
the maximum primary ( 6 - 13 years ) record of all rows is 95.41 . the region record of the row with superlative primary ( 6 - 13 years ) record is o'higgins .
6
6
{'and_5': 5, 'result_6': 6, 'eq_1': 1, 'max_0': 0, 'all_rows_7': 7, 'primary (6 - 13 years)_8': 8, '95.41_9': 9, 'str_eq_4': 4, 'str_hop_3': 3, 'argmax_2': 2, 'all_rows_10': 10, 'primary (6 - 13 years)_11': 11, 'region_12': 12, "o'higgins_13": 13}
{'and_5': 'and', 'result_6': 'true', 'eq_1': 'eq', 'max_0': 'max', 'all_rows_7': 'all_rows', 'primary (6 - 13 years)_8': 'primary ( 6 - 13 years )', '95.41_9': '95.41', 'str_eq_4': 'str_eq', 'str_hop_3': 'str_hop', 'argmax_2': 'argmax', 'all_rows_10': 'all_rows', 'primary (6 - 13 years)_11': 'primary ( 6 - 13 years )', 'region_12': 'region', "o'higgins_13": "o'higgins"}
{'and_5': [6], 'result_6': [], 'eq_1': [5], 'max_0': [1], 'all_rows_7': [0], 'primary (6 - 13 years)_8': [0], '95.41_9': [1], 'str_eq_4': [5], 'str_hop_3': [4], 'argmax_2': [3], 'all_rows_10': [2], 'primary (6 - 13 years)_11': [2], 'region_12': [3], "o'higgins_13": [4]}
['region', 'preschool ( 0 - 5 years )', 'primary ( 6 - 13 years )', 'secondary ( 14 - 17 years )', 'tertiary ( 18 - 24 years )']
[['arica and parinacota', '42.92', '91.17', '76.65', '38.67'], ['tarapacá', '47.51', '94.52', '70.82', '28.16'], ['antofagasta', '38.13', '91.90', '70.78', '28.26'], ['atacama', '38.14', '94.13', '73.93', '23.01'], ['coquimbo', '47.43', '93.00', '68.95', '33.89'], ['valparaíso', '50.23', '91.37', '71.63', '42.96'], ['santiago', '43.15', '92.38', '72.91', '35.03'], ["o'higgins", '41.89', '95.41', '63.00', '28.60'], ['maule', '43.38', '93.10', '67.49', '26.31'], ['biobío', '40.76', '93.45', '71.83', '31.62'], ['araucanía', '45.49', '93.40', '73.25', '29.55'], ['los ríos', '38.49', '94.18', '69.83', '33.88'], ['los lagos', '40.42', '92.88', '71.43', '25.78'], ['aisén', '52.28', '94.39', '69.30', '22.42'], ['magallanes', '51.16', '94.40', '72.50', '43.87']]
united states house of representatives elections , 1942
https://en.wikipedia.org/wiki/United_States_House_of_Representatives_elections%2C_1942
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-1342256-18.html.csv
majority
most of the people elected to the louisiana house of representatives in 1942 had been first elected in 1940 .
{'scope': 'all', 'col': '4', 'most_or_all': 'most', 'criterion': 'equal', 'value': '1940', 'subset': None}
{'func': 'most_eq', 'args': ['all_rows', 'first elected', '1940'], 'result': True, 'ind': 0, 'tointer': 'for the first elected records of all rows , most of them are equal to 1940 .', 'tostr': 'most_eq { all_rows ; first elected ; 1940 } = true'}
most_eq { all_rows ; first elected ; 1940 } = true
for the first elected records of all rows , most of them are equal to 1940 .
1
1
{'most_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'first elected_3': 3, '1940_4': 4}
{'most_eq_0': 'most_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'first elected_3': 'first elected', '1940_4': '1940'}
{'most_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'first elected_3': [0], '1940_4': [0]}
['district', 'incumbent', 'party', 'first elected', 'result', 'candidates']
[['louisiana 1', 'f edward hebert', 'democratic', '1940', 're - elected', 'f edward hebert ( d ) unopposed'], ['louisiana 2', 'hale boggs', 'democratic', '1940', 'lost renomination democratic hold', 'paul h maloney ( d ) unopposed'], ['louisiana 3', 'james r domengeaux', 'democratic', '1940', 're - elected', 'james r domengeaux ( d ) unopposed'], ['louisiana 4', 'overton brooks', 'democratic', '1936', 're - elected', 'overton brooks ( d ) unopposed'], ['louisiana 5', 'newt v mills', 'democratic', '1936', 'lost renomination democratic hold', 'charles e mckenzie ( d ) unopposed'], ['louisiana 6', 'jared y sanders , jr', 'democratic', '1940', 'lost renomination democratic hold', 'james h morrison ( d ) unopposed'], ['louisiana 7', 'vance plauche', 'democratic', '1940', 'retired democratic hold', 'henry d larcade , jr ( d ) unopposed']]
xxl ( mylène farmer song )
https://en.wikipedia.org/wiki/XXL_%28Myl%C3%A8ne_Farmer_song%29
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-14562754-1.html.csv
comparative
the german radio edit of the mylène farmer song " xxl " is shorter than the album version of the song .
{'row_1': '9', 'row_2': '1', 'col': '2', 'col_other': '1', 'relation': 'less', 'record_mentioned': 'no', 'diff_result': None}
{'func': 'less', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'version', 'german radio edit'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose version record fuzzily matches to german radio edit .', 'tostr': 'filter_eq { all_rows ; version ; german radio edit }'}, 'length'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; version ; german radio edit } ; length }', 'tointer': 'select the rows whose version record fuzzily matches to german radio edit . take the length record of this row .'}, {'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'version', 'album version'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose version record fuzzily matches to album version .', 'tostr': 'filter_eq { all_rows ; version ; album version }'}, 'length'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; version ; album version } ; length }', 'tointer': 'select the rows whose version record fuzzily matches to album version . take the length record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'less { hop { filter_eq { all_rows ; version ; german radio edit } ; length } ; hop { filter_eq { all_rows ; version ; album version } ; length } } = true', 'tointer': 'select the rows whose version record fuzzily matches to german radio edit . take the length record of this row . select the rows whose version record fuzzily matches to album version . take the length record of this row . the first record is less than the second record .'}
less { hop { filter_eq { all_rows ; version ; german radio edit } ; length } ; hop { filter_eq { all_rows ; version ; album version } ; length } } = true
select the rows whose version record fuzzily matches to german radio edit . take the length record of this row . select the rows whose version record fuzzily matches to album version . take the length record of this row . the first record is less than the second record .
5
5
{'less_4': 4, 'result_5': 5, 'str_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'version_7': 7, 'german radio edit_8': 8, 'length_9': 9, 'str_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'version_11': 11, 'album version_12': 12, 'length_13': 13}
{'less_4': 'less', 'result_5': 'true', 'str_hop_2': 'str_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'version_7': 'version', 'german radio edit_8': 'german radio edit', 'length_9': 'length', 'str_hop_3': 'str_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'version_11': 'version', 'album version_12': 'album version', 'length_13': 'length'}
{'less_4': [5], 'result_5': [], 'str_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'version_7': [0], 'german radio edit_8': [0], 'length_9': [2], 'str_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'version_11': [1], 'album version_12': [1], 'length_13': [3]}
['version', 'length', 'album', 'remixed by', 'year']
[['album version', '4:45', 'anamorphosée , les mots', '-', '1995'], ['single version', '4:23', '-', 'laurent boutonnat', '1995'], ['no voice remix edit', '4:20', '-', 'laurent boutonnat , bertrand chtenet', '1995'], ['extra large remix', '5:02', '-', 'laurent boutonnat , bertrand chtenet', '1995'], ['distorded dance mix', '5:20', '-', 'laurent boutonnat , bertrand chtenet', '1995'], ['new remix edit', '4:25', '-', 'laurent boutonnat , bertrand chtenet', '1995'], ['single dance mix', '4:25', '-', 'laurent boutonnat , bertrand chtenet', '1995'], ['new remix edit ( germany )', '4:43', '-', 'laurent boutonnat , bertrand chtenet', '1995'], ['german radio edit', '3:54', '-', 'laurent boutonnat , bertrand chtenet', '1995'], ['music video', '4:22', 'music videos ii , music videos ii & iii', '-', '1995'], ['uk remix', '9:00', '-', 'richard dekkard', '1996'], ['live version ( recorded in 1996 )', '7:25', 'live à bercy', '-', '1996'], ['jxl remix', '6:06', 'remixes', 'junkie xl', '2003'], ['live version ( recorded in 2006 )', '5:28', "avant que l'ombre", '-', '2006'], ['live version ( recorded in 2009 )', '4:30', 'n degree5 on tour', '-', '2009']]
list of space telescopes
https://en.wikipedia.org/wiki/List_of_space_telescopes
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15951109-4.html.csv
ordinal
the space telescope with the 2nd most recent launch date was corot .
{'row': '1', 'col': '3', 'order': '2', 'col_other': '1', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'scope': 'all', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'launch date', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmax { all_rows ; launch date ; 2 }'}, 'name'], 'result': 'corot', 'ind': 1, 'tostr': 'hop { nth_argmax { all_rows ; launch date ; 2 } ; name }'}, 'corot'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmax { all_rows ; launch date ; 2 } ; name } ; corot } = true', 'tointer': 'select the row whose launch date record of all rows is 2nd maximum . the name record of this row is corot .'}
eq { hop { nth_argmax { all_rows ; launch date ; 2 } ; name } ; corot } = true
select the row whose launch date record of all rows is 2nd maximum . the name record of this row is corot .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmax_0': 0, 'all_rows_4': 4, 'launch date_5': 5, '2_6': 6, 'name_7': 7, 'corot_8': 8}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmax_0': 'nth_argmax', 'all_rows_4': 'all_rows', 'launch date_5': 'launch date', '2_6': '2', 'name_7': 'name', 'corot_8': 'corot'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmax_0': [1], 'all_rows_4': [0], 'launch date_5': [0], '2_6': [0], 'name_7': [1], 'corot_8': [2]}
['name', 'space agency', 'launch date', 'terminated', 'location']
[['corot', 'cnes & esa', '27 december 2006', '2013', 'earth orbit ( 872 - 884 km )'], ['hipparcos', 'esa', '8 august 1989', 'march 1993', 'earth orbit ( 223 - 35632 km )'], ['hubble space telescope', 'nasa', '24 april 1990', '-', 'earth orbit ( 586.47 - 610.44 km )'], ['kepler mission', 'nasa', '6 march 2009', '-', 'earth - trailing heliocentric orbit'], ['most', 'csa', '30 june 2003', '-', 'earth orbit ( 819 - 832 km )'], ['swift gamma ray burst explorer', 'nasa', '20 november 2004', '-', 'earth orbit ( 585 - 604 km )']]
2008 - 09 guildford flames season
https://en.wikipedia.org/wiki/2008%E2%80%9309_Guildford_Flames_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-17120964-9.html.csv
count
in the 2008-09 guildford flames season , there were four home games .
{'scope': 'all', 'criterion': 'equal', 'value': 'home', 'result': '4', 'col': '3', 'subset': None}
{'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'venue', 'home'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose venue record fuzzily matches to home .', 'tostr': 'filter_eq { all_rows ; venue ; home }'}], 'result': '4', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; venue ; home } }', 'tointer': 'select the rows whose venue record fuzzily matches to home . the number of such rows is 4 .'}, '4'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; venue ; home } } ; 4 } = true', 'tointer': 'select the rows whose venue record fuzzily matches to home . the number of such rows is 4 .'}
eq { count { filter_eq { all_rows ; venue ; home } } ; 4 } = true
select the rows whose venue record fuzzily matches to home . the number of such rows is 4 .
3
3
{'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'venue_5': 5, 'home_6': 6, '4_7': 7}
{'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'venue_5': 'venue', 'home_6': 'home', '4_7': '4'}
{'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'venue_5': [0], 'home_6': [0], '4_7': [2]}
['date', 'opponent', 'venue', 'result', 'attendance', 'competition', 'man of the match']
[['1st', 'milton keynes lightning', 'home', 'won 5 - 4', '1336', 'league', 'terry miles'], ['8th', 'bracknell bees', 'away', 'won 4 - 3 ( ot )', 'n / a', 'league', 'ollie bronnimann'], ['14th', 'peterborough phantoms', 'away', 'won 4 - 2', '493', 'league', 'n / a'], ['15th', 'slough jets', 'home', 'lost 3 - 2', '1634', 'league', 'joe watkins'], ['19th', 'slough jets', 'home', 'drawn 1 - 1', '2057', 'cup semi - final ( 1st leg )', 'david savage'], ['21st', 'peterborough phantoms', 'home', 'won 5 - 3', '1551', 'league', 'ben austin'], ['22nd', 'sheffield scimitars', 'away', 'lost 5 - 4', '627', 'league', 'n / a'], ['25th', 'slough jets', 'away', 'won 4 - 2', '702', 'cup semi - final ( 2nd leg )', 'n / a'], ['28th', 'wightlink raiders', 'away', 'lost 3 - 2 ( so )', '582', 'league', 'stuart potts']]
list of essex list a cricket records
https://en.wikipedia.org/wiki/List_of_Essex_List_A_cricket_records
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-11337751-4.html.csv
superlative
graham gooch and ken mcewan ranked 1st in runs scored for a wicket partnership .
{'scope': 'all', 'col_superlative': '2', 'row_superlative': '2', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '3', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'runs'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; runs }'}, 'batsmen'], 'result': 'graham gooch ken mcewan', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; runs } ; batsmen }'}, 'graham gooch ken mcewan'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; runs } ; batsmen } ; graham gooch ken mcewan } = true', 'tointer': 'select the row whose runs record of all rows is maximum . the batsmen record of this row is graham gooch ken mcewan .'}
eq { hop { argmax { all_rows ; runs } ; batsmen } ; graham gooch ken mcewan } = true
select the row whose runs record of all rows is maximum . the batsmen record of this row is graham gooch ken mcewan .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'runs_5': 5, 'batsmen_6': 6, 'graham gooch ken mcewan_7': 7}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'runs_5': 'runs', 'batsmen_6': 'batsmen', 'graham gooch ken mcewan_7': 'graham gooch ken mcewan'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'runs_5': [0], 'batsmen_6': [1], 'graham gooch ken mcewan_7': [2]}
['wicket partnership', 'runs', 'batsmen', 'opponents', 'venue', 'season']
[['1st', '269', 'mark pettini jason gallian', 'v surrey', 'the oval', '2008'], ['2nd', '273', 'graham gooch ken mcewan', 'v nottinghamshire', 'nottingham', '1983'], ['3rd', '268', 'graham gooch keith fletcher', 'v sussex', 'hove', '1982'], ['4th', '151', 'ronnie irani paul grayson', 'v northamptonshire', 'northampton', '1999'], ['5th', '190', 'ravi bopara james foster', 'v leicestershire', 'leicester', '2008'], ['6th', '127', 'stuart law robert rollins', 'v hampshire', 'southampton', '1996'], ['7th', '92', 'brian edmeades stuart turner', 'v nottinghamshire', 'chelmsford', '1969'], ['8th', '109', 'ray east neil smith', 'v northamptonshire', 'chelmsford', '1977'], ['9th', '67', 'unknown ray east', 'v gloucestershire', 'chelmsford', '1973'], ['10th', '81', 'stuart turner ray east', 'v yorkshire', 'leeds', '1982']]
international softball congress
https://en.wikipedia.org/wiki/International_Softball_Congress
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-18618672-2.html.csv
count
the nitehawks were the 2nd place team in the international softball congress a total of two times .
{'scope': 'all', 'criterion': 'fuzzily_match', 'value': 'nitehawks', 'result': '2', 'col': '3', 'subset': None}
{'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', '2nd place team', 'nitehawks'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose 2nd place team record fuzzily matches to nitehawks .', 'tostr': 'filter_eq { all_rows ; 2nd place team ; nitehawks }'}], 'result': '2', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; 2nd place team ; nitehawks } }', 'tointer': 'select the rows whose 2nd place team record fuzzily matches to nitehawks . the number of such rows is 2 .'}, '2'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; 2nd place team ; nitehawks } } ; 2 } = true', 'tointer': 'select the rows whose 2nd place team record fuzzily matches to nitehawks . the number of such rows is 2 .'}
eq { count { filter_eq { all_rows ; 2nd place team ; nitehawks } } ; 2 } = true
select the rows whose 2nd place team record fuzzily matches to nitehawks . the number of such rows is 2 .
3
3
{'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, '2nd place team_5': 5, 'nitehawks_6': 6, '2_7': 7}
{'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', '2nd place team_5': '2nd place team', 'nitehawks_6': 'nitehawks', '2_7': '2'}
{'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], '2nd place team_5': [0], 'nitehawks_6': [0], '2_7': [2]}
['year', '1st place team', '2nd place team', '3rd place team', '4th place team', 'host location']
[['1951', 'hoak packers , fresno , ca', 'nitehawks , long beach , ca', 'robitaille motors , montreal , qc', 'wells motors , greeley , co', 'greeley , co'], ['1952', 'hoak packers , fresno , ca', 'nitehawks , long beach , ca', 'pointers , barbers point , hi', 'wyoming angus , johnstown , co', 'plainview , tx'], ['1953', 'nitehawks , long beach , ca', 'merchants , tampico , il', 'lions , lorenzo , tx', 'hoak packers , fresno , ca', 'selma , ca'], ['1954', 'hoak packers , fresno , ca', 'condors , dinuba , ca', 'nitehawks , long beach , ca', 'lions , lorenzo , tx', 'selma , ca'], ['1955', 'nitehawks , long beach , ca', 'condors , dinuba , ca', 'elites , new bedford , il', 'local 1014 chiefs , gary , in', 'new bedford , il'], ['1956', 'nitehawks , long beach , ca', 'siebren hybrids , geneseo , il', 'elites , new bedford , il', 'national cash register , dayton , oh', 'new bedford , il']]
1982 open championship
https://en.wikipedia.org/wiki/1982_Open_Championship
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-18166348-5.html.csv
aggregation
the average score for players from the united states for the 1982 open championship is 139.5 .
{'scope': 'subset', 'col': '4', 'type': 'average', 'result': '139.5', 'subset': {'col': '3', 'criterion': 'equal', 'value': 'united states'}}
{'func': 'round_eq', 'args': [{'func': 'avg', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'country', 'united states'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; country ; united states }', 'tointer': 'select the rows whose country record fuzzily matches to united states .'}, 'score'], 'result': '139.5', 'ind': 1, 'tostr': 'avg { filter_eq { all_rows ; country ; united states } ; score }'}, '139.5'], 'result': True, 'ind': 2, 'tostr': 'round_eq { avg { filter_eq { all_rows ; country ; united states } ; score } ; 139.5 } = true', 'tointer': 'select the rows whose country record fuzzily matches to united states . the average of the score record of these rows is 139.5 .'}
round_eq { avg { filter_eq { all_rows ; country ; united states } ; score } ; 139.5 } = true
select the rows whose country record fuzzily matches to united states . the average of the score record of these rows is 139.5 .
3
3
{'eq_2': 2, 'result_3': 3, 'avg_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'country_5': 5, 'united states_6': 6, 'score_7': 7, '139.5_8': 8}
{'eq_2': 'eq', 'result_3': 'true', 'avg_1': 'avg', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'country_5': 'country', 'united states_6': 'united states', 'score_7': 'score', '139.5_8': '139.5'}
{'eq_2': [3], 'result_3': [], 'avg_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'country_5': [0], 'united states_6': [0], 'score_7': [1], '139.5_8': [2]}
['place', 'player', 'country', 'score', 'to par']
[['1', 'bobby clampett', 'united states', '67 + 66 = 133', '- 11'], ['2', 'nick price', 'zimbabwe', '69 + 69 = 138', '- 6'], ['t2', 'bernhard langer', 'west germany', '70 + 69 = 139', '- 5'], ['t2', 'des smyth', 'ireland', '70 + 69 = 139', '- 5'], ['t5', 'sandy lyle', 'scotland', '74 + 66 = 140', '- 4'], ['t5', 'tom watson', 'united states', '69 + 71 = 140', '- 4'], ['t7', 'ken brown', 'scotland', '70 = 71 = 141', '- 3'], ['t7', 'peter oosterhuis', 'england', '74 + 67 = 141', '- 3'], ['9', 'tom purtzer', 'united states', '76 + 66 = 142', '- 2'], ['t10', 'josé maria cañizares', 'spain', '71 + 72 = 143', '- 1'], ['t10', 'david graham', 'australia', '73 + 70 = 143', '- 1'], ['t10', 'bill rogers', 'united states', '73 + 70 = 143', '- 1']]
1929 in brazilian football
https://en.wikipedia.org/wiki/1929_in_Brazilian_football
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15372465-2.html.csv
unique
during the 1929 brazilian football games , antarctica was the only team to score 17 points .
{'scope': 'all', 'row': '9', 'col': '3', 'col_other': '2', 'criterion': 'equal', 'value': '17', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_eq', 'args': ['all_rows', 'points', '17'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose points record is equal to 17 .', 'tostr': 'filter_eq { all_rows ; points ; 17 }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; points ; 17 } }', 'tointer': 'select the rows whose points record is equal to 17 . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_eq', 'args': ['all_rows', 'points', '17'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose points record is equal to 17 .', 'tostr': 'filter_eq { all_rows ; points ; 17 }'}, 'team'], 'result': 'antártica', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; points ; 17 } ; team }'}, 'antártica'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; points ; 17 } ; team } ; antártica }', 'tointer': 'the team record of this unqiue row is antártica .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; points ; 17 } } ; eq { hop { filter_eq { all_rows ; points ; 17 } ; team } ; antártica } } = true', 'tointer': 'select the rows whose points record is equal to 17 . there is only one such row in the table . the team record of this unqiue row is antártica .'}
and { only { filter_eq { all_rows ; points ; 17 } } ; eq { hop { filter_eq { all_rows ; points ; 17 } ; team } ; antártica } } = true
select the rows whose points record is equal to 17 . there is only one such row in the table . the team record of this unqiue row is antártica .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_eq_0': 0, 'all_rows_6': 6, 'points_7': 7, '17_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'team_9': 9, 'antártica_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_eq_0': 'filter_eq', 'all_rows_6': 'all_rows', 'points_7': 'points', '17_8': '17', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'team_9': 'team', 'antártica_10': 'antártica'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_eq_0': [1, 2], 'all_rows_6': [0], 'points_7': [0], '17_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'team_9': [2], 'antártica_10': [3]}
['position', 'team', 'points', 'played', 'drawn', 'lost', 'against', 'difference']
[['1', 'paulistano', '30', '19', '2', '3', '15', '38'], ['2', 'ponte preta', '26', '20', '2', '6', '36', '19'], ['3', 'sc internacional de são paulo', '23', '18', '5', '4', '23', '11'], ['4', 'independência', '23', '20', '5', '7', '37', '5'], ['5', 'hespanha', '22', '20', '6', '6', '35', '11'], ['6', 'atlético santista', '19', '19', '5', '7', '28', '6'], ['7', 'germnia', '18', '18', '2', '8', '45', '- 7'], ['8', 'portuguesa santista', '18', '21', '4', '10', '40', '- 3'], ['9', 'antártica', '17', '21', '7', '9', '47', '- 17'], ['10', 'aa são bento', '16', '19', '6', '8', '32', '- 12'], ['11', 'aa das palmeiras', '11', '17', '1', '11', '50', '- 22'], ['12', 'ca paulista', '11', '20', '1', '14', '58', '- 29']]
list of formula one driver records
https://en.wikipedia.org/wiki/List_of_Formula_One_driver_records
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-13599687-35.html.csv
count
10 drivers are listed in the formula one driver records .
{'scope': 'all', 'criterion': 'all', 'value': 'n/a', 'result': '10', 'col': '1', 'subset': None}
{'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_all', 'args': ['all_rows', 'driver'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose driver record is arbitrary .', 'tostr': 'filter_all { all_rows ; driver }'}], 'result': '10', 'ind': 1, 'tostr': 'count { filter_all { all_rows ; driver } }', 'tointer': 'select the rows whose driver record is arbitrary . the number of such rows is 10 .'}, '10'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_all { all_rows ; driver } } ; 10 } = true', 'tointer': 'select the rows whose driver record is arbitrary . the number of such rows is 10 .'}
eq { count { filter_all { all_rows ; driver } } ; 10 } = true
select the rows whose driver record is arbitrary . the number of such rows is 10 .
3
3
{'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_all_0': 0, 'all_rows_4': 4, 'driver_5': 5, '10_6': 6}
{'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_all_0': 'filter_all', 'all_rows_4': 'all_rows', 'driver_5': 'driver', '10_6': '10'}
{'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_all_0': [1], 'all_rows_4': [0], 'driver_5': [0], '10_6': [2]}
['driver', 'front row starts', 'pole positions', 'entries', 'percentage']
[['juan manuel fangio', '48', '29', '52', '92.31 %'], ['jim clark', '48', '33', '73', '65.75 %'], ['ayrton senna', '87', '65', '162', '53.70 %'], ['sebastian vettel', '62', '43', '118', '52.54 %'], ['lewis hamilton', '57', '31', '127', '44.81 %'], ['alain prost', '86', '33', '202', '42.57 %'], ['jackie stewart', '42', '17', '100', '42.00 %'], ['damon hill', '47', '20', '122', '38.52 %'], ['michael schumacher', '116', '68', '308', '37.66 %'], ['nigel mansell', '56', '32', '191', '29.32 %']]
leonardo tavares
https://en.wikipedia.org/wiki/Leonardo_Tavares
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-22853654-9.html.csv
count
leonardo tavares had two matches in the year of 2004 .
{'scope': 'all', 'criterion': 'fuzzily_match', 'value': '2004', 'result': '2', 'col': '3', 'subset': None}
{'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'date', '2004'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose date record fuzzily matches to 2004 .', 'tostr': 'filter_eq { all_rows ; date ; 2004 }'}], 'result': '2', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; date ; 2004 } }', 'tointer': 'select the rows whose date record fuzzily matches to 2004 . the number of such rows is 2 .'}, '2'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; date ; 2004 } } ; 2 } = true', 'tointer': 'select the rows whose date record fuzzily matches to 2004 . the number of such rows is 2 .'}
eq { count { filter_eq { all_rows ; date ; 2004 } } ; 2 } = true
select the rows whose date record fuzzily matches to 2004 . the number of such rows is 2 .
3
3
{'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'date_5': 5, '2004_6': 6, '2_7': 7}
{'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'date_5': 'date', '2004_6': '2004', '2_7': '2'}
{'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'date_5': [0], '2004_6': [0], '2_7': [2]}
['edition', 'round', 'date', 'against', 'surface', 'opponent', 'w - l', 'result']
[['2002 davis cup europe / africa group i', 'qf', '12 - 14 july 2002', 'belarus', 'carpet', 'alexander shvec', 'loss', '2 - 6 , 6 - 7 ( 4 - 7 )'], ['2002 davis cup europe / africa group i', 'gi play - offs', '20 - 22 september 2002', 'italy', 'carpet', 'davide sanguinetti', 'loss', '3 - 6 , 2 - 6 , 1 - 6'], ['2003 davis cup europe / africa group i', '1r', '4 - 6 april 2003', 'monaco', 'clay', 'emmanuel heussner', 'loss', '2 - 6 , 4 - 6'], ['2003 davis cup europe / africa group i', 'qf', '11 - 13 july 2003', 'south africa', 'hard', 'wesley moodie', 'loss', '3 - 6 , 4 - 6 , 6 - 1 , 6 - 7 ( 7 - 9 )'], ['2003 davis cup europe / africa group i', 'qf', '11 - 13 july 2003', 'south africa', 'hard', 'rik de voest', 'loss', '1 - 6 , 4 - 6'], ['2004 davis cup europe / africa group ii', '1r', '9 - 11 april 2004', 'tunisia', 'hard', 'haithem abid', 'win', '7 - 6 ( 7 - 5 ) , 6 - 1 , 4 - 6 , 6 - 2'], ['2004 davis cup europe / africa group ii', 'qf', '16 - 18 july 2004', 'serbia and montenegro', 'clay', 'boris pašanski', 'loss', '6 - 3 , 1 - 6 , 6 - 1 , 1 - 6 , 2 - 6'], ['2005 davis cup europe / africa group ii', '1r', '4 - 6 march 2005', 'estonia', 'carpet', 'alti vahkal', 'win', '3 - 6 , 2 - 6 , 6 - 2 , 7 - 6 ( 7 - 5 ) , 6 - 4'], ['2005 davis cup europe / africa group ii', 'sf', '23 - 25 september 2005', 'slovenia', 'clay', 'marko tkalec', 'win', '7 - 4 , 7 - 5'], ['2008 davis cup europe / africa group ii', '1r', '11 - 13 april 2008', 'tunisia', 'clay', 'slah mbarek', 'win', '6 - 2 , 6 - 4'], ['2008 davis cup europe / africa group ii', 'sf', '19 - 21 september 2008', 'ukraine', 'hard', 'ivan sergeyev', 'loss', '3 - 6 , 2 - 6 , 4 - 6'], ['2009 davis cup europe / africa group ii', 'gii play - offs', '10 - 12 july 2009', 'algeria', 'clay', 'valentin rahmine', 'win', '6 - 1 , 6 - 0'], ['2010 davis cup europe / africa group ii', '1r', '5 - 7 march 2010', 'denmark', 'clay', 'martin pedersen', 'loss', '5 - 7 , 4 - 6']]
list of azerbaijani submissions for the academy award for best foreign language film
https://en.wikipedia.org/wiki/List_of_Azerbaijani_submissions_for_the_Academy_Award_for_Best_Foreign_Language_Film
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-17155980-1.html.csv
superlative
caucasia is the first best foreign language film for the azerbaijani submission award .
{'scope': 'all', 'col_superlative': '1', 'row_superlative': '1', 'value_mentioned': 'no', 'max_or_min': 'min', 'other_col': '2', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmin', 'args': ['all_rows', 'year ( ceremony )'], 'result': None, 'ind': 0, 'tostr': 'argmin { all_rows ; year ( ceremony ) }'}, 'film title used in nomination'], 'result': 'caucasia', 'ind': 1, 'tostr': 'hop { argmin { all_rows ; year ( ceremony ) } ; film title used in nomination }'}, 'caucasia'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmin { all_rows ; year ( ceremony ) } ; film title used in nomination } ; caucasia } = true', 'tointer': 'select the row whose year ( ceremony ) record of all rows is minimum . the film title used in nomination record of this row is caucasia .'}
eq { hop { argmin { all_rows ; year ( ceremony ) } ; film title used in nomination } ; caucasia } = true
select the row whose year ( ceremony ) record of all rows is minimum . the film title used in nomination record of this row is caucasia .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmin_0': 0, 'all_rows_4': 4, 'year (ceremony)_5': 5, 'film title used in nomination_6': 6, 'caucasia_7': 7}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmin_0': 'argmin', 'all_rows_4': 'all_rows', 'year (ceremony)_5': 'year ( ceremony )', 'film title used in nomination_6': 'film title used in nomination', 'caucasia_7': 'caucasia'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmin_0': [1], 'all_rows_4': [0], 'year (ceremony)_5': [0], 'film title used in nomination_6': [1], 'caucasia_7': [2]}
['year ( ceremony )', 'film title used in nomination', 'original title', 'primary language ( s )', 'director', 'result']
[['2007 ( 80th )', 'caucasia', 'kavkaz ( кавказ )', 'russian', 'farid gumbatov', 'not nominated'], ['2008 ( 81st )', 'fortress', 'qala', 'azerbaijani', 'shamil najafzadeh', 'not nominated'], ['2010 ( 83rd )', 'the precinct', 'sahə', 'azerbaijani , russian', 'ilgar safat', 'not nominated'], ['2012 ( 85th )', 'buta', 'buta', 'azerbaijani', 'ilgar najaf', 'not nominated'], ['2013 ( 86th )', 'steppe man', 'çölçü', 'azerbaijani', 'shamil aliyev', 'tbd']]
2005 open championship
https://en.wikipedia.org/wiki/2005_Open_Championship
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-16225902-5.html.csv
unique
tiger woods was the only person with 11 to par during the 2005 open championship .
{'scope': 'all', 'row': '1', 'col': '5', 'col_other': '2', 'criterion': 'equal', 'value': '11', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_eq', 'args': ['all_rows', 'to par', '11'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose to par record is equal to 11 .', 'tostr': 'filter_eq { all_rows ; to par ; 11 }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; to par ; 11 } }', 'tointer': 'select the rows whose to par record is equal to 11 . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_eq', 'args': ['all_rows', 'to par', '11'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose to par record is equal to 11 .', 'tostr': 'filter_eq { all_rows ; to par ; 11 }'}, 'player'], 'result': 'tiger woods', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; to par ; 11 } ; player }'}, 'tiger woods'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; to par ; 11 } ; player } ; tiger woods }', 'tointer': 'the player record of this unqiue row is tiger woods .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; to par ; 11 } } ; eq { hop { filter_eq { all_rows ; to par ; 11 } ; player } ; tiger woods } } = true', 'tointer': 'select the rows whose to par record is equal to 11 . there is only one such row in the table . the player record of this unqiue row is tiger woods .'}
and { only { filter_eq { all_rows ; to par ; 11 } } ; eq { hop { filter_eq { all_rows ; to par ; 11 } ; player } ; tiger woods } } = true
select the rows whose to par record is equal to 11 . there is only one such row in the table . the player record of this unqiue row is tiger woods .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_eq_0': 0, 'all_rows_6': 6, 'to par_7': 7, '11_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'player_9': 9, 'tiger woods_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_eq_0': 'filter_eq', 'all_rows_6': 'all_rows', 'to par_7': 'to par', '11_8': '11', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'player_9': 'player', 'tiger woods_10': 'tiger woods'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_eq_0': [1, 2], 'all_rows_6': [0], 'to par_7': [0], '11_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'player_9': [2], 'tiger woods_10': [3]}
['place', 'player', 'country', 'score', 'to par']
[['1', 'tiger woods', 'united states', '66 + 67 = 133', '11'], ['2', 'colin montgomerie', 'scotland', '71 + 66 = 137', '7'], ['t3', 'robert allenby', 'australia', '70 + 68 = 138', '6'], ['t3', 'brad faxon', 'united states', '72 + 66 = 138', '6'], ['t3', 'trevor immelman', 'south africa', '68 + 70 = 138', '6'], ['t3', 'peter lonard', 'australia', '68 + 70 = 138', '6'], ['t3', 'josé maría olazábal', 'spain', '68 + 70 = 138', '6'], ['t3', 'vijay singh', 'fiji', '69 + 69 = 138', '6'], ['t3', 'scott verplank', 'united states', '68 + 70 = 138', '6'], ['t10', 'bart bryant', 'united states', '69 + 70 = 139', '5'], ['t10', 'fred couples', 'united states', '68 + 71 = 139', '5'], ['t10', 'sergio garcía', 'spain', '70 + 69 = 139', '5'], ['t10', 'simon khan', 'england', '69 + 70 = 139', '5'], ['t10', 'bo van pelt', 'united states', '72 + 67 = 139', '5']]
2010 - 11 orlando magic season
https://en.wikipedia.org/wiki/2010%E2%80%9311_Orlando_Magic_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-27700530-13.html.csv
unique
in the 2010 - 11 orlando magic season , when dwight howard had the high points , the only time he shared the high rebounds with hedo türkoğlu was on march 23rd .
{'scope': 'subset', 'row': '12', 'col': '6', 'col_other': '2', 'criterion': 'equal', 'value': 'hedo türkoğlu', 'subset': {'col': '5', 'criterion': 'fuzzily_match', 'value': 'dwight howard'}}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'high points', 'dwight howard'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; high points ; dwight howard }', 'tointer': 'select the rows whose high points record fuzzily matches to dwight howard .'}, 'high rebounds', 'hedo türkoğlu'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose high points record fuzzily matches to dwight howard . among these rows , select the rows whose high rebounds record fuzzily matches to hedo türkoğlu .', 'tostr': 'filter_eq { filter_eq { all_rows ; high points ; dwight howard } ; high rebounds ; hedo türkoğlu }'}], 'result': True, 'ind': 2, 'tostr': 'only { filter_eq { filter_eq { all_rows ; high points ; dwight howard } ; high rebounds ; hedo türkoğlu } }', 'tointer': 'select the rows whose high points record fuzzily matches to dwight howard . among these rows , select the rows whose high rebounds record fuzzily matches to hedo türkoğlu . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'high points', 'dwight howard'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; high points ; dwight howard }', 'tointer': 'select the rows whose high points record fuzzily matches to dwight howard .'}, 'high rebounds', 'hedo türkoğlu'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose high points record fuzzily matches to dwight howard . among these rows , select the rows whose high rebounds record fuzzily matches to hedo türkoğlu .', 'tostr': 'filter_eq { filter_eq { all_rows ; high points ; dwight howard } ; high rebounds ; hedo türkoğlu }'}, 'date'], 'result': 'march 23', 'ind': 3, 'tostr': 'hop { filter_eq { filter_eq { all_rows ; high points ; dwight howard } ; high rebounds ; hedo türkoğlu } ; date }'}, 'march 23'], 'result': True, 'ind': 4, 'tostr': 'eq { hop { filter_eq { filter_eq { all_rows ; high points ; dwight howard } ; high rebounds ; hedo türkoğlu } ; date } ; march 23 }', 'tointer': 'the date record of this unqiue row is march 23 .'}], 'result': True, 'ind': 5, 'tostr': 'and { only { filter_eq { filter_eq { all_rows ; high points ; dwight howard } ; high rebounds ; hedo türkoğlu } } ; eq { hop { filter_eq { filter_eq { all_rows ; high points ; dwight howard } ; high rebounds ; hedo türkoğlu } ; date } ; march 23 } } = true', 'tointer': 'select the rows whose high points record fuzzily matches to dwight howard . among these rows , select the rows whose high rebounds record fuzzily matches to hedo türkoğlu . there is only one such row in the table . the date record of this unqiue row is march 23 .'}
and { only { filter_eq { filter_eq { all_rows ; high points ; dwight howard } ; high rebounds ; hedo türkoğlu } } ; eq { hop { filter_eq { filter_eq { all_rows ; high points ; dwight howard } ; high rebounds ; hedo türkoğlu } ; date } ; march 23 } } = true
select the rows whose high points record fuzzily matches to dwight howard . among these rows , select the rows whose high rebounds record fuzzily matches to hedo türkoğlu . there is only one such row in the table . the date record of this unqiue row is march 23 .
8
6
{'and_5': 5, 'result_6': 6, 'only_2': 2, 'filter_str_eq_1': 1, 'filter_str_eq_0': 0, 'all_rows_7': 7, 'high points_8': 8, 'dwight howard_9': 9, 'high rebounds_10': 10, 'hedo türkoğlu_11': 11, 'str_eq_4': 4, 'str_hop_3': 3, 'date_12': 12, 'march 23_13': 13}
{'and_5': 'and', 'result_6': 'true', 'only_2': 'only', 'filter_str_eq_1': 'filter_str_eq', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_7': 'all_rows', 'high points_8': 'high points', 'dwight howard_9': 'dwight howard', 'high rebounds_10': 'high rebounds', 'hedo türkoğlu_11': 'hedo türkoğlu', 'str_eq_4': 'str_eq', 'str_hop_3': 'str_hop', 'date_12': 'date', 'march 23_13': 'march 23'}
{'and_5': [6], 'result_6': [], 'only_2': [5], 'filter_str_eq_1': [2, 3], 'filter_str_eq_0': [1], 'all_rows_7': [0], 'high points_8': [0], 'dwight howard_9': [0], 'high rebounds_10': [1], 'hedo türkoğlu_11': [1], 'str_eq_4': [5], 'str_hop_3': [4], 'date_12': [3], 'march 23_13': [4]}
['game', 'date', 'team', 'score', 'high points', 'high rebounds', 'high assists', 'location attendance', 'record']
[['61', 'march 1', 'new york', 'w 116 - 110 ( ot )', 'dwight howard ( 30 )', 'dwight howard ( 16 )', 'chris duhon ( 5 )', 'amway center 19131', '39 - 22'], ['62', 'march 3', 'miami', 'w 99 - 96 ( ot )', 'jason richardson ( 24 )', 'dwight howard ( 18 )', 'jameer nelson ( 7 )', 'american airlines arena 19600', '40 - 22'], ['63', 'march 4', 'chicago', 'l 81 - 89 ( ot )', 'dwight howard ( 20 )', 'dwight howard ( 10 )', 'jameer nelson ( 6 )', 'amway center 19207', '40 - 23'], ['64', 'march 7', 'portland', 'l 85 - 89 ( ot )', 'jason richardson ( 22 )', 'earl clark ( 9 )', 'jameer nelson ( 4 )', 'amway center 19001', '40 - 24'], ['65', 'march 9', 'sacramento', 'w 106 - 102 ( ot )', 'jameer nelson ( 26 )', 'dwight howard ( 15 )', 'jameer nelson ( 4 )', 'power balance pavilion 12728', '41 - 24'], ['66', 'march 11', 'golden state', 'l 120 - 123 ( ot )', 'jason richardson ( 30 )', 'dwight howard ( 21 )', 'jameer nelson ( 8 )', 'oracle arena 19596', '41 - 25'], ['67', 'march 13', 'phoenix', 'w 111 - 88 ( ot )', 'dwight howard ( 26 )', 'dwight howard ( 15 )', 'chris duhon ( 4 )', 'us airways center 18091', '42 - 25'], ['68', 'march 14', 'la lakers', 'l 84 - 97 ( ot )', 'dwight howard ( 22 )', 'dwight howard ( 15 )', 'jameer nelson ( 8 )', 'staples center 18997', '42 - 26'], ['69', 'march 16', 'milwaukee', 'w 93 - 89 ( ot )', 'dwight howard ( 31 )', 'dwight howard ( 22 )', 'hedo türkoğlu ( 5 )', 'bradley center 13831', '43 - 26'], ['70', 'march 18', 'denver', 'w 85 - 82 ( ot )', 'dwight howard ( 16 )', 'dwight howard ( 18 )', 'hedo türkoğlu ( 8 )', 'amway center 19113', '44 - 26'], ['71', 'march 21', 'cleveland', 'w 97 - 86 ( ot )', 'dwight howard ( 28 )', 'dwight howard ( 18 )', 'hedo türkoğlu ( 9 )', 'quicken loans arena 19343', '45 - 26'], ['72', 'march 23', 'new york', 'w 111 - 99 ( ot )', 'dwight howard ( 33 )', 'dwight howard , hedo türkoğlu ( 11 )', 'jameer nelson , hedo türkoğlu ( 4 )', 'madison square garden 19763', '46 - 26'], ['73', 'march 25', 'new jersey', 'w 95 - 85 ( ot )', 'dwight howard ( 21 )', 'dwight howard ( 14 )', 'hedo türkoğlu ( 13 )', 'amway center 19087', '47 - 26'], ['74', 'march 28', 'new york', 'l 106 - 113 ( ot )', 'dwight howard ( 29 )', 'dwight howard ( 18 )', 'gilbert arenas ( 5 )', 'madison square garden 19763', '47 - 27']]
gabi rockmeier
https://en.wikipedia.org/wiki/Gabi_Rockmeier
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-13252602-1.html.csv
majority
most of gabi rockmeir 's races were the 4 x 100 meter relay .
{'scope': 'all', 'col': '5', 'most_or_all': 'most', 'criterion': 'equal', 'value': '4x100 m relay', 'subset': None}
{'func': 'most_str_eq', 'args': ['all_rows', 'extra', '4x100 m relay'], 'result': True, 'ind': 0, 'tointer': 'for the extra records of all rows , most of them fuzzily match to 4x100 m relay .', 'tostr': 'most_eq { all_rows ; extra ; 4x100 m relay } = true'}
most_eq { all_rows ; extra ; 4x100 m relay } = true
for the extra records of all rows , most of them fuzzily match to 4x100 m relay .
1
1
{'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'extra_3': 3, '4x100 m relay_4': 4}
{'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'extra_3': 'extra', '4x100 m relay_4': '4x100 m relay'}
{'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'extra_3': [0], '4x100 m relay_4': [0]}
['year', 'tournament', 'venue', 'result', 'extra']
[['1991', 'european junior championships', 'thessaloniki , greece', '1st', '4x100 m relay'], ['1998', 'european championships', 'munich , germany', '7th', '200 m'], ['1998', 'european championships', 'munich , germany', '2nd', '4x100 m relay'], ['1998', 'world cup', 'johannesburg , south africa', '3rd', '4x100 m relay'], ['2000', 'olympic games', 'sydney , australia', '6th', '4x100 m relay'], ['2001', 'world championships', 'edmonton , canada', '1st', '4x100 m relay'], ['2002', 'european indoor championships', 'vienna , austria', '3rd', '200 m'], ['2002', 'european championships', 'munich , germany', '5th', '200 m'], ['2002', 'european championships', 'munich , germany', '2nd', '4x100 m relay'], ['2002', 'world cup', 'madrid , spain', '8th', '200 m'], ['2002', 'world cup', 'madrid , spain', '5th', '4x100 m relay']]
1939 vfl season
https://en.wikipedia.org/wiki/1939_VFL_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10806852-13.html.csv
aggregation
the average crowd attendance for game sin the 1939 vfl season was 14208 .
{'scope': 'all', 'col': '6', 'type': 'average', 'result': '14208', 'subset': None}
{'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 'crowd'], 'result': '14208', 'ind': 0, 'tostr': 'avg { all_rows ; crowd }'}, '14208'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; crowd } ; 14208 } = true', 'tointer': 'the average of the crowd record of all rows is 14208 .'}
round_eq { avg { all_rows ; crowd } ; 14208 } = true
the average of the crowd record of all rows is 14208 .
2
2
{'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 'crowd_4': 4, '14208_5': 5}
{'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 'crowd_4': 'crowd', '14208_5': '14208'}
{'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 'crowd_4': [0], '14208_5': [1]}
['home team', 'home team score', 'away team', 'away team score', 'venue', 'crowd', 'date']
[['footscray', '7.7 ( 49 )', 'richmond', '13.25 ( 103 )', 'western oval', '15000', '15 july 1939'], ['collingwood', '19.11 ( 125 )', 'south melbourne', '8.13 ( 61 )', 'victoria park', '10500', '15 july 1939'], ['carlton', '14.14 ( 98 )', 'geelong', '12.5 ( 77 )', 'princes park', '19000', '15 july 1939'], ['north melbourne', '18.11 ( 119 )', 'hawthorn', '11.16 ( 82 )', 'arden street oval', '8000', '15 july 1939'], ['st kilda', '16.19 ( 115 )', 'fitzroy', '13.6 ( 84 )', 'junction oval', '16500', '15 july 1939'], ['melbourne', '7.18 ( 60 )', 'essendon', '10.17 ( 77 )', 'mcg', '16247', '15 july 1939']]
northeast delta dental international
https://en.wikipedia.org/wiki/Northeast_Delta_Dental_International
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15315276-1.html.csv
majority
the majority of northeast delta dental international tournaments are located in beaver meadow golf course .
{'scope': 'all', 'col': '6', 'most_or_all': 'most', 'criterion': 'equal', 'value': 'beaver meadow golf course', 'subset': None}
{'func': 'most_str_eq', 'args': ['all_rows', 'tournament location', 'beaver meadow golf course'], 'result': True, 'ind': 0, 'tointer': 'for the tournament location records of all rows , most of them fuzzily match to beaver meadow golf course .', 'tostr': 'most_eq { all_rows ; tournament location ; beaver meadow golf course } = true'}
most_eq { all_rows ; tournament location ; beaver meadow golf course } = true
for the tournament location records of all rows , most of them fuzzily match to beaver meadow golf course .
1
1
{'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'tournament location_3': 3, 'beaver meadow golf course_4': 4}
{'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'tournament location_3': 'tournament location', 'beaver meadow golf course_4': 'beaver meadow golf course'}
{'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'tournament location_3': [0], 'beaver meadow golf course_4': [0]}
['year', 'dates', 'champion', 'country', 'score', 'tournament location', 'purse', "winner 's share"]
[['2013', 'jul 19 - 21', 'pk kongkraphan', 'thailand', '207 ( 9 )', 'beaver meadow golf course', '100000', '15000'], ['2012', 'jul 20 - 22', 'jenny gleason', 'united states', '211 ( 5 )', 'beaver meadow golf course', '100000', '15000'], ['2011', 'jul 22 - 24', 'jessica shepley', 'canada', '203 ( 13 )', 'beaver meadow golf course', '100000', '14000'], ['2010', 'jul 19 - 25', 'jenny shin', 'united states', '205 ( 11 )', 'beaver meadow golf course', '110000', '15400'], ['2009', 'jul 24 - 26', 'misun cho', 'south korea', '207 ( 9 )', 'beaver meadow golf course', '90000', '12600'], ['2008', 'jul 25 - 27', 'mo martin', 'united states', '204 ( 12 )', 'beaver meadow golf course', '80000', '11200'], ['2007', 'aug 3 - 5', 'ji min jeong', 'south korea', '209 ( 7 )', 'beaver meadow golf course', '75000', '10500'], ['2006', 'aug 4 - 6', 'charlotte mayorkas', 'united states', '207 ( 9 )', 'beaver meadow golf course', '70000', '9800'], ['2005', 'jul 22 - 24', 'kyeong bae', 'south korea', '209 ( 7 )', 'beaver meadow golf course', '65000', '9100'], ['2004', 'jul 16 - 18', 'erica blasberg', 'united states', '201 ( 15 )', 'canterbury woods country club', '65000', '9100']]
1961 houston oilers season
https://en.wikipedia.org/wiki/1961_Houston_Oilers_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15991313-3.html.csv
majority
most of the games in the 1961 oilers season had an attendance of over 20000 .
{'scope': 'all', 'col': '5', 'most_or_all': 'most', 'criterion': 'greater_than', 'value': '20000', 'subset': None}
{'func': 'most_greater', 'args': ['all_rows', 'attendance', '20000'], 'result': True, 'ind': 0, 'tointer': 'for the attendance records of all rows , most of them are greater than 20000 .', 'tostr': 'most_greater { all_rows ; attendance ; 20000 } = true'}
most_greater { all_rows ; attendance ; 20000 } = true
for the attendance records of all rows , most of them are greater than 20000 .
1
1
{'most_greater_0': 0, 'result_1': 1, 'all_rows_2': 2, 'attendance_3': 3, '20000_4': 4}
{'most_greater_0': 'most_greater', 'result_1': 'true', 'all_rows_2': 'all_rows', 'attendance_3': 'attendance', '20000_4': '20000'}
{'most_greater_0': [1], 'result_1': [], 'all_rows_2': [0], 'attendance_3': [0], '20000_4': [0]}
['week', 'date', 'opponent', 'result', 'attendance']
[['1', 'september 9 , 1961', 'oakland raiders', 'w 55 - 0', '16231'], ['3', 'september 24 , 1961', 'san diego chargers', 'l 34 - 24', '29210'], ['4', 'october 1 , 1961', 'dallas texans', 'l 26 - 21', '28000'], ['5', 'october 8 , 1961', 'buffalo bills', 'l 22 - 12', '22761'], ['6', 'october 13 , 1961', 'boston patriots', 't 31 - 31', '15070'], ['7', 'october 22 , 1961', 'dallas texans', 'w 38 - 7', '21237'], ['8', 'october 29 , 1961', 'buffalo bills', 'w 28 - 16', '23228'], ['9', 'november 5 , 1961', 'denver broncos', 'w 55 - 14', '11564'], ['10', 'november 12 , 1961', 'boston patriots', 'w 27 - 15', '35649'], ['11', 'november 19 , 1961', 'new york titans', 'w 49 - 13', '33428'], ['12', 'november 26 , 1961', 'denver broncos', 'w 45 - 14', '27864'], ['13', 'december 3 , 1961', 'san diego chargers', 'w 33 - 13', '37845'], ['14', 'december 10 , 1961', 'new york titans', 'w 48 - 21', '9462'], ['15', 'december 17 , 1961', 'oakland raiders', 'w 47 - 16', '4821']]
list of generator rex episodes
https://en.wikipedia.org/wiki/List_of_Generator_Rex_episodes
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-26982362-2.html.csv
count
sam montes directed two of the episodes of generator rex that were written by man of action .
{'scope': 'subset', 'criterion': 'equal', 'value': 'sam montes', 'result': '2', 'col': '3', 'subset': {'col': '4', 'criterion': 'equal', 'value': 'man of action'}}
{'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'written by', 'man of action'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; written by ; man of action }', 'tointer': 'select the rows whose written by record fuzzily matches to man of action .'}, 'directed by', 'sam montes'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose written by record fuzzily matches to man of action . among these rows , select the rows whose directed by record fuzzily matches to sam montes .', 'tostr': 'filter_eq { filter_eq { all_rows ; written by ; man of action } ; directed by ; sam montes }'}], 'result': '2', 'ind': 2, 'tostr': 'count { filter_eq { filter_eq { all_rows ; written by ; man of action } ; directed by ; sam montes } }', 'tointer': 'select the rows whose written by record fuzzily matches to man of action . among these rows , select the rows whose directed by record fuzzily matches to sam montes . the number of such rows is 2 .'}, '2'], 'result': True, 'ind': 3, 'tostr': 'eq { count { filter_eq { filter_eq { all_rows ; written by ; man of action } ; directed by ; sam montes } } ; 2 } = true', 'tointer': 'select the rows whose written by record fuzzily matches to man of action . among these rows , select the rows whose directed by record fuzzily matches to sam montes . the number of such rows is 2 .'}
eq { count { filter_eq { filter_eq { all_rows ; written by ; man of action } ; directed by ; sam montes } } ; 2 } = true
select the rows whose written by record fuzzily matches to man of action . among these rows , select the rows whose directed by record fuzzily matches to sam montes . the number of such rows is 2 .
4
4
{'eq_3': 3, 'result_4': 4, 'count_2': 2, 'filter_str_eq_1': 1, 'filter_str_eq_0': 0, 'all_rows_5': 5, 'written by_6': 6, 'man of action_7': 7, 'directed by_8': 8, 'sam montes_9': 9, '2_10': 10}
{'eq_3': 'eq', 'result_4': 'true', 'count_2': 'count', 'filter_str_eq_1': 'filter_str_eq', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_5': 'all_rows', 'written by_6': 'written by', 'man of action_7': 'man of action', 'directed by_8': 'directed by', 'sam montes_9': 'sam montes', '2_10': '2'}
{'eq_3': [4], 'result_4': [], 'count_2': [3], 'filter_str_eq_1': [2], 'filter_str_eq_0': [1], 'all_rows_5': [0], 'written by_6': [0], 'man of action_7': [0], 'directed by_8': [1], 'sam montes_9': [1], '2_10': [3]}
['no in series', 'title', 'directed by', 'written by', 'original airdate', 'production code']
[['1', 'the day that everything changed', 'sam montes', 'man of action', 'april 23 , 2010', '693 - 001'], ['2', 'string theory', 'rick morales', 'man of action', 'april 30 , 2010', '693 - 002'], ['3', 'beyond the sea', 'chris graham', 'man of action', 'may 7 , 2010', '693 - 003'], ['4', 'lockdown', 'sam montes', 'scott sonneborn', 'may 14 , 2010', '693 - 004'], ['5', 'the architect', 'rick morales', 'amy wolfram', 'may 21 , 2010', '693 - 005'], ['6', 'frostbite', 'chris graham', 'marty isenberg', 'may 28 , 2010', '693 - 006'], ['7', 'leader of the pack', 'sam montes', 'alexx van dyne', 'june 4 , 2010', '693 - 007'], ['8', 'breach', 'chris graham', 'adam beechen', 'june 11 , 2010', '693 - 009'], ['9', 'dark passage', 'sam montes', 'marsha griffin', 'june 18 , 2010', '693 - 010'], ['10', 'the forgotten', 'rick morales', 'paul giacoppo', 'september 17 , 2010', '693 - 011'], ['11', 'operation : wingman', 'chris graham', 'eugene son', 'september 24 , 2010', '693 - 012'], ['12', 'rabble', 'sam montes', 'rob hoegee', 'october 1 , 2010', '693 - 013'], ['13', 'the hunter', 'rick morales', 'michael ryan', 'october 8 , 2010', '693 - 008'], ['14', 'gravity', 'rick morales', 'andrew robinson', 'october 15 , 2010', '693 - 014'], ['15', 'what lies beneath', 'chris graham', 'marsha griffin', 'october 22 , 2010', '693 - 015'], ['16', 'the swarm', 'sam montes', 'paul giacoppo', 'october 29 , 2010', '693 - 016'], ['17', 'basic', 'rick morales', 'scott sonneborn', 'november 5 , 2010', '693 - 017'], ['18', 'plague', 'chris graham', 'tad stones', 'november 12 , 2010', '693 - 018'], ['19', 'promises , promises', 'sam montes', 'man of action', 'november 19 , 2010', '693 - 019'], ['20', 'badlands', 'rick morales', 'eugene son', 'december 3 , 2010', '693 - 021']]
2006 - 07 charlotte bobcats season
https://en.wikipedia.org/wiki/2006%E2%80%9307_Charlotte_Bobcats_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-19169116-8.html.csv
aggregation
the total points scores by powe for the charlotte bobcats in games where he was high scorer in the 2006 - 07 season was 39 .
{'scope': 'subset', 'col': '5', 'type': 'sum', 'result': '39', 'subset': {'col': '5', 'criterion': 'equal', 'value': 'powe'}}
{'func': 'round_eq', 'args': [{'func': 'sum', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'high points', 'powe'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; high points ; powe }', 'tointer': 'select the rows whose high points record fuzzily matches to powe .'}, 'high points'], 'result': '39', 'ind': 1, 'tostr': 'sum { filter_eq { all_rows ; high points ; powe } ; high points }'}, '39'], 'result': True, 'ind': 2, 'tostr': 'round_eq { sum { filter_eq { all_rows ; high points ; powe } ; high points } ; 39 } = true', 'tointer': 'select the rows whose high points record fuzzily matches to powe . the sum of the high points record of these rows is 39 .'}
round_eq { sum { filter_eq { all_rows ; high points ; powe } ; high points } ; 39 } = true
select the rows whose high points record fuzzily matches to powe . the sum of the high points record of these rows is 39 .
3
3
{'eq_2': 2, 'result_3': 3, 'sum_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'high points_5': 5, 'powe_6': 6, 'high points_7': 7, '39_8': 8}
{'eq_2': 'eq', 'result_3': 'true', 'sum_1': 'sum', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'high points_5': 'high points', 'powe_6': 'powe', 'high points_7': 'high points', '39_8': '39'}
{'eq_2': [3], 'result_3': [], 'sum_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'high points_5': [0], 'powe_6': [0], 'high points_7': [1], '39_8': [2]}
['game', 'date', 'opponent', 'score', 'high points', 'high rebounds', 'high assists', 'location / attendance', 'record']
[['73', 'april 1', 'cleveland', '88 - 62', 'powe ( 17 )', 'powe ( 13 )', 'garnett ( 5 )', 'td banknorth garden 18624', '23 - 50'], ['74', 'april 4', 'milwaukee', '106 - 92', 'allen ( 22 )', 'perkins ( 9 )', 'rondo ( 10 )', 'united center 22225', '23 - 51'], ['75', 'april 6', 'miami', '92 - 77', 'garnett ( 20 )', 'garnett ( 11 )', 'rondo ( 6 )', 'td banknorth garden 18624', '23 - 52'], ['76', 'april 7', 'indiana', '101 - 78', 'powe ( 22 )', 'powe ( 9 )', 'rondo ( 5 )', 'charlotte bobcats arena 19403', '23 - 53'], ['77', 'april 10', 'atlanta', '107 - 104 ( ot )', 'garnett ( 21 )', 'perkins ( 10 )', 'pierce ( 4 )', 'bradley center 15921', '23 - 54'], ['78', 'april 11', 'philadelphia', '95 - 109', 'pierce ( 28 )', 'garnett ( 14 )', 'pierce , rondo ( 7 )', 'td banknorth garden 20173', '23 - 55'], ['79', 'april 13', 'milwaukee', '102 - 86', 'rondo ( 16 )', 'garnett , posey ( 8 )', 'rondo ( 10 )', 'td banknorth garden 18624', '23 - 56'], ['80', 'april 15', 'orlando', '99 - 89', 'garnett ( 24 )', 'davis ( 10 )', 'cassell , rondo ( 5 )', 'philips arena 20098', '23 - 57'], ['81', 'april 16', 'miami', '99 - 93', 'rondo ( 23 )', 'rondo ( 10 )', 'rondo ( 5 )', 'madison square garden 19763', '24 - 57']]
orlando magic all - time roster
https://en.wikipedia.org/wiki/Orlando_Magic_all-time_roster
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-15621965-16.html.csv
count
according to the orlando magic all - time roster , 3 forwards started their career in orlando magic after 2000 .
{'scope': 'subset', 'criterion': 'greater_than_eq', 'value': '2000', 'result': '3', 'col': '5', 'subset': {'col': '4', 'criterion': 'equal', 'value': 'forward'}}
{'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_greater_eq', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'position', 'forward'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; position ; forward }', 'tointer': 'select the rows whose position record fuzzily matches to forward .'}, 'years in orlando', '2000'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose position record fuzzily matches to forward . among these rows , select the rows whose years in orlando record is greater than or equal to 2000 .', 'tostr': 'filter_greater_eq { filter_eq { all_rows ; position ; forward } ; years in orlando ; 2000 }'}], 'result': '3', 'ind': 2, 'tostr': 'count { filter_greater_eq { filter_eq { all_rows ; position ; forward } ; years in orlando ; 2000 } }', 'tointer': 'select the rows whose position record fuzzily matches to forward . among these rows , select the rows whose years in orlando record is greater than or equal to 2000 . the number of such rows is 3 .'}, '3'], 'result': True, 'ind': 3, 'tostr': 'eq { count { filter_greater_eq { filter_eq { all_rows ; position ; forward } ; years in orlando ; 2000 } } ; 3 } = true', 'tointer': 'select the rows whose position record fuzzily matches to forward . among these rows , select the rows whose years in orlando record is greater than or equal to 2000 . the number of such rows is 3 .'}
eq { count { filter_greater_eq { filter_eq { all_rows ; position ; forward } ; years in orlando ; 2000 } } ; 3 } = true
select the rows whose position record fuzzily matches to forward . among these rows , select the rows whose years in orlando record is greater than or equal to 2000 . the number of such rows is 3 .
4
4
{'eq_3': 3, 'result_4': 4, 'count_2': 2, 'filter_greater_eq_1': 1, 'filter_str_eq_0': 0, 'all_rows_5': 5, 'position_6': 6, 'forward_7': 7, 'years in orlando_8': 8, '2000_9': 9, '3_10': 10}
{'eq_3': 'eq', 'result_4': 'true', 'count_2': 'count', 'filter_greater_eq_1': 'filter_greater_eq', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_5': 'all_rows', 'position_6': 'position', 'forward_7': 'forward', 'years in orlando_8': 'years in orlando', '2000_9': '2000', '3_10': '3'}
{'eq_3': [4], 'result_4': [], 'count_2': [3], 'filter_greater_eq_1': [2], 'filter_str_eq_0': [1], 'all_rows_5': [0], 'position_6': [0], 'forward_7': [0], 'years in orlando_8': [1], '2000_9': [1], '3_10': [3]}
['player', 'no', 'nationality', 'position', 'years in orlando', 'school / club team']
[['j j redick', '7', 'united states', 'guard', '2006 - 2013', 'duke'], ['don reid', '52', 'united states', 'forward', '2000 - 2002', 'georgetown'], ['jerry reynolds', '35', 'united states', 'guard - forward', '1989 - 1992', 'louisiana state'], ['jason richardson', '23', 'united states', 'guard', '2010 - 2012', 'michigan state'], ['jeremy richardson', '32', 'united states', 'forward', '2008 - 2009', 'delta state'], ['quentin richardson', '5', 'united states', 'forward', '2010 - 2012', 'depaul'], ['stanley roberts', '53', 'united states', 'center', '1991 - 1992', 'louisiana state'], ['james robinson', '26', 'united states', 'guard', '2000 - 2001', 'alabama'], ["wayne ' tree ' rollins", '30', 'united states', 'center', '1993 - 1995', 'clemson'], ['sean rooks', '45', 'united states', 'center', '2004', 'arizona'], ['donald royal', '5', 'united states', 'forward', '1992 - 1996', 'notre dame']]
list of top association football goal scorers by country
https://en.wikipedia.org/wiki/List_of_top_association_football_goal_scorers_by_country
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1590321-78.html.csv
unique
of the top association football goal scorers , yordanos abay was the only one from ethiopia .
{'scope': 'all', 'row': '5', 'col': '3', 'col_other': '2', 'criterion': 'equal', 'value': 'ethiopia', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'country', 'ethiopia'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose country record fuzzily matches to ethiopia .', 'tostr': 'filter_eq { all_rows ; country ; ethiopia }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; country ; ethiopia } }', 'tointer': 'select the rows whose country record fuzzily matches to ethiopia . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'country', 'ethiopia'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose country record fuzzily matches to ethiopia .', 'tostr': 'filter_eq { all_rows ; country ; ethiopia }'}, 'player'], 'result': 'yordanos abay', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; country ; ethiopia } ; player }'}, 'yordanos abay'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; country ; ethiopia } ; player } ; yordanos abay }', 'tointer': 'the player record of this unqiue row is yordanos abay .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; country ; ethiopia } } ; eq { hop { filter_eq { all_rows ; country ; ethiopia } ; player } ; yordanos abay } } = true', 'tointer': 'select the rows whose country record fuzzily matches to ethiopia . there is only one such row in the table . the player record of this unqiue row is yordanos abay .'}
and { only { filter_eq { all_rows ; country ; ethiopia } } ; eq { hop { filter_eq { all_rows ; country ; ethiopia } ; player } ; yordanos abay } } = true
select the rows whose country record fuzzily matches to ethiopia . there is only one such row in the table . the player record of this unqiue row is yordanos abay .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'country_7': 7, 'ethiopia_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'player_9': 9, 'yordanos abay_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'country_7': 'country', 'ethiopia_8': 'ethiopia', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'player_9': 'player', 'yordanos abay_10': 'yordanos abay'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'country_7': [0], 'ethiopia_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'player_9': [2], 'yordanos abay_10': [3]}
['rank', 'player', 'country', 'goals', 'years']
[['1', 'ali al - nono', 'yemen', '146', "'99 -"], ['2', 'adel al - salimi', 'yemen', '136', "'97 - ' 11"], ['3', 'sharaf mahfood', 'yemen', '121', "'85 - ' 05"], ['4', 'fathi jabir', 'yemen', '108', "'97 - ' 08"], ['5', 'yordanos abay', 'ethiopia', '93', "'03 -"]]
libertine ( song )
https://en.wikipedia.org/wiki/Libertine_%28song%29
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15204733-2.html.csv
comparative
the live version recorded in 1996 is longer than the live version recorded in 2000 .
{'row_1': '11', 'row_2': '12', 'col': '2', 'col_other': '5', 'relation': 'greater', 'record_mentioned': 'no', 'diff_result': None}
{'func': 'greater', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'year', '1996'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose year record fuzzily matches to 1996 .', 'tostr': 'filter_eq { all_rows ; year ; 1996 }'}, 'length'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; year ; 1996 } ; length }', 'tointer': 'select the rows whose year record fuzzily matches to 1996 . take the length record of this row .'}, {'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'year', '2000'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose year record fuzzily matches to 2000 .', 'tostr': 'filter_eq { all_rows ; year ; 2000 }'}, 'length'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; year ; 2000 } ; length }', 'tointer': 'select the rows whose year record fuzzily matches to 2000 . take the length record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; year ; 1996 } ; length } ; hop { filter_eq { all_rows ; year ; 2000 } ; length } } = true', 'tointer': 'select the rows whose year record fuzzily matches to 1996 . take the length record of this row . select the rows whose year record fuzzily matches to 2000 . take the length record of this row . the first record is greater than the second record .'}
greater { hop { filter_eq { all_rows ; year ; 1996 } ; length } ; hop { filter_eq { all_rows ; year ; 2000 } ; length } } = true
select the rows whose year record fuzzily matches to 1996 . take the length record of this row . select the rows whose year record fuzzily matches to 2000 . take the length record of this row . the first record is greater than the second record .
5
5
{'greater_4': 4, 'result_5': 5, 'str_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'year_7': 7, '1996_8': 8, 'length_9': 9, 'str_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'year_11': 11, '2000_12': 12, 'length_13': 13}
{'greater_4': 'greater', 'result_5': 'true', 'str_hop_2': 'str_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'year_7': 'year', '1996_8': '1996', 'length_9': 'length', 'str_hop_3': 'str_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'year_11': 'year', '2000_12': '2000', 'length_13': 'length'}
{'greater_4': [5], 'result_5': [], 'str_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'year_7': [0], '1996_8': [0], 'length_9': [2], 'str_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'year_11': [1], '2000_12': [1], 'length_13': [3]}
['version', 'length', 'album', 'remixed by', 'year']
[['album version', '3:49', 'cendres de lune', 'laurent boutonnat', '1986'], ['single version', '3:30', '-', '-', '1986'], ['long version', '4:30', '-', 'laurent boutonnat', '1986'], ['instrumental', '3:31', 'les clips , music videos i', '-', '1986'], ['remix', '4:35', '-', 'laurent boutonnat', '1986'], ['new remix', '3:35', '-', 'thierry rogen', '1986'], ['soundtrack from the video', '3:22', 'les clips , music videos i', '-', '1986'], ['remix special club', '5:53', 'cendres de lune', 'laurent boutonnat', '1986'], ['live version ( recorded in 1989 )', '12:00', 'en concert', '-', '1989'], ['carnal sins remix', '7:00', 'dance remixes', 'laurent boutonnat', '1992'], ['live version ( recorded in 1996 )', '5:40', 'live à bercy', '-', '1996'], ['live version ( recorded in 2000 )', '0:30', 'mylenium tour', '-', '2000'], ['album version', '3:30', 'les mots', 'laurent boutonnat', '2001'], ['y - front remix', '4:02', 'remixes', 'y - front', '2003'], ['live version ( recorded in 2009 )', '5:35', 'n degree5 on tour', '-', '2009']]
jacksonville jaguars draft history
https://en.wikipedia.org/wiki/Jacksonville_Jaguars_draft_history
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15100419-2.html.csv
unique
jimmy herndon was the only player the jacksonville jaguars drafted from houston .
{'scope': 'all', 'row': '6', 'col': '6', 'col_other': '4', 'criterion': 'equal', 'value': 'houston', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'college', 'houston'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose college record fuzzily matches to houston .', 'tostr': 'filter_eq { all_rows ; college ; houston }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; college ; houston } }', 'tointer': 'select the rows whose college record fuzzily matches to houston . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'college', 'houston'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose college record fuzzily matches to houston .', 'tostr': 'filter_eq { all_rows ; college ; houston }'}, 'name'], 'result': 'jimmy herndon', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; college ; houston } ; name }'}, 'jimmy herndon'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; college ; houston } ; name } ; jimmy herndon }', 'tointer': 'the name record of this unqiue row is jimmy herndon .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; college ; houston } } ; eq { hop { filter_eq { all_rows ; college ; houston } ; name } ; jimmy herndon } } = true', 'tointer': 'select the rows whose college record fuzzily matches to houston . there is only one such row in the table . the name record of this unqiue row is jimmy herndon .'}
and { only { filter_eq { all_rows ; college ; houston } } ; eq { hop { filter_eq { all_rows ; college ; houston } ; name } ; jimmy herndon } } = true
select the rows whose college record fuzzily matches to houston . there is only one such row in the table . the name record of this unqiue row is jimmy herndon .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'college_7': 7, 'houston_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'name_9': 9, 'jimmy herndon_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'college_7': 'college', 'houston_8': 'houston', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'name_9': 'name', 'jimmy herndon_10': 'jimmy herndon'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'college_7': [0], 'houston_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'name_9': [2], 'jimmy herndon_10': [3]}
['round', 'pick', 'overall', 'name', 'position', 'college']
[['1', '2', '2', 'kevin hardy', 'linebacker', 'illinois'], ['2', '3', '33', 'tony brackens', 'defensive end', 'texas'], ['2', '30', '60', 'michael cheever', 'center', 'georgia tech'], ['3', '2', '63', 'aaron beasley', 'cornerback', 'west virginia'], ['4', '15', '110', 'reggie barlow', 'wide receiver', 'alabama state'], ['5', '14', '146', 'jimmy herndon', 'guard', 'houston'], ['6', '3', '170', 'john fisher', 'defensive back', 'missouri western'], ['6', '18', '185', 'chris doering', 'wide receiver', 'florida'], ['7', '18', '227', 'clarence jones', 'wide receiver', 'tennessee state'], ['7', '19', '228', 'gregory spann', 'wide receiver', 'jackson state']]
united states house of representatives elections , 1960
https://en.wikipedia.org/wiki/United_States_House_of_Representatives_elections%2C_1960
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-1341897-6.html.csv
majority
all incumbents of the 1960 united states house of representatives elections were from the democratic party .
{'scope': 'all', 'col': '3', 'most_or_all': 'all', 'criterion': 'equal', 'value': 'democratic', 'subset': None}
{'func': 'all_str_eq', 'args': ['all_rows', 'party', 'democratic'], 'result': True, 'ind': 0, 'tointer': 'for the party records of all rows , all of them fuzzily match to democratic .', 'tostr': 'all_eq { all_rows ; party ; democratic } = true'}
all_eq { all_rows ; party ; democratic } = true
for the party records of all rows , all of them fuzzily match to democratic .
1
1
{'all_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'party_3': 3, 'democratic_4': 4}
{'all_str_eq_0': 'all_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'party_3': 'party', 'democratic_4': 'democratic'}
{'all_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'party_3': [0], 'democratic_4': [0]}
['district', 'incumbent', 'party', 'first elected', 'result', 'candidates']
[['arkansas 1', 'ezekiel c gathings', 'democratic', '1938', 're - elected', 'ezekiel c gathings ( d ) unopposed'], ['arkansas 2', 'wilbur mills', 'democratic', '1938', 're - elected', 'wilbur mills ( d ) unopposed'], ['arkansas 3', 'james william trimble', 'democratic', '1944', 're - elected', 'james william trimble ( d ) unopposed'], ['arkansas 4', 'oren harris', 'democratic', '1940', 're - elected', 'oren harris ( d ) unopposed'], ['arkansas 5', 'dale alford', 'democratic', '1958', 're - elected', 'dale alford ( d ) 82.7 % l j churchill ( r ) 17.3 %']]
1946 vfl season
https://en.wikipedia.org/wiki/1946_VFL_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10809368-5.html.csv
ordinal
during the 1946 vfl season , the 2nd largest crowd was when the home team was carlton .
{'row': '4', 'col': '6', 'order': '2', 'col_other': '1', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'scope': 'all', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'crowd', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmax { all_rows ; crowd ; 2 }'}, 'home team'], 'result': 'carlton', 'ind': 1, 'tostr': 'hop { nth_argmax { all_rows ; crowd ; 2 } ; home team }'}, 'carlton'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmax { all_rows ; crowd ; 2 } ; home team } ; carlton } = true', 'tointer': 'select the row whose crowd record of all rows is 2nd maximum . the home team record of this row is carlton .'}
eq { hop { nth_argmax { all_rows ; crowd ; 2 } ; home team } ; carlton } = true
select the row whose crowd record of all rows is 2nd maximum . the home team record of this row is carlton .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmax_0': 0, 'all_rows_4': 4, 'crowd_5': 5, '2_6': 6, 'home team_7': 7, 'carlton_8': 8}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmax_0': 'nth_argmax', 'all_rows_4': 'all_rows', 'crowd_5': 'crowd', '2_6': '2', 'home team_7': 'home team', 'carlton_8': 'carlton'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmax_0': [1], 'all_rows_4': [0], 'crowd_5': [0], '2_6': [0], 'home team_7': [1], 'carlton_8': [2]}
['home team', 'home team score', 'away team', 'away team score', 'venue', 'crowd', 'date']
[['geelong', '11.8 ( 74 )', 'st kilda', '13.17 ( 95 )', 'kardinia park', '9000', '18 may 1946'], ['essendon', '11.13 ( 79 )', 'fitzroy', '4.15 ( 39 )', 'windy hill', '17000', '18 may 1946'], ['collingwood', '2.16 ( 28 )', 'richmond', '14.10 ( 94 )', 'victoria park', '24000', '18 may 1946'], ['carlton', '11.15 ( 81 )', 'north melbourne', '9.11 ( 65 )', 'princes park', '19000', '18 may 1946'], ['south melbourne', '11.21 ( 87 )', 'hawthorn', '6.17 ( 53 )', 'junction oval', '10000', '18 may 1946'], ['melbourne', '12.13 ( 85 )', 'footscray', '22.14 ( 146 )', 'punt road oval', '18000', '18 may 1946']]
faysal bank t20 cup
https://en.wikipedia.org/wiki/Faysal_Bank_T20_Cup
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-16159444-1.html.csv
unique
in the faysal bank t20 cup , when the coach was manzoor elahi , the only time the captain was abdur rauf was when the team was multan tigers .
{'scope': 'subset', 'row': '10', 'col': '5', 'col_other': '1', 'criterion': 'equal', 'value': 'abdur rauf', 'subset': {'col': '6', 'criterion': 'equal', 'value': 'manzoor elahi'}}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'coach', 'manzoor elahi'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; coach ; manzoor elahi }', 'tointer': 'select the rows whose coach record fuzzily matches to manzoor elahi .'}, 'captain', 'abdur rauf'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose coach record fuzzily matches to manzoor elahi . among these rows , select the rows whose captain record fuzzily matches to abdur rauf .', 'tostr': 'filter_eq { filter_eq { all_rows ; coach ; manzoor elahi } ; captain ; abdur rauf }'}], 'result': True, 'ind': 2, 'tostr': 'only { filter_eq { filter_eq { all_rows ; coach ; manzoor elahi } ; captain ; abdur rauf } }', 'tointer': 'select the rows whose coach record fuzzily matches to manzoor elahi . among these rows , select the rows whose captain record fuzzily matches to abdur rauf . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'coach', 'manzoor elahi'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; coach ; manzoor elahi }', 'tointer': 'select the rows whose coach record fuzzily matches to manzoor elahi .'}, 'captain', 'abdur rauf'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose coach record fuzzily matches to manzoor elahi . among these rows , select the rows whose captain record fuzzily matches to abdur rauf .', 'tostr': 'filter_eq { filter_eq { all_rows ; coach ; manzoor elahi } ; captain ; abdur rauf }'}, 'team'], 'result': 'multan tigers', 'ind': 3, 'tostr': 'hop { filter_eq { filter_eq { all_rows ; coach ; manzoor elahi } ; captain ; abdur rauf } ; team }'}, 'multan tigers'], 'result': True, 'ind': 4, 'tostr': 'eq { hop { filter_eq { filter_eq { all_rows ; coach ; manzoor elahi } ; captain ; abdur rauf } ; team } ; multan tigers }', 'tointer': 'the team record of this unqiue row is multan tigers .'}], 'result': True, 'ind': 5, 'tostr': 'and { only { filter_eq { filter_eq { all_rows ; coach ; manzoor elahi } ; captain ; abdur rauf } } ; eq { hop { filter_eq { filter_eq { all_rows ; coach ; manzoor elahi } ; captain ; abdur rauf } ; team } ; multan tigers } } = true', 'tointer': 'select the rows whose coach record fuzzily matches to manzoor elahi . among these rows , select the rows whose captain record fuzzily matches to abdur rauf . there is only one such row in the table . the team record of this unqiue row is multan tigers .'}
and { only { filter_eq { filter_eq { all_rows ; coach ; manzoor elahi } ; captain ; abdur rauf } } ; eq { hop { filter_eq { filter_eq { all_rows ; coach ; manzoor elahi } ; captain ; abdur rauf } ; team } ; multan tigers } } = true
select the rows whose coach record fuzzily matches to manzoor elahi . among these rows , select the rows whose captain record fuzzily matches to abdur rauf . there is only one such row in the table . the team record of this unqiue row is multan tigers .
8
6
{'and_5': 5, 'result_6': 6, 'only_2': 2, 'filter_str_eq_1': 1, 'filter_str_eq_0': 0, 'all_rows_7': 7, 'coach_8': 8, 'manzoor elahi_9': 9, 'captain_10': 10, 'abdur rauf_11': 11, 'str_eq_4': 4, 'str_hop_3': 3, 'team_12': 12, 'multan tigers_13': 13}
{'and_5': 'and', 'result_6': 'true', 'only_2': 'only', 'filter_str_eq_1': 'filter_str_eq', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_7': 'all_rows', 'coach_8': 'coach', 'manzoor elahi_9': 'manzoor elahi', 'captain_10': 'captain', 'abdur rauf_11': 'abdur rauf', 'str_eq_4': 'str_eq', 'str_hop_3': 'str_hop', 'team_12': 'team', 'multan tigers_13': 'multan tigers'}
{'and_5': [6], 'result_6': [], 'only_2': [5], 'filter_str_eq_1': [2, 3], 'filter_str_eq_0': [1], 'all_rows_7': [0], 'coach_8': [0], 'manzoor elahi_9': [0], 'captain_10': [1], 'abdur rauf_11': [1], 'str_eq_4': [5], 'str_hop_3': [4], 'team_12': [3], 'multan tigers_13': [4]}
['team', 'city', 'inaugural season', 'final season', 'captain', 'coach']
[['abbottabad falcons', 'abbottabad , kp', '2005 - 06', '2012 - 13', 'younus khan', 'sajjad akbar'], ['lahore eagles', 'lahore , pj', '2004 - 05', '2012 - 13', 'taufeeq umar', 'manzoor elahi'], ['faisalabad wolves', 'faisalabad , pj', '2004 - 05', '2012 - 13', 'misbah - ul - haq', 'naveed anjum'], ['bahawalpur stags', 'bahawalpur , pj', '2012 - 13', '2012 - 13', 'bilal khilji', 'shahid anwar'], ['hyderabad hawks', 'hyderabad , sn', '2004 - 05', '2012 - 13', 'rizwan ahmed', 'shaukat mirza'], ['islamabad leopards', 'islamabad , ct', '2005 - 06', '2012 - 13', 'umar gul', 'taimoor azam'], ['afghan cheetahs', 'kabul , afghanistan', '2011 - 12', '2011 - 12', 'mohammad nabi', 'raees ahmadzai'], ['karachi dolphins', 'karachi , sn', '2004 - 05', '2012 - 13', 'shahid afridi', 'tauseef ahmed'], ['lahore lions', 'lahore , pj', '2004 - 05', '2012 - 13', 'mohammad hafeez', 'mohsin kamal'], ['multan tigers', 'multan , pj', '2004 - 05', '2012 - 13', 'abdur rauf', 'manzoor elahi'], ['peshawar panthers', 'peshawar , kp', '2004 - 05', '2012 - 13', 'akbar badshah', 'abdur rehman'], ['quetta bears', 'quetta , bl', '2004 - 05', '2012 - 13', 'gohar faiz', 'arshad khan'], ['rawalpindi rams', 'rawalpindi , pj', '2004 - 05', '2012 - 13', 'sohail tanvir', 'sabih azhar'], ['sialkot stallions', 'sialkot , pj', '2004 - 05', '2012 - 13', 'shoaib malik', 'ijaz ahmed jnr'], ['karachi zebras', 'karachi , sn', '2004 - 05', '2012 - 13', 'rameez raja', 'azam khan']]
gulf coast athletic conference
https://en.wikipedia.org/wiki/Gulf_Coast_Athletic_Conference
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-10577579-2.html.csv
superlative
the enrollment at southern university at new orleans is higher than the enrollment at any other institution in the gulf coast athletic conference .
{'scope': 'all', 'col_superlative': '7', 'row_superlative': '5', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '1', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'enrollment'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; enrollment }'}, 'institution'], 'result': 'southern university at new orleans', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; enrollment } ; institution }'}, 'southern university at new orleans'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; enrollment } ; institution } ; southern university at new orleans } = true', 'tointer': 'select the row whose enrollment record of all rows is maximum . the institution record of this row is southern university at new orleans .'}
eq { hop { argmax { all_rows ; enrollment } ; institution } ; southern university at new orleans } = true
select the row whose enrollment record of all rows is maximum . the institution record of this row is southern university at new orleans .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'enrollment_5': 5, 'institution_6': 6, 'southern university at new orleans_7': 7}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'enrollment_5': 'enrollment', 'institution_6': 'institution', 'southern university at new orleans_7': 'southern university at new orleans'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'enrollment_5': [0], 'institution_6': [1], 'southern university at new orleans_7': [2]}
['institution', 'location', 'mens nickname', 'womens nickname', 'founded', 'type', 'enrollment', 'joined']
[['dillard university', 'new orleans , louisiana', 'bleu devils', 'lady bleu devils', '1869', 'private / ( methodist & church of christ )', '900', '1981'], ['edward waters college', 'jacksonville , florida', 'tigers', 'lady tigers', '1866', 'private / ( african methodist )', '800', '2010'], ['fisk university', 'nashville , tennessee', 'bulldogs', 'lady bulldogs', '1866', 'private / ( church of christ )', '800', '2010'], ['philander smith college', 'little rock , arkansas', 'panthers', 'lady panthers', '1864', 'private / ( methodist )', '700', '2011'], ['southern university at new orleans', 'new orleans , louisiana', 'black knights', 'lady knights', '1956', 'public', '3200', '1986'], ['talladega college', 'talladega , alabama', 'tornadoes', 'lady tornadoes', '1867', 'private / ( united church of christ )', '600', '1999 , 2011'], ['tougaloo college', 'tougaloo , mississippi', 'bulldogs', 'lady bulldogs', '1869', 'private / ( church of christ )', '900', '1981']]
1939 vfl season
https://en.wikipedia.org/wiki/1939_VFL_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10806852-6.html.csv
aggregation
the average sore of the away teams in round 6 of the 1939 vfl season was 10.55 ( 74 ) .
{'scope': 'all', 'col': '4', 'type': 'average', 'result': '10.55 ( 74 )', 'subset': None}
{'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 'away team score'], 'result': '10.55 ( 74 )', 'ind': 0, 'tostr': 'avg { all_rows ; away team score }'}, '10.55 ( 74 )'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; away team score } ; 10.55 ( 74 ) } = true', 'tointer': 'the average of the away team score record of all rows is 10.55 ( 74 ) .'}
round_eq { avg { all_rows ; away team score } ; 10.55 ( 74 ) } = true
the average of the away team score record of all rows is 10.55 ( 74 ) .
2
2
{'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 'away team score_4': 4, '10.55 (74)_5': 5}
{'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 'away team score_4': 'away team score', '10.55 (74)_5': '10.55 ( 74 )'}
{'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 'away team score_4': [0], '10.55 (74)_5': [1]}
['home team', 'home team score', 'away team', 'away team score', 'venue', 'crowd', 'date']
[['melbourne', '19.23 ( 137 )', 'south melbourne', '3.12 ( 30 )', 'mcg', '16523', '27 may 1939'], ['collingwood', '14.14 ( 98 )', 'hawthorn', '12.7 ( 79 )', 'victoria park', '15000', '27 may 1939'], ['carlton', '8.13 ( 61 )', 'richmond', '9.14 ( 68 )', 'princes park', '34000', '27 may 1939'], ['st kilda', '16.18 ( 114 )', 'geelong', '10.16 ( 76 )', 'junction oval', '17000', '27 may 1939'], ['footscray', '11.13 ( 79 )', 'fitzroy', '15.10 ( 100 )', 'western oval', '13000', '27 may 1939'], ['north melbourne', '15.11 ( 101 )', 'essendon', '13.10 ( 88 )', 'arden street oval', '14500', '27 may 1939']]
orlando magic all - time roster
https://en.wikipedia.org/wiki/Orlando_Magic_all-time_roster
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-15621965-14.html.csv
unique
victor oladipo is the only player still currently playing for orlando magic .
{'scope': 'all', 'row': '1', 'col': '5', 'col_other': '1', 'criterion': 'fuzzily_match', 'value': 'present', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'years in orlando', 'present'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose years in orlando record fuzzily matches to present .', 'tostr': 'filter_eq { all_rows ; years in orlando ; present }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; years in orlando ; present } }', 'tointer': 'select the rows whose years in orlando record fuzzily matches to present . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'years in orlando', 'present'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose years in orlando record fuzzily matches to present .', 'tostr': 'filter_eq { all_rows ; years in orlando ; present }'}, 'player'], 'result': 'victor oladipo', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; years in orlando ; present } ; player }'}, 'victor oladipo'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; years in orlando ; present } ; player } ; victor oladipo }', 'tointer': 'the player record of this unqiue row is victor oladipo .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; years in orlando ; present } } ; eq { hop { filter_eq { all_rows ; years in orlando ; present } ; player } ; victor oladipo } } = true', 'tointer': 'select the rows whose years in orlando record fuzzily matches to present . there is only one such row in the table . the player record of this unqiue row is victor oladipo .'}
and { only { filter_eq { all_rows ; years in orlando ; present } } ; eq { hop { filter_eq { all_rows ; years in orlando ; present } ; player } ; victor oladipo } } = true
select the rows whose years in orlando record fuzzily matches to present . there is only one such row in the table . the player record of this unqiue row is victor oladipo .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'years in orlando_7': 7, 'present_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'player_9': 9, 'victor oladipo_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'years in orlando_7': 'years in orlando', 'present_8': 'present', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'player_9': 'player', 'victor oladipo_10': 'victor oladipo'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'years in orlando_7': [0], 'present_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'player_9': [2], 'victor oladipo_10': [3]}
['player', 'no', 'nationality', 'position', 'years in orlando', 'school / club team']
[['victor oladipo', '5', 'united states', 'guard', '2013 - present', 'indiana'], ['jawann oldham', '55', 'united states', 'center', '1989 - 1990', 'seattle'], ['kevin ollie', '3', 'united states', 'guard', '1998', 'connecticut'], ["shaquille o'neal", '32', 'united states', 'center', '1992 - 1996', 'louisiana state'], ['daniel orton', '21', 'united states', 'center', '2010 - 2012', 'kentucky'], ['bo outlaw', '45', 'united states', 'forward - center', '1997 - 2001', 'houston'], ['bo outlaw', '45', 'united states', 'forward - center', '2005 - 2008', 'houston'], ['doug overton', '11', 'united states', 'guard', '1998 - 1999', 'la salle']]
jack turner ( racing driver )
https://en.wikipedia.org/wiki/Jack_Turner_%28racing_driver%29
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1252094-1.html.csv
comparative
jack turner had a higher rank in 1959 than he did in 1958 .
{'row_1': '4', 'row_2': '3', 'col': '4', 'col_other': '1', 'relation': 'less', 'record_mentioned': 'no', 'diff_result': None}
{'func': 'less', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'year', '1959'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose year record fuzzily matches to 1959 .', 'tostr': 'filter_eq { all_rows ; year ; 1959 }'}, 'rank'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; year ; 1959 } ; rank }', 'tointer': 'select the rows whose year record fuzzily matches to 1959 . take the rank record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'year', '1958'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose year record fuzzily matches to 1958 .', 'tostr': 'filter_eq { all_rows ; year ; 1958 }'}, 'rank'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; year ; 1958 } ; rank }', 'tointer': 'select the rows whose year record fuzzily matches to 1958 . take the rank record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'less { hop { filter_eq { all_rows ; year ; 1959 } ; rank } ; hop { filter_eq { all_rows ; year ; 1958 } ; rank } } = true', 'tointer': 'select the rows whose year record fuzzily matches to 1959 . take the rank record of this row . select the rows whose year record fuzzily matches to 1958 . take the rank record of this row . the first record is less than the second record .'}
less { hop { filter_eq { all_rows ; year ; 1959 } ; rank } ; hop { filter_eq { all_rows ; year ; 1958 } ; rank } } = true
select the rows whose year record fuzzily matches to 1959 . take the rank record of this row . select the rows whose year record fuzzily matches to 1958 . take the rank record of this row . the first record is less than the second record .
5
5
{'less_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'year_7': 7, '1959_8': 8, 'rank_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'year_11': 11, '1958_12': 12, 'rank_13': 13}
{'less_4': 'less', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'year_7': 'year', '1959_8': '1959', 'rank_9': 'rank', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'year_11': 'year', '1958_12': '1958', 'rank_13': 'rank'}
{'less_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'year_7': [0], '1959_8': [0], 'rank_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'year_11': [1], '1958_12': [1], 'rank_13': [3]}
['year', 'start', 'qual', 'rank', 'finish', 'laps']
[['1956', '24', '142.394', '18', '25', '131'], ['1957', '19', '140.367', '25', '11', '200'], ['1958', '10', '143.438', '12', '25', '21'], ['1959', '14', '143.478', '11', '27', '47'], ['1961', '21', '144.904', '21', '25', '52'], ['1962', '25', '146.496', '25', '29', '17']]
fivb volleyball world championship
https://en.wikipedia.org/wiki/FIVB_Volleyball_World_Championship
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1747960-4.html.csv
aggregation
the teams that participated in the fivb volleyball championship won an aggregate of about 4 medals .
{'scope': 'all', 'col': '5', 'type': 'average', 'result': '3.75', 'subset': None}
{'func': 'round_eq', 'args': [{'func': 'avg', 'args': ['all_rows', 'total'], 'result': '3.75', 'ind': 0, 'tostr': 'avg { all_rows ; total }'}, '3.75'], 'result': True, 'ind': 1, 'tostr': 'round_eq { avg { all_rows ; total } ; 3.75 } = true', 'tointer': 'the average of the total record of all rows is 3.75 .'}
round_eq { avg { all_rows ; total } ; 3.75 } = true
the average of the total record of all rows is 3.75 .
2
2
{'eq_1': 1, 'result_2': 2, 'avg_0': 0, 'all_rows_3': 3, 'total_4': 4, '3.75_5': 5}
{'eq_1': 'eq', 'result_2': 'true', 'avg_0': 'avg', 'all_rows_3': 'all_rows', 'total_4': 'total', '3.75_5': '3.75'}
{'eq_1': [2], 'result_2': [], 'avg_0': [1], 'all_rows_3': [0], 'total_4': [0], '3.75_5': [1]}
['rank', 'gold', 'silver', 'bronze', 'total']
[['1', '7', '2', '4', '13'], ['2', '3', '3', '1', '7'], ['3', '3', '1', '0', '4'], ['4', '2', '2', '0', '4'], ['5', '1', '0', '0', '1'], ['6', '0', '3', '0', '3'], ['7', '0', '2', '2', '4'], ['8', '0', '1', '2', '3'], ['9', '0', '1', '1', '2'], ['10', '0', '1', '0', '1'], ['11', '0', '0', '2', '2'], ['13', '0', '0', '1', '1']]
list of asian academy award winners and nominees
https://en.wikipedia.org/wiki/List_of_Asian_Academy_Award_winners_and_nominees
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-11296015-5.html.csv
unique
the year 1957 is the only year that the film won the asian academy award .
{'scope': 'all', 'row': '2', 'col': '5', 'col_other': '1', 'criterion': 'fuzzily_match', 'value': 'won', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'status', 'won'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose status record fuzzily matches to won .', 'tostr': 'filter_eq { all_rows ; status ; won }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; status ; won } }', 'tointer': 'select the rows whose status record fuzzily matches to won . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'status', 'won'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose status record fuzzily matches to won .', 'tostr': 'filter_eq { all_rows ; status ; won }'}, 'year'], 'result': '1957', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; status ; won } ; year }'}, '1957'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; status ; won } ; year } ; 1957 }', 'tointer': 'the year record of this unqiue row is 1957 .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; status ; won } } ; eq { hop { filter_eq { all_rows ; status ; won } ; year } ; 1957 } } = true', 'tointer': 'select the rows whose status record fuzzily matches to won . there is only one such row in the table . the year record of this unqiue row is 1957 .'}
and { only { filter_eq { all_rows ; status ; won } } ; eq { hop { filter_eq { all_rows ; status ; won } ; year } ; 1957 } } = true
select the rows whose status record fuzzily matches to won . there is only one such row in the table . the year record of this unqiue row is 1957 .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'status_7': 7, 'won_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'year_9': 9, '1957_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'status_7': 'status', 'won_8': 'won', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'year_9': 'year', '1957_10': '1957'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'status_7': [0], 'won_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'year_9': [2], '1957_10': [3]}
['year', 'name', 'film', 'role', 'status']
[['year', 'name', 'film', 'role', 'status'], ['1957', 'miyoshi umeki', 'sayonara', 'katsumi kelly', 'won'], ['1985', 'meg tilly', 'agnes of god', 'sister agnes', 'nominated'], ['1994', 'jennifer tilly', 'bullets over broadway', 'olive neal', 'nominated'], ['2003', 'shohreh aghdashloo', 'house of sand and fog', 'nadereh behrani', 'nominated'], ['2006', 'rinko kikuchi', 'babel', 'chieko wataya', 'nominated'], ['2010', 'hailee steinfeld', 'true grit', 'mattie ross', 'nominated']]
kstp - tv
https://en.wikipedia.org/wiki/KSTP-TV
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1406855-1.html.csv
majority
most of the channels are run by the same station , kstc - tv .
{'scope': 'all', 'col': '2', 'most_or_all': 'most', 'criterion': 'equal', 'value': 'kstc - tv', 'subset': None}
{'func': 'most_str_eq', 'args': ['all_rows', 'station', 'kstc - tv'], 'result': True, 'ind': 0, 'tointer': 'for the station records of all rows , most of them fuzzily match to kstc - tv .', 'tostr': 'most_eq { all_rows ; station ; kstc - tv } = true'}
most_eq { all_rows ; station ; kstc - tv } = true
for the station records of all rows , most of them fuzzily match to kstc - tv .
1
1
{'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'station_3': 3, 'kstc - tv_4': 4}
{'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'station_3': 'station', 'kstc - tv_4': 'kstc - tv'}
{'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'station_3': [0], 'kstc - tv_4': [0]}
['channel', 'station', 'video', 'aspect', 'psip short name', 'programming']
[['5.1', 'kstp - tv', '720p', '16:9', 'kstpdt1', 'main kstp - tv programming / abc'], ['5.2', 'kstc - tv', '720p', '16:9', 'kstcdt2', 'main kstc - tv programming'], ['5.3', 'kstc - tv', '480i', '16:9', 'kstcdt3', 'me - tv'], ['5.4', 'kstc - tv', '480i', '16:9', 'kstcdt4', 'antenna tv'], ['5.5', 'kstc - tv', '480i', '16:9', 'kstpdt2', 'live well network'], ['5.6', 'kstc - tv', '480i', '16:9', 'kstcdt6', 'this tv']]
united states house of representatives elections , 1974
https://en.wikipedia.org/wiki/United_States_House_of_Representatives_elections%2C_1974
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-1341690-13.html.csv
unique
harold r collier is the only incumbent who retired .
{'scope': 'all', 'row': '3', 'col': '5', 'col_other': '2', 'criterion': 'fuzzily_match', 'value': 'retired', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'result', 'retired'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose result record fuzzily matches to retired .', 'tostr': 'filter_eq { all_rows ; result ; retired }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; result ; retired } }', 'tointer': 'select the rows whose result record fuzzily matches to retired . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'result', 'retired'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose result record fuzzily matches to retired .', 'tostr': 'filter_eq { all_rows ; result ; retired }'}, 'incumbent'], 'result': 'harold r collier', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; result ; retired } ; incumbent }'}, 'harold r collier'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; result ; retired } ; incumbent } ; harold r collier }', 'tointer': 'the incumbent record of this unqiue row is harold r collier .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; result ; retired } } ; eq { hop { filter_eq { all_rows ; result ; retired } ; incumbent } ; harold r collier } } = true', 'tointer': 'select the rows whose result record fuzzily matches to retired . there is only one such row in the table . the incumbent record of this unqiue row is harold r collier .'}
and { only { filter_eq { all_rows ; result ; retired } } ; eq { hop { filter_eq { all_rows ; result ; retired } ; incumbent } ; harold r collier } } = true
select the rows whose result record fuzzily matches to retired . there is only one such row in the table . the incumbent record of this unqiue row is harold r collier .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'result_7': 7, 'retired_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'incumbent_9': 9, 'harold r collier_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'result_7': 'result', 'retired_8': 'retired', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'incumbent_9': 'incumbent', 'harold r collier_10': 'harold r collier'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'result_7': [0], 'retired_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'incumbent_9': [2], 'harold r collier_10': [3]}
['district', 'incumbent', 'party', 'first elected', 'result', 'candidates']
[['illinois 3', 'robert p hanrahan', 'republican', '1972', 'lost re - election democratic gain', 'marty russo ( d ) 52.6 % robert p hanrahan ( r ) 47.4 %'], ['illinois 4', 'ed derwinski', 'republican', '1958', 're - elected', 'ed derwinski ( r ) 59.2 % ronald a rodger ( d ) 40.8 %'], ['illinois 6', 'harold r collier', 'republican', '1956', 'retired republican hold', 'henry hyde ( r ) 53.4 % edward v hanrahan ( d ) 46.6 %'], ['illinois 9', 'sidney r yates', 'democratic', '1964', 're - elected', 'sidney r yates ( d ) unopposed'], ['illinois 10', 'samuel h young', 'republican', '1972', 'lost re - election democratic gain', 'abner j mikva ( d ) 50.9 % samuel h young ( r ) 49.1 %'], ['illinois 12', 'phil crane', 'republican', '1969', 're - elected', 'phil crane ( r ) 61.1 % betty c spence ( d ) 38.9 %'], ['illinois 19', 'tom railsback', 'republican', '1966', 're - elected', 'tom railsback ( r ) 65.3 % jim gende ( d ) 34.7 %'], ['illinois 20', 'paul findley', 'republican', '1960', 're - elected', 'paul findley ( r ) 54.8 % peter f mack ( d ) 45.2 %'], ['illinois 23', 'melvin price', 'democratic', '1944', 're - elected', 'melvin price ( d ) 80.5 % scott randolph ( r ) 19.5 %']]
aqeel khan
https://en.wikipedia.org/wiki/Aqeel_Khan
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-11976006-2.html.csv
count
of the singles titles aqeel khan competed in during the year 2004 , two of them were played on a hard surface .
{'scope': 'subset', 'criterion': 'equal', 'value': 'hard', 'result': '2', 'col': '3', 'subset': {'col': '1', 'criterion': 'fuzzily_match', 'value': '2004'}}
{'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'date', '2004'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; date ; 2004 }', 'tointer': 'select the rows whose date record fuzzily matches to 2004 .'}, 'surface', 'hard'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose date record fuzzily matches to 2004 . among these rows , select the rows whose surface record fuzzily matches to hard .', 'tostr': 'filter_eq { filter_eq { all_rows ; date ; 2004 } ; surface ; hard }'}], 'result': '2', 'ind': 2, 'tostr': 'count { filter_eq { filter_eq { all_rows ; date ; 2004 } ; surface ; hard } }', 'tointer': 'select the rows whose date record fuzzily matches to 2004 . among these rows , select the rows whose surface record fuzzily matches to hard . the number of such rows is 2 .'}, '2'], 'result': True, 'ind': 3, 'tostr': 'eq { count { filter_eq { filter_eq { all_rows ; date ; 2004 } ; surface ; hard } } ; 2 } = true', 'tointer': 'select the rows whose date record fuzzily matches to 2004 . among these rows , select the rows whose surface record fuzzily matches to hard . the number of such rows is 2 .'}
eq { count { filter_eq { filter_eq { all_rows ; date ; 2004 } ; surface ; hard } } ; 2 } = true
select the rows whose date record fuzzily matches to 2004 . among these rows , select the rows whose surface record fuzzily matches to hard . the number of such rows is 2 .
4
4
{'eq_3': 3, 'result_4': 4, 'count_2': 2, 'filter_str_eq_1': 1, 'filter_str_eq_0': 0, 'all_rows_5': 5, 'date_6': 6, '2004_7': 7, 'surface_8': 8, 'hard_9': 9, '2_10': 10}
{'eq_3': 'eq', 'result_4': 'true', 'count_2': 'count', 'filter_str_eq_1': 'filter_str_eq', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_5': 'all_rows', 'date_6': 'date', '2004_7': '2004', 'surface_8': 'surface', 'hard_9': 'hard', '2_10': '2'}
{'eq_3': [4], 'result_4': [], 'count_2': [3], 'filter_str_eq_1': [2], 'filter_str_eq_0': [1], 'all_rows_5': [0], 'date_6': [0], '2004_7': [0], 'surface_8': [1], 'hard_9': [1], '2_10': [3]}
['date', 'tournament', 'surface', 'opponent in the final', 'score']
[['15 august 2004', 'islamabad', 'clay', 'toshiaki sakai', '7 - 6 ( 3 ) 7 - 6 ( 5 )'], ['22 august 2004', 'lahore', 'grass', 'toshiaki sakai', '1 - 6 6 - 4 6 - 3'], ['5 september 2004', 'karachi', 'hard', 'tommaso sanna', '6 - 3 6 - 4'], ['12 september 2004', 'hyderabad', 'hard', 'tai - wei liu', '6 - 7 ( 5 ) 6 - 1 6 - 1'], ['20 august 2006', 'delhi', 'hard', 'ravishankar pathanjali', '7 - 6 ( 7 ) 6 - 4'], ['12 august 2007', 'ludhiana', 'hard', 'aditya madkekar', '6 - 3 7 - 6 ( 5 )'], ['29 october 2007', 'lahore', 'grass', 'divij sharan', '4 - 6 6 - 3 6 - 4']]
washington redskins draft history
https://en.wikipedia.org/wiki/Washington_Redskins_draft_history
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-17100961-8.html.csv
ordinal
charley holm was the second highest overall pick by the washington redskins .
{'row': '2', 'col': '3', 'order': '2', 'col_other': '4', 'max_or_min': 'min_to_max', 'value_mentioned': 'no', 'scope': 'all', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmin', 'args': ['all_rows', 'overall', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmin { all_rows ; overall ; 2 }'}, 'name'], 'result': 'charley holm', 'ind': 1, 'tostr': 'hop { nth_argmin { all_rows ; overall ; 2 } ; name }'}, 'charley holm'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmin { all_rows ; overall ; 2 } ; name } ; charley holm } = true', 'tointer': 'select the row whose overall record of all rows is 2nd minimum . the name record of this row is charley holm .'}
eq { hop { nth_argmin { all_rows ; overall ; 2 } ; name } ; charley holm } = true
select the row whose overall record of all rows is 2nd minimum . the name record of this row is charley holm .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmin_0': 0, 'all_rows_4': 4, 'overall_5': 5, '2_6': 6, 'name_7': 7, 'charley holm_8': 8}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmin_0': 'nth_argmin', 'all_rows_4': 'all_rows', 'overall_5': 'overall', '2_6': '2', 'name_7': 'name', 'charley holm_8': 'charley holm'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmin_0': [1], 'all_rows_4': [0], 'overall_5': [0], '2_6': [0], 'name_7': [1], 'charley holm_8': [2]}
['round', 'pick', 'overall', 'name', 'position', 'college']
[['1', '8', '8', 'i b hale', 'ot', 'texas christian'], ['3', '8', '23', 'charley holm', 'rb', 'alabama'], ['5', '8', '38', 'dick todd', 'rb', 'texas a & m'], ['6', '8', '48', 'dave anderson', 'rb', 'california'], ['7', '8', '58', 'quinton lumpkin', 'c', 'georgia'], ['8', '8', '68', 'bo russell', 'ot', 'auburn'], ['9', '8', '78', 'wilbur moore', 'hb', 'minnesota'], ['10', '8', '88', 'jim johnston', 'rb', 'washington'], ['11', '8', '98', 'jim german', 'rb', 'centre'], ['12', '8', '108', "bob o'mara", 'rb', 'duke'], ['13', '8', '118', 'steve slivinski', 'g', 'washington'], ['14', '8', '128', 'bob hoffman', 'rb', 'southern california'], ['15', '8', '138', 'eric tipton', 'rb', 'duke'], ['16', '8', '148', 'dick farman', 'ot', 'washington state'], ['17', '8', '158', 'clyde shugart', 'ot', 'iowa state'], ['18', '8', '168', 'boyd morgan', 'rb', 'southern california'], ['19', '8', '178', 'phil smith', 'ot', "st benedict 's"], ['20', '8', '188', 'paul coop', 'ot', 'centre'], ['21', '3', '193', 'matt kuber', 'g', 'villanova'], ['22', '3', '198', 'al cruver', 'rb', 'washington state']]
algeria at the 2008 summer olympics
https://en.wikipedia.org/wiki/Algeria_at_the_2008_Summer_Olympics
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-17427004-7.html.csv
majority
the majority of algeria athletes at the 2008 summer olympics did not advance to the quarterfinals .
{'scope': 'all', 'col': '5', 'most_or_all': 'most', 'criterion': 'equal', 'value': 'did not advance', 'subset': None}
{'func': 'most_str_eq', 'args': ['all_rows', 'quarterfinals', 'did not advance'], 'result': True, 'ind': 0, 'tointer': 'for the quarterfinals records of all rows , most of them fuzzily match to did not advance .', 'tostr': 'most_eq { all_rows ; quarterfinals ; did not advance } = true'}
most_eq { all_rows ; quarterfinals ; did not advance } = true
for the quarterfinals records of all rows , most of them fuzzily match to did not advance .
1
1
{'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'quarterfinals_3': 3, 'did not advance_4': 4}
{'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'quarterfinals_3': 'quarterfinals', 'did not advance_4': 'did not advance'}
{'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'quarterfinals_3': [0], 'did not advance_4': [0]}
['athlete', 'event', 'round of 32', 'round of 16', 'quarterfinals', 'semifinals']
[['abdelhalim ouradi', 'bantamweight', 'nevin ( irl ) l 4 - 9', 'did not advance', 'did not advance', 'did not advance'], ['abdelkader chadi', 'featherweight', 'bye', 'adi ( tha ) w 7 - 6', 'kılıç ( tur ) l 6 - 13', 'did not advance'], ['hamza kramou', 'lightweight', 'ugás ( cub ) l 3 - 21', 'did not advance', 'did not advance', 'did not advance'], ['nabil kassel', 'middleweight', 'bye', 'sutherland ( irl ) l 14 - 21', 'did not advance', 'did not advance'], ['abdelhafid benchebla', 'light heavyweight', 'kumar ( ind ) w 23 - 3', 'yasser ( egy ) w 13 - 6', 'zhang xp ( chn ) l 7 - 12', 'did not advance'], ['abdelaziz touilbini', 'heavyweight', 'n / a', 'wilder ( usa ) l 4 - 10', 'did not advance', 'did not advance']]
greater antilles
https://en.wikipedia.org/wiki/Greater_Antilles
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-298550-1.html.csv
ordinal
the dominican republic has the second largest population in the greater antilles .
{'row': '3', 'col': '3', 'order': '2', 'col_other': '1', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'scope': 'all', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'population ( 1 july 2005 est )', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmax { all_rows ; population ( 1 july 2005 est ) ; 2 }'}, 'country with flag'], 'result': 'dominican republic', 'ind': 1, 'tostr': 'hop { nth_argmax { all_rows ; population ( 1 july 2005 est ) ; 2 } ; country with flag }'}, 'dominican republic'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmax { all_rows ; population ( 1 july 2005 est ) ; 2 } ; country with flag } ; dominican republic } = true', 'tointer': 'select the row whose population ( 1 july 2005 est ) record of all rows is 2nd maximum . the country with flag record of this row is dominican republic .'}
eq { hop { nth_argmax { all_rows ; population ( 1 july 2005 est ) ; 2 } ; country with flag } ; dominican republic } = true
select the row whose population ( 1 july 2005 est ) record of all rows is 2nd maximum . the country with flag record of this row is dominican republic .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmax_0': 0, 'all_rows_4': 4, 'population (1 july 2005 est)_5': 5, '2_6': 6, 'country with flag_7': 7, 'dominican republic_8': 8}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmax_0': 'nth_argmax', 'all_rows_4': 'all_rows', 'population (1 july 2005 est)_5': 'population ( 1 july 2005 est )', '2_6': '2', 'country with flag_7': 'country with flag', 'dominican republic_8': 'dominican republic'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmax_0': [1], 'all_rows_4': [0], 'population (1 july 2005 est)_5': [0], '2_6': [0], 'country with flag_7': [1], 'dominican republic_8': [2]}
['country with flag', 'area ( km square )', 'population ( 1 july 2005 est )', 'population density ( per km square )', 'capital']
[['cuba', '110860', '11346670', '102.4', 'havana'], ['cayman islands ( uk )', '264', '54878', '207.9', 'george town'], ['dominican republic', '48730', '8950034', '183.7', 'santo domingo'], ['haiti', '27750', '8121622', '292.7', 'port - au - prince'], ['jamaica', '10991', '2731832', '248.6', 'kingston'], ['puerto rico ( usa )', '9104', '3916632', '430.2', 'san juan']]
alexia dechaume - balleret
https://en.wikipedia.org/wiki/Alexia_Dechaume-Balleret
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-16570128-5.html.csv
majority
alexia dechaume was the winner in the majority of the listed matches .
{'scope': 'all', 'col': '1', 'most_or_all': 'most', 'criterion': 'equal', 'value': 'winner', 'subset': None}
{'func': 'most_str_eq', 'args': ['all_rows', 'outcome', 'winner'], 'result': True, 'ind': 0, 'tointer': 'for the outcome records of all rows , most of them fuzzily match to winner .', 'tostr': 'most_eq { all_rows ; outcome ; winner } = true'}
most_eq { all_rows ; outcome ; winner } = true
for the outcome records of all rows , most of them fuzzily match to winner .
1
1
{'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'outcome_3': 3, 'winner_4': 4}
{'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'outcome_3': 'outcome', 'winner_4': 'winner'}
{'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'outcome_3': [0], 'winner_4': [0]}
['outcome', 'date', 'tournament', 'surface', 'partner', 'opponents', 'score']
[['winner', '25 september 1988', 'paris , france', 'clay', 'emmanuelle derly', 'louise field nathalie herreman', '6 - 0 , 6 - 2'], ['runner - up', '23 september 1990', 'paris , france', 'clay', 'nathalie herreman', 'kristin godridge kirrily sharpe', '6 - 4 , 3 - 6 , 1 - 6'], ['winner', '5 may 1991', 'taranto , italy', 'clay', 'florencia labat', 'laura golarsa ann grossman', '6 - 2 , 7 - 5'], ['runner - up', '22 september 1991', 'paris , france', 'clay', 'julie halard', 'petra langrová radomira zrubáková', '4 - 6 , 4 - 6'], ['winner', '12 july 1992', 'kitzbühel , austria', 'clay', 'florencia labat', 'amanda coetzer wiltrud probst', '6 - 3 , 6 - 3'], ['winner', '26 july 1992', 'san marino', 'clay', 'florencia labat', 'sandra cecchini laura garrone', '7 - 6 , 7 - 5'], ['winner', '30 august 1992', 'schenectady , new york , usa', 'hard', 'florencia labat', 'ginger helgeson shannan mccarthy', '6 - 3 , 1 - 6 , 6 - 2'], ['runner - up', '6 august 1995', 'san diego , california , usa', 'hard', 'sandrine testud', 'gigi fernández natalia zvereva', '2 - 6 , 1 - 6'], ['runner - up', '5 may 1996', 'bol , croatia', 'clay', 'alexandra fusai', 'laura montalvo paola suárez', '7 - 6 , 3 - 6 , 4 - 6'], ['winner', '20 april 1997', 'tokyo , japan', 'hard', 'rika hiraki', 'kerry - anne guse corina morariu', '6 - 4 , 6 - 2'], ['runner - up', '16 january 1999', 'hobart , australia', 'hard', 'émilie loit', 'mariaan de swardt elena tatarkova', '1 - 6 , 2 - 6']]
all - time saint louis athletica roster
https://en.wikipedia.org/wiki/All-time_Saint_Louis_Athletica_roster
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-23963781-3.html.csv
majority
most of the players on the all-time saint louis athletica roster are from the united states .
{'scope': 'all', 'col': '2', 'most_or_all': 'most', 'criterion': 'equal', 'value': 'united states', 'subset': None}
{'func': 'most_str_eq', 'args': ['all_rows', 'nationality', 'united states'], 'result': True, 'ind': 0, 'tointer': 'for the nationality records of all rows , most of them fuzzily match to united states .', 'tostr': 'most_eq { all_rows ; nationality ; united states } = true'}
most_eq { all_rows ; nationality ; united states } = true
for the nationality records of all rows , most of them fuzzily match to united states .
1
1
{'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'nationality_3': 3, 'united states_4': 4}
{'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'nationality_3': 'nationality', 'united states_4': 'united states'}
{'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'nationality_3': [0], 'united states_4': [0]}
['name', 'nationality', 'position', 'appearances', 'starts', 'minutes', 'goals']
[['lori chalupny', 'united states', 'mf', '1', '1', '90', '0'], ['amanda cinalli', 'united states', 'fw', '1', '1', '90', '0'], ['niki cross', 'united states', 'df', '1', '0', '30', '0'], ['tina ellertson', 'united states', 'df', '1', '1', '90', '0'], ['kendall fletcher', 'united states', 'df', '1', '1', '90', '0'], ['stephanie logterman', 'united states', 'df', '1', '0', '26', '0'], ['kia mcneill', 'united states', 'df', '1', '1', '64', '0'], ['ashlee pistorius', 'united states', 'fw', '1', '0', '19', '0'], ['hope solo', 'united states', 'gk', '1', '1', '90', '0'], ['melissa tancredi', 'canada', 'fw', '1', '1', '90', '0'], ['sarah walsh', 'australia', 'fw', '1', '1', '90', '0'], ['elise weber', 'united states', 'df', '1', '1', '90', '0'], ['christie welsh', 'united states', 'fw', '1', '1', '71', '0']]
list of state leaders in 990s bc
https://en.wikipedia.org/wiki/List_of_state_leaders_in_990s_BC
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-17310478-8.html.csv
majority
all of the state leaders in 990s bc were sovereign .
{'scope': 'all', 'col': '2', 'most_or_all': 'all', 'criterion': 'equal', 'value': 'sovereign', 'subset': None}
{'func': 'all_str_eq', 'args': ['all_rows', 'type', 'sovereign'], 'result': True, 'ind': 0, 'tointer': 'for the type records of all rows , all of them fuzzily match to sovereign .', 'tostr': 'all_eq { all_rows ; type ; sovereign } = true'}
all_eq { all_rows ; type ; sovereign } = true
for the type records of all rows , all of them fuzzily match to sovereign .
1
1
{'all_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'type_3': 3, 'sovereign_4': 4}
{'all_str_eq_0': 'all_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'type_3': 'type', 'sovereign_4': 'sovereign'}
{'all_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'type_3': [0], 'sovereign_4': [0]}
['state', 'type', 'name', 'title', 'royal house', 'from']
[['cao', 'sovereign', 'zhong', 'lord', '-', '1002 bc'], ['lu', 'sovereign', 'bo qin', 'ruler', 'ji', '1043 bc'], ['lu', 'sovereign', 'kao', 'duke', 'ji', '997 bc'], ['lu', 'sovereign', 'yang', 'duke', 'ji', '993 bc'], ['qi', 'sovereign', 'ding', 'duke', '-', '999 bc']]
1968 cleveland browns season
https://en.wikipedia.org/wiki/1968_Cleveland_Browns_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10652150-1.html.csv
count
in the 1968 cleveland browns season , for players in the position of running back , there were 3 players chosen after round 4 .
{'scope': 'subset', 'criterion': 'greater_than', 'value': '4', 'result': '3', 'col': '1', 'subset': {'col': '4', 'criterion': 'equal', 'value': 'running back'}}
{'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_greater', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'position', 'running back'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; position ; running back }', 'tointer': 'select the rows whose position record fuzzily matches to running back .'}, 'round', '4'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose position record fuzzily matches to running back . among these rows , select the rows whose round record is greater than 4 .', 'tostr': 'filter_greater { filter_eq { all_rows ; position ; running back } ; round ; 4 }'}], 'result': '3', 'ind': 2, 'tostr': 'count { filter_greater { filter_eq { all_rows ; position ; running back } ; round ; 4 } }', 'tointer': 'select the rows whose position record fuzzily matches to running back . among these rows , select the rows whose round record is greater than 4 . the number of such rows is 3 .'}, '3'], 'result': True, 'ind': 3, 'tostr': 'eq { count { filter_greater { filter_eq { all_rows ; position ; running back } ; round ; 4 } } ; 3 } = true', 'tointer': 'select the rows whose position record fuzzily matches to running back . among these rows , select the rows whose round record is greater than 4 . the number of such rows is 3 .'}
eq { count { filter_greater { filter_eq { all_rows ; position ; running back } ; round ; 4 } } ; 3 } = true
select the rows whose position record fuzzily matches to running back . among these rows , select the rows whose round record is greater than 4 . the number of such rows is 3 .
4
4
{'eq_3': 3, 'result_4': 4, 'count_2': 2, 'filter_greater_1': 1, 'filter_str_eq_0': 0, 'all_rows_5': 5, 'position_6': 6, 'running back_7': 7, 'round_8': 8, '4_9': 9, '3_10': 10}
{'eq_3': 'eq', 'result_4': 'true', 'count_2': 'count', 'filter_greater_1': 'filter_greater', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_5': 'all_rows', 'position_6': 'position', 'running back_7': 'running back', 'round_8': 'round', '4_9': '4', '3_10': '3'}
{'eq_3': [4], 'result_4': [], 'count_2': [3], 'filter_greater_1': [2], 'filter_str_eq_0': [1], 'all_rows_5': [0], 'position_6': [0], 'running back_7': [0], 'round_8': [1], '4_9': [1], '3_10': [3]}
['round', 'overall', 'player', 'position', 'school / club team']
[['1', '21', 'marvin upshaw', 'defensive end', 'trinity'], ['2', '47', 'john garlington', 'linebacker', 'lsu'], ['3', '64', 'harry olszewski', 'guard', 'clemson'], ['3', '66', 'reece morrison', 'running back', 'texas state'], ['4', '104', 'wayne meylan', 'linebacker', 'nebraska'], ['5', '131', 'mike wempe', 'tackle', 'missouri'], ['5', '134', 'jackie jackson', 'running back', 'clemson'], ['6', '152', 'nate james', 'defensive back', 'florida a & m'], ['7', '186', 'dale brady', 'running back', 'memphis'], ['8', '212', 'tom schoen', 'defensive back', 'notre dame'], ['9', '238', 'david porter', 'defensive tackle', 'michigan'], ['10', '255', 'james greer', 'defensive end', 'stephen f austin'], ['10', '267', 'alvin mitchell', 'defensive back', 'morgan state'], ['11', '293', 'jim alcorn', 'quarterback', 'clarion'], ['12', '319', 'tom beutler', 'linebacker', 'toledo'], ['13', '348', 'terry sellers', 'defensive back', 'georgia'], ['14', '374', 'edgar whipps', 'running back', 'jackson state'], ['15', '400', 'bob baxter', 'flanker', 'memphis'], ['16', '429', 'dick sievert', 'defensive end', 'wisconsin - river falls'], ['17', '455', 'wayne mcduffie', 'center', 'florida state']]
statistics relating to enlargement of the european union
https://en.wikipedia.org/wiki/Statistics_relating_to_enlargement_of_the_European_Union
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-1307842-6.html.csv
comparative
in the statistics relating to enlargement of the european union sweden has a larger population than austria .
{'row_1': '3', 'row_2': '1', 'col': '2', 'col_other': '1', 'relation': 'greater', 'record_mentioned': 'no', 'diff_result': None}
{'func': 'greater', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'member countries', 'sweden'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose member countries record fuzzily matches to sweden .', 'tostr': 'filter_eq { all_rows ; member countries ; sweden }'}, 'population'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; member countries ; sweden } ; population }', 'tointer': 'select the rows whose member countries record fuzzily matches to sweden . take the population record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'member countries', 'austria'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose member countries record fuzzily matches to austria .', 'tostr': 'filter_eq { all_rows ; member countries ; austria }'}, 'population'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; member countries ; austria } ; population }', 'tointer': 'select the rows whose member countries record fuzzily matches to austria . take the population record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; member countries ; sweden } ; population } ; hop { filter_eq { all_rows ; member countries ; austria } ; population } } = true', 'tointer': 'select the rows whose member countries record fuzzily matches to sweden . take the population record of this row . select the rows whose member countries record fuzzily matches to austria . take the population record of this row . the first record is greater than the second record .'}
greater { hop { filter_eq { all_rows ; member countries ; sweden } ; population } ; hop { filter_eq { all_rows ; member countries ; austria } ; population } } = true
select the rows whose member countries record fuzzily matches to sweden . take the population record of this row . select the rows whose member countries record fuzzily matches to austria . take the population record of this row . the first record is greater than the second record .
5
5
{'greater_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'member countries_7': 7, 'sweden_8': 8, 'population_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'member countries_11': 11, 'austria_12': 12, 'population_13': 13}
{'greater_4': 'greater', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'member countries_7': 'member countries', 'sweden_8': 'sweden', 'population_9': 'population', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'member countries_11': 'member countries', 'austria_12': 'austria', 'population_13': 'population'}
{'greater_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'member countries_7': [0], 'sweden_8': [0], 'population_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'member countries_11': [1], 'austria_12': [1], 'population_13': [3]}
['member countries', 'population', 'area ( km square )', 'gdp ( billion us )', 'gdp per capita ( us )']
[['austria', '8206524', '83871', '145.238', '18048'], ['finland', '5261008', '338145', '80.955', '15859'], ['sweden', '9047752', '449964', '156.640', '17644'], ['accession countries', '22029977', '871980', '382.833', '17378'], ['existing members ( 1995 )', '350909402', '2495174', '5894.232', '16797']]
2007 - 08 rangers f.c. season
https://en.wikipedia.org/wiki/2007%E2%80%9308_Rangers_F.C._season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-11221038-3.html.csv
count
3 players left rangers f.c. during the winter transfer window of the 2007 - 08 season .
{'scope': 'all', 'criterion': 'equal', 'value': 'winter', 'result': '3', 'col': '5', 'subset': None}
{'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'transfer window', 'winter'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose transfer window record fuzzily matches to winter .', 'tostr': 'filter_eq { all_rows ; transfer window ; winter }'}], 'result': '3', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; transfer window ; winter } }', 'tointer': 'select the rows whose transfer window record fuzzily matches to winter . the number of such rows is 3 .'}, '3'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; transfer window ; winter } } ; 3 } = true', 'tointer': 'select the rows whose transfer window record fuzzily matches to winter . the number of such rows is 3 .'}
eq { count { filter_eq { all_rows ; transfer window ; winter } } ; 3 } = true
select the rows whose transfer window record fuzzily matches to winter . the number of such rows is 3 .
3
3
{'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'transfer window_5': 5, 'winter_6': 6, '3_7': 7}
{'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'transfer window_5': 'transfer window', 'winter_6': 'winter', '3_7': '3'}
{'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'transfer window_5': [0], 'winter_6': [0], '3_7': [2]}
['nat', 'name', 'moving to', 'type', 'transfer window', 'transfer fee']
[['sco', 'martin ure', "queen 's park", 'end of contract', 'summer', 'n / a'], ['sco', 'scott hadden', 'ross county', 'end of contract', 'summer', 'n / a'], ['sco', 'steven campbell', 'free agent', 'end of contract', 'summer', 'n / a'], ['england', 'joe sagar', 'free agent', 'end of contract', 'summer', 'n / a'], ['sen', "makhtar n'diaye", 'free agent', 'end of contract', 'summer', 'n / a'], ['fra', 'antoine ponroy', 'free agent', 'end of contract', 'summer', 'n / a'], ['ger', 'stefan klos', 'retired', 'end of contract', 'summer', 'n / a'], ['croatia', 'dado pršo', 'retired', 'end of contract', 'summer', 'n / a'], ['sco', 'gavin rae', 'cardiff city', 'end of contract', 'summer', 'n / a'], ['sco', 'brian gilmour', 'queen of the south', 'end of contract', 'summer', 'n / a'], ['swe', 'karl svensson', 'caen', 'transfer', 'summer', '0.7 m'], ['eng', 'lee robinson', 'greenock morton', 'loan', 'summer', 'n / a'], ['cze', 'libor sionko', 'copenhagen', 'transfer', 'summer', '0.09 m'], ['slovakia', 'filip šebo', 'valenciennes', 'loan', 'summer', 'n / a'], ['sco', 'ian murray', 'norwich city', 'transfer', 'summer', 'free'], ['eng', 'ugo ehiogu', 'sheffield united', 'transfer', 'winter', 'free'], ['sco', 'alan hutton', 'tottenham hotspur', 'transfer', 'winter', '9 m'], ['nir', 'roy carroll', 'derby county', 'transfer', 'winter', 'free']]
vladimir koman
https://en.wikipedia.org/wiki/Vladimir_Koman
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10521952-3.html.csv
superlative
vladimir koman 's highest scoring match took place in october of 2010 .
{'scope': 'all', 'col_superlative': '4', 'row_superlative': '2', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '1', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'result'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; result }'}, 'date'], 'result': '8 october 2010', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; result } ; date }'}, '8 october 2010'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; result } ; date } ; 8 october 2010 } = true', 'tointer': 'select the row whose result record of all rows is maximum . the date record of this row is 8 october 2010 .'}
eq { hop { argmax { all_rows ; result } ; date } ; 8 october 2010 } = true
select the row whose result record of all rows is maximum . the date record of this row is 8 october 2010 .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'result_5': 5, 'date_6': 6, '8 october 2010_7': 7}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'result_5': 'result', 'date_6': 'date', '8 october 2010_7': '8 october 2010'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'result_5': [0], 'date_6': [1], '8 october 2010_7': [2]}
['date', 'venue', 'score', 'result', 'competition']
[['7 september 2010', 'szusza stadium , budapest', '2 - 0', '2 - 1', 'uefa euro 2012 qualifying'], ['8 october 2010', 'puskás stadium , budapest', '6 - 0', '8 - 0', 'uefa euro 2012 qualifying'], ['7 june 2011', 'stadio olimpico , serravalle', '3 - 0', '3 - 0', 'uefa euro 2012 qualifying'], ['10 august 2011', 'puskás stadium , budapest', '1 - 0', '4 - 0', 'international friendly'], ['11 september 2011', 'puskás stadium , budapest', '4 - 0', '5 - 0', 'international friendly'], ['7 september 2012', 'estadi comunal , andorra la vella', '5 - 0', '5 - 0', '2014 fifa world cup qualifying'], ['16 october 2012', 'puskás stadium , budapest', '1 - 1', '3 - 1', '2014 fifa world cup qualifying']]
1962 oakland raiders season
https://en.wikipedia.org/wiki/1962_Oakland_Raiders_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-12676700-1.html.csv
comparative
the oakland raiders game attendance was lower on november 18 , 1962 that on december 9 , 1962 .
{'row_1': '10', 'row_2': '14', 'col': '5', 'col_other': '2', 'relation': 'greater', 'record_mentioned': 'yes', 'diff_result': None}
{'func': 'and', 'args': [{'func': 'greater', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'date', 'november 18 , 1962'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose date record fuzzily matches to november 18 , 1962 .', 'tostr': 'filter_eq { all_rows ; date ; november 18 , 1962 }'}, 'attendance'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; date ; november 18 , 1962 } ; attendance }', 'tointer': 'select the rows whose date record fuzzily matches to november 18 , 1962 . take the attendance record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'date', 'december 16 , 1962'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose date record fuzzily matches to december 16 , 1962 .', 'tostr': 'filter_eq { all_rows ; date ; december 16 , 1962 }'}, 'attendance'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; date ; december 16 , 1962 } ; attendance }', 'tointer': 'select the rows whose date record fuzzily matches to december 16 , 1962 . take the attendance record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; date ; november 18 , 1962 } ; attendance } ; hop { filter_eq { all_rows ; date ; december 16 , 1962 } ; attendance } }', 'tointer': 'select the rows whose date record fuzzily matches to november 18 , 1962 . take the attendance record of this row . select the rows whose date record fuzzily matches to december 16 , 1962 . take the attendance record of this row . the first record is greater than the second record .'}, {'func': 'and', 'args': [{'func': 'eq', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'date', 'november 18 , 1962'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose date record fuzzily matches to november 18 , 1962 .', 'tostr': 'filter_eq { all_rows ; date ; november 18 , 1962 }'}, 'attendance'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; date ; november 18 , 1962 } ; attendance }', 'tointer': 'select the rows whose date record fuzzily matches to november 18 , 1962 . take the attendance record of this row .'}, '12500'], 'result': True, 'ind': 5, 'tostr': 'eq { hop { filter_eq { all_rows ; date ; november 18 , 1962 } ; attendance } ; 12500 }', 'tointer': 'the attendance record of the first row is 12500 .'}, {'func': 'eq', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'date', 'december 16 , 1962'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose date record fuzzily matches to december 16 , 1962 .', 'tostr': 'filter_eq { all_rows ; date ; december 16 , 1962 }'}, 'attendance'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; date ; december 16 , 1962 } ; attendance }', 'tointer': 'select the rows whose date record fuzzily matches to december 16 , 1962 . take the attendance record of this row .'}, '8000'], 'result': True, 'ind': 6, 'tostr': 'eq { hop { filter_eq { all_rows ; date ; december 16 , 1962 } ; attendance } ; 8000 }', 'tointer': 'the attendance record of the second row is 8000 .'}], 'result': True, 'ind': 7, 'tostr': 'and { eq { hop { filter_eq { all_rows ; date ; november 18 , 1962 } ; attendance } ; 12500 } ; eq { hop { filter_eq { all_rows ; date ; december 16 , 1962 } ; attendance } ; 8000 } }', 'tointer': 'the attendance record of the first row is 12500 . the attendance record of the second row is 8000 .'}], 'result': True, 'ind': 8, 'tostr': 'and { greater { hop { filter_eq { all_rows ; date ; november 18 , 1962 } ; attendance } ; hop { filter_eq { all_rows ; date ; december 16 , 1962 } ; attendance } } ; and { eq { hop { filter_eq { all_rows ; date ; november 18 , 1962 } ; attendance } ; 12500 } ; eq { hop { filter_eq { all_rows ; date ; december 16 , 1962 } ; attendance } ; 8000 } } } = true', 'tointer': 'select the rows whose date record fuzzily matches to november 18 , 1962 . take the attendance record of this row . select the rows whose date record fuzzily matches to december 16 , 1962 . take the attendance record of this row . the first record is greater than the second record . the attendance record of the first row is 12500 . the attendance record of the second row is 8000 .'}
and { greater { hop { filter_eq { all_rows ; date ; november 18 , 1962 } ; attendance } ; hop { filter_eq { all_rows ; date ; december 16 , 1962 } ; attendance } } ; and { eq { hop { filter_eq { all_rows ; date ; november 18 , 1962 } ; attendance } ; 12500 } ; eq { hop { filter_eq { all_rows ; date ; december 16 , 1962 } ; attendance } ; 8000 } } } = true
select the rows whose date record fuzzily matches to november 18 , 1962 . take the attendance record of this row . select the rows whose date record fuzzily matches to december 16 , 1962 . take the attendance record of this row . the first record is greater than the second record . the attendance record of the first row is 12500 . the attendance record of the second row is 8000 .
13
9
{'and_8': 8, 'result_9': 9, 'greater_4': 4, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_10': 10, 'date_11': 11, 'november 18 , 1962_12': 12, 'attendance_13': 13, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_14': 14, 'date_15': 15, 'december 16 , 1962_16': 16, 'attendance_17': 17, 'and_7': 7, 'eq_5': 5, '12500_18': 18, 'eq_6': 6, '8000_19': 19}
{'and_8': 'and', 'result_9': 'true', 'greater_4': 'greater', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_10': 'all_rows', 'date_11': 'date', 'november 18 , 1962_12': 'november 18 , 1962', 'attendance_13': 'attendance', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_14': 'all_rows', 'date_15': 'date', 'december 16 , 1962_16': 'december 16 , 1962', 'attendance_17': 'attendance', 'and_7': 'and', 'eq_5': 'eq', '12500_18': '12500', 'eq_6': 'eq', '8000_19': '8000'}
{'and_8': [9], 'result_9': [], 'greater_4': [8], 'num_hop_2': [4, 5], 'filter_str_eq_0': [2], 'all_rows_10': [0], 'date_11': [0], 'november 18 , 1962_12': [0], 'attendance_13': [2], 'num_hop_3': [4, 6], 'filter_str_eq_1': [3], 'all_rows_14': [1], 'date_15': [1], 'december 16 , 1962_16': [1], 'attendance_17': [3], 'and_7': [8], 'eq_5': [7], '12500_18': [5], 'eq_6': [7], '8000_19': [6]}
['week', 'date', 'opponent', 'result', 'attendance']
[['1', 'september 9 , 1962', 'new york titans', 'l 28 - 17', '12893'], ['2', 'september 23 , 1962', 'dallas texans', 'l 26 - 16', '12500'], ['3', 'september 30 , 1962', 'san diego chargers', 'l 42 - 33', '13000'], ['4', 'october 5 , 1962', 'denver broncos', 'l 44 - 7', '22452'], ['5', 'october 14 , 1962', 'denver broncos', 'l 23 - 6', '7000'], ['6', 'october 20 , 1962', 'buffalo bills', 'l 14 - 6', '21037'], ['7', 'october 26 , 1962', 'boston patriots', 'l 26 - 16', '12514'], ['8', 'november 4 , 1962', 'new york titans', 'l 31 - 21', '18247'], ['9', 'november 11 , 1962', 'houston oilers', 'l 28 - 20', '11000'], ['10', 'november 18 , 1962', 'buffalo bills', 'l 10 - 6', '12500'], ['11', 'november 25 , 1962', 'dallas texans', 'l 35 - 7', '13557'], ['12', 'december 2 , 1962', 'san diego chargers', 'l 31 - 21', '17874'], ['13', 'december 9 , 1962', 'houston oilers', 'l 32 - 17', '27400'], ['14', 'december 16 , 1962', 'boston patriots', 'w 20 - 0', '8000']]
2003 bridgeport barrage season
https://en.wikipedia.org/wiki/2003_Bridgeport_Barrage_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-12101799-1.html.csv
ordinal
the second to last game in the 2003 bridgeport barrage season took place at cawley memorial stadium .
{'row': '11', 'col': '1', 'order': '11', 'col_other': '4', 'max_or_min': 'min_to_max', 'value_mentioned': 'no', 'scope': 'all', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmin', 'args': ['all_rows', 'date', '11'], 'result': None, 'ind': 0, 'tostr': 'nth_argmin { all_rows ; date ; 11 }'}, 'field'], 'result': 'cawley memorial stadium', 'ind': 1, 'tostr': 'hop { nth_argmin { all_rows ; date ; 11 } ; field }'}, 'cawley memorial stadium'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmin { all_rows ; date ; 11 } ; field } ; cawley memorial stadium } = true', 'tointer': 'select the row whose date record of all rows is 11th minimum . the field record of this row is cawley memorial stadium .'}
eq { hop { nth_argmin { all_rows ; date ; 11 } ; field } ; cawley memorial stadium } = true
select the row whose date record of all rows is 11th minimum . the field record of this row is cawley memorial stadium .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmin_0': 0, 'all_rows_4': 4, 'date_5': 5, '11_6': 6, 'field_7': 7, 'cawley memorial stadium_8': 8}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmin_0': 'nth_argmin', 'all_rows_4': 'all_rows', 'date_5': 'date', '11_6': '11', 'field_7': 'field', 'cawley memorial stadium_8': 'cawley memorial stadium'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmin_0': [1], 'all_rows_4': [0], 'date_5': [0], '11_6': [0], 'field_7': [1], 'cawley memorial stadium_8': [2]}
['date', 'opponent', 'home / away', 'field', 'result']
[['may 31', 'rattlers', 'away', 'bishop kearney field', 'l 13 - 23'], ['june 6', 'cannons', 'home', 'the ballpark at harbor yard', 'l 17 - 23'], ['june 12', 'bayhawks', 'home', 'the ballpark at harbor yard', 'l 14 - 21'], ['june 14', 'pride', 'away', 'commerce bank ballpark', 'l 9 - 16'], ['june 27', 'lizards', 'away', 'mitchel athletic complex', 'l 19 - 23'], ['july 12', 'lizards', 'home', 'the ballpark at harbor yard', 'l 16 - 17'], ['july 19', 'bayhawks', 'away', 'homewood field', 'w 22 - 17'], ['july 24', 'rattlers', 'home', 'the ballpark at harbor yard', 'l 19 - 21'], ['july 31', 'pride', 'home', 'the ballpark at harbor yard', 'l 14 - 22'], ['august 2', 'rattlers', 'away', 'bishop kearney field', 'l 13 - 28'], ['august 7', 'cannons', 'away', 'cawley memorial stadium', 'l 15 - 21'], ['august 14', 'rattlers', 'home', 'the ballpark at harbor yard', 'l 18 - 23']]
takayo hashi
https://en.wikipedia.org/wiki/Takayo_Hashi
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10727601-2.html.csv
unique
for takayo hashi , when the location is tokyo , japan , the only time he lost was when the opponent was hitomi akano .
{'scope': 'subset', 'row': '13', 'col': '1', 'col_other': '3', 'criterion': 'equal', 'value': 'loss', 'subset': {'col': '7', 'criterion': 'fuzzily_match', 'value': 'tokyo'}}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'location', 'tokyo'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; location ; tokyo }', 'tointer': 'select the rows whose location record fuzzily matches to tokyo .'}, 'res', 'loss'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose location record fuzzily matches to tokyo . among these rows , select the rows whose res record fuzzily matches to loss .', 'tostr': 'filter_eq { filter_eq { all_rows ; location ; tokyo } ; res ; loss }'}], 'result': True, 'ind': 2, 'tostr': 'only { filter_eq { filter_eq { all_rows ; location ; tokyo } ; res ; loss } }', 'tointer': 'select the rows whose location record fuzzily matches to tokyo . among these rows , select the rows whose res record fuzzily matches to loss . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'location', 'tokyo'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; location ; tokyo }', 'tointer': 'select the rows whose location record fuzzily matches to tokyo .'}, 'res', 'loss'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose location record fuzzily matches to tokyo . among these rows , select the rows whose res record fuzzily matches to loss .', 'tostr': 'filter_eq { filter_eq { all_rows ; location ; tokyo } ; res ; loss }'}, 'opponent'], 'result': 'hitomi akano', 'ind': 3, 'tostr': 'hop { filter_eq { filter_eq { all_rows ; location ; tokyo } ; res ; loss } ; opponent }'}, 'hitomi akano'], 'result': True, 'ind': 4, 'tostr': 'eq { hop { filter_eq { filter_eq { all_rows ; location ; tokyo } ; res ; loss } ; opponent } ; hitomi akano }', 'tointer': 'the opponent record of this unqiue row is hitomi akano .'}], 'result': True, 'ind': 5, 'tostr': 'and { only { filter_eq { filter_eq { all_rows ; location ; tokyo } ; res ; loss } } ; eq { hop { filter_eq { filter_eq { all_rows ; location ; tokyo } ; res ; loss } ; opponent } ; hitomi akano } } = true', 'tointer': 'select the rows whose location record fuzzily matches to tokyo . among these rows , select the rows whose res record fuzzily matches to loss . there is only one such row in the table . the opponent record of this unqiue row is hitomi akano .'}
and { only { filter_eq { filter_eq { all_rows ; location ; tokyo } ; res ; loss } } ; eq { hop { filter_eq { filter_eq { all_rows ; location ; tokyo } ; res ; loss } ; opponent } ; hitomi akano } } = true
select the rows whose location record fuzzily matches to tokyo . among these rows , select the rows whose res record fuzzily matches to loss . there is only one such row in the table . the opponent record of this unqiue row is hitomi akano .
8
6
{'and_5': 5, 'result_6': 6, 'only_2': 2, 'filter_str_eq_1': 1, 'filter_str_eq_0': 0, 'all_rows_7': 7, 'location_8': 8, 'tokyo_9': 9, 'res_10': 10, 'loss_11': 11, 'str_eq_4': 4, 'str_hop_3': 3, 'opponent_12': 12, 'hitomi akano_13': 13}
{'and_5': 'and', 'result_6': 'true', 'only_2': 'only', 'filter_str_eq_1': 'filter_str_eq', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_7': 'all_rows', 'location_8': 'location', 'tokyo_9': 'tokyo', 'res_10': 'res', 'loss_11': 'loss', 'str_eq_4': 'str_eq', 'str_hop_3': 'str_hop', 'opponent_12': 'opponent', 'hitomi akano_13': 'hitomi akano'}
{'and_5': [6], 'result_6': [], 'only_2': [5], 'filter_str_eq_1': [2, 3], 'filter_str_eq_0': [1], 'all_rows_7': [0], 'location_8': [0], 'tokyo_9': [0], 'res_10': [1], 'loss_11': [1], 'str_eq_4': [5], 'str_hop_3': [4], 'opponent_12': [3], 'hitomi akano_13': [4]}
['res', 'record', 'opponent', 'method', 'round', 'time', 'location']
[['win', '14 - 4', 'roxanne modafferi', 'decision ( unanimous )', '2', '5:00', 'koto , tokyo , japan'], ['loss', '13 - 4', 'cat zingano', 'ko ( slam )', '3', '4:42', 'denver , colorado , united states'], ['loss', '13 - 3', 'tara larosa', 'decision ( unanimous )', '5', '5:00', 'atlantic city , new jersey , united states'], ['loss', '13 - 2', 'sarah kaufman', 'decision ( unanimous )', '5', '5:00', 'san jose , california , united states'], ['win', '13 - 1', 'chisa yonezawa', 'submission ( rear - naked choke )', '2', '1:43', 'tokyo , japan'], ['win', '12 - 1', 'amanda buckner', 'decision ( unanimous )', '3', '5:00', 'los angeles , california , united states'], ['win', '11 - 1', 'hitomi akano', 'decision ( unanimous )', '3', '5:00', 'tokyo , japan'], ['win', '10 - 1', 'hee jin lee', 'decision ( unanimous )', '2', '5:00', 'tokyo , japan'], ['win', '9 - 1', 'sybil starr', 'submission ( armbar )', '1', '2:47', 'tokyo , japan'], ['win', '8 - 1', 'miki morifuji', 'decision ( unanimous )', '2', '5:00', 'tokyo , japan'], ['win', '7 - 1', 'kinuka sasaki', 'submission ( rear - naked choke )', '1', '2:31', 'tokyo , japan'], ['win', '6 - 1', 'kazuma morohoshi', 'decision ( unanimous )', '2', '5:00', 'tokyo , japan'], ['loss', '5 - 1', 'hitomi akano', 'submission ( armbar )', '1', '1:19', 'tokyo , japan'], ['win', '5 - 0', 'yukari', 'decision ( unanimous )', '2', '5:00', 'tokyo , japan'], ['win', '4 - 0', 'yoko hattori', 'decision ( unanimous )', '2', '5:00', 'tokyo , japan'], ['win', '3 - 0', 'yumiko sugimoto', 'decision ( unanimous )', '2', '5:00', 'tokyo , japan'], ['win', '2 - 0', 'mika harigai', 'submission ( rear - naked choke )', '2', '4:28', 'tokyo , japan'], ['win', '1 - 0', 'natsuko kikukawa', 'decision ( unanimous )', '2', '5:00', 'tokyo , japan']]
list of the colbert report episodes ( 2010 )
https://en.wikipedia.org/wiki/List_of_The_Colbert_Report_episodes_%282010%29
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-25691838-11.html.csv
superlative
the earliest airdate of the november 2010 episodes of the colbert report was on november 2 .
{'scope': 'all', 'col_superlative': '5', 'row_superlative': '1', 'value_mentioned': 'yes', 'max_or_min': 'min', 'other_col': 'n/a', 'subset': None}
{'func': 'eq', 'args': [{'func': 'min', 'args': ['all_rows', 'original airdate'], 'result': 'november 02', 'ind': 0, 'tostr': 'min { all_rows ; original airdate }', 'tointer': 'the minimum original airdate record of all rows is november 02 .'}, 'november 02'], 'result': True, 'ind': 1, 'tostr': 'eq { min { all_rows ; original airdate } ; november 02 } = true', 'tointer': 'the minimum original airdate record of all rows is november 02 .'}
eq { min { all_rows ; original airdate } ; november 02 } = true
the minimum original airdate record of all rows is november 02 .
2
2
{'eq_1': 1, 'result_2': 2, 'min_0': 0, 'all_rows_3': 3, 'original airdate_4': 4, 'november 02_5': 5}
{'eq_1': 'eq', 'result_2': 'true', 'min_0': 'min', 'all_rows_3': 'all_rows', 'original airdate_4': 'original airdate', 'november 02_5': 'november 02'}
{'eq_1': [2], 'result_2': [], 'min_0': [1], 'all_rows_3': [0], 'original airdate_4': [0], 'november 02_5': [1]}
['episode', 'the wãrd', 'guest', 'introductory phrase', 'original airdate', 'production code']
[['791', 'none', 'david frum , katrina vanden heuvel', 'shaka brah ! this is the colbert report !', 'november 02', '6139'], ['794', 'nothingness', 'reza aslan', 'none', 'november 08', '6142'], ['795', 'none', 'abbe lowell , cee lo green', 'none', 'november 09', '6143'], ['796', 'none', 'beri fox , martha stewart', 'none', 'november 10', '6144'], ['798', 'none', 'jeffrey goldberg , david stern', 'none', 'november 15', '6146'], ['801', 'none', 'salvatore giunta', 'none', 'november 18', '6149']]
thawatchai damrong - ongtrakul
https://en.wikipedia.org/wiki/Thawatchai_Damrong-Ongtrakul
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-18458106-1.html.csv
unique
october 7 , 1994 was the only match that thawatchai damrong - ongtrakul lost .
{'scope': 'all', 'row': '1', 'col': '4', 'col_other': '1', 'criterion': 'equal', 'value': 'lost', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'result', 'lost'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose result record fuzzily matches to lost .', 'tostr': 'filter_eq { all_rows ; result ; lost }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; result ; lost } }', 'tointer': 'select the rows whose result record fuzzily matches to lost . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'result', 'lost'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose result record fuzzily matches to lost .', 'tostr': 'filter_eq { all_rows ; result ; lost }'}, 'date'], 'result': 'october 7 , 1994', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; result ; lost } ; date }'}, 'october 7 , 1994'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; result ; lost } ; date } ; october 7 , 1994 }', 'tointer': 'the date record of this unqiue row is october 7 , 1994 .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; result ; lost } } ; eq { hop { filter_eq { all_rows ; result ; lost } ; date } ; october 7 , 1994 } } = true', 'tointer': 'select the rows whose result record fuzzily matches to lost . there is only one such row in the table . the date record of this unqiue row is october 7 , 1994 .'}
and { only { filter_eq { all_rows ; result ; lost } } ; eq { hop { filter_eq { all_rows ; result ; lost } ; date } ; october 7 , 1994 } } = true
select the rows whose result record fuzzily matches to lost . there is only one such row in the table . the date record of this unqiue row is october 7 , 1994 .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'result_7': 7, 'lost_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'date_9': 9, 'october 7 , 1994_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'result_7': 'result', 'lost_8': 'lost', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'date_9': 'date', 'october 7 , 1994_10': 'october 7 , 1994'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'result_7': [0], 'lost_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'date_9': [2], 'october 7 , 1994_10': [3]}
['date', 'venue', 'score', 'result', 'competition']
[['october 7 , 1994', 'hiroshima , japan', '4 - 5', 'lost', '1994 asian games'], ['february 16 , 1996', 'bangkok , thailand', '5 - 2', 'win', "king 's cup 1996"], ['june 29 , 1996', 'bangkok , thailand', '5 - 1', 'won', '1996 asian cup qualification'], ['december 4 , 1998', 'bangkok , thailand', '2 - 0', 'won', '1998 asian games'], ['december 14 , 1998', 'bangkok , thailand', '2 - 1', 'won', '1998 asian games'], ['august 12 , 1999', 'bandar seri begawan , brunei', '2 - 0', 'won', '1999 southeast asian games'], ['august 14 , 1999', 'bandar seri begawan , brunei', '2 - 0', 'won', '1999 southeast asian games'], ['april 4 , 2000', 'bangkok , thailand', '5 - 3', 'won', '2000 asian cup qualification'], ['april 6 , 2000', 'bangkok , thailand', '1 - 0', 'won', '2000 asian cup qualification'], ['may 13 , 2001', 'bangkok , thailand', '4 - 2', 'won', '2002 world cup qualification']]
sweden in the eurovision song contest 1959
https://en.wikipedia.org/wiki/Sweden_in_the_Eurovision_Song_Contest_1959
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-12204536-1.html.csv
ordinal
in the 1959 eurovision song contest , sweeden 's staffan broms finished second with 79 points with the song " dags igen att vara kāra . " .
{'scope': 'all', 'row': '3', 'col': '5', 'order': '2', 'col_other': '2,3', 'max_or_min': 'max_to_min', 'value_mentioned': 'yes', 'subset': None}
{'func': 'and', 'args': [{'func': 'eq', 'args': [{'func': 'nth_max', 'args': ['all_rows', 'points', '2'], 'result': '79', 'ind': 0, 'tostr': 'nth_max { all_rows ; points ; 2 }', 'tointer': 'the 2nd maximum points record of all rows is 79 .'}, '79'], 'result': True, 'ind': 1, 'tostr': 'eq { nth_max { all_rows ; points ; 2 } ; 79 }', 'tointer': 'the 2nd maximum points record of all rows is 79 .'}, {'func': 'and', 'args': [{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'points', '2'], 'result': None, 'ind': 2, 'tostr': 'nth_argmax { all_rows ; points ; 2 }'}, 'artist'], 'result': 'staffan broms', 'ind': 3, 'tostr': 'hop { nth_argmax { all_rows ; points ; 2 } ; artist }'}, 'staffan broms'], 'result': True, 'ind': 4, 'tostr': 'eq { hop { nth_argmax { all_rows ; points ; 2 } ; artist } ; staffan broms }', 'tointer': 'the artist record of the row with 2nd maximum points record is staffan broms .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'points', '2'], 'result': None, 'ind': 2, 'tostr': 'nth_argmax { all_rows ; points ; 2 }'}, 'song'], 'result': 'dags igen att vara kära', 'ind': 5, 'tostr': 'hop { nth_argmax { all_rows ; points ; 2 } ; song }'}, 'dags igen att vara kära'], 'result': True, 'ind': 6, 'tostr': 'eq { hop { nth_argmax { all_rows ; points ; 2 } ; song } ; dags igen att vara kära }', 'tointer': 'the song record of the row with 2nd maximum points record is dags igen att vara kära .'}], 'result': True, 'ind': 7, 'tostr': 'and { eq { hop { nth_argmax { all_rows ; points ; 2 } ; artist } ; staffan broms } ; eq { hop { nth_argmax { all_rows ; points ; 2 } ; song } ; dags igen att vara kära } }', 'tointer': 'the artist record of the row with 2nd maximum points record is staffan broms . the song record of the row with 2nd maximum points record is dags igen att vara kära .'}], 'result': True, 'ind': 8, 'tostr': 'and { eq { nth_max { all_rows ; points ; 2 } ; 79 } ; and { eq { hop { nth_argmax { all_rows ; points ; 2 } ; artist } ; staffan broms } ; eq { hop { nth_argmax { all_rows ; points ; 2 } ; song } ; dags igen att vara kära } } } = true', 'tointer': 'the 2nd maximum points record of all rows is 79 . the artist record of the row with 2nd maximum points record is staffan broms . the song record of the row with 2nd maximum points record is dags igen att vara kära .'}
and { eq { nth_max { all_rows ; points ; 2 } ; 79 } ; and { eq { hop { nth_argmax { all_rows ; points ; 2 } ; artist } ; staffan broms } ; eq { hop { nth_argmax { all_rows ; points ; 2 } ; song } ; dags igen att vara kära } } } = true
the 2nd maximum points record of all rows is 79 . the artist record of the row with 2nd maximum points record is staffan broms . the song record of the row with 2nd maximum points record is dags igen att vara kära .
10
9
{'and_8': 8, 'result_9': 9, 'eq_1': 1, 'nth_max_0': 0, 'all_rows_10': 10, 'points_11': 11, '2_12': 12, '79_13': 13, 'and_7': 7, 'str_eq_4': 4, 'str_hop_3': 3, 'nth_argmax_2': 2, 'all_rows_14': 14, 'points_15': 15, '2_16': 16, 'artist_17': 17, 'staffan broms_18': 18, 'str_eq_6': 6, 'str_hop_5': 5, 'song_19': 19, 'dags igen att vara kära_20': 20}
{'and_8': 'and', 'result_9': 'true', 'eq_1': 'eq', 'nth_max_0': 'nth_max', 'all_rows_10': 'all_rows', 'points_11': 'points', '2_12': '2', '79_13': '79', 'and_7': 'and', 'str_eq_4': 'str_eq', 'str_hop_3': 'str_hop', 'nth_argmax_2': 'nth_argmax', 'all_rows_14': 'all_rows', 'points_15': 'points', '2_16': '2', 'artist_17': 'artist', 'staffan broms_18': 'staffan broms', 'str_eq_6': 'str_eq', 'str_hop_5': 'str_hop', 'song_19': 'song', 'dags igen att vara kära_20': 'dags igen att vara kära'}
{'and_8': [9], 'result_9': [], 'eq_1': [8], 'nth_max_0': [1], 'all_rows_10': [0], 'points_11': [0], '2_12': [0], '79_13': [1], 'and_7': [8], 'str_eq_4': [7], 'str_hop_3': [4], 'nth_argmax_2': [3, 5], 'all_rows_14': [2], 'points_15': [2], '2_16': [2], 'artist_17': [3], 'staffan broms_18': [4], 'str_eq_6': [7], 'str_hop_5': [6], 'song_19': [5], 'dags igen att vara kära_20': [6]}
['draw', 'artist', 'song', 'songwriters', 'points', 'place']
[['1', 'östen warnebring', 'kungsgatans blues', 'axel flyckt , sven - gunnar johnson', '67', '4th'], ['2', 'ulla christensson', 'lyckans soluppgång', 'dag lambert , bengt haslum', '52', '6th'], ['3', 'staffan broms', 'dags igen att vara kära', 'ulf källqvist , åke gerhard', '79', '2nd'], ['4', 'britt - inger dreilick', 'hösten är vår', 'gösta westerberg , fritz - gustaf', '52', '6th'], ['5', 'åke söhr', 'en miljon för dina', 'åke gerhard', '0', '8th'], ['6', 'britt damberg', 'nya fågelsången', 'sam samson , fritz - gustaf', '76', '3rd'], ['7', 'östen warnebring', 'någon saknar dig', 'britt lindeborg', '56', '5th'], ['8', 'siw malmkvist', 'augustin', 'bo harry sandin , åke gerhard', '105', '1st']]
margalita chakhnashvili
https://en.wikipedia.org/wiki/Margalita_Chakhnashvili
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-12428755-2.html.csv
ordinal
the westende tournament was the third most recent tournament in which margalita chakhnashvili competed .
{'row': '3', 'col': '1', 'order': '3', 'col_other': '2', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'scope': 'all', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'date', '3'], 'result': None, 'ind': 0, 'tostr': 'nth_argmax { all_rows ; date ; 3 }'}, 'tournament'], 'result': 'westende', 'ind': 1, 'tostr': 'hop { nth_argmax { all_rows ; date ; 3 } ; tournament }'}, 'westende'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmax { all_rows ; date ; 3 } ; tournament } ; westende } = true', 'tointer': 'select the row whose date record of all rows is 3rd maximum . the tournament record of this row is westende .'}
eq { hop { nth_argmax { all_rows ; date ; 3 } ; tournament } ; westende } = true
select the row whose date record of all rows is 3rd maximum . the tournament record of this row is westende .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmax_0': 0, 'all_rows_4': 4, 'date_5': 5, '3_6': 6, 'tournament_7': 7, 'westende_8': 8}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmax_0': 'nth_argmax', 'all_rows_4': 'all_rows', 'date_5': 'date', '3_6': '3', 'tournament_7': 'tournament', 'westende_8': 'westende'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmax_0': [1], 'all_rows_4': [0], 'date_5': [0], '3_6': [0], 'tournament_7': [1], 'westende_8': [2]}
['date', 'tournament', 'surface', 'tier', 'partner', 'opponents in the final', 'score']
[['may 8 , 2006', 'antalya - belek', 'clay', 'itf 10k', 'ipek şenoğlu', 'claire de gubernatis alexandra dulgheru', '6 - 4 , 6 - 3'], ['july 3 , 2006', 'mont de marson', 'clay', 'itf 25k', 'ioana raluca olaru', 'akgul amanmuradova nina bratchikova', '7 - 5 , 1 - 6 , 6 - 1'], ['august 21 , 2009', 'westende', 'hard', 'itf 25k', 'vasilisa davydova', 'emilie bacquet jasmin wöhr', '6 - 2 , 7 - 5'], ['june 12 , 2011', 'zlin', 'clay', 'itf 50k', 'yuliya beygelzimer', 'réka - luca jani katalin marosi', '3 - 6 , 6 - 1 ,'], ['june 3 , 2012', 'grado', 'clay', 'itf 25k', 'ekaterine gorgodze', 'claudia giovine anastasia grymalska', '7 - 6 ( 7 - 2 ) , 7 - 6 ( 7 - 1 )']]
atlantic coast collegiate hockey league
https://en.wikipedia.org/wiki/Atlantic_Coast_Collegiate_Hockey_League
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-16403890-1.html.csv
unique
among the members of the atlantic coast collegiate hockey league , georgetown university is the only member institution whose primary conference is the big east conference .
{'scope': 'all', 'row': '3', 'col': '7', 'col_other': '1', 'criterion': 'equal', 'value': 'big east conference', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'primary conference', 'big east conference'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose primary conference record fuzzily matches to big east conference .', 'tostr': 'filter_eq { all_rows ; primary conference ; big east conference }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; primary conference ; big east conference } }', 'tointer': 'select the rows whose primary conference record fuzzily matches to big east conference . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'primary conference', 'big east conference'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose primary conference record fuzzily matches to big east conference .', 'tostr': 'filter_eq { all_rows ; primary conference ; big east conference }'}, 'institution'], 'result': 'georgetown university', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; primary conference ; big east conference } ; institution }'}, 'georgetown university'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; primary conference ; big east conference } ; institution } ; georgetown university }', 'tointer': 'the institution record of this unqiue row is georgetown university .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; primary conference ; big east conference } } ; eq { hop { filter_eq { all_rows ; primary conference ; big east conference } ; institution } ; georgetown university } } = true', 'tointer': 'select the rows whose primary conference record fuzzily matches to big east conference . there is only one such row in the table . the institution record of this unqiue row is georgetown university .'}
and { only { filter_eq { all_rows ; primary conference ; big east conference } } ; eq { hop { filter_eq { all_rows ; primary conference ; big east conference } ; institution } ; georgetown university } } = true
select the rows whose primary conference record fuzzily matches to big east conference . there is only one such row in the table . the institution record of this unqiue row is georgetown university .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'primary conference_7': 7, 'big east conference_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'institution_9': 9, 'georgetown university_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'primary conference_7': 'primary conference', 'big east conference_8': 'big east conference', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'institution_9': 'institution', 'georgetown university_10': 'georgetown university'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'primary conference_7': [0], 'big east conference_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'institution_9': [2], 'georgetown university_10': [3]}
['institution', 'location', 'founded', 'affiliation', 'enrollment', 'team nickname', 'primary conference', 'home rink']
[['duke university', 'durham , nc', '1838', 'private / non - sectarian', '6496', 'blue devils', 'atlantic coast conference ( d - i )', 'triangle sports plex'], ['elon university', 'elon , nc', '1889', 'private', '5225', 'phoenix', 'southern conference ( d - i )', 'triangle sports plex / greensboro ice house'], ['georgetown university', 'washington , dc', '1789', 'private / catholic', '13612', 'hoyas', 'big east conference ( d - i )', 'kettler capitals iceplex'], ['george washington university', 'washington , dc', '1821', 'private', '6655', 'colonials', 'atlantic 10 conference ( d - i )', 'fort dupont ice arena / kettler capitals iceplex'], ['university of north carolina', 'chapel hill , nc', '1789', 'public', '17895', 'tar heels', 'atlantic coast conference ( d - i )', 'triangle sports plex'], ['north carolina state university', 'raleigh , nc', '1887', 'public', '24741', 'wolfpack', 'atlantic coast conference ( d - i )', 'raleigh center ice']]
gardline group
https://en.wikipedia.org/wiki/Gardline_group
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-28132970-5.html.csv
comparative
of the gardline group 's windfarm support vessels , smeaton array has a higher max speed than marianarray .
{'row_1': '7', 'row_2': '6', 'col': '3', 'col_other': '1', 'relation': 'greater', 'record_mentioned': 'no', 'diff_result': None}
{'func': 'greater', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'vessel', 'smeaton array'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose vessel record fuzzily matches to smeaton array .', 'tostr': 'filter_eq { all_rows ; vessel ; smeaton array }'}, 'max speed'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; vessel ; smeaton array } ; max speed }', 'tointer': 'select the rows whose vessel record fuzzily matches to smeaton array . take the max speed record of this row .'}, {'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'vessel', 'marianarray'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose vessel record fuzzily matches to marianarray .', 'tostr': 'filter_eq { all_rows ; vessel ; marianarray }'}, 'max speed'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; vessel ; marianarray } ; max speed }', 'tointer': 'select the rows whose vessel record fuzzily matches to marianarray . take the max speed record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; vessel ; smeaton array } ; max speed } ; hop { filter_eq { all_rows ; vessel ; marianarray } ; max speed } } = true', 'tointer': 'select the rows whose vessel record fuzzily matches to smeaton array . take the max speed record of this row . select the rows whose vessel record fuzzily matches to marianarray . take the max speed record of this row . the first record is greater than the second record .'}
greater { hop { filter_eq { all_rows ; vessel ; smeaton array } ; max speed } ; hop { filter_eq { all_rows ; vessel ; marianarray } ; max speed } } = true
select the rows whose vessel record fuzzily matches to smeaton array . take the max speed record of this row . select the rows whose vessel record fuzzily matches to marianarray . take the max speed record of this row . the first record is greater than the second record .
5
5
{'greater_4': 4, 'result_5': 5, 'str_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'vessel_7': 7, 'smeaton array_8': 8, 'max speed_9': 9, 'str_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'vessel_11': 11, 'marianarray_12': 12, 'max speed_13': 13}
{'greater_4': 'greater', 'result_5': 'true', 'str_hop_2': 'str_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'vessel_7': 'vessel', 'smeaton array_8': 'smeaton array', 'max speed_9': 'max speed', 'str_hop_3': 'str_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'vessel_11': 'vessel', 'marianarray_12': 'marianarray', 'max speed_13': 'max speed'}
{'greater_4': [5], 'result_5': [], 'str_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'vessel_7': [0], 'smeaton array_8': [0], 'max speed_9': [2], 'str_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'vessel_11': [1], 'marianarray_12': [1], 'max speed_13': [3]}
['vessel', 'built', 'max speed', 'length', 'breadth', 'flag', 'propulsion']
[['gallion', '2010', '30 knots', '20 m', '6.5 m', 'united kingdom', 'prop'], ['gardian 1', '2010', '30 knots', '20 m', '6.5 m', 'united kingdom', 'prop'], ['gardian 2', '2010', '30 knots', '20 m', '6.5 m', 'united kingdom', 'prop'], ['gardian 7', '2010', '30 knots', '20 m', '6.5 m', 'united kingdom', 'prop'], ['gardian 9', '2010', '30 knots', '20 m', '6.5 m', 'united kingdom', 'prop'], ['marianarray', '2011', '26 knots', '17 m', '6 m', 'united kingdom', 'jet'], ['smeaton array', '2011', '30 knots', '20 m', '6 m', 'united kingdom', 'controllable pitch propeller']]
1954 vfl season
https://en.wikipedia.org/wiki/1954_VFL_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10773616-14.html.csv
majority
the majority of the games were on july 31st .
{'scope': 'all', 'col': '7', 'most_or_all': 'all', 'criterion': 'equal', 'value': '31 july 1954', 'subset': None}
{'func': 'all_str_eq', 'args': ['all_rows', 'date', '31 july 1954'], 'result': True, 'ind': 0, 'tointer': 'for the date records of all rows , all of them fuzzily match to 31 july 1954 .', 'tostr': 'all_eq { all_rows ; date ; 31 july 1954 } = true'}
all_eq { all_rows ; date ; 31 july 1954 } = true
for the date records of all rows , all of them fuzzily match to 31 july 1954 .
1
1
{'all_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'date_3': 3, '31 july 1954_4': 4}
{'all_str_eq_0': 'all_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'date_3': 'date', '31 july 1954_4': '31 july 1954'}
{'all_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'date_3': [0], '31 july 1954_4': [0]}
['home team', 'home team score', 'away team', 'away team score', 'venue', 'crowd', 'date']
[['collingwood', '12.13 ( 85 )', 'st kilda', '7.15 ( 57 )', 'victoria park', '16500', '31 july 1954'], ['carlton', '9.11 ( 65 )', 'richmond', '6.10 ( 46 )', 'princes park', '25863', '31 july 1954'], ['melbourne', '16.14 ( 110 )', 'hawthorn', '5.3 ( 33 )', 'mcg', '26708', '31 july 1954'], ['south melbourne', '6.5 ( 41 )', 'footscray', '11.12 ( 78 )', 'lake oval', '19500', '31 july 1954'], ['north melbourne', '8.19 ( 67 )', 'fitzroy', '8.6 ( 54 )', 'arden street oval', '11000', '31 july 1954'], ['geelong', '16.14 ( 110 )', 'essendon', '11.10 ( 76 )', 'kardinia park', '28158', '31 july 1954']]
1930 british empire games
https://en.wikipedia.org/wiki/1930_British_Empire_Games
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-177520-1.html.csv
unique
the holder of rank three , was the only one to have 8 bronze medals at the 1930 british empire games .
{'scope': 'all', 'row': '3', 'col': '4', 'col_other': '1', 'criterion': 'equal', 'value': '8', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_eq', 'args': ['all_rows', 'bronze', '8'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose bronze record is equal to 8 .', 'tostr': 'filter_eq { all_rows ; bronze ; 8 }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; bronze ; 8 } }', 'tointer': 'select the rows whose bronze record is equal to 8 . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_eq', 'args': ['all_rows', 'bronze', '8'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose bronze record is equal to 8 .', 'tostr': 'filter_eq { all_rows ; bronze ; 8 }'}, 'rank'], 'result': '3', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; bronze ; 8 } ; rank }'}, '3'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; bronze ; 8 } ; rank } ; 3 }', 'tointer': 'the rank record of this unqiue row is 3 .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; bronze ; 8 } } ; eq { hop { filter_eq { all_rows ; bronze ; 8 } ; rank } ; 3 } } = true', 'tointer': 'select the rows whose bronze record is equal to 8 . there is only one such row in the table . the rank record of this unqiue row is 3 .'}
and { only { filter_eq { all_rows ; bronze ; 8 } } ; eq { hop { filter_eq { all_rows ; bronze ; 8 } ; rank } ; 3 } } = true
select the rows whose bronze record is equal to 8 . there is only one such row in the table . the rank record of this unqiue row is 3 .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_eq_0': 0, 'all_rows_6': 6, 'bronze_7': 7, '8_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'rank_9': 9, '3_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_eq_0': 'filter_eq', 'all_rows_6': 'all_rows', 'bronze_7': 'bronze', '8_8': '8', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'rank_9': 'rank', '3_10': '3'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_eq_0': [1, 2], 'all_rows_6': [0], 'bronze_7': [0], '8_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'rank_9': [2], '3_10': [3]}
['rank', 'gold', 'silver', 'bronze', 'total']
[['1', '25', '22', '13', '60'], ['2', '20', '16', '18', '54'], ['3', '6', '4', '8', '18'], ['4', '3', '4', '2', '9'], ['5', '3', '4', '1', '8'], ['6', '2', '3', '5', '10'], ['7', '0', '2', '1', '3'], ['8', '0', '1', '1', '2'], ['9', '0', '1', '0', '1'], ['total', '59', '57', '49', '165']]
tom nijssen
https://en.wikipedia.org/wiki/Tom_Nijssen
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-13662140-2.html.csv
comparative
tom nijssen had a winning record of 6-4 for the paris atp masters series compared to the rome atp masters series where he had a losing record of 0-8 .
{'row_1': '17', 'row_2': '12', 'col': '12', 'col_other': '1', 'relation': 'not_equal', 'record_mentioned': 'yes', 'diff_result': None}
{'func': 'and', 'args': [{'func': 'not_str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'tournament', 'paris'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose tournament record fuzzily matches to paris .', 'tostr': 'filter_eq { all_rows ; tournament ; paris }'}, 'career win - loss'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; tournament ; paris } ; career win - loss }', 'tointer': 'select the rows whose tournament record fuzzily matches to paris . take the career win - loss record of this row .'}, {'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'tournament', 'rome'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose tournament record fuzzily matches to rome .', 'tostr': 'filter_eq { all_rows ; tournament ; rome }'}, 'career win - loss'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; tournament ; rome } ; career win - loss }', 'tointer': 'select the rows whose tournament record fuzzily matches to rome . take the career win - loss record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'not_eq { hop { filter_eq { all_rows ; tournament ; paris } ; career win - loss } ; hop { filter_eq { all_rows ; tournament ; rome } ; career win - loss } }', 'tointer': 'select the rows whose tournament record fuzzily matches to paris . take the career win - loss record of this row . select the rows whose tournament record fuzzily matches to rome . take the career win - loss record of this row . the first record does not match to the second record .'}, {'func': 'and', 'args': [{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'tournament', 'paris'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose tournament record fuzzily matches to paris .', 'tostr': 'filter_eq { all_rows ; tournament ; paris }'}, 'career win - loss'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; tournament ; paris } ; career win - loss }', 'tointer': 'select the rows whose tournament record fuzzily matches to paris . take the career win - loss record of this row .'}, '6 - 4'], 'result': True, 'ind': 5, 'tostr': 'eq { hop { filter_eq { all_rows ; tournament ; paris } ; career win - loss } ; 6 - 4 }', 'tointer': 'the career win - loss record of the first row is 6 - 4 .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'tournament', 'rome'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose tournament record fuzzily matches to rome .', 'tostr': 'filter_eq { all_rows ; tournament ; rome }'}, 'career win - loss'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; tournament ; rome } ; career win - loss }', 'tointer': 'select the rows whose tournament record fuzzily matches to rome . take the career win - loss record of this row .'}, '0 - 8'], 'result': True, 'ind': 6, 'tostr': 'eq { hop { filter_eq { all_rows ; tournament ; rome } ; career win - loss } ; 0 - 8 }', 'tointer': 'the career win - loss record of the second row is 0 - 8 .'}], 'result': True, 'ind': 7, 'tostr': 'and { eq { hop { filter_eq { all_rows ; tournament ; paris } ; career win - loss } ; 6 - 4 } ; eq { hop { filter_eq { all_rows ; tournament ; rome } ; career win - loss } ; 0 - 8 } }', 'tointer': 'the career win - loss record of the first row is 6 - 4 . the career win - loss record of the second row is 0 - 8 .'}], 'result': True, 'ind': 8, 'tostr': 'and { not_eq { hop { filter_eq { all_rows ; tournament ; paris } ; career win - loss } ; hop { filter_eq { all_rows ; tournament ; rome } ; career win - loss } } ; and { eq { hop { filter_eq { all_rows ; tournament ; paris } ; career win - loss } ; 6 - 4 } ; eq { hop { filter_eq { all_rows ; tournament ; rome } ; career win - loss } ; 0 - 8 } } } = true', 'tointer': 'select the rows whose tournament record fuzzily matches to paris . take the career win - loss record of this row . select the rows whose tournament record fuzzily matches to rome . take the career win - loss record of this row . the first record does not match to the second record . the career win - loss record of the first row is 6 - 4 . the career win - loss record of the second row is 0 - 8 .'}
and { not_eq { hop { filter_eq { all_rows ; tournament ; paris } ; career win - loss } ; hop { filter_eq { all_rows ; tournament ; rome } ; career win - loss } } ; and { eq { hop { filter_eq { all_rows ; tournament ; paris } ; career win - loss } ; 6 - 4 } ; eq { hop { filter_eq { all_rows ; tournament ; rome } ; career win - loss } ; 0 - 8 } } } = true
select the rows whose tournament record fuzzily matches to paris . take the career win - loss record of this row . select the rows whose tournament record fuzzily matches to rome . take the career win - loss record of this row . the first record does not match to the second record . the career win - loss record of the first row is 6 - 4 . the career win - loss record of the second row is 0 - 8 .
13
9
{'and_8': 8, 'result_9': 9, 'not_str_eq_4': 4, 'str_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_10': 10, 'tournament_11': 11, 'paris_12': 12, 'career win - loss_13': 13, 'str_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_14': 14, 'tournament_15': 15, 'rome_16': 16, 'career win - loss_17': 17, 'and_7': 7, 'str_eq_5': 5, '6 - 4_18': 18, 'str_eq_6': 6, '0 - 8_19': 19}
{'and_8': 'and', 'result_9': 'true', 'not_str_eq_4': 'not_str_eq', 'str_hop_2': 'str_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_10': 'all_rows', 'tournament_11': 'tournament', 'paris_12': 'paris', 'career win - loss_13': 'career win - loss', 'str_hop_3': 'str_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_14': 'all_rows', 'tournament_15': 'tournament', 'rome_16': 'rome', 'career win - loss_17': 'career win - loss', 'and_7': 'and', 'str_eq_5': 'str_eq', '6 - 4_18': '6 - 4', 'str_eq_6': 'str_eq', '0 - 8_19': '0 - 8'}
{'and_8': [9], 'result_9': [], 'not_str_eq_4': [8], 'str_hop_2': [4, 5], 'filter_str_eq_0': [2], 'all_rows_10': [0], 'tournament_11': [0], 'paris_12': [0], 'career win - loss_13': [2], 'str_hop_3': [4, 6], 'filter_str_eq_1': [3], 'all_rows_14': [1], 'tournament_15': [1], 'rome_16': [1], 'career win - loss_17': [3], 'and_7': [8], 'str_eq_5': [7], '6 - 4_18': [5], 'str_eq_6': [7], '0 - 8_19': [6]}
['tournament', '1990', '1991', '1992', '1993', '1994', '1995', '1996', '1997', '1998', 'career sr', 'career win - loss']
[['grand slams', 'grand slams', 'grand slams', 'grand slams', 'grand slams', 'grand slams', 'grand slams', 'grand slams', 'grand slams', 'grand slams', 'grand slams', 'grand slams'], ['australian open', '1r', '1r', 'qf', '2r', 'qf', '1r', '1r', '3r', '1r', '0 / 11', '12 - 11'], ['french open', '1r', 'qf', '2r', '2r', '3r', '1r', '1r', '2r', 'a', '0 / 12', '13 - 12'], ['wimbledon', '2r', '3r', '1r', '1r', 'qf', '1r', '3r', '2r', '1r', '0 / 11', '12 - 11'], ['us open', 'a', '1r', '3r', '3r', 'qf', '2r', '2r', '1r', 'a', '0 / 8', '9 - 8'], ['grand slam sr', '0 / 3', '0 / 4', '0 / 4', '0 / 4', '0 / 4', '0 / 4', '0 / 4', '0 / 4', '0 / 2', '0 / 42', 'n / a'], ['annual win - loss', '2 - 3', '5 - 4', '6 - 4', '4 - 4', '11 - 4', '1 - 4', '3 - 4', '4 - 4', '0 - 2', 'n / a', '46 - 42'], ['atp masters series', 'atp masters series', 'atp masters series', 'atp masters series', 'atp masters series', 'atp masters series', 'atp masters series', 'atp masters series', 'atp masters series', 'atp masters series', 'atp masters series', 'atp masters series'], ['indian wells', 'a', 'a', '1r', 'a', 'a', 'a', '1r', 'a', 'a', '0 / 2', '0 - 2'], ['miami', '1r', '1r', '3r', '2r', '2r', '2r', '1r', '1r', '2r', '0 / 9', '2 - 9'], ['monte carlo', 'a', '1r', 'sf', '1r', '1r', 'sf', '1r', 'a', 'a', '0 / 6', '5 - 6'], ['rome', '1r', '1r', '1r', '1r', '1r', '1r', '1r', '1r', 'a', '0 / 8', '0 - 8'], ['hamburg', '1r', 'qf', 'sf', '1r', 'qf', '1r', '2r', 'a', 'a', '0 / 7', '5 - 7'], ['canada', 'a', 'a', 'a', 'a', 'a', 'a', 'a', 'a', 'a', '0 / 0', '0 - 0'], ['cincinnati', 'a', 'a', 'a', 'a', 'a', 'a', 'a', 'a', 'a', '0 / 0', '0 - 0'], ['stuttgart ( stockholm )', '1r', 'f', 'qf', 'qf', '2r', 'a', 'a', 'a', 'a', '0 / 5', '6 - 5'], ['paris', 'a', '1r', 'qf', 'f', '2r', 'a', 'a', 'a', 'a', '0 / 4', '6 - 4'], ['masters series sr', '0 / 4', '0 / 6', '0 / 7', '0 / 6', '0 / 6', '0 / 4', '0 / 5', '0 / 2', '0 / 1', '0 / 41', 'n / a'], ['annual win - loss', '0 - 4', '5 - 6', '8 - 7', '5 - 6', '1 - 6', '4 - 4', '1 - 5', '0 - 2', '0 - 1', 'n / a', '24 - 41'], ['year end ranking', '53', '23', '18', '25', '28', '74', '79', '96', '313', 'n / a', 'n / a']]
turkish airlines
https://en.wikipedia.org/wiki/Turkish_Airlines
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-167925-2.html.csv
unique
the turkish airlines flight crash on 2 february 1969 was the only one with 0 fatalities .
{'scope': 'all', 'row': '5', 'col': '6', 'col_other': '1', 'criterion': 'equal', 'value': '0', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_eq', 'args': ['all_rows', 'fatalities', '0'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose fatalities record is equal to 0 .', 'tostr': 'filter_eq { all_rows ; fatalities ; 0 }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; fatalities ; 0 } }', 'tointer': 'select the rows whose fatalities record is equal to 0 . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_eq', 'args': ['all_rows', 'fatalities', '0'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose fatalities record is equal to 0 .', 'tostr': 'filter_eq { all_rows ; fatalities ; 0 }'}, 'date'], 'result': '2 february 1969', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; fatalities ; 0 } ; date }'}, '2 february 1969'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; fatalities ; 0 } ; date } ; 2 february 1969 }', 'tointer': 'the date record of this unqiue row is 2 february 1969 .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; fatalities ; 0 } } ; eq { hop { filter_eq { all_rows ; fatalities ; 0 } ; date } ; 2 february 1969 } } = true', 'tointer': 'select the rows whose fatalities record is equal to 0 . there is only one such row in the table . the date record of this unqiue row is 2 february 1969 .'}
and { only { filter_eq { all_rows ; fatalities ; 0 } } ; eq { hop { filter_eq { all_rows ; fatalities ; 0 } ; date } ; 2 february 1969 } } = true
select the rows whose fatalities record is equal to 0 . there is only one such row in the table . the date record of this unqiue row is 2 february 1969 .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_eq_0': 0, 'all_rows_6': 6, 'fatalities_7': 7, '0_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'date_9': 9, '2 february 1969_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_eq_0': 'filter_eq', 'all_rows_6': 'all_rows', 'fatalities_7': 'fatalities', '0_8': '0', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'date_9': 'date', '2 february 1969_10': '2 february 1969'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_eq_0': [1, 2], 'all_rows_6': [0], 'fatalities_7': [0], '0_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'date_9': [2], '2 february 1969_10': [3]}
['date', 'flight', 'aircraft', 'registration', 'location', 'fatalities']
[['17 february 1959', 'n / a', 'vickers viscount type 793', 'tc - sev', 'london', '14'], ['23 september 1961', '100', 'fokker f27 - 100', 'tc - tay', 'ankara', '28'], ['8 march 1962', 'n / a', 'fairchild f - 27', 'tc - kop', 'adana', '11'], ['3 february 1964', 'n / a', 'douglas c - 47', 'tc - eti', 'ankara', '3'], ['2 february 1969', 'n / a', 'vickers viscount type 794', 'tc - set', 'ankara', '0'], ['26 january 1974', 'n / a', 'fokker f28 - 1000', 'tc - jao', 'izmir', '66'], ['3 march 1974', '981', 'mcdonnell douglas dc - 10', 'tc - jav', 'fontaine - chaalis , oise', '346'], ['30 january 1975', '345', 'fokker f28 - 1000', 'tc - jap', 'istanbul', '42'], ['19 september 1976', '452', 'boeing 727', 'tc - jbh', 'isparta', '154'], ['23 december 1979', 'n / a', 'fokker f28 - 1000', 'tc - jat', 'ankara', '41'], ['16 january 1983', '158', 'boeing 727', 'tc - jbr', 'ankara', '47'], ['29 december 1994', '278', 'boeing 737', 'tc - jes', 'van', '57'], ['7 april 1999', '5904', 'boeing 737', 'tc - jep', 'ceyhan', '6'], ['8 january 2003', '634', 'avro rj - 100', 'tc - thg', 'diyarbakä ± r', '75'], ['25 february 2009', '1951', 'boeing 737', 'tc - jge', 'amsterdam', '9']]
united states house of representatives elections , 1986
https://en.wikipedia.org/wiki/United_States_House_of_Representatives_elections%2C_1986
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-1341586-44.html.csv
unique
only one of the people voted to serve in the house of representatives in 1986 was originally elected in 1985 .
{'scope': 'all', 'row': '1', 'col': '4', 'col_other': 'n/a', 'criterion': 'equal', 'value': '1985', 'subset': None}
{'func': 'only', 'args': [{'func': 'filter_eq', 'args': ['all_rows', 'first elected', '1985'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose first elected record is equal to 1985 .', 'tostr': 'filter_eq { all_rows ; first elected ; 1985 }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; first elected ; 1985 } } = true', 'tointer': 'select the rows whose first elected record is equal to 1985 . there is only one such row in the table .'}
only { filter_eq { all_rows ; first elected ; 1985 } } = true
select the rows whose first elected record is equal to 1985 . there is only one such row in the table .
2
2
{'only_1': 1, 'result_2': 2, 'filter_eq_0': 0, 'all_rows_3': 3, 'first elected_4': 4, '1985_5': 5}
{'only_1': 'only', 'result_2': 'true', 'filter_eq_0': 'filter_eq', 'all_rows_3': 'all_rows', 'first elected_4': 'first elected', '1985_5': '1985'}
{'only_1': [2], 'result_2': [], 'filter_eq_0': [1], 'all_rows_3': [0], 'first elected_4': [0], '1985_5': [0]}
['district', 'incumbent', 'party', 'first elected', 'result', 'candidates']
[['texas 1', 'jim chapman', 'democratic', '1985', 're - elected', 'jim chapman ( d ) unopposed'], ['texas 4', 'ralph hall', 'democratic', '1980', 're - elected', 'ralph hall ( d ) 71.7 % thomas blow ( r ) 28.3 %'], ['texas 6', 'joe barton', 'republican', '1984', 're - elected', 'joe barton ( r ) 55.8 % pete geren ( d ) 44.2 %'], ['texas 8', 'jack fields', 'republican', '1980', 're - elected', 'jack fields ( r ) 68.4 % blaine mann ( d ) 31.6 %'], ['texas 9', 'jack brooks', 'democratic', '1952', 're - elected', 'jack brooks ( d ) 61.5 % lisa d duperier ( r ) 38.5 %'], ['texas 11', 'marvin leath', 'democratic', '1978', 're - elected', 'marvin leath ( d ) unopposed'], ['texas 12', 'jim wright', 'democratic', '1954', 're - elected', 'jim wright ( d ) 68.7 % don mcneil ( r ) 31.3 %'], ['texas 13', 'beau boulter', 'republican', '1984', 're - elected', 'beau boulter ( r ) 64.9 % doug seal ( d ) 35.1 %'], ['texas 14', 'mac sweeney', 'republican', '1984', 're - elected', 'mac sweeney ( r ) 52.3 % greg laughlin ( d ) 47.7 %'], ['texas 15', 'kika de la garza', 'democratic', '1964', 're - elected', 'kika de la garza ( d ) unopposed'], ['texas 16', 'ronald d coleman', 'democratic', '1982', 're - elected', 'ronald d coleman ( d ) 65.7 % roy gillia ( r ) 34.3 %'], ['texas 17', 'charles stenholm', 'democratic', '1978', 're - elected', 'charles stenholm ( d ) unopposed'], ['texas 18', 'mickey leland', 'democratic', '1978', 're - elected', 'mickey leland ( d ) 90.2 % joanne kuniansky ( i ) 9.8 %'], ['texas 19', 'larry combest', 'republican', '1984', 're - elected', 'larry combest ( r ) 62.0 % gerald mccathern ( d ) 38.0 %'], ['texas 20', 'henry b gonzalez', 'democratic', '1961', 're - elected', 'henry b gonzalez ( d ) unopposed'], ['texas 22', 'tom delay', 'republican', '1984', 're - elected', 'tom delay ( r ) 71.8 % susan director ( d ) 28.2 %'], ['texas 23', 'albert bustamante', 'democratic', '1984', 're - elected', 'albert bustamante ( d ) 90.7 % ken hendrix ( l ) 9.3 %'], ['texas 24', 'martin frost', 'democratic', '1978', 're - elected', 'martin frost ( d ) 67.2 % bob burk ( r ) 32.8 %'], ['texas 25', 'michael a andrews', 'democratic', '1982', 're - elected', 'michael a andrews ( d ) unopposed'], ['texas 26', 'dick armey', 'republican', '1984', 're - elected', 'dick armey ( r ) 68.1 % george richardson ( d ) 31.9 %']]
european parliament election , 1989 ( ireland )
https://en.wikipedia.org/wiki/European_Parliament_election%2C_1989_%28Ireland%29
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-13564557-2.html.csv
ordinal
the constituency dublin had the 2nd highest quota in the 1989 european parliament election in ireland .
{'row': '2', 'col': '6', 'order': '2', 'col_other': '1', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'scope': 'all', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'quota', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmax { all_rows ; quota ; 2 }'}, 'constituency'], 'result': 'dublin', 'ind': 1, 'tostr': 'hop { nth_argmax { all_rows ; quota ; 2 } ; constituency }'}, 'dublin'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmax { all_rows ; quota ; 2 } ; constituency } ; dublin } = true', 'tointer': 'select the row whose quota record of all rows is 2nd maximum . the constituency record of this row is dublin .'}
eq { hop { nth_argmax { all_rows ; quota ; 2 } ; constituency } ; dublin } = true
select the row whose quota record of all rows is 2nd maximum . the constituency record of this row is dublin .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmax_0': 0, 'all_rows_4': 4, 'quota_5': 5, '2_6': 6, 'constituency_7': 7, 'dublin_8': 8}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmax_0': 'nth_argmax', 'all_rows_4': 'all_rows', 'quota_5': 'quota', '2_6': '2', 'constituency_7': 'constituency', 'dublin_8': 'dublin'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmax_0': [1], 'all_rows_4': [0], 'quota_5': [0], '2_6': [0], 'constituency_7': [1], 'dublin_8': [2]}
['constituency', 'electorate', 'turnout', 'spoilt', 'valid poll', 'quota', 'seats', 'candidates']
[['connachtulster', '464661', '322664 ( 69.4 % )', '10362 ( 3.2 % )', '312302', '78076', '3', '13'], ['dublin', '711416', '455539 ( 64.0 % )', '7137 ( 1.5 % )', '448402', '89681', '4', '11'], ['leinster', '571694', '391697 ( 68.5 % )', '14106 ( 3.6 % )', '377591', '94398', '3', '15'], ['munster', '703913', '505219 ( 71.7 % )', '10786 ( 2.2 % )', '494433', '82406', '5', '15'], ['total', '2451684', '1675119 ( 68.3 % )', '42391 ( 2.6 % )', '1632728', 'n / a', '15', '44']]
peak uranium
https://en.wikipedia.org/wiki/Peak_uranium
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15624586-2.html.csv
comparative
the untied states of america has a higher percentage of world demand for uranium than south korea .
{'row_1': '1', 'row_2': '6', 'col': '3', 'col_other': '1', 'relation': 'greater', 'record_mentioned': 'no', 'diff_result': None}
{'func': 'greater', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'country', 'usa'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose country record fuzzily matches to usa .', 'tostr': 'filter_eq { all_rows ; country ; usa }'}, '% of world demand'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; country ; usa } ; % of world demand }', 'tointer': 'select the rows whose country record fuzzily matches to usa . take the % of world demand record of this row .'}, {'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'country', 'south korea'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose country record fuzzily matches to south korea .', 'tostr': 'filter_eq { all_rows ; country ; south korea }'}, '% of world demand'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; country ; south korea } ; % of world demand }', 'tointer': 'select the rows whose country record fuzzily matches to south korea . take the % of world demand record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; country ; usa } ; % of world demand } ; hop { filter_eq { all_rows ; country ; south korea } ; % of world demand } } = true', 'tointer': 'select the rows whose country record fuzzily matches to usa . take the % of world demand record of this row . select the rows whose country record fuzzily matches to south korea . take the % of world demand record of this row . the first record is greater than the second record .'}
greater { hop { filter_eq { all_rows ; country ; usa } ; % of world demand } ; hop { filter_eq { all_rows ; country ; south korea } ; % of world demand } } = true
select the rows whose country record fuzzily matches to usa . take the % of world demand record of this row . select the rows whose country record fuzzily matches to south korea . take the % of world demand record of this row . the first record is greater than the second record .
5
5
{'greater_4': 4, 'result_5': 5, 'str_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'country_7': 7, 'usa_8': 8, '% of world demand_9': 9, 'str_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'country_11': 11, 'south korea_12': 12, '% of world demand_13': 13}
{'greater_4': 'greater', 'result_5': 'true', 'str_hop_2': 'str_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'country_7': 'country', 'usa_8': 'usa', '% of world demand_9': '% of world demand', 'str_hop_3': 'str_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'country_11': 'country', 'south korea_12': 'south korea', '% of world demand_13': '% of world demand'}
{'greater_4': [5], 'result_5': [], 'str_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'country_7': [0], 'usa_8': [0], '% of world demand_9': [2], 'str_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'country_11': [1], 'south korea_12': [1], '% of world demand_13': [3]}
['country', 'uranium required 2006 - 08', '% of world demand', 'indigenous mining production 2006', 'deficit ( - surplus )']
[['usa', 'tonnes ( 10 6lb )', '29.3 %', 'tonnes ( 10 6lb )', 'tonnes ( 10 6lb )'], ['france', 'tonnes ( 10 6lb )', '16.3 %', '0', 'tonnes ( 10 6lb )'], ['japan', 'tonnes ( 10 6lb )', '11.8 %', '0', 'tonnes ( 10 6lb )'], ['russia', 'tonnes ( 10 6lb )', '5.2 %', 'tonnes ( 10 6lb )', 'tonnes ( 10 6lb )'], ['germany', 'tonnes ( 10 6lb )', '5.2 %', 'tonnes ( 10 6lb )', 'tonnes ( 10 6lb )'], ['south korea', 'tonnes ( 10 6lb )', '4.8 %', '0', 'tonnes ( 10 6lb )'], ['uk', 'tonnes ( 10 6lb )', '3.4 %', '0', 'tonnes ( 10 6lb )'], ['rest of world', 'tonnes ( 10 6lb )', '24.0 %', 'tonnes ( 10 6lb )', 'tonnes ( 10 6lb )'], ['total', 'tonnes ( 10 6lb )', '100.0 %', 'tonnes ( 10 6lb )', 'tonnes ( 10 6lb )']]
2004 grand prix of road america
https://en.wikipedia.org/wiki/2004_Grand_Prix_of_Road_America
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-16759619-2.html.csv
unique
gastón mazzacane was the only driver to go off course in the 2004 grand prix of america .
{'scope': 'all', 'row': '18', 'col': '4', 'col_other': '1', 'criterion': 'equal', 'value': 'off course', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'time / retired', 'off course'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose time / retired record fuzzily matches to off course .', 'tostr': 'filter_eq { all_rows ; time / retired ; off course }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; time / retired ; off course } }', 'tointer': 'select the rows whose time / retired record fuzzily matches to off course . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'time / retired', 'off course'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose time / retired record fuzzily matches to off course .', 'tostr': 'filter_eq { all_rows ; time / retired ; off course }'}, 'driver'], 'result': 'gastón mazzacane', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; time / retired ; off course } ; driver }'}, 'gastón mazzacane'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; time / retired ; off course } ; driver } ; gastón mazzacane }', 'tointer': 'the driver record of this unqiue row is gastón mazzacane .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; time / retired ; off course } } ; eq { hop { filter_eq { all_rows ; time / retired ; off course } ; driver } ; gastón mazzacane } } = true', 'tointer': 'select the rows whose time / retired record fuzzily matches to off course . there is only one such row in the table . the driver record of this unqiue row is gastón mazzacane .'}
and { only { filter_eq { all_rows ; time / retired ; off course } } ; eq { hop { filter_eq { all_rows ; time / retired ; off course } ; driver } ; gastón mazzacane } } = true
select the rows whose time / retired record fuzzily matches to off course . there is only one such row in the table . the driver record of this unqiue row is gastón mazzacane .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'time / retired_7': 7, 'off course_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'driver_9': 9, 'gastón mazzacane_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'time / retired_7': 'time / retired', 'off course_8': 'off course', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'driver_9': 'driver', 'gastón mazzacane_10': 'gastón mazzacane'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'time / retired_7': [0], 'off course_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'driver_9': [2], 'gastón mazzacane_10': [3]}
['driver', 'team', 'laps', 'time / retired', 'grid', 'points']
[['alex tagliani', 'rocketsports racing', '48', '1:45:07.288', '13', '33'], ['rodolfo lavín', 'forsythe racing', '48', '+ 1.855 secs', '10', '28'], ['sébastien bourdais', 'newman / haas racing', '48', '+ 2.767 secs', '1', '27'], ['ryan hunter - reay', 'herdez competition', '48', '+ 3.814 secs', '2', '24'], ['mario domínguez', 'herdez competition', '48', '+ 4.398 secs', '15', '21'], ['oriol servià', 'dale coyne racing', '48', '+ 6.390 secs', '8', '19'], ['justin wilson', 'mi - jack conquest racing', '48', '+ 8.500 secs', '9', '17'], ['jimmy vasser', 'pkv racing', '48', '+ 8.546 secs', '3', '15'], ['michel jourdain , jr', 'rusport', '48', '+ 9.056 secs', '11', '13'], ['guy smith', 'rocketsports racing', '48', '+ 9.997 secs', '16', '11'], ['mario haberfeld', 'walker racing', '48', '+ 16.725 secs', '12', '10'], ['paul tracy', 'forsythe racing', '48', '+ 26.616 secs', '6', '10'], ['a j allmendinger', 'rusport', '47', '+ 1 lap', '7', '8'], ['patrick carpentier', 'forsythe racing', '46', '+ 2 laps', '5', '7'], ['bruno junqueira', 'newman / haas racing', '46', '+ 2 laps', '4', '7'], ['roberto gonzález', 'pkv racing', '46', '+ 2 laps', '14', '5'], ['alex sperafico', 'mi - jack conquest racing', '46', '+ 2 laps', '17', '4'], ['gastón mazzacane', 'dale coyne racing', '29', 'off course', '18', '3']]
fugitive pieces ( film )
https://en.wikipedia.org/wiki/Fugitive_Pieces_%28film%29
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-17480544-1.html.csv
count
among the nominations that the fugitive pieces ( film ) received in 2008 , 3 of them were won .
{'scope': 'subset', 'criterion': 'equal', 'value': 'won', 'result': '3', 'col': '2', 'subset': {'col': '1', 'criterion': 'equal', 'value': '2008'}}
{'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': [{'func': 'filter_eq', 'args': ['all_rows', 'year', '2008'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; year ; 2008 }', 'tointer': 'select the rows whose year record is equal to 2008 .'}, 'nominated / won', 'won'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose year record is equal to 2008 . among these rows , select the rows whose nominated / won record fuzzily matches to won .', 'tostr': 'filter_eq { filter_eq { all_rows ; year ; 2008 } ; nominated / won ; won }'}], 'result': '3', 'ind': 2, 'tostr': 'count { filter_eq { filter_eq { all_rows ; year ; 2008 } ; nominated / won ; won } }', 'tointer': 'select the rows whose year record is equal to 2008 . among these rows , select the rows whose nominated / won record fuzzily matches to won . the number of such rows is 3 .'}, '3'], 'result': True, 'ind': 3, 'tostr': 'eq { count { filter_eq { filter_eq { all_rows ; year ; 2008 } ; nominated / won ; won } } ; 3 } = true', 'tointer': 'select the rows whose year record is equal to 2008 . among these rows , select the rows whose nominated / won record fuzzily matches to won . the number of such rows is 3 .'}
eq { count { filter_eq { filter_eq { all_rows ; year ; 2008 } ; nominated / won ; won } } ; 3 } = true
select the rows whose year record is equal to 2008 . among these rows , select the rows whose nominated / won record fuzzily matches to won . the number of such rows is 3 .
4
4
{'eq_3': 3, 'result_4': 4, 'count_2': 2, 'filter_str_eq_1': 1, 'filter_eq_0': 0, 'all_rows_5': 5, 'year_6': 6, '2008_7': 7, 'nominated / won_8': 8, 'won_9': 9, '3_10': 10}
{'eq_3': 'eq', 'result_4': 'true', 'count_2': 'count', 'filter_str_eq_1': 'filter_str_eq', 'filter_eq_0': 'filter_eq', 'all_rows_5': 'all_rows', 'year_6': 'year', '2008_7': '2008', 'nominated / won_8': 'nominated / won', 'won_9': 'won', '3_10': '3'}
{'eq_3': [4], 'result_4': [], 'count_2': [3], 'filter_str_eq_1': [2], 'filter_eq_0': [1], 'all_rows_5': [0], 'year_6': [0], '2008_7': [0], 'nominated / won_8': [1], 'won_9': [1], '3_10': [3]}
['year', 'nominated / won', 'award / category', 'festival / organization', 'role']
[['2007', 'won', 'best actor', 'rome film festival', 'rade šerbedžija as athos'], ['2008', 'nominated', 'best supporting actor in a motion picture', 'satellite award', 'rade šerbedžija as athos'], ['2008', 'won', 'best film', 'sydney film festival', '-'], ['2008', 'won', 'audience award ( narrative feature )', 'sarasota film festival', '-'], ['2008', 'won', 'jury award', 'newport beach film festival', '-']]
soo line locomotives
https://en.wikipedia.org/wiki/Soo_Line_locomotives
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-17248696-6.html.csv
aggregation
the total quantity made of soo line locomotive models is 151 .
{'scope': 'all', 'col': '6', 'type': 'sum', 'result': '151', 'subset': None}
{'func': 'round_eq', 'args': [{'func': 'sum', 'args': ['all_rows', 'quantity made'], 'result': '151', 'ind': 0, 'tostr': 'sum { all_rows ; quantity made }'}, '151'], 'result': True, 'ind': 1, 'tostr': 'round_eq { sum { all_rows ; quantity made } ; 151 } = true', 'tointer': 'the sum of the quantity made record of all rows is 151 .'}
round_eq { sum { all_rows ; quantity made } ; 151 } = true
the sum of the quantity made record of all rows is 151 .
2
2
{'eq_1': 1, 'result_2': 2, 'sum_0': 0, 'all_rows_3': 3, 'quantity made_4': 4, '151_5': 5}
{'eq_1': 'eq', 'result_2': 'true', 'sum_0': 'sum', 'all_rows_3': 'all_rows', 'quantity made_4': 'quantity made', '151_5': '151'}
{'eq_1': [2], 'result_2': [], 'sum_0': [1], 'all_rows_3': [0], 'quantity made_4': [0], '151_5': [1]}
['class', 'wheel arrangement', 'fleet number ( s )', 'manufacturer', 'year made', 'quantity made', 'quantity preserved']
[['2 - 8 - 0 - ooooo - consolidation', '2 - 8 - 0 - ooooo - consolidation', '2 - 8 - 0 - ooooo - consolidation', '2 - 8 - 0 - ooooo - consolidation', '2 - 8 - 0 - ooooo - consolidation', '2 - 8 - 0 - ooooo - consolidation', '2 - 8 - 0 - ooooo - consolidation'], ['f - 1', '2 - 8 - 0', '403 - 405 , 407 - 412', 'schenectady', '1893', '9', '0'], ['f - 2', '2 - 8 - 0', '406', 'schenectady', '1893', '1', '0'], ['f - 3', '2 - 8 - 0', '413 - 416', 'schenectady', '1893', '4', '0'], ['f - 4', '2 - 8 - 0', '417', 'schenectady', '1893', '1', '0'], ['f - 6', '2 - 8 - 0', '400 - 402 , 418 - 427', 'rhode island', '1893', '13', '0'], ['f - 7', '2 - 8 - 0', '428 - 430', 'schenectady', '1900', '3', '0'], ['f - 8', '2 - 8 - 0', '431 - 444', 'alco - schenectady', '1902 - 1903', '14', '1'], ['f - 9', '2 - 8 - 0', '445 - 472', 'alco - schenectady', '1905 - 1906', '28', '1'], ['f - 10', '2 - 8 - 0', '473 - 474', 'alco - schenectady', '1909', '2', '0'], ['f - 11', '2 - 8 - 0', '475 - 484', 'alco - schenectady', '1910', '10', '0'], ['f - 12', '2 - 8 - 0', '485 - 499', 'alco - schenectady', '1912 - 1913', '15', '0'], ['f - 20', '2 - 8 - 0', '2400 - 2424', 'alco - schenectady', '1903 - 1907', '25', '1'], ['f - 21', '2 - 8 - 0', '2425 - 2428', 'alco - schenectady', '1909', '4', '1'], ['f - 22', '2 - 8 - 0', '2429 - 2443', 'alco - schenectady', '1911', '15', '1'], ['f - 23', '2 - 8 - 0', '2444 - 2450', 'alco - schenectady', '1914', '7', '0']]
tokyo skytree
https://en.wikipedia.org/wiki/Tokyo_Skytree
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-2638104-1.html.csv
ordinal
tv tokyo / teleto ( tx ) is broadcast on a lower channel number by tokyo skytree than fuji television .
{'row': '6', 'col': '1', 'order': '2', 'col_other': '2', 'max_or_min': 'max_to_min', 'value_mentioned': 'no', 'scope': 'all', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmax', 'args': ['all_rows', 'channel', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmax { all_rows ; channel ; 2 }'}, 'channel name'], 'result': 'tv tokyo / teleto ( tx )', 'ind': 1, 'tostr': 'hop { nth_argmax { all_rows ; channel ; 2 } ; channel name }'}, 'tv tokyo / teleto ( tx )'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmax { all_rows ; channel ; 2 } ; channel name } ; tv tokyo / teleto ( tx ) } = true', 'tointer': 'select the row whose channel record of all rows is 2nd maximum . the channel name record of this row is tv tokyo / teleto ( tx ) .'}
eq { hop { nth_argmax { all_rows ; channel ; 2 } ; channel name } ; tv tokyo / teleto ( tx ) } = true
select the row whose channel record of all rows is 2nd maximum . the channel name record of this row is tv tokyo / teleto ( tx ) .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmax_0': 0, 'all_rows_4': 4, 'channel_5': 5, '2_6': 6, 'channel name_7': 7, 'tv tokyo / teleto (tx)_8': 8}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmax_0': 'nth_argmax', 'all_rows_4': 'all_rows', 'channel_5': 'channel', '2_6': '2', 'channel name_7': 'channel name', 'tv tokyo / teleto (tx)_8': 'tv tokyo / teleto ( tx )'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmax_0': [1], 'all_rows_4': [0], 'channel_5': [0], '2_6': [0], 'channel name_7': [1], 'tv tokyo / teleto (tx)_8': [2]}
['channel', 'channel name', 'callsign', 'signal power', 'broadcast area']
[['1', 'nhk general tv / nhk g ( gtv )', 'joak - dtv', '10 kw', 'greater tokyo'], ['2', 'nhk educational tv / nhk e ( etv )', 'joab - dtv', '10 kw', 'all kantō'], ['4', 'nippon television / nittele ( ntv )', 'joax - dtv', '10 kw', 'all kantō'], ['5', 'tv asahi / tele - asa ( ex )', 'joex - dtv', '10 kw', 'all kantō'], ['6', 'tbs', 'jorx - dtv', '10 kw', 'all kantō'], ['7', 'tv tokyo / teleto ( tx )', 'jotx - dtv', '10 kw', 'all kantō'], ['8', 'fuji television ( cx )', 'jocx - dtv', '10 kw', 'all kantō']]
uyanış ( album )
https://en.wikipedia.org/wiki/Uyan%C4%B1%C5%9F_%28album%29
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-14436802-1.html.csv
majority
the majority of songs on the uyanış album are less than 5:00 in duration .
{'scope': 'all', 'col': '5', 'most_or_all': 'most', 'criterion': 'less_than', 'value': '5:00', 'subset': None}
{'func': 'most_less', 'args': ['all_rows', 'duration', '5:00'], 'result': True, 'ind': 0, 'tointer': 'for the duration records of all rows , most of them are less than 5:00 .', 'tostr': 'most_less { all_rows ; duration ; 5:00 } = true'}
most_less { all_rows ; duration ; 5:00 } = true
for the duration records of all rows , most of them are less than 5:00 .
1
1
{'most_less_0': 0, 'result_1': 1, 'all_rows_2': 2, 'duration_3': 3, '5:00_4': 4}
{'most_less_0': 'most_less', 'result_1': 'true', 'all_rows_2': 'all_rows', 'duration_3': 'duration', '5:00_4': '5:00'}
{'most_less_0': [1], 'result_1': [], 'all_rows_2': [0], 'duration_3': [0], '5:00_4': [0]}
['title', 'translation', 'lyricist', 'composer', 'duration']
[['uyanış ( aka yangın yeminler )', 'awakening', 'işın karaca & zeki güner', 'işın karaca & erdem yörük', '4:45'], ['5 dakika ( featuring ege çubukçu )', '5 minutes', 'sibel alaş', 'işın karaca & erdem yörük', '4:47'], ['gidemedim', "i could n't go", 'zeki güner', 'raşit gözdamla', '6:08'], ['sessiz hikayem', 'my silent story', 'işın karaca & zeki güner', 'işın karaca & erdem yörük', '4:23'], ['bilmece', 'puzzle', 'işın karaca & zeki güner', 'işın karaca', '3:31'], ['1000 yalan', '1000 lies', 'işın karaca & zeki güner', 'işın karaca & erdem yörük', '4:46'], ['ben bilirim', 'i know', 'işın karaca & zeki güner', 'işın karaca & erdem yörük', '4:06'], ['aşk çizgisi', 'love line', 'işın karaca & sibel alaş', 'işın karaca & erdem yörük', '4:25'], ['başrol', 'leading role', 'işın karaca & zeki güner & funky c', 'işın karaca & funky c', '4:31'], ['bambaşka biri', 'totally different person', 'fikret şenes', 'frederic mania', '4:57']]
1971 vfl season
https://en.wikipedia.org/wiki/1971_VFL_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10826072-6.html.csv
count
in the 1971 vfl season , when the away team 's score was under 20 , there were three games where the crowd was under 20000 .
{'scope': 'subset', 'criterion': 'less_than', 'value': '20000', 'result': '3', 'col': '6', 'subset': {'col': '4', 'criterion': 'less_than', 'value': '20'}}
{'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_less', 'args': [{'func': 'filter_less', 'args': ['all_rows', 'away team score', '20'], 'result': None, 'ind': 0, 'tostr': 'filter_less { all_rows ; away team score ; 20 }', 'tointer': 'select the rows whose away team score record is less than 20 .'}, 'crowd', '20000'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose away team score record is less than 20 . among these rows , select the rows whose crowd record is less than 20000 .', 'tostr': 'filter_less { filter_less { all_rows ; away team score ; 20 } ; crowd ; 20000 }'}], 'result': '3', 'ind': 2, 'tostr': 'count { filter_less { filter_less { all_rows ; away team score ; 20 } ; crowd ; 20000 } }', 'tointer': 'select the rows whose away team score record is less than 20 . among these rows , select the rows whose crowd record is less than 20000 . the number of such rows is 3 .'}, '3'], 'result': True, 'ind': 3, 'tostr': 'eq { count { filter_less { filter_less { all_rows ; away team score ; 20 } ; crowd ; 20000 } } ; 3 } = true', 'tointer': 'select the rows whose away team score record is less than 20 . among these rows , select the rows whose crowd record is less than 20000 . the number of such rows is 3 .'}
eq { count { filter_less { filter_less { all_rows ; away team score ; 20 } ; crowd ; 20000 } } ; 3 } = true
select the rows whose away team score record is less than 20 . among these rows , select the rows whose crowd record is less than 20000 . the number of such rows is 3 .
4
4
{'eq_3': 3, 'result_4': 4, 'count_2': 2, 'filter_less_1': 1, 'filter_less_0': 0, 'all_rows_5': 5, 'away team score_6': 6, '20_7': 7, 'crowd_8': 8, '20000_9': 9, '3_10': 10}
{'eq_3': 'eq', 'result_4': 'true', 'count_2': 'count', 'filter_less_1': 'filter_less', 'filter_less_0': 'filter_less', 'all_rows_5': 'all_rows', 'away team score_6': 'away team score', '20_7': '20', 'crowd_8': 'crowd', '20000_9': '20000', '3_10': '3'}
{'eq_3': [4], 'result_4': [], 'count_2': [3], 'filter_less_1': [2], 'filter_less_0': [1], 'all_rows_5': [0], 'away team score_6': [0], '20_7': [0], 'crowd_8': [1], '20000_9': [1], '3_10': [3]}
['home team', 'home team score', 'away team', 'away team score', 'venue', 'crowd', 'date']
[['north melbourne', '12.12 ( 84 )', 'richmond', '22.14 ( 146 )', 'arden street oval', '13047', '8 may 1971'], ['footscray', '18.5 ( 113 )', 'hawthorn', '16.11 ( 107 )', 'western oval', '19205', '8 may 1971'], ['st kilda', '14.25 ( 109 )', 'essendon', '9.11 ( 65 )', 'moorabbin oval', '21604', '8 may 1971'], ['melbourne', '10.12 ( 72 )', 'collingwood', '18.19 ( 127 )', 'mcg', '80231', '8 may 1971'], ['south melbourne', '13.14 ( 92 )', 'carlton', '14.10 ( 94 )', 'lake oval', '17171', '8 may 1971'], ['fitzroy', '20.16 ( 136 )', 'geelong', '13.13 ( 91 )', 'vfl park', '11432', '8 may 1971']]
united states house of representatives elections in georgia , 2000
https://en.wikipedia.org/wiki/United_States_House_of_Representatives_elections_in_Georgia%2C_2000
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-26336739-1.html.csv
comparative
nathan deal has a first elected year which is earlier than that of charlie norwood .
{'row_1': '9', 'row_2': '10', 'col': '4', 'col_other': '2', 'relation': 'less', 'record_mentioned': 'no', 'diff_result': None}
{'func': 'less', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'incumbent', 'nathan deal'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose incumbent record fuzzily matches to nathan deal .', 'tostr': 'filter_eq { all_rows ; incumbent ; nathan deal }'}, 'elected'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; incumbent ; nathan deal } ; elected }', 'tointer': 'select the rows whose incumbent record fuzzily matches to nathan deal . take the elected record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'incumbent', 'charlie norwood'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose incumbent record fuzzily matches to charlie norwood .', 'tostr': 'filter_eq { all_rows ; incumbent ; charlie norwood }'}, 'elected'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; incumbent ; charlie norwood } ; elected }', 'tointer': 'select the rows whose incumbent record fuzzily matches to charlie norwood . take the elected record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'less { hop { filter_eq { all_rows ; incumbent ; nathan deal } ; elected } ; hop { filter_eq { all_rows ; incumbent ; charlie norwood } ; elected } } = true', 'tointer': 'select the rows whose incumbent record fuzzily matches to nathan deal . take the elected record of this row . select the rows whose incumbent record fuzzily matches to charlie norwood . take the elected record of this row . the first record is less than the second record .'}
less { hop { filter_eq { all_rows ; incumbent ; nathan deal } ; elected } ; hop { filter_eq { all_rows ; incumbent ; charlie norwood } ; elected } } = true
select the rows whose incumbent record fuzzily matches to nathan deal . take the elected record of this row . select the rows whose incumbent record fuzzily matches to charlie norwood . take the elected record of this row . the first record is less than the second record .
5
5
{'less_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'incumbent_7': 7, 'nathan deal_8': 8, 'elected_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'incumbent_11': 11, 'charlie norwood_12': 12, 'elected_13': 13}
{'less_4': 'less', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'incumbent_7': 'incumbent', 'nathan deal_8': 'nathan deal', 'elected_9': 'elected', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'incumbent_11': 'incumbent', 'charlie norwood_12': 'charlie norwood', 'elected_13': 'elected'}
{'less_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'incumbent_7': [0], 'nathan deal_8': [0], 'elected_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'incumbent_11': [1], 'charlie norwood_12': [1], 'elected_13': [3]}
['district', 'incumbent', 'party', 'elected', 'status', 'result']
[["georgia 's 1st", 'jack kingston', 'republican', '1992', 're - elected', 'jack kingston ( r ) 69 % joyce marie griggs ( d ) 31 %'], ["georgia 's 2nd", 'sanford bishop', 'democratic', '1992', 're - elected', 'sanford bishop ( d ) 53 % dylan glenn ( r ) 47 %'], ["georgia 's 3rd", 'mac collins', 'republican', '1992', 're - elected', 'mac collins ( r ) 63 % gail notti ( d ) 37 %'], ["georgia 's 4th", 'cynthia mckinney', 'democratic', '1992', 're - elected', 'cynthia mckinney ( d ) 60 % sunny warren ( r ) 40 %'], ["georgia 's 5th", 'john lewis', 'democratic', '1986', 're - elected', 'john lewis ( d ) 77 % hank schwab ( r ) 23 %'], ["georgia 's 6th", 'johnny isakson', 'republican', '1999', 're - elected', 'johnny isakson ( r ) 75 % brett dehart ( d ) 25 %'], ["georgia 's 7th", 'bob barr', 'republican', '1994', 're - elected', 'bob barr ( r ) 54 % roger kahn ( d ) 46 %'], ["georgia 's 8th", 'saxby chambliss', 'republican', '1994', 're - elected', 'saxby chambliss ( r ) 59 % jim marshall ( d ) 41 %'], ["georgia 's 9th", 'nathan deal', 'republican', '1992', 're - elected', 'nathan deal ( r ) 75 % james harrington ( d ) 25 %'], ["georgia 's 10th", 'charlie norwood', 'republican', '1994', 're - elected', 'charlie norwood ( r ) 63 % marion freeman ( d ) 37 %']]
1984 washington redskins season
https://en.wikipedia.org/wiki/1984_Washington_Redskins_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-15085579-1.html.csv
comparative
the game played against the atlanta falcons had a lower attendance than the game played against the san francisco 49ers .
{'row_1': '10', 'row_2': '2', 'col': '5', 'col_other': '3', 'relation': 'less', 'record_mentioned': 'no', 'diff_result': None}
{'func': 'less', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'opponent', 'atlanta falcons'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose opponent record fuzzily matches to atlanta falcons .', 'tostr': 'filter_eq { all_rows ; opponent ; atlanta falcons }'}, 'attendance'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; opponent ; atlanta falcons } ; attendance }', 'tointer': 'select the rows whose opponent record fuzzily matches to atlanta falcons . take the attendance record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'opponent', 'san francisco 49ers'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose opponent record fuzzily matches to san francisco 49ers .', 'tostr': 'filter_eq { all_rows ; opponent ; san francisco 49ers }'}, 'attendance'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; opponent ; san francisco 49ers } ; attendance }', 'tointer': 'select the rows whose opponent record fuzzily matches to san francisco 49ers . take the attendance record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'less { hop { filter_eq { all_rows ; opponent ; atlanta falcons } ; attendance } ; hop { filter_eq { all_rows ; opponent ; san francisco 49ers } ; attendance } } = true', 'tointer': 'select the rows whose opponent record fuzzily matches to atlanta falcons . take the attendance record of this row . select the rows whose opponent record fuzzily matches to san francisco 49ers . take the attendance record of this row . the first record is less than the second record .'}
less { hop { filter_eq { all_rows ; opponent ; atlanta falcons } ; attendance } ; hop { filter_eq { all_rows ; opponent ; san francisco 49ers } ; attendance } } = true
select the rows whose opponent record fuzzily matches to atlanta falcons . take the attendance record of this row . select the rows whose opponent record fuzzily matches to san francisco 49ers . take the attendance record of this row . the first record is less than the second record .
5
5
{'less_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'opponent_7': 7, 'atlanta falcons_8': 8, 'attendance_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'opponent_11': 11, 'san francisco 49ers_12': 12, 'attendance_13': 13}
{'less_4': 'less', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'opponent_7': 'opponent', 'atlanta falcons_8': 'atlanta falcons', 'attendance_9': 'attendance', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'opponent_11': 'opponent', 'san francisco 49ers_12': 'san francisco 49ers', 'attendance_13': 'attendance'}
{'less_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'opponent_7': [0], 'atlanta falcons_8': [0], 'attendance_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'opponent_11': [1], 'san francisco 49ers_12': [1], 'attendance_13': [3]}
['week', 'date', 'opponent', 'result', 'attendance']
[['1', 'september 2 , 1984', 'miami dolphins', 'l , 17 - 35', '52683'], ['2', 'september 10 , 1984', 'san francisco 49ers', 'l , 31 - 37', '59707'], ['3', 'september 16 , 1984', 'new york giants', 'w , 30 - 14', '52997'], ['4', 'september 23 , 1984', 'new england patriots', 'w , 26 - 10', '60503'], ['5', 'september 30 , 1984', 'philadelphia eagles', 'w , 20 - 0', '53064'], ['6', 'october 7 , 1984', 'indianapolis colts', 'w , 35 - 7', '60012'], ['7', 'october 14 , 1984', 'dallas cowboys', 'w , 34 - 14', '55431'], ['8', 'october 21 , 1984', 'st louis cardinals', 'l , 24 - 26', '50262'], ['9', 'october 28 , 1984', 'new york giants', 'l , 13 - 37', '76192'], ['10', 'november 5 , 1984', 'atlanta falcons', 'w , 14 - 27', '51301'], ['11', 'november 11 , 1984', 'detroit lions', 'w , 28 - 14', '50212'], ['12', 'november 18 , 1984', 'philadelphia eagles', 'l , 10 - 16', '63117'], ['13', 'november 25 , 1984', 'buffalo bills', 'w , 41 - 14', '51513'], ['14', 'november 29 , 1984', 'minnesota vikings', 'w , 31 - 17', '55017'], ['15', 'december 9 , 1984', 'dallas cowboys', 'w , 30 - 28', '64286'], ['16', 'december 16 , 1984', 'st louis cardinals', 'w , 29 - 27', '54299']]
derek warwick
https://en.wikipedia.org/wiki/Derek_Warwick
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1158017-1.html.csv
superlative
derek warwick scored the greatest amount of points in 1984 .
{'scope': 'all', 'col_superlative': '5', 'row_superlative': '5', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '1', 'subset': None}
{'func': 'eq', 'args': [{'func': 'num_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'points'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; points }'}, 'year'], 'result': '1984', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; points } ; year }'}, '1984'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; points } ; year } ; 1984 } = true', 'tointer': 'select the row whose points record of all rows is maximum . the year record of this row is 1984 .'}
eq { hop { argmax { all_rows ; points } ; year } ; 1984 } = true
select the row whose points record of all rows is maximum . the year record of this row is 1984 .
3
3
{'eq_2': 2, 'result_3': 3, 'num_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'points_5': 5, 'year_6': 6, '1984_7': 7}
{'eq_2': 'eq', 'result_3': 'true', 'num_hop_1': 'num_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'points_5': 'points', 'year_6': 'year', '1984_7': '1984'}
{'eq_2': [3], 'result_3': [], 'num_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'points_5': [0], 'year_6': [1], '1984_7': [2]}
['year', 'entrant', 'chassis', 'engine', 'points']
[['1981', 'candy toleman motorsport', 'toleman tg181', 'hart 415t 1.5 l4 t', '0'], ['1982', 'candy toleman motorsport', 'toleman tg181c', 'hart 415t 1.5 l4 t', '0'], ['1982', 'candy toleman motorsport', 'toleman tg183', 'hart 415t 1.5 l4 t', '0'], ['1983', 'candy toleman motorsport', 'toleman tg183b', 'hart 415t 1.5 l4 t', '9'], ['1984', 'equipe renault elf', 'renault re50', 'renault ef4 1.5 v6 t', '23'], ['1985', 'equipe renault elf', 'renault re60', 'renault ef4b 1.5 v6 t', '5'], ['1985', 'equipe renault elf', 'renault re60b', 'renault ef15 1.5 v6 t', '5'], ['1986', 'motor racing developments', 'brabham bt55', 'bmw m12 / 13 / 1 1.5 l4 t', '0'], ['1987', 'usf & g arrows megatron', 'arrows a10', 'megatron m12 / 13 1.5 l4 t', '3'], ['1988', 'usf & g arrows megatron', 'arrows a10b', 'megatron m12 / 13 1.5 l4 t', '17'], ['1989', 'usf & g arrows', 'arrows a11', 'ford cosworth dfr 3.5 v8', '7'], ['1990', 'camel team lotus', 'lotus 102', 'lamborghini 3512 3.5 v12', '3'], ['1993', 'footwork mugen - honda', 'footwork fa13b', 'mugen - honda mf - 351 hb 3.5 v10', '4'], ['1993', 'footwork mugen - honda', 'footwork fa14', 'mugen - honda mf - 351 hb 3.5 v10', '4']]
washington redskins draft history
https://en.wikipedia.org/wiki/Washington_Redskins_draft_history
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-17100961-32.html.csv
comparative
john childress was drafted higher by the washington redskins than claude crabb was .
{'row_1': '8', 'row_2': '17', 'col': '3', 'col_other': '4', 'relation': 'less', 'record_mentioned': 'no', 'diff_result': None}
{'func': 'less', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'name', 'john childress'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose name record fuzzily matches to john childress .', 'tostr': 'filter_eq { all_rows ; name ; john childress }'}, 'overall'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; name ; john childress } ; overall }', 'tointer': 'select the rows whose name record fuzzily matches to john childress . take the overall record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'name', 'claude crabb'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose name record fuzzily matches to claude crabb .', 'tostr': 'filter_eq { all_rows ; name ; claude crabb }'}, 'overall'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; name ; claude crabb } ; overall }', 'tointer': 'select the rows whose name record fuzzily matches to claude crabb . take the overall record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'less { hop { filter_eq { all_rows ; name ; john childress } ; overall } ; hop { filter_eq { all_rows ; name ; claude crabb } ; overall } } = true', 'tointer': 'select the rows whose name record fuzzily matches to john childress . take the overall record of this row . select the rows whose name record fuzzily matches to claude crabb . take the overall record of this row . the first record is less than the second record .'}
less { hop { filter_eq { all_rows ; name ; john childress } ; overall } ; hop { filter_eq { all_rows ; name ; claude crabb } ; overall } } = true
select the rows whose name record fuzzily matches to john childress . take the overall record of this row . select the rows whose name record fuzzily matches to claude crabb . take the overall record of this row . the first record is less than the second record .
5
5
{'less_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'name_7': 7, 'john childress_8': 8, 'overall_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'name_11': 11, 'claude crabb_12': 12, 'overall_13': 13}
{'less_4': 'less', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'name_7': 'name', 'john childress_8': 'john childress', 'overall_9': 'overall', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'name_11': 'name', 'claude crabb_12': 'claude crabb', 'overall_13': 'overall'}
{'less_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'name_7': [0], 'john childress_8': [0], 'overall_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'name_11': [1], 'claude crabb_12': [1], 'overall_13': [3]}
['round', 'pick', 'overall', 'name', 'position', 'college']
[['1', '1', '1', 'ernie davis', 'rb', 'syracuse'], ['2', '1', '15', 'joe hernandez', 'wr', 'arizona'], ['3', '1', '29', 'bob mitinger', 'lb', 'penn state'], ['4', '1', '43', 'billy neighbors', 'g', 'alabama'], ['7', '1', '85', 'bert coan', 'hb', 'kansas'], ['8', '1', '99', 'ron hatcher', 'fb', 'michigan state'], ['9', '1', '113', 'dave viti', 'e', 'boston university'], ['10', '1', '127', 'john childress', 'g', 'arkansas'], ['11', '1', '141', 'carl palazzo', 'ot', 'adams state'], ['12', '1', '155', 'terry terrebonne', 'hb', 'tulane'], ['13', '1', '169', 'bill whisler', 'e', 'iowa'], ['14', '1', '183', 'jim costen', 'hb', 'south carolina'], ['15', '1', '197', 'len velia', 'ot', 'georgia'], ['16', '1', '211', 'tommy brooker', 'e', 'alabama'], ['17', '1', '225', 'allen miller', 'lb', 'ohio'], ['18', '1', '239', 'carl charon', 'db', 'michigan state'], ['19', '1', '253', 'claude crabb', 'db', 'colorado'], ['20', '1', '267', 'ed trancygier', 'qb', 'florida state']]
2003 belarusian premier league
https://en.wikipedia.org/wiki/2003_Belarusian_Premier_League
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-14748588-1.html.csv
unique
neman is the only team in the belarusian premier league that plays in the grodno location .
{'scope': 'all', 'row': '2', 'col': '2', 'col_other': '1', 'criterion': 'equal', 'value': 'grodno', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'location', 'grodno'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose location record fuzzily matches to grodno .', 'tostr': 'filter_eq { all_rows ; location ; grodno }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; location ; grodno } }', 'tointer': 'select the rows whose location record fuzzily matches to grodno . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'location', 'grodno'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose location record fuzzily matches to grodno .', 'tostr': 'filter_eq { all_rows ; location ; grodno }'}, 'team'], 'result': 'neman', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; location ; grodno } ; team }'}, 'neman'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; location ; grodno } ; team } ; neman }', 'tointer': 'the team record of this unqiue row is neman .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; location ; grodno } } ; eq { hop { filter_eq { all_rows ; location ; grodno } ; team } ; neman } } = true', 'tointer': 'select the rows whose location record fuzzily matches to grodno . there is only one such row in the table . the team record of this unqiue row is neman .'}
and { only { filter_eq { all_rows ; location ; grodno } } ; eq { hop { filter_eq { all_rows ; location ; grodno } ; team } ; neman } } = true
select the rows whose location record fuzzily matches to grodno . there is only one such row in the table . the team record of this unqiue row is neman .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'location_7': 7, 'grodno_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'team_9': 9, 'neman_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'location_7': 'location', 'grodno_8': 'grodno', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'team_9': 'team', 'neman_10': 'neman'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'location_7': [0], 'grodno_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'team_9': [2], 'neman_10': [3]}
['team', 'location', 'venue', 'capacity', 'position in 2002']
[['bate', 'borisov', 'city stadium , borisov', '5500', '1'], ['neman', 'grodno', 'neman', '6300', '2'], ['shakhtyor', 'soligorsk', 'stroitel', '5000', '3'], ['torpedo - ska', 'minsk', 'torpedo , minsk', '5200', '4'], ['torpedo', 'zhodino', 'torpedo , zhodino', '3020', '5'], ['gomel', 'gomel', 'central', '11800', '6'], ['dinamo minsk', 'minsk', 'dinamo , minsk', '41040', '7'], ['belshina', 'bobruisk', 'spartak , bobruisk', '3550', '8'], ['dnepr - transmash', 'mogilev', 'spartak , mogilev', '11200', '9'], ['dinamo brest', 'brest', 'osk brestskiy', '10080', '10'], ['slavia', 'mozyr', 'yunost', '5500', '11'], ['zvezda - va - bgu', 'minsk', 'traktor', '17600', '12'], ['molodechno - 2000', 'molodechno', 'city stadium , molodechno', '5500', '13'], ['darida', 'minsk raion', 'darida', '6000', 'first league , 1'], ['naftan', 'novopolotsk', 'atlant', '6500', 'first league , 2'], ['lokomotiv', 'minsk', 'lokomotiv', '800', 'first league , 3']]
2008 in hip hop music
https://en.wikipedia.org/wiki/2008_in_hip_hop_music
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-16444986-2.html.csv
majority
in 2008 hip hop music , most of the top ten best selling hip hop artists sold at least 200,000 albums in their first week .
{'scope': 'all', 'col': '4', 'most_or_all': 'most', 'criterion': 'greater_than_eq', 'value': '200000', 'subset': None}
{'func': 'most_greater_eq', 'args': ['all_rows', '1st week sales', '200000'], 'result': True, 'ind': 0, 'tointer': 'for the 1st week sales records of all rows , most of them are greater than or equal to 200000 .', 'tostr': 'most_greater_eq { all_rows ; 1st week sales ; 200000 } = true'}
most_greater_eq { all_rows ; 1st week sales ; 200000 } = true
for the 1st week sales records of all rows , most of them are greater than or equal to 200000 .
1
1
{'most_greater_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, '1st week sales_3': 3, '200000_4': 4}
{'most_greater_eq_0': 'most_greater_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', '1st week sales_3': '1st week sales', '200000_4': '200000'}
{'most_greater_eq_0': [1], 'result_1': [], 'all_rows_2': [0], '1st week sales_3': [0], '200000_4': [0]}
['number', 'artist', 'album', '1st week sales', '1st week position']
[['1', 'lil wayne', 'tha carter iii', '1006000', '1'], ['2', 'ti', 'paper trail', '568000', '1'], ['3', 'kanye west', '808s and heartbreak', '450000', '1'], ['4', 'young jeezy', 'the recession', '260000', '1'], ['5', 'the game', 'lax', '240000', '2'], ['6', 'plies', 'definition of real', '215000', '2'], ['7', 'ludacris', 'theater of the mind', '214000', '5'], ['8', 'rick ross', 'trilla', '198000', '1'], ['9', 'nas', 'untitled', '187000', '1'], ['10', 'snoop dogg', "ego trippin '", '137000', '5']]
list of canadian provinces and territories by population
https://en.wikipedia.org/wiki/List_of_Canadian_provinces_and_territories_by_population
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-106104-1.html.csv
unique
manitoba is the only province with less than 10 % of the total canadian population .
{'scope': 'all', 'row': '5', 'col': '4', 'col_other': '2', 'criterion': 'less_than', 'value': '10.00 %', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_less', 'args': ['all_rows', 'percent of national population', '10.00 %'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose percent of national population record is less than 10.00 % .', 'tostr': 'filter_less { all_rows ; percent of national population ; 10.00 % }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_less { all_rows ; percent of national population ; 10.00 % } }', 'tointer': 'select the rows whose percent of national population record is less than 10.00 % . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_less', 'args': ['all_rows', 'percent of national population', '10.00 %'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose percent of national population record is less than 10.00 % .', 'tostr': 'filter_less { all_rows ; percent of national population ; 10.00 % }'}, 'name'], 'result': 'manitoba', 'ind': 2, 'tostr': 'hop { filter_less { all_rows ; percent of national population ; 10.00 % } ; name }'}, 'manitoba'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_less { all_rows ; percent of national population ; 10.00 % } ; name } ; manitoba }', 'tointer': 'the name record of this unqiue row is manitoba .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_less { all_rows ; percent of national population ; 10.00 % } } ; eq { hop { filter_less { all_rows ; percent of national population ; 10.00 % } ; name } ; manitoba } } = true', 'tointer': 'select the rows whose percent of national population record is less than 10.00 % . there is only one such row in the table . the name record of this unqiue row is manitoba .'}
and { only { filter_less { all_rows ; percent of national population ; 10.00 % } } ; eq { hop { filter_less { all_rows ; percent of national population ; 10.00 % } ; name } ; manitoba } } = true
select the rows whose percent of national population record is less than 10.00 % . there is only one such row in the table . the name record of this unqiue row is manitoba .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_less_0': 0, 'all_rows_6': 6, 'percent of national population_7': 7, '10.00%_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'name_9': 9, 'manitoba_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_less_0': 'filter_less', 'all_rows_6': 'all_rows', 'percent of national population_7': 'percent of national population', '10.00%_8': '10.00 %', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'name_9': 'name', 'manitoba_10': 'manitoba'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_less_0': [1, 2], 'all_rows_6': [0], 'percent of national population_7': [0], '10.00%_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'name_9': [2], 'manitoba_10': [3]}
['rank', 'name', 'population ( 2011 census )', 'percent of national population', '% growth ( 2006 - 11 )', 'land area ( km square )', 'population density ( / km 2 )', 'house of commons seats', 'house of commons seats ( % )', '2013 population ( july est )']
[['1', 'ontario', '12851821', '38.4 %', '5.7 %', '908607.67', '14.1', '106', '34.4 %', '13537994'], ['2', 'quebec', '7903001', '23.6 %', '4.7 %', '1356547.02', '5.8', '75', '24.4 %', '8155334'], ['3', 'british columbia', '4400057', '13.1 %', '7.0 %', '922509.29', '4.8', '36', '11.7 %', '4581978'], ['4', 'alberta', '3645257', '10.9 %', '10.8 %', '640081.87', '5.7', '28', '9.1 %', '4025074'], ['5', 'manitoba', '1208268', '3.6 %', '5.2 %', '552329.52', '2.2', '14', '4.5 %', '1265015']]
piia pantsu
https://en.wikipedia.org/wiki/Piia_Pantsu
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-16178073-1.html.csv
majority
piia pantsu rode the horse , ypäjä karuso in most of the competitions .
{'scope': 'all', 'col': '4', 'most_or_all': 'most', 'criterion': 'equal', 'value': 'ypäjä karuso', 'subset': None}
{'func': 'most_str_eq', 'args': ['all_rows', 'horse', 'ypäjä karuso'], 'result': True, 'ind': 0, 'tointer': 'for the horse records of all rows , most of them fuzzily match to ypäjä karuso .', 'tostr': 'most_eq { all_rows ; horse ; ypäjä karuso } = true'}
most_eq { all_rows ; horse ; ypäjä karuso } = true
for the horse records of all rows , most of them fuzzily match to ypäjä karuso .
1
1
{'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'horse_3': 3, 'ypäjä karuso_4': 4}
{'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'horse_3': 'horse', 'ypäjä karuso_4': 'ypäjä karuso'}
{'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'horse_3': [0], 'ypäjä karuso_4': [0]}
['competition', 'place', 'year', 'horse', 'rank']
[['world championship', 'haag', '1994', 'cyna', '5'], ['european championship', 'rome', '1995', 'cyna', '4'], ['european championship', 'burghley', '1997', 'cyna', 'stopped in second event'], ['world championship', 'rooma', '1998', 'uppercut', '9'], ['european championship', 'luhmühlen', '1999', 'uppercut', '4'], ['olympics', 'sydney', '2000', 'uppercut', 'disqualification in second event'], ['european championship', 'pau', '2001', 'ypäjä karuso', '17'], ['world championship', 'jerez', '2002', 'ypäjä karuso', '3'], ['badminton horse trials', 'gloucestershire , england', '2003', 'ypäjä karuso', '2'], ['european championship', 'blenheim', '2005', 'ypäjä karuso', '6'], ['world cup final', 'malmö', '2005', 'ypäjä karuso', '3'], ['world championship', 'aachen', '2006', 'ypäjä karuso', 'stopped in second event'], ['finland championship', 'kerava , finland', '2007', 'ypäjä karuso', '1']]
united states house of representatives elections , 1910
https://en.wikipedia.org/wiki/United_States_House_of_Representatives_elections%2C_1910
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-1365780-7.html.csv
majority
all of the south carolina incumbents in the 1910 united states house of representatives elections were with the democratic party .
{'scope': 'all', 'col': '3', 'most_or_all': 'all', 'criterion': 'equal', 'value': 'democratic', 'subset': None}
{'func': 'all_str_eq', 'args': ['all_rows', 'party', 'democratic'], 'result': True, 'ind': 0, 'tointer': 'for the party records of all rows , all of them fuzzily match to democratic .', 'tostr': 'all_eq { all_rows ; party ; democratic } = true'}
all_eq { all_rows ; party ; democratic } = true
for the party records of all rows , all of them fuzzily match to democratic .
1
1
{'all_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'party_3': 3, 'democratic_4': 4}
{'all_str_eq_0': 'all_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'party_3': 'party', 'democratic_4': 'democratic'}
{'all_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'party_3': [0], 'democratic_4': [0]}
['district', 'incumbent', 'party', 'first elected', 'result']
[['south carolina 1', 'george swinton legarã', 'democratic', '1902', 're - elected'], ['south carolina 2', "james o ' h patterson", 'democratic', '1904', 'lost primary democratic hold'], ['south carolina 3', 'wyatt aiken', 'democratic', '1902', 're - elected'], ['south carolina 4', 'joseph t johnson', 'democratic', '1900', 're - elected'], ['south carolina 5', 'david e finley', 'democratic', '1898', 're - elected'], ['south carolina 6', 'j edwin ellerbe', 'democratic', '1904', 're - elected'], ['south carolina 7', 'asbury f lever', 'democratic', '1901 ( special )', 're - elected']]
2001 open championship
https://en.wikipedia.org/wiki/2001_Open_Championship
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-18017216-5.html.csv
aggregation
in the 2001 open championship , average points to par among united states contenders was 3.4 .
{'scope': 'subset', 'col': '5', 'type': 'average', 'result': '3.4', 'subset': {'col': '3', 'criterion': 'equal', 'value': 'united states'}}
{'func': 'round_eq', 'args': [{'func': 'avg', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'country', 'united states'], 'result': None, 'ind': 0, 'tostr': 'filter_eq { all_rows ; country ; united states }', 'tointer': 'select the rows whose country record fuzzily matches to united states .'}, 'to par'], 'result': '3.4', 'ind': 1, 'tostr': 'avg { filter_eq { all_rows ; country ; united states } ; to par }'}, '3.4'], 'result': True, 'ind': 2, 'tostr': 'round_eq { avg { filter_eq { all_rows ; country ; united states } ; to par } ; 3.4 } = true', 'tointer': 'select the rows whose country record fuzzily matches to united states . the average of the to par record of these rows is 3.4 .'}
round_eq { avg { filter_eq { all_rows ; country ; united states } ; to par } ; 3.4 } = true
select the rows whose country record fuzzily matches to united states . the average of the to par record of these rows is 3.4 .
3
3
{'eq_2': 2, 'result_3': 3, 'avg_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'country_5': 5, 'united states_6': 6, 'to par_7': 7, '3.4_8': 8}
{'eq_2': 'eq', 'result_3': 'true', 'avg_1': 'avg', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'country_5': 'country', 'united states_6': 'united states', 'to par_7': 'to par', '3.4_8': '3.4'}
{'eq_2': [3], 'result_3': [], 'avg_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'country_5': [0], 'united states_6': [0], 'to par_7': [1], '3.4_8': [2]}
['place', 'player', 'country', 'score', 'to par']
[['1', 'colin montgomerie', 'scotland', '65 + 70 = 135', '7'], ['2', 'pierre fulke', 'sweden', '69 + 67 = 136', '6'], ['t3', 'joe ogilvie', 'united states', '69 + 68 = 137', '5'], ['t3', 'greg owen', 'england', '69 + 68 = 137', '5'], ['t3', 'jesper parnevik', 'sweden', '69 + 68 = 137', '5'], ['t6', 'alex čejka', 'germany', '69 + 69 = 138', '4'], ['t6', 'niclas fasth', 'sweden', '69 + 69 = 138', '4'], ['t6', 'eduardo romero', 'argentina', '70 + 68 = 138', '4'], ['t9', 'billy andrade', 'united states', '69 + 70 = 139', '3'], ['t9', 'darren clarke', 'northern ireland', '70 + 69 = 139', '3'], ['t9', 'brad faxon', 'united states', '68 + 71 = 139', '3'], ['t9', 'raphaël jacquelin', 'france', '71 + 68 = 139', '3'], ['t9', "mark o'meara", 'united states', '70 + 69 = 139', '3'], ['t9', 'rory sabbatini', 'south africa', '70 + 69 = 139', '3'], ['t9', 'des smyth', 'ireland', '74 + 65 = 139', '3'], ['t9', 'tiger woods', 'united states', '71 + 68 = 139', '3']]
2007 missouri tigers football team
https://en.wikipedia.org/wiki/2007_Missouri_Tigers_football_team
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-13237088-24.html.csv
unique
for the 2007 missouri tigers football team , of the players whose gain was under 100 , the only one who had a long of 18 was chase patton .
{'scope': 'subset', 'row': '9', 'col': '4', 'col_other': '1', 'criterion': 'equal', 'value': '18', 'subset': {'col': '2', 'criterion': 'less_than', 'value': '100'}}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_eq', 'args': [{'func': 'filter_less', 'args': ['all_rows', 'gain', '100'], 'result': None, 'ind': 0, 'tostr': 'filter_less { all_rows ; gain ; 100 }', 'tointer': 'select the rows whose gain record is less than 100 .'}, 'long', '18'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose gain record is less than 100 . among these rows , select the rows whose long record is equal to 18 .', 'tostr': 'filter_eq { filter_less { all_rows ; gain ; 100 } ; long ; 18 }'}], 'result': True, 'ind': 2, 'tostr': 'only { filter_eq { filter_less { all_rows ; gain ; 100 } ; long ; 18 } }', 'tointer': 'select the rows whose gain record is less than 100 . among these rows , select the rows whose long record is equal to 18 . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_eq', 'args': [{'func': 'filter_less', 'args': ['all_rows', 'gain', '100'], 'result': None, 'ind': 0, 'tostr': 'filter_less { all_rows ; gain ; 100 }', 'tointer': 'select the rows whose gain record is less than 100 .'}, 'long', '18'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose gain record is less than 100 . among these rows , select the rows whose long record is equal to 18 .', 'tostr': 'filter_eq { filter_less { all_rows ; gain ; 100 } ; long ; 18 }'}, 'name'], 'result': 'patton , chase', 'ind': 3, 'tostr': 'hop { filter_eq { filter_less { all_rows ; gain ; 100 } ; long ; 18 } ; name }'}, 'patton , chase'], 'result': True, 'ind': 4, 'tostr': 'eq { hop { filter_eq { filter_less { all_rows ; gain ; 100 } ; long ; 18 } ; name } ; patton , chase }', 'tointer': 'the name record of this unqiue row is patton , chase .'}], 'result': True, 'ind': 5, 'tostr': 'and { only { filter_eq { filter_less { all_rows ; gain ; 100 } ; long ; 18 } } ; eq { hop { filter_eq { filter_less { all_rows ; gain ; 100 } ; long ; 18 } ; name } ; patton , chase } } = true', 'tointer': 'select the rows whose gain record is less than 100 . among these rows , select the rows whose long record is equal to 18 . there is only one such row in the table . the name record of this unqiue row is patton , chase .'}
and { only { filter_eq { filter_less { all_rows ; gain ; 100 } ; long ; 18 } } ; eq { hop { filter_eq { filter_less { all_rows ; gain ; 100 } ; long ; 18 } ; name } ; patton , chase } } = true
select the rows whose gain record is less than 100 . among these rows , select the rows whose long record is equal to 18 . there is only one such row in the table . the name record of this unqiue row is patton , chase .
8
6
{'and_5': 5, 'result_6': 6, 'only_2': 2, 'filter_eq_1': 1, 'filter_less_0': 0, 'all_rows_7': 7, 'gain_8': 8, '100_9': 9, 'long_10': 10, '18_11': 11, 'str_eq_4': 4, 'str_hop_3': 3, 'name_12': 12, 'patton , chase_13': 13}
{'and_5': 'and', 'result_6': 'true', 'only_2': 'only', 'filter_eq_1': 'filter_eq', 'filter_less_0': 'filter_less', 'all_rows_7': 'all_rows', 'gain_8': 'gain', '100_9': '100', 'long_10': 'long', '18_11': '18', 'str_eq_4': 'str_eq', 'str_hop_3': 'str_hop', 'name_12': 'name', 'patton , chase_13': 'patton , chase'}
{'and_5': [6], 'result_6': [], 'only_2': [5], 'filter_eq_1': [2, 3], 'filter_less_0': [1], 'all_rows_7': [0], 'gain_8': [0], '100_9': [0], 'long_10': [1], '18_11': [1], 'str_eq_4': [5], 'str_hop_3': [4], 'name_12': [3], 'patton , chase_13': [4]}
['name', 'gain', 'loss', 'long', 'avg / g']
[['temple , tony', '1135', '96', '44', '86.6'], ['maclin , jeremy', '388', '13', '30', '26.8'], ['jackson , jimmy', '342', '11', '18', '23.6'], ['daniel , chase', '471', '218', '39', '18.1'], ['washington , derrick', '199', '15', '20', '15.3'], ['woods , marcus', '120', '0', '19', '20.0'], ['goldsmith , earl', '99', '7', '19', '7.7'], ['rucker , martin', '51', '1', '9', '3.6'], ['patton , chase', '35', '1', '18', '3.4'], ['alexander , danario', '36', '5', '19', '3.1'], ['coffman , chase', '8', '0', '8', '0.6'], ['saunders , tommy', '8', '11', '8', '0.2'], ['perry , jered', '0', '5', '0', '0.4'], ['team', '0', '42', '0', '4.2'], ['total', '2892', '425', '44', '176.2'], ['opponents', '2106', '397', '66', '122.1']]
made ( tv series )
https://en.wikipedia.org/wiki/Made_%28TV_series%29
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-2140071-5.html.csv
comparative
the episode of made titled " richard is made into boyfriend material " premiered earlier than the episode titled " lawryn is made into a bmx biker " .
{'row_1': '2', 'row_2': '8', 'col': '4', 'col_other': '3', 'relation': 'less', 'record_mentioned': 'no', 'diff_result': None}
{'func': 'less', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'episode summary', 'richard is made into boyfriend material'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose episode summary record fuzzily matches to richard is made into boyfriend material .', 'tostr': 'filter_eq { all_rows ; episode summary ; richard is made into boyfriend material }'}, 'premier date'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; episode summary ; richard is made into boyfriend material } ; premier date }', 'tointer': 'select the rows whose episode summary record fuzzily matches to richard is made into boyfriend material . take the premier date record of this row .'}, {'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'episode summary', 'lawryn is made into a bmx biker'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose episode summary record fuzzily matches to lawryn is made into a bmx biker .', 'tostr': 'filter_eq { all_rows ; episode summary ; lawryn is made into a bmx biker }'}, 'premier date'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; episode summary ; lawryn is made into a bmx biker } ; premier date }', 'tointer': 'select the rows whose episode summary record fuzzily matches to lawryn is made into a bmx biker . take the premier date record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'less { hop { filter_eq { all_rows ; episode summary ; richard is made into boyfriend material } ; premier date } ; hop { filter_eq { all_rows ; episode summary ; lawryn is made into a bmx biker } ; premier date } } = true', 'tointer': 'select the rows whose episode summary record fuzzily matches to richard is made into boyfriend material . take the premier date record of this row . select the rows whose episode summary record fuzzily matches to lawryn is made into a bmx biker . take the premier date record of this row . the first record is less than the second record .'}
less { hop { filter_eq { all_rows ; episode summary ; richard is made into boyfriend material } ; premier date } ; hop { filter_eq { all_rows ; episode summary ; lawryn is made into a bmx biker } ; premier date } } = true
select the rows whose episode summary record fuzzily matches to richard is made into boyfriend material . take the premier date record of this row . select the rows whose episode summary record fuzzily matches to lawryn is made into a bmx biker . take the premier date record of this row . the first record is less than the second record .
5
5
{'less_4': 4, 'result_5': 5, 'str_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'episode summary_7': 7, 'richard is made into boyfriend material_8': 8, 'premier date_9': 9, 'str_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, 'episode summary_11': 11, 'lawryn is made into a bmx biker_12': 12, 'premier date_13': 13}
{'less_4': 'less', 'result_5': 'true', 'str_hop_2': 'str_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'episode summary_7': 'episode summary', 'richard is made into boyfriend material_8': 'richard is made into boyfriend material', 'premier date_9': 'premier date', 'str_hop_3': 'str_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', 'episode summary_11': 'episode summary', 'lawryn is made into a bmx biker_12': 'lawryn is made into a bmx biker', 'premier date_13': 'premier date'}
{'less_4': [5], 'result_5': [], 'str_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], 'episode summary_7': [0], 'richard is made into boyfriend material_8': [0], 'premier date_9': [2], 'str_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], 'episode summary_11': [1], 'lawryn is made into a bmx biker_12': [1], 'premier date_13': [3]}
['season', 'episode', 'episode summary', 'premier date', 'external link', 'coach']
[['5', '1', 'selena is made into a surfer chick', 'october 6 , 2004', 'episode summary', 'brad'], ['5', '2', 'richard is made into boyfriend material', 'october 13 , 2004', 'episode summary', 'samantha house'], ['5', '3', 'abby is made into a hip hop dancer', 'october 20 , 2004', 'episode summary', 'cedric crowe'], ['5', '5', 'jackie is made into a talent show chowder', 'december 30 , 2004', 'episode summary', 'brian'], ['5', '6', 'krystle is made into miss junior', 'january 6 , 2005', 'episode summary', 'ceylone boothe - grooms'], ['5', '7', 'dov is made into a wrestler', 'january 13 , 2005', 'episode summary', 'gene mills , kurt angle'], ['5', '8', 'anna is made into a leading lady', 'january 20 , 2005', 'episode summary', "john o'connell"], ['5', '9', 'lawryn is made into a bmx biker', 'january 27 , 2005', 'episode summary', 'warwick stevenson'], ['5', '10', 'mack is made into a ballet dancer', 'february 3 , 2005', 'episode summary', 'christopher fleming'], ['5', '11', 'ian is made into a salsa dancer', 'february 10 , 2005', 'episode summary', 'shaun perry']]
list of superfund sites in alaska
https://en.wikipedia.org/wiki/List_of_Superfund_sites_in_Alaska
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-10834634-1.html.csv
count
there were 4 superfund sites located in fairbanks north star .
{'scope': 'all', 'criterion': 'equal', 'value': 'fairbanks north star', 'result': '4', 'col': '3', 'subset': None}
{'func': 'eq', 'args': [{'func': 'count', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'borough or census area', 'fairbanks north star'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose borough or census area record fuzzily matches to fairbanks north star .', 'tostr': 'filter_eq { all_rows ; borough or census area ; fairbanks north star }'}], 'result': '4', 'ind': 1, 'tostr': 'count { filter_eq { all_rows ; borough or census area ; fairbanks north star } }', 'tointer': 'select the rows whose borough or census area record fuzzily matches to fairbanks north star . the number of such rows is 4 .'}, '4'], 'result': True, 'ind': 2, 'tostr': 'eq { count { filter_eq { all_rows ; borough or census area ; fairbanks north star } } ; 4 } = true', 'tointer': 'select the rows whose borough or census area record fuzzily matches to fairbanks north star . the number of such rows is 4 .'}
eq { count { filter_eq { all_rows ; borough or census area ; fairbanks north star } } ; 4 } = true
select the rows whose borough or census area record fuzzily matches to fairbanks north star . the number of such rows is 4 .
3
3
{'eq_2': 2, 'result_3': 3, 'count_1': 1, 'filter_str_eq_0': 0, 'all_rows_4': 4, 'borough or census area_5': 5, 'fairbanks north star_6': 6, '4_7': 7}
{'eq_2': 'eq', 'result_3': 'true', 'count_1': 'count', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_4': 'all_rows', 'borough or census area_5': 'borough or census area', 'fairbanks north star_6': 'fairbanks north star', '4_7': '4'}
{'eq_2': [3], 'result_3': [], 'count_1': [2], 'filter_str_eq_0': [1], 'all_rows_4': [0], 'borough or census area_5': [0], 'fairbanks north star_6': [0], '4_7': [2]}
['cerclis id', 'name', 'borough or census area', 'proposed', 'listed', 'construction completed', 'partially deleted', 'deleted']
[['ak4170024323', 'adak naval air station', 'aleutians west', '10 / 14 / 1992', '05 / 31 / 1994', '-', '-', '-'], ['ak8570028649', 'elmendorf air force base', 'anchorage', '07 / 14 / 1989', '08 / 30 / 1990', '-', '-', '-'], ['ak6214522157', 'fort richardson ( usarmy )', 'anchorage', '06 / 23 / 1993', '05 / 31 / 1994', '09 / 28 / 2006', '-', '-'], ['akd980978787', 'standard steel & metals salvage yard ( usdot )', 'anchorage', '07 / 14 / 1989', '08 / 30 / 1990', '09 / 16 / 1999', '-', '09 / 30 / 2002'], ['akd004904215', 'alaska battery enterprises', 'fairbanks north star', '06 / 24 / 1988', '03 / 31 / 1989', '03 / 02 / 1993', '-', '07 / 26 / 1996'], ['akd980988158', 'arctic surplus', 'fairbanks north star', '10 / 26 / 1989', '08 / 30 / 1990', '04 / 18 / 2005', '-', '09 / 25 / 2006'], ['ak1570028646', 'eielson air force base', 'fairbanks north star', '07 / 14 / 1989', '11 / 21 / 1989', '09 / 30 / 1998', '-', '-'], ['ak6210022426', 'fort wainwright', 'fairbanks north star', '07 / 14 / 1989', '08 / 30 / 1990', '09 / 27 / 2002', '-', '-'], ['ak0001897602', 'salt chuck mine', 'outer ketchikan', '09 / 23 / 2009', '03 / 04 / 2010', '-', '-', '-']]
1972 miami dolphins season
https://en.wikipedia.org/wiki/1972_Miami_Dolphins_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-14202514-1.html.csv
majority
most of the 1972 miami dolphins season games were scheduled for the month of august .
{'scope': 'all', 'col': '2', 'most_or_all': 'most', 'criterion': 'fuzzily_match', 'value': 'august', 'subset': None}
{'func': 'most_str_eq', 'args': ['all_rows', 'date', 'august'], 'result': True, 'ind': 0, 'tointer': 'for the date records of all rows , most of them fuzzily match to august .', 'tostr': 'most_eq { all_rows ; date ; august } = true'}
most_eq { all_rows ; date ; august } = true
for the date records of all rows , most of them fuzzily match to august .
1
1
{'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'date_3': 3, 'august_4': 4}
{'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'date_3': 'date', 'august_4': 'august'}
{'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'date_3': [0], 'august_4': [0]}
['week', 'date', 'opponent', 'result', 'record']
[['1', 'august 5 , 1972', 'detroit lions', 'l 23 - 31', '0 - 1'], ['2', 'august 12 , 1972', 'green bay packers', 'l 13 - 14', '0 - 2'], ['3', 'august 19 , 1972', 'cincinnati bengals', 'w 35 - 17', '1 - 2'], ['4', 'august 25 , 1972', 'atlanta falcons', 'w 24 - 10', '2 - 2'], ['5', 'august 31 , 1972', 'washington redskins', 'l 24 - 27', '2 - 3'], ['6', 'september 10 , 1972', 'minnesota vikings', 'w 21 - 19', '3 - 3']]
united states house of representatives elections , 1920
https://en.wikipedia.org/wiki/United_States_House_of_Representatives_elections%2C_1920
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-1342451-16.html.csv
ordinal
henry garland dupré was the second earliest first elected incumbent in the 1920 united states house of representatives elections .
{'row': '2', 'col': '4', 'order': '2', 'col_other': '1', 'max_or_min': 'min_to_max', 'value_mentioned': 'no', 'scope': 'all', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'nth_argmin', 'args': ['all_rows', 'first elected', '2'], 'result': None, 'ind': 0, 'tostr': 'nth_argmin { all_rows ; first elected ; 2 }'}, 'district'], 'result': 'louisiana 2', 'ind': 1, 'tostr': 'hop { nth_argmin { all_rows ; first elected ; 2 } ; district }'}, 'louisiana 2'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { nth_argmin { all_rows ; first elected ; 2 } ; district } ; louisiana 2 } = true', 'tointer': 'select the row whose first elected record of all rows is 2nd minimum . the district record of this row is louisiana 2 .'}
eq { hop { nth_argmin { all_rows ; first elected ; 2 } ; district } ; louisiana 2 } = true
select the row whose first elected record of all rows is 2nd minimum . the district record of this row is louisiana 2 .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'nth_argmin_0': 0, 'all_rows_4': 4, 'first elected_5': 5, '2_6': 6, 'district_7': 7, 'louisiana 2_8': 8}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'nth_argmin_0': 'nth_argmin', 'all_rows_4': 'all_rows', 'first elected_5': 'first elected', '2_6': '2', 'district_7': 'district', 'louisiana 2_8': 'louisiana 2'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'nth_argmin_0': [1], 'all_rows_4': [0], 'first elected_5': [0], '2_6': [0], 'district_7': [1], 'louisiana 2_8': [2]}
['district', 'incumbent', 'party', 'first elected', 'result', 'candidates']
[['louisiana 1', "james o'connor", 'democratic', '1918', 're - elected', "james o'connor ( d ) unopposed"], ['louisiana 2', 'henry garland dupré', 'democratic', '1908', 're - elected', 'henry garland dupré ( d ) unopposed'], ['louisiana 3', 'whitmell p martin', 'democratic', '1914', 're - elected', 'whitmell p martin ( d ) unopposed'], ['louisiana 4', 'john t watkins', 'democratic', '1904', 'lost renomination democratic hold', 'john n sandlin ( d ) unopposed'], ['louisiana 5', 'riley joseph wilson', 'democratic', '1914', 're - elected', 'riley joseph wilson ( d ) unopposed'], ['louisiana 6', 'jared y sanders , sr', 'democratic', '1916', 'retired to run for us senate democratic hold', 'george k favrot ( d ) unopposed'], ['louisiana 7', 'ladislas lazaro', 'democratic', '1912', 're - elected', 'ladislas lazaro ( d ) unopposed']]
peak water
https://en.wikipedia.org/wiki/Peak_water
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-15909409-3.html.csv
comparative
thailand has a greater number of total freshwater withdrawal than ecuador does .
{'row_1': '13', 'row_2': '14', 'col': '2', 'col_other': '1', 'relation': 'greater', 'record_mentioned': 'no', 'diff_result': None}
{'func': 'greater', 'args': [{'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', '', 'thailand'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose record fuzzily matches to thailand .', 'tostr': 'filter_eq { all_rows ; ; thailand }'}, 'total freshwater withdrawal'], 'result': None, 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; ; thailand } ; total freshwater withdrawal }', 'tointer': 'select the rows whose record fuzzily matches to thailand . take the total freshwater withdrawal record of this row .'}, {'func': 'num_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', '', 'ecuador'], 'result': None, 'ind': 1, 'tointer': 'select the rows whose record fuzzily matches to ecuador .', 'tostr': 'filter_eq { all_rows ; ; ecuador }'}, 'total freshwater withdrawal'], 'result': None, 'ind': 3, 'tostr': 'hop { filter_eq { all_rows ; ; ecuador } ; total freshwater withdrawal }', 'tointer': 'select the rows whose record fuzzily matches to ecuador . take the total freshwater withdrawal record of this row .'}], 'result': True, 'ind': 4, 'tostr': 'greater { hop { filter_eq { all_rows ; ; thailand } ; total freshwater withdrawal } ; hop { filter_eq { all_rows ; ; ecuador } ; total freshwater withdrawal } } = true', 'tointer': 'select the rows whose record fuzzily matches to thailand . take the total freshwater withdrawal record of this row . select the rows whose record fuzzily matches to ecuador . take the total freshwater withdrawal record of this row . the first record is greater than the second record .'}
greater { hop { filter_eq { all_rows ; ; thailand } ; total freshwater withdrawal } ; hop { filter_eq { all_rows ; ; ecuador } ; total freshwater withdrawal } } = true
select the rows whose record fuzzily matches to thailand . take the total freshwater withdrawal record of this row . select the rows whose record fuzzily matches to ecuador . take the total freshwater withdrawal record of this row . the first record is greater than the second record .
5
5
{'greater_4': 4, 'result_5': 5, 'num_hop_2': 2, 'filter_str_eq_0': 0, 'all_rows_6': 6, '_7': 7, 'thailand_8': 8, 'total freshwater withdrawal_9': 9, 'num_hop_3': 3, 'filter_str_eq_1': 1, 'all_rows_10': 10, '_11': 11, 'ecuador_12': 12, 'total freshwater withdrawal_13': 13}
{'greater_4': 'greater', 'result_5': 'true', 'num_hop_2': 'num_hop', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', '_7': '', 'thailand_8': 'thailand', 'total freshwater withdrawal_9': 'total freshwater withdrawal', 'num_hop_3': 'num_hop', 'filter_str_eq_1': 'filter_str_eq', 'all_rows_10': 'all_rows', '_11': '', 'ecuador_12': 'ecuador', 'total freshwater withdrawal_13': 'total freshwater withdrawal'}
{'greater_4': [5], 'result_5': [], 'num_hop_2': [4], 'filter_str_eq_0': [2], 'all_rows_6': [0], '_7': [0], 'thailand_8': [0], 'total freshwater withdrawal_9': [2], 'num_hop_3': [4], 'filter_str_eq_1': [3], 'all_rows_10': [1], '_11': [1], 'ecuador_12': [1], 'total freshwater withdrawal_13': [3]}
['', 'total freshwater withdrawal', 'per capita withdrawal', 'domestic use', 'industrial use', 'agricultural use']
[['turkmenistan', '24.65', '5104', '2', '1', '98'], ['kazakhstan', '35', '2360', '2', '17', '82'], ['uzbekistan', '58.34', '2194', '5', '2', '93'], ['guyana', '1.64', '2187', '2', '1', '98'], ['hungary', '21.03', '2082', '9', '59', '32'], ['azerbaijan', '17.25', '2051', '5', '28', '68'], ['kyrgyzstan', '10.08', '1916', '3', '3', '94'], ['tajikistan', '11.96', '1837', '4', '5', '92'], ['usa', '477', '1600', '13', '46', '41'], ['suriname', '0.67', '1489', '4', '3', '93'], ['iraq', '42.7', '1482', '3', '5', '92'], ['canada', '44.72', '1386', '20', '69', '12'], ['thailand', '82.75', '1288', '2', '2', '95'], ['ecuador', '16.98', '1283', '12', '5', '82']]
lancashire county council election , 2009
https://en.wikipedia.org/wiki/Lancashire_County_Council_election%2C_2009
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-18992950-1.html.csv
superlative
the highest total in the lancashire county council election in 2009 , was for the labour party .
{'scope': 'all', 'col_superlative': '14', 'row_superlative': '1', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '1', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'total'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; total }'}, 'party'], 'result': 'labour', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; total } ; party }'}, 'labour'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; total } ; party } ; labour } = true', 'tointer': 'select the row whose total record of all rows is maximum . the party record of this row is labour .'}
eq { hop { argmax { all_rows ; total } ; party } ; labour } = true
select the row whose total record of all rows is maximum . the party record of this row is labour .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'total_5': 5, 'party_6': 6, 'labour_7': 7}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'total_5': 'total', 'party_6': 'party', 'labour_7': 'labour'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'total_5': [0], 'party_6': [1], 'labour_7': [2]}
['party', 'burnley', 'chorley', 'fylde', 'hyndburn', 'lancaster', 'pendle', 'preston', 'ribble valley', 'rossendale', 'south ribble', 'west lancashire', 'wyre', 'total']
[['labour', '6', '4', '0', '6', '6', '1', '6', '0', '3', '5', '4', '3', '44'], ['conservative', '0', '3', '5', '0', '3', '2', '3', '3', '2', '1', '4', '5', '31'], ['liberal democrat', '0', '0', '0', '0', '0', '3', '1', '1', '0', '1', '0', '0', '6'], ['green', '0', '0', '0', '0', '1', '0', '0', '0', '0', '0', '0', '0', '1'], ['idle toad', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1', '0', '0', '1'], ['independent', '0', '0', '1', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1']]
triple crown ( basketball )
https://en.wikipedia.org/wiki/Triple_Crown_%28basketball%29
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-12072982-5.html.csv
majority
the majority of triple crowns had fiba korać cup ( 3rd tier ) as the european cup .
{'scope': 'all', 'col': '5', 'most_or_all': 'most', 'criterion': 'equal', 'value': 'fiba korać cup', 'subset': None}
{'func': 'most_str_eq', 'args': ['all_rows', 'european cup', 'fiba korać cup'], 'result': True, 'ind': 0, 'tointer': 'for the european cup records of all rows , most of them fuzzily match to fiba korać cup .', 'tostr': 'most_eq { all_rows ; european cup ; fiba korać cup } = true'}
most_eq { all_rows ; european cup ; fiba korać cup } = true
for the european cup records of all rows , most of them fuzzily match to fiba korać cup .
1
1
{'most_str_eq_0': 0, 'result_1': 1, 'all_rows_2': 2, 'european cup_3': 3, 'fiba korać cup_4': 4}
{'most_str_eq_0': 'most_str_eq', 'result_1': 'true', 'all_rows_2': 'all_rows', 'european cup_3': 'european cup', 'fiba korać cup_4': 'fiba korać cup'}
{'most_str_eq_0': [1], 'result_1': [], 'all_rows_2': [0], 'european cup_3': [0], 'fiba korać cup_4': [0]}
['season', 'club', 'national league', 'national cup', 'european cup']
[['1976 - 77', 'kk split ( jugoplastika )', 'yugoslav first federal league', 'yugoslav cup', 'fiba korać cup ( 3rd tier )'], ['1978 - 79', 'kk partizan', 'yugoslav first federal league', 'yugoslav cup', 'fiba korać cup ( 3rd tier )'], ['1982 - 83', 'limoges csp', 'french nationale 1', 'french federation cup', 'fiba korać cup ( 3rd tier )'], ['1986 - 87', 'fc barcelona', 'spanish acb league', "spanish king 's cup", 'fiba korać cup ( 3rd tier )'], ['1995 - 96', 'efes pilsen', 'turkish basketball league', 'turkish basketball cup', 'fiba korać cup ( 3rd tier )'], ['1999 - 00', 'limoges csp', 'french pro a league', 'french basketball cup', 'fiba korać cup ( 3rd tier )'], ['2004 - 05', 'csu asesoft ploiesti', 'romanian divizia a', 'romanian basketball cup', 'fiba eurocup challenge ( 4th tier )'], ['2011 - 12', 'beşiktaş', 'turkish basketball league', 'turkish basketball cup', 'fiba eurochallenge ( 3rd tier )']]
2009 - 10 big ten conference men 's basketball season
https://en.wikipedia.org/wiki/2009%E2%80%9310_Big_Ten_Conference_men%27s_basketball_season
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-23058971-8.html.csv
unique
the game between north carolina and michigan state is the only game to have more than 20000 in attendance .
{'scope': 'all', 'row': '5', 'col': '4', 'col_other': '1,2', 'criterion': 'greater_than', 'value': '20000', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_greater', 'args': ['all_rows', 'attendance', '20000'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose attendance record is greater than 20000 .', 'tostr': 'filter_greater { all_rows ; attendance ; 20000 }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_greater { all_rows ; attendance ; 20000 } }', 'tointer': 'select the rows whose attendance record is greater than 20000 . there is only one such row in the table .'}, {'func': 'and', 'args': [{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_greater', 'args': ['all_rows', 'attendance', '20000'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose attendance record is greater than 20000 .', 'tostr': 'filter_greater { all_rows ; attendance ; 20000 }'}, 'acc team'], 'result': '10 north carolina', 'ind': 2, 'tostr': 'hop { filter_greater { all_rows ; attendance ; 20000 } ; acc team }'}, '10 north carolina'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_greater { all_rows ; attendance ; 20000 } ; acc team } ; 10 north carolina }', 'tointer': 'the acc team record of this unqiue row is 10 north carolina .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_greater', 'args': ['all_rows', 'attendance', '20000'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose attendance record is greater than 20000 .', 'tostr': 'filter_greater { all_rows ; attendance ; 20000 }'}, 'big ten team'], 'result': '9 michigan state', 'ind': 4, 'tostr': 'hop { filter_greater { all_rows ; attendance ; 20000 } ; big ten team }'}, '9 michigan state'], 'result': True, 'ind': 5, 'tostr': 'eq { hop { filter_greater { all_rows ; attendance ; 20000 } ; big ten team } ; 9 michigan state }', 'tointer': 'the big ten team record of this unqiue row is 9 michigan state .'}], 'result': True, 'ind': 6, 'tostr': 'and { eq { hop { filter_greater { all_rows ; attendance ; 20000 } ; acc team } ; 10 north carolina } ; eq { hop { filter_greater { all_rows ; attendance ; 20000 } ; big ten team } ; 9 michigan state } }', 'tointer': 'the acc team record of this unqiue row is 10 north carolina . the big ten team record of this unqiue row is 9 michigan state .'}], 'result': True, 'ind': 7, 'tostr': 'and { only { filter_greater { all_rows ; attendance ; 20000 } } ; and { eq { hop { filter_greater { all_rows ; attendance ; 20000 } ; acc team } ; 10 north carolina } ; eq { hop { filter_greater { all_rows ; attendance ; 20000 } ; big ten team } ; 9 michigan state } } } = true', 'tointer': 'select the rows whose attendance record is greater than 20000 . there is only one such row in the table . the acc team record of this unqiue row is 10 north carolina . the big ten team record of this unqiue row is 9 michigan state .'}
and { only { filter_greater { all_rows ; attendance ; 20000 } } ; and { eq { hop { filter_greater { all_rows ; attendance ; 20000 } ; acc team } ; 10 north carolina } ; eq { hop { filter_greater { all_rows ; attendance ; 20000 } ; big ten team } ; 9 michigan state } } } = true
select the rows whose attendance record is greater than 20000 . there is only one such row in the table . the acc team record of this unqiue row is 10 north carolina . the big ten team record of this unqiue row is 9 michigan state .
10
8
{'and_7': 7, 'result_8': 8, 'only_1': 1, 'filter_greater_0': 0, 'all_rows_9': 9, 'attendance_10': 10, '20000_11': 11, 'and_6': 6, 'str_eq_3': 3, 'str_hop_2': 2, 'acc team_12': 12, '10 north carolina_13': 13, 'str_eq_5': 5, 'str_hop_4': 4, 'big ten team_14': 14, '9 michigan state_15': 15}
{'and_7': 'and', 'result_8': 'true', 'only_1': 'only', 'filter_greater_0': 'filter_greater', 'all_rows_9': 'all_rows', 'attendance_10': 'attendance', '20000_11': '20000', 'and_6': 'and', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'acc team_12': 'acc team', '10 north carolina_13': '10 north carolina', 'str_eq_5': 'str_eq', 'str_hop_4': 'str_hop', 'big ten team_14': 'big ten team', '9 michigan state_15': '9 michigan state'}
{'and_7': [8], 'result_8': [], 'only_1': [7], 'filter_greater_0': [1, 2, 4], 'all_rows_9': [0], 'attendance_10': [0], '20000_11': [0], 'and_6': [7], 'str_eq_3': [6], 'str_hop_2': [3], 'acc team_12': [2], '10 north carolina_13': [3], 'str_eq_5': [6], 'str_hop_4': [5], 'big ten team_14': [4], '9 michigan state_15': [5]}
['acc team', 'big ten team', 'location', 'attendance', 'winner', 'challenge leader']
[['virginia', 'penn state', 'john paul jones arena charlottesville , va', '8898', 'penn state ( 69 - 66 )', 'big ten ( 1 - 0 )'], ['wake forest', '4 purdue', 'mackey arena west lafayette , in', '14123', 'purdue ( 69 - 58 )', 'big ten ( 2 - 0 )'], ['nc state', 'northwestern', 'rbc center raleigh , nc', '11913', 'northwestern ( 65 - 53 )', 'big ten ( 3 - 0 )'], ['maryland', 'indiana', 'assembly hall bloomington , in', '17039', 'maryland ( 80 - 68 )', 'big ten ( 3 - 1 )'], ['10 north carolina', '9 michigan state', 'dean smith center chapel hill , nc', '21346', 'north carolina ( 89 - 82 )', 'big ten ( 3 - 2 )'], ['virginia tech', 'iowa', 'carver - hawkeye arena iowa city , ia', '8755', 'virginia tech ( 70 - 64 )', 'tied ( 3 - 3 )'], ['18 clemson', 'illinois', 'littlejohn coliseum clemson , sc', '10000', 'illinois ( 76 - 74 )', 'big ten ( 4 - 3 )'], ['miami', 'minnesota', 'bankunited center coral gables , fl', '5157', 'miami ( 63 - 58 )', 'tied ( 4 - 4 )'], ['boston college', 'michigan', 'crisler arena ann arbor , mi', '10718', 'boston college ( 62 - 58 )', 'acc ( 5 - 4 )'], ['6 duke', 'wisconsin', 'kohl center madison , wi', '17230', 'wisconsin ( 73 - 69 )', 'tied ( 5 - 5 )']]
13th united states congress
https://en.wikipedia.org/wiki/13th_United_States_Congress
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/1-225096-4.html.csv
superlative
in the 13th united states congress , the successor that was seated the latest was from ohio 's 6th district .
{'scope': 'all', 'col_superlative': '5', 'row_superlative': '12', 'value_mentioned': 'no', 'max_or_min': 'max', 'other_col': '1', 'subset': None}
{'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'argmax', 'args': ['all_rows', 'date successor seated'], 'result': None, 'ind': 0, 'tostr': 'argmax { all_rows ; date successor seated }'}, 'district'], 'result': 'ohio 6th', 'ind': 1, 'tostr': 'hop { argmax { all_rows ; date successor seated } ; district }'}, 'ohio 6th'], 'result': True, 'ind': 2, 'tostr': 'eq { hop { argmax { all_rows ; date successor seated } ; district } ; ohio 6th } = true', 'tointer': 'select the row whose date successor seated record of all rows is maximum . the district record of this row is ohio 6th .'}
eq { hop { argmax { all_rows ; date successor seated } ; district } ; ohio 6th } = true
select the row whose date successor seated record of all rows is maximum . the district record of this row is ohio 6th .
3
3
{'str_eq_2': 2, 'result_3': 3, 'str_hop_1': 1, 'argmax_0': 0, 'all_rows_4': 4, 'date successor seated_5': 5, 'district_6': 6, 'ohio 6th_7': 7}
{'str_eq_2': 'str_eq', 'result_3': 'true', 'str_hop_1': 'str_hop', 'argmax_0': 'argmax', 'all_rows_4': 'all_rows', 'date successor seated_5': 'date successor seated', 'district_6': 'district', 'ohio 6th_7': 'ohio 6th'}
{'str_eq_2': [3], 'result_3': [], 'str_hop_1': [2], 'argmax_0': [1], 'all_rows_4': [0], 'date successor seated_5': [0], 'district_6': [1], 'ohio 6th_7': [2]}
['district', 'vacator', 'reason for change', 'successor', 'date successor seated']
[['new york 15th', 'vacant', 'rep - elect william dowse died on february 18 , 1813', 'john m bowers ( f )', 'seated june 21 , 1813'], ['pennsylvania 5th', 'robert whitehill ( dr )', 'died april 8 , 1813', 'john rea ( dr )', 'seated may 28 , 1813'], ['new york 2nd', 'egbert benson ( f )', 'resigned august 2 , 1813', 'william irving ( dr )', 'seated january 22 , 1814'], ['pennsylvania 3rd', 'john gloninger ( f )', 'resigned august 2 , 1813', 'edward crouch ( dr )', 'seated december 6 , 1813'], ['pennsylvania 7th', 'john m hyneman ( dr )', 'resigned august 2 , 1813', 'daniel udree ( dr )', 'seated december 6 , 1813'], ['illinois territory at - large', 'shadrach bond', 'until august 2 , 1813', 'benjamin stephenson', 'seated november 14 , 1814'], ['tennessee 5th', 'felix grundy ( dr )', 'resigned sometime in 1814', 'newton cannon ( dr )', 'seated october 15 , 1814'], ['kentucky 2nd', 'henry clay ( dr )', 'resigned january 19 , 1814', 'joseph h hawkins ( dr )', 'seated march 29 , 1814'], ['virginia 11th', 'john dawson ( dr )', 'died march 31 , 1814', 'philip barbour ( dr )', 'seated september 19 , 1814'], ['massachusetts 4th', 'william m richardson ( dr )', 'resigned april 18 , 1814', 'samuel dana ( dr )', 'seated september 22 , 1814'], ['new jersey 3rd', 'jacob hufty ( f )', 'died may 20 , 1814', 'thomas bines ( dr )', 'seated november 2 , 1814'], ['ohio 6th', 'reasin beall ( dr )', 'resigned june 7 , 1814', 'david clendenin ( dr )', 'seated december 22 , 1814'], ['pennsylvania 3rd', 'james whitehill ( dr )', 'resigned september 1 , 1814', 'amos slaymaker ( f )', 'seated december 12 , 1814']]
kakrapar atomic power station
https://en.wikipedia.org/wiki/Kakrapar_Atomic_Power_Station
https://raw.githubusercontent.com/wenhuchen/Table-Fact-Checking/master/data/all_csv/2-12901997-1.html.csv
unique
kakrapar 2 is the only unit of the kakrapar atomic power station that started construction in 1985 .
{'scope': 'all', 'row': '3', 'col': '4', 'col_other': '1', 'criterion': 'fuzzily_match', 'value': '1985', 'subset': None}
{'func': 'and', 'args': [{'func': 'only', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'construction start', '1985'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose construction start record fuzzily matches to 1985 .', 'tostr': 'filter_eq { all_rows ; construction start ; 1985 }'}], 'result': True, 'ind': 1, 'tostr': 'only { filter_eq { all_rows ; construction start ; 1985 } }', 'tointer': 'select the rows whose construction start record fuzzily matches to 1985 . there is only one such row in the table .'}, {'func': 'str_eq', 'args': [{'func': 'str_hop', 'args': [{'func': 'filter_str_eq', 'args': ['all_rows', 'construction start', '1985'], 'result': None, 'ind': 0, 'tointer': 'select the rows whose construction start record fuzzily matches to 1985 .', 'tostr': 'filter_eq { all_rows ; construction start ; 1985 }'}, 'unit'], 'result': 'kakrapar 2', 'ind': 2, 'tostr': 'hop { filter_eq { all_rows ; construction start ; 1985 } ; unit }'}, 'kakrapar 2'], 'result': True, 'ind': 3, 'tostr': 'eq { hop { filter_eq { all_rows ; construction start ; 1985 } ; unit } ; kakrapar 2 }', 'tointer': 'the unit record of this unqiue row is kakrapar 2 .'}], 'result': True, 'ind': 4, 'tostr': 'and { only { filter_eq { all_rows ; construction start ; 1985 } } ; eq { hop { filter_eq { all_rows ; construction start ; 1985 } ; unit } ; kakrapar 2 } } = true', 'tointer': 'select the rows whose construction start record fuzzily matches to 1985 . there is only one such row in the table . the unit record of this unqiue row is kakrapar 2 .'}
and { only { filter_eq { all_rows ; construction start ; 1985 } } ; eq { hop { filter_eq { all_rows ; construction start ; 1985 } ; unit } ; kakrapar 2 } } = true
select the rows whose construction start record fuzzily matches to 1985 . there is only one such row in the table . the unit record of this unqiue row is kakrapar 2 .
6
5
{'and_4': 4, 'result_5': 5, 'only_1': 1, 'filter_str_eq_0': 0, 'all_rows_6': 6, 'construction start_7': 7, '1985_8': 8, 'str_eq_3': 3, 'str_hop_2': 2, 'unit_9': 9, 'kakrapar 2_10': 10}
{'and_4': 'and', 'result_5': 'true', 'only_1': 'only', 'filter_str_eq_0': 'filter_str_eq', 'all_rows_6': 'all_rows', 'construction start_7': 'construction start', '1985_8': '1985', 'str_eq_3': 'str_eq', 'str_hop_2': 'str_hop', 'unit_9': 'unit', 'kakrapar 2_10': 'kakrapar 2'}
{'and_4': [5], 'result_5': [], 'only_1': [4], 'filter_str_eq_0': [1, 2], 'all_rows_6': [0], 'construction start_7': [0], '1985_8': [0], 'str_eq_3': [4], 'str_hop_2': [3], 'unit_9': [2], 'kakrapar 2_10': [3]}
['unit', 'type', 'gross mw', 'construction start', 'operation start']
[['phase i', 'phase i', 'phase i', 'phase i', 'phase i'], ['kakrapar 1', 'phwr', '220', '1 december 1984', '6 may 1993'], ['kakrapar 2', 'phwr', '220', '1 april 1985', '1 september 1995'], ['phase ii', 'phase ii', 'phase ii', 'phase ii', 'phase ii'], ['kakrapar 3', 'phwr', '700', '22 november 2010', '2015'], ['kakrapar 4', 'phwr', '700', '22 november 2010', '2016']]