image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000004545.jpg
4545_evjvqa
[{"from": "human", "value": "what color shirt is the person running next to the bike with the flag wearing?\n<image>"}, {"from": "gpt", "value": "wear a blue shirt"}]
00000004545.jpg
4545_evjvqa
[{"from": "human", "value": "where are the children going?\n<image>"}, {"from": "gpt", "value": "the children are marching"}]
00000004547.jpg
4547_evjvqa
[{"from": "human", "value": "what is the woman sitting in front of the store doing?\n<image>"}, {"from": "gpt", "value": "choosing goods"}]
00000004547.jpg
4547_evjvqa
[{"from": "human", "value": "how many people are there behind the woman sitting?\n<image>"}, {"from": "gpt", "value": "there is a person behind"}]
00000004549.jpg
4549_evjvqa
[{"from": "human", "value": "how many cars are about to go through the gate?\n<image>"}, {"from": "gpt", "value": "there are two cars about to pass through the gate"}]
00000004549.jpg
4549_evjvqa
[{"from": "human", "value": "what are the two sides of the road decorated with?\n<image>"}, {"from": "gpt", "value": "there are flags on both sides of the road"}]
00000004551.jpg
4551_evjvqa
[{"from": "human", "value": "what color are the flags everyone holding?\n<image>"}, {"from": "gpt", "value": "the flags are red"}]
00000004551.jpg
4551_evjvqa
[{"from": "human", "value": "what is on display on the yard?\n<image>"}, {"from": "gpt", "value": "the paintings are being displayed on the yard"}]
00000004551.jpg
4551_evjvqa
[{"from": "human", "value": "which side of the paintings row is everyone standing on?\n<image>"}, {"from": "gpt", "value": "everyone is standing to the left of the row of paintings"}]
00000004552.jpg
4552_evjvqa
[{"from": "human", "value": "where is the car parked?\n<image>"}, {"from": "gpt", "value": "the car is parked in front of a supermarket"}]
00000004553.jpg
4553_evjvqa
[{"from": "human", "value": "how big is the alley?\n<image>"}, {"from": "gpt", "value": "the alley is very narrow in size"}]
00000004553.jpg
4553_evjvqa
[{"from": "human", "value": "how many vehicles are placed in front of the alley?\n<image>"}, {"from": "gpt", "value": "there is a motorbike in front of the alley"}]
00000004553.jpg
4553_evjvqa
[{"from": "human", "value": "how many people are standing next to the vehicle in front of the alley?\n<image>"}, {"from": "gpt", "value": "there is a person standing next to the vehicle"}]
00000004555.jpg
4555_evjvqa
[{"from": "human", "value": "how many people are standing in front of the gate?\n<image>"}, {"from": "gpt", "value": "there is a person standing in front of the gate"}]
00000004555.jpg
4555_evjvqa
[{"from": "human", "value": "in which direction is the person leading the bicycle?\n<image>"}, {"from": "gpt", "value": "going out of the street"}]
00000004556.jpg
4556_evjvqa
[{"from": "human", "value": "how is the weather here?\n<image>"}, {"from": "gpt", "value": "it is raining"}]
00000004556.jpg
4556_evjvqa
[{"from": "human", "value": "where are people going?\n<image>"}, {"from": "gpt", "value": "heading towards the castle"}]
00000004556.jpg
4556_evjvqa
[{"from": "human", "value": "how are visitors avoiding getting wet?\n<image>"}, {"from": "gpt", "value": "by using umbrellas"}]
00000004557.jpg
4557_evjvqa
[{"from": "human", "value": "how many main dishes are on the table?\n<image>"}, {"from": "gpt", "value": "there are three main dishes on the table"}]
00000004557.jpg
4557_evjvqa
[{"from": "human", "value": "where is the sauce cup placed?\n<image>"}, {"from": "gpt", "value": "placed below"}]
00000004557.jpg
4557_evjvqa
[{"from": "human", "value": "where is the soup placed?\n<image>"}, {"from": "gpt", "value": "the soup is placed on the left"}]
00000004558.jpg
4558_evjvqa
[{"from": "human", "value": "where are two people standing?\n<image>"}, {"from": "gpt", "value": "standing in front of a church"}]
00000004558.jpg
4558_evjvqa
[{"from": "human", "value": "which side of the girl is the boy standing on?\n<image>"}, {"from": "gpt", "value": "standing in front of the girl"}]
00000004558.jpg
4558_evjvqa
[{"from": "human", "value": "what are two people doing?\n<image>"}, {"from": "gpt", "value": "are about to kiss each other"}]
00000004559.jpg
4559_evjvqa
[{"from": "human", "value": "how many people are sitting on the stairs?\n<image>"}, {"from": "gpt", "value": "there is a person sitting on the stairs"}]
00000004559.jpg
4559_evjvqa
[{"from": "human", "value": "what area are people standing in?\n<image>"}, {"from": "gpt", "value": "people are standing in front of a temple"}]
00000004559.jpg
4559_evjvqa
[{"from": "human", "value": "on which side of the girl sitting is the boy standing?\n<image>"}, {"from": "gpt", "value": "stand to the left of the girl"}]
00000004561.jpg
4561_evjvqa
[{"from": "human", "value": "what color is the basket?\n<image>"}, {"from": "gpt", "value": "the basket is blue"}]
00000004561.jpg
4561_evjvqa
[{"from": "human", "value": "how many handles does the basket have?\n<image>"}, {"from": "gpt", "value": "the basket has two handles"}]
00000004562.jpg
4562_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a market"}]
00000004562.jpg
4562_evjvqa
[{"from": "human", "value": "which stall is the man in the hat standing at?\n<image>"}, {"from": "gpt", "value": "standing at the fruit stall"}]
00000004562.jpg
4562_evjvqa
[{"from": "human", "value": "what kind of fruit is on the left-hand side of the man?\n<image>"}, {"from": "gpt", "value": "on the left are grapefruits"}]
00000004566.jpg
4566_evjvqa
[{"from": "human", "value": "what is everyone doing?\n<image>"}, {"from": "gpt", "value": "getting ready to paddle canoes to sea"}]
00000004566.jpg
4566_evjvqa
[{"from": "human", "value": "how many red canoes are there?\n<image>"}, {"from": "gpt", "value": "there is one red canoe"}]
00000004567.jpg
4567_evjvqa
[{"from": "human", "value": "how many motorbikes are running on the road?\n<image>"}, {"from": "gpt", "value": "there is a motorbike running on the road"}]
00000004569.jpg
4569_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a seaport"}]
00000004569.jpg
4569_evjvqa
[{"from": "human", "value": "what is the weather like at the seaport?\n<image>"}, {"from": "gpt", "value": "it is early in the morning"}]
00000004569.jpg
4569_evjvqa
[{"from": "human", "value": "what are the cranes doing?\n<image>"}, {"from": "gpt", "value": "moving the boxes on the ships"}]
00000004570.jpg
4570_evjvqa
[{"from": "human", "value": "where are the paintings put?\n<image>"}, {"from": "gpt", "value": "put in a museum"}]
00000004570.jpg
4570_evjvqa
[{"from": "human", "value": "what artefacts are exhibited in the showroom?\n<image>"}, {"from": "gpt", "value": "there are famous paintings exhibited in the room"}]
00000004570.jpg
4570_evjvqa
[{"from": "human", "value": "how many drawings are exhibited next to the entrance?\n<image>"}, {"from": "gpt", "value": "there are two drawings hanging on either side of the entrance"}]
00000004572.jpg
4572_evjvqa
[{"from": "human", "value": "what color top is the tour guide wearing?\n<image>"}, {"from": "gpt", "value": "wearing a blue top"}]
00000004572.jpg
4572_evjvqa
[{"from": "human", "value": "what are the students doing?\n<image>"}, {"from": "gpt", "value": "raising hands up high following the guide"}]
00000004574.jpg
4574_evjvqa
[{"from": "human", "value": "how many circular windows does the house have?\n<image>"}, {"from": "gpt", "value": "the house has two circular windows"}]
00000004574.jpg
4574_evjvqa
[{"from": "human", "value": "how many sets of tables and chairs are there in the kitchen of the house?\n<image>"}, {"from": "gpt", "value": "there are three sets of tables and chairs in the kitchen"}]
00000004574.jpg
4574_evjvqa
[{"from": "human", "value": "what is the main color of the house?\n<image>"}, {"from": "gpt", "value": "the main color of the house is yellow"}]
00000004578.jpg
4578_evjvqa
[{"from": "human", "value": "what products does the store sell?\n<image>"}, {"from": "gpt", "value": "the store sells all kinds of sports equipment"}]
00000004578.jpg
4578_evjvqa
[{"from": "human", "value": "how many motorbikes are parked next to the shop?\n<image>"}, {"from": "gpt", "value": "there is a motorbike parked next to the shop"}]
00000004578.jpg
4578_evjvqa
[{"from": "human", "value": "what kind of products are displayed on the left side of the store?\n<image>"}, {"from": "gpt", "value": "the left side of the store displays all kinds of treadmills"}]
00000004579.jpg
4579_evjvqa
[{"from": "human", "value": "what products does the store sell?\n<image>"}, {"from": "gpt", "value": "the store sells all kinds of clothes and fabrics"}]
00000004579.jpg
4579_evjvqa
[{"from": "human", "value": "on which side are the clothes sold?\n<image>"}, {"from": "gpt", "value": "the clothes are sold on the right side"}]
00000004579.jpg
4579_evjvqa
[{"from": "human", "value": "where is the fabric located in the store?\n<image>"}, {"from": "gpt", "value": "the fabric is placed on the left side of the store"}]
00000004589.jpg
4589_evjvqa
[{"from": "human", "value": "what color shirt is the man buying?\n<image>"}, {"from": "gpt", "value": "wearing a gray shirt"}]
00000004589.jpg
4589_evjvqa
[{"from": "human", "value": "where is the male adult standing?\n<image>"}, {"from": "gpt", "value": "standing in a supermarket"}]
00000004590.jpg
4590_evjvqa
[{"from": "human", "value": "where is the group of people standing?\n<image>"}, {"from": "gpt", "value": "standing in a museum"}]
00000004590.jpg
4590_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "giving a presentation to the students"}]
00000004590.jpg
4590_evjvqa
[{"from": "human", "value": "what is the little girl using her phone to take pictures of?\n<image>"}, {"from": "gpt", "value": "take a picture of an ancient statue"}]
00000004591.jpg
4591_evjvqa
[{"from": "human", "value": "wwhat is the bag made of?\n<image>"}, {"from": "gpt", "value": "made of leather"}]
00000004591.jpg
4591_evjvqa
[{"from": "human", "value": "what color is the bag?\n<image>"}, {"from": "gpt", "value": "brown"}]
00000004594.jpg
4594_evjvqa
[{"from": "human", "value": "how many people are standing on the chairs?\n<image>"}, {"from": "gpt", "value": "there are two people standing on the chairs"}]
00000004594.jpg
4594_evjvqa
[{"from": "human", "value": "what is the woman standing on the stool doing?\n<image>"}, {"from": "gpt", "value": "pulling out the roof of the shop"}]
00000001800.jpg
1800_evjvqa
[{"from": "human", "value": "where are people posing for pictures?\n<image>"}, {"from": "gpt", "value": "next to the painted wall"}]
00000001800.jpg
1800_evjvqa
[{"from": "human", "value": "how many motorbikes are parked on the sidewalk opposite the wall?\n<image>"}, {"from": "gpt", "value": "five"}]
00000001800.jpg
1800_evjvqa
[{"from": "human", "value": "how many motorbikes are there with people sitting on them?\n<image>"}, {"from": "gpt", "value": "three"}]
00000001800.jpg
1800_evjvqa
[{"from": "human", "value": "how many bicycles are there in the painting on the wall?\n<image>"}, {"from": "gpt", "value": "one"}]
00000001801.jpg
1801_evjvqa
[{"from": "human", "value": "what color is the car parked in the distance?\n<image>"}, {"from": "gpt", "value": "black"}]
00000001801.jpg
1801_evjvqa
[{"from": "human", "value": "what is the road surface covered with?\n<image>"}, {"from": "gpt", "value": "leaves"}]
00000001801.jpg
1801_evjvqa
[{"from": "human", "value": "how many lampposts are there on the road?\n<image>"}, {"from": "gpt", "value": "two"}]
00000001805.jpg
1805_evjvqa
[{"from": "human", "value": "which direction is the cyclo going?\n<image>"}, {"from": "gpt", "value": "to the city gate"}]
00000001805.jpg
1805_evjvqa
[{"from": "human", "value": "who are selling around the sidewalk on the left?\n<image>"}, {"from": "gpt", "value": "there are three women selling"}]
00000001805.jpg
1805_evjvqa
[{"from": "human", "value": "which sidewalk is the woman with the pedlar selling goods sitting on?\n<image>"}, {"from": "gpt", "value": "right sidewalk"}]
00000001805.jpg
1805_evjvqa
[{"from": "human", "value": "how many cyclos are going to the city gate?\n<image>"}, {"from": "gpt", "value": "there is a cyclo going to the city gate"}]
00000001807.jpg
1807_evjvqa
[{"from": "human", "value": "how many people are standing in the gallery?\n<image>"}, {"from": "gpt", "value": "two people"}]
00000001807.jpg
1807_evjvqa
[{"from": "human", "value": "what objects does the museum display?\n<image>"}, {"from": "gpt", "value": "bones and fossils"}]
00000001807.jpg
1807_evjvqa
[{"from": "human", "value": "what objects does this room display?\n<image>"}, {"from": "gpt", "value": "bones and fossils"}]
00000001807.jpg
1807_evjvqa
[{"from": "human", "value": "how many people are watching in the gallery?\n<image>"}, {"from": "gpt", "value": "two"}]
00000001808.jpg
1808_evjvqa
[{"from": "human", "value": "how many models of boat are there?\n<image>"}, {"from": "gpt", "value": "two models"}]
00000001808.jpg
1808_evjvqa
[{"from": "human", "value": "how many model boats carry flower pots?\n<image>"}, {"from": "gpt", "value": "one"}]
00000001809.jpg
1809_evjvqa
[{"from": "human", "value": "what stall is the man in the blue shirt standing at?\n<image>"}, {"from": "gpt", "value": "watermelon"}]
00000001809.jpg
1809_evjvqa
[{"from": "human", "value": "where is the man in the blue shirt standing?\n<image>"}, {"from": "gpt", "value": "watermelon stall"}]
00000001809.jpg
1809_evjvqa
[{"from": "human", "value": "what is the man in the blue shirt doing?\n<image>"}, {"from": "gpt", "value": "taking pictures"}]
00000001809.jpg
1809_evjvqa
[{"from": "human", "value": "who is the man in the blue shirt standing near?\n<image>"}, {"from": "gpt", "value": "a woman wearing a hat and a watch"}]
00000001814.jpg
1814_evjvqa
[{"from": "human", "value": "where are the two girls standing?\n<image>"}, {"from": "gpt", "value": "orchard"}]
00000001814.jpg
1814_evjvqa
[{"from": "human", "value": "what is the girl in the blue shirt holding in her hand?\n<image>"}, {"from": "gpt", "value": "conical hat"}]
00000001814.jpg
1814_evjvqa
[{"from": "human", "value": "where does the girl in the red shirt put the conical hat?\n<image>"}, {"from": "gpt", "value": "on her head"}]
00000001814.jpg
1814_evjvqa
[{"from": "human", "value": "how many girls stand here in the orchard?\n<image>"}, {"from": "gpt", "value": "two"}]
00000001814.jpg
1814_evjvqa
[{"from": "human", "value": "how are the two girls in the orchard dressed?\n<image>"}, {"from": "gpt", "value": "blouse"}]
00000001815.jpg
1815_evjvqa
[{"from": "human", "value": "what kind of products do people set up in front of the store?\n<image>"}, {"from": "gpt", "value": "fruit"}]
00000001815.jpg
1815_evjvqa
[{"from": "human", "value": "how many people are coming to buy fruit?\n<image>"}, {"from": "gpt", "value": "no one is coming to buy fruit"}]
00000001815.jpg
1815_evjvqa
[{"from": "human", "value": "how many people are standing near the fruit stall?\n<image>"}, {"from": "gpt", "value": "one person"}]
00000001819.jpg
1819_evjvqa
[{"from": "human", "value": "what is the counter right behind the counter selling food made from dragon fruit?\n<image>"}, {"from": "gpt", "value": "dragon fruit counter"}]
00000001820.jpg
1820_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "repairing the roof"}]
00000001820.jpg
1820_evjvqa
[{"from": "human", "value": "where is the man sitting?\n<image>"}, {"from": "gpt", "value": "on the roof"}]
00000001820.jpg
1820_evjvqa
[{"from": "human", "value": "what is the man sitting on the roof doing?\n<image>"}, {"from": "gpt", "value": "repairing the roof"}]
00000001821.jpg
1821_evjvqa
[{"from": "human", "value": "how many boats are on the water?\n<image>"}, {"from": "gpt", "value": "two"}]
00000001821.jpg
1821_evjvqa
[{"from": "human", "value": "how many people are on the boat nearby?\n<image>"}, {"from": "gpt", "value": "two"}]
00000001821.jpg
1821_evjvqa
[{"from": "human", "value": "how many people are sitting on the boat in the distance?\n<image>"}, {"from": "gpt", "value": "one"}]
00000001823.jpg
1823_evjvqa
[{"from": "human", "value": "where is the boy standing?\n<image>"}, {"from": "gpt", "value": "on a mountain"}]
00000001823.jpg
1823_evjvqa
[{"from": "human", "value": "where is the boy visiting?\n<image>"}, {"from": "gpt", "value": "on top of a mountain"}]
00000001826.jpg
1826_evjvqa
[{"from": "human", "value": "where is the girl in the black shirt near the woman standing?\n<image>"}, {"from": "gpt", "value": "vegetable counter"}]