image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000004661.jpg
4661_evjvqa
[{"from": "human", "value": "where are people coming from?\n<image>"}, {"from": "gpt", "value": "going out from the market"}]
00000004662.jpg
4662_evjvqa
[{"from": "human", "value": "how many motorbikes are on the road?\n<image>"}, {"from": "gpt", "value": "there are two motorbikes running on the road"}]
00000004662.jpg
4662_evjvqa
[{"from": "human", "value": "what color shirt is the man jogging wearing?\n<image>"}, {"from": "gpt", "value": "wearing a yellow shirt"}]
00000004662.jpg
4662_evjvqa
[{"from": "human", "value": "how many people are standing on the sidewalk?\n<image>"}, {"from": "gpt", "value": "there are two people standing on the sidewalk"}]
00000004664.jpg
4664_evjvqa
[{"from": "human", "value": "where is everyone?\n<image>"}, {"from": "gpt", "value": "in the supermarket"}]
00000004664.jpg
4664_evjvqa
[{"from": "human", "value": "what are people doing?\n<image>"}, {"from": "gpt", "value": "shopping in the supermarket"}]
00000004664.jpg
4664_evjvqa
[{"from": "human", "value": "where is the child standing?\n<image>"}, {"from": "gpt", "value": "standing in a supermarket trolley"}]
00000004666.jpg
4666_evjvqa
[{"from": "human", "value": "how many flags are flied by the lakeside?\n<image>"}, {"from": "gpt", "value": "there are three flags flying by the lakeside"}]
00000004667.jpg
4667_evjvqa
[{"from": "human", "value": "where is everyone?\n<image>"}, {"from": "gpt", "value": "in a supermarket"}]
00000004667.jpg
4667_evjvqa
[{"from": "human", "value": "what color are supermarket staff uniforms?\n<image>"}, {"from": "gpt", "value": "uniforms are orange"}]
00000004667.jpg
4667_evjvqa
[{"from": "human", "value": "what shelf is the man in the white t-shirt standing next to?\n<image>"}, {"from": "gpt", "value": "standing next to the milk shelf"}]
00000004671.jpg
4671_evjvqa
[{"from": "human", "value": "what is the boy holding in his hand?\n<image>"}, {"from": "gpt", "value": "holding banknotes in hand"}]
00000004671.jpg
4671_evjvqa
[{"from": "human", "value": "what color is the car behind the guy?\n<image>"}, {"from": "gpt", "value": "the car is blue"}]
00000004671.jpg
4671_evjvqa
[{"from": "human", "value": "what is hanging on the rearview mirror of a motorcycle?\n<image>"}, {"from": "gpt", "value": "bags of food boxes are hung on the rearview mirror of the motorbike"}]
00000004672.jpg
4672_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a village by the river"}]
00000004672.jpg
4672_evjvqa
[{"from": "human", "value": "how many boats are running?\n<image>"}, {"from": "gpt", "value": "there is a boat running"}]
00000004672.jpg
4672_evjvqa
[{"from": "human", "value": "what time of day is the view of the village?\n<image>"}, {"from": "gpt", "value": "it is the afternoon"}]
00000004674.jpg
4674_evjvqa
[{"from": "human", "value": "how many people are in the group?\n<image>"}, {"from": "gpt", "value": "the group are four people"}]
00000004674.jpg
4674_evjvqa
[{"from": "human", "value": "what is behind the group of people?\n<image>"}, {"from": "gpt", "value": "behind is a house next to the lake"}]
00000004674.jpg
4674_evjvqa
[{"from": "human", "value": "how many people are wearing yellow shirts?\n<image>"}, {"from": "gpt", "value": "there is a man in a yellow t-shirt"}]
00000004676.jpg
4676_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a food shelf"}]
00000004677.jpg
4677_evjvqa
[{"from": "human", "value": "how many people are sitting by the roadside?\n<image>"}, {"from": "gpt", "value": "there are two people sitting on the side of the road"}]
00000004677.jpg
4677_evjvqa
[{"from": "human", "value": "how many motorbikes are parked next to the road?\n<image>"}, {"from": "gpt", "value": "there are two motorbikes parked next to the road"}]
00000004678.jpg
4678_evjvqa
[{"from": "human", "value": "how many girls are standing behind the product desk?\n<image>"}, {"from": "gpt", "value": "there are three girls"}]
00000004678.jpg
4678_evjvqa
[{"from": "human", "value": "how many products is the guy holding?\n<image>"}, {"from": "gpt", "value": "the guy is holding two products"}]
00000004678.jpg
4678_evjvqa
[{"from": "human", "value": "which side of girls does the boy stand?\n<image>"}, {"from": "gpt", "value": "standing to the left of the girls"}]
00000004683.jpg
4683_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a gallery of paintings"}]
00000004683.jpg
4683_evjvqa
[{"from": "human", "value": "how many chairs are placed in the room?\n<image>"}, {"from": "gpt", "value": "there is a chair placed in the room"}]
00000004683.jpg
4683_evjvqa
[{"from": "human", "value": "how many paintings are on display in the room?\n<image>"}, {"from": "gpt", "value": "there are three paintings on display in the room"}]
00000004690.jpg
4690_evjvqa
[{"from": "human", "value": "how many canoes are there on the river?\n<image>"}, {"from": "gpt", "value": "there are two canoes on the river"}]
00000004690.jpg
4690_evjvqa
[{"from": "human", "value": "how many people are driving the canoe?\n<image>"}, {"from": "gpt", "value": "there are four people driving the canoe"}]
00000004690.jpg
4690_evjvqa
[{"from": "human", "value": "how many people are on each boat?\n<image>"}, {"from": "gpt", "value": "there are two people on each boat"}]
00000004691.jpg
4691_evjvqa
[{"from": "human", "value": "how many people are rowing?\n<image>"}, {"from": "gpt", "value": "there are two people rowing"}]
00000004691.jpg
4691_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a river in the woods"}]
00000004691.jpg
4691_evjvqa
[{"from": "human", "value": "which side is the person behind the boat rowing towards?\n<image>"}, {"from": "gpt", "value": "swinging the paddle to the right"}]
00000004694.jpg
4694_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a market"}]
00000004694.jpg
4694_evjvqa
[{"from": "human", "value": "in which hand is the polka dot woman holding the bag?\n<image>"}, {"from": "gpt", "value": "holding the bag in her left hand"}]
00000004694.jpg
4694_evjvqa
[{"from": "human", "value": "how many children are walking in the market?\n<image>"}, {"from": "gpt", "value": "there are two children walking in the market"}]
00000004695.jpg
4695_evjvqa
[{"from": "human", "value": "how many people are standing at the sweet potato stand?\n<image>"}, {"from": "gpt", "value": "there are three people standing at the sweet potato stand"}]
00000004695.jpg
4695_evjvqa
[{"from": "human", "value": "what area are the three people standing in?\n<image>"}, {"from": "gpt", "value": "three people are standing at the supermarket sweet potato stand"}]
00000004696.jpg
4696_evjvqa
[{"from": "human", "value": "how many girls are looking at the phone?\n<image>"}, {"from": "gpt", "value": "there are two girls looking at the phone"}]
00000004696.jpg
4696_evjvqa
[{"from": "human", "value": "where are the two girls sitting?\n<image>"}, {"from": "gpt", "value": "two girls are sitting in a pub"}]
00000004696.jpg
4696_evjvqa
[{"from": "human", "value": "what color shirt does the person sitting on the left of the two girls wear?\n<image>"}, {"from": "gpt", "value": "wear a red shirt"}]
00000004697.jpg
4697_evjvqa
[{"from": "human", "value": "how many people are sitting in front of the house?\n<image>"}, {"from": "gpt", "value": "there is a person sitting in front of the house"}]
00000004697.jpg
4697_evjvqa
[{"from": "human", "value": "what color are the students' coat uniforms?\n<image>"}, {"from": "gpt", "value": "the coat uniform is blue"}]
00000004697.jpg
4697_evjvqa
[{"from": "human", "value": "how many lanterns are hanging behind the man?\n<image>"}, {"from": "gpt", "value": "there are three lanterns hanging"}]
00000004699.jpg
4699_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a market"}]
00000004699.jpg
4699_evjvqa
[{"from": "human", "value": "what color is the woman's apron?\n<image>"}, {"from": "gpt", "value": "the apron is yellow"}]
00000004699.jpg
4699_evjvqa
[{"from": "human", "value": "what color shirt is the child standing next to the clothes stall wearing?\n<image>"}, {"from": "gpt", "value": "wearing a white t-shirt"}]
00000009601.jpg
9601_evjvqa
[{"from": "human", "value": "how many tiers does the left shelf have?\n<image>"}, {"from": "gpt", "value": "the left shelf has four tiers"}]
00000009603.jpg
9603_evjvqa
[{"from": "human", "value": "where is this vegetable stall located?\n<image>"}, {"from": "gpt", "value": "at the corner of the street"}]
00000009605.jpg
9605_evjvqa
[{"from": "human", "value": "which side of the bus is the truck running?\n<image>"}, {"from": "gpt", "value": "the truck is running behind and on the left of the bus"}]
00000009607.jpg
9607_evjvqa
[{"from": "human", "value": "what color shirt is the woman without eyeglasses wearing?\n<image>"}, {"from": "gpt", "value": "the woman without eyeglasses is wearing a green shirt"}]
00000009609.jpg
9609_evjvqa
[{"from": "human", "value": "how does the announcement board look like?\n<image>"}, {"from": "gpt", "value": "the announcement board is rectangle, has red background and white text"}]
00000009610.jpg
9610_evjvqa
[{"from": "human", "value": "how many cups are held in the hands?\n<image>"}, {"from": "gpt", "value": "there are three cups held in the hands"}]
00000009612.jpg
9612_evjvqa
[{"from": "human", "value": "what color are the phoenix flowers on the branches?\n<image>"}, {"from": "gpt", "value": "the phoenix flowers on the branches are red"}]
00000009613.jpg
9613_evjvqa
[{"from": "human", "value": "what is the man wearing black shirt and glasses holding in his right hand?\n<image>"}, {"from": "gpt", "value": "he is holding a smartphone in his right hand"}]
00000009614.jpg
9614_evjvqa
[{"from": "human", "value": "what time of the day is it in this city area?\n<image>"}, {"from": "gpt", "value": "it is evening in this city area "}]
00000009617.jpg
9617_evjvqa
[{"from": "human", "value": "where is the man wearing red shirt standing?\n<image>"}, {"from": "gpt", "value": "the man wearing red shirt is standing in the leftmost position"}]
00000009620.jpg
9620_evjvqa
[{"from": "human", "value": "what shape is the yellow lantern?\n<image>"}, {"from": "gpt", "value": "the yellow lantern is a sphere"}]
00000009622.jpg
9622_evjvqa
[{"from": "human", "value": "how many joss sticks are sticked on the green ceremony table?\n<image>"}, {"from": "gpt", "value": "there are three joss sticks sticked on the green ceremony table"}]
00000009631.jpg
9631_evjvqa
[{"from": "human", "value": "where is the flag hanged compared to the entrance?\n<image>"}, {"from": "gpt", "value": "the flag is hanged to the left of the entrance"}]
00000009632.jpg
9632_evjvqa
[{"from": "human", "value": "what posture is the young man in white shirt in?\n<image>"}, {"from": "gpt", "value": "the young man in white shirt is in sitting posture"}]
00000009633.jpg
9633_evjvqa
[{"from": "human", "value": "what is the man wearing floppy hat doing?\n<image>"}, {"from": "gpt", "value": "he is carrying stuffs"}]
00000009637.jpg
9637_evjvqa
[{"from": "human", "value": "what do the roadside stalls sell?\n<image>"}, {"from": "gpt", "value": "clothing"}]
00000009640.jpg
9640_evjvqa
[{"from": "human", "value": "what is the woman in black shirt doing?\n<image>"}, {"from": "gpt", "value": "she is putting stuffs into the cart"}]
00000009641.jpg
9641_evjvqa
[{"from": "human", "value": "what is the sauce put in?\n<image>"}, {"from": "gpt", "value": "the sauce is put in a yellow bowl"}]
00000009642.jpg
9642_evjvqa
[{"from": "human", "value": "how many loafs of bread are on the plate?\n<image>"}, {"from": "gpt", "value": "there are two loaf of bread on the plate"}]
00000009643.jpg
9643_evjvqa
[{"from": "human", "value": "where can the red lantern be seen?\n<image>"}, {"from": "gpt", "value": "the red latern is hanged on the gate of the house on the left"}]
00000009648.jpg
9648_evjvqa
[{"from": "human", "value": "where is the inox chair?\n<image>"}, {"from": "gpt", "value": "the inox chair is behind the woman"}]
00000009649.jpg
9649_evjvqa
[{"from": "human", "value": "what dress is a person between two other people in black shirt wearing?\n<image>"}, {"from": "gpt", "value": "between two people in black shirt is a person in red ao dai"}]
00000009650.jpg
9650_evjvqa
[{"from": "human", "value": "how many men are on the stage?\n<image>"}, {"from": "gpt", "value": "there are five men on the stage"}]
00000009651.jpg
9651_evjvqa
[{"from": "human", "value": "where is the girl in black dress standing?\n<image>"}, {"from": "gpt", "value": "she is standing on the steps in front of the store"}]
00000009654.jpg
9654_evjvqa
[{"from": "human", "value": "what is this man doing?\n<image>"}, {"from": "gpt", "value": "he is pushing the cart"}]
00000009656.jpg
9656_evjvqa
[{"from": "human", "value": "what are these stalls selling?\n<image>"}, {"from": "gpt", "value": "selling clothing and fabric"}]
00000009659.jpg
9659_evjvqa
[{"from": "human", "value": "what color suit is the man wearing?\n<image>"}, {"from": "gpt", "value": "the man is wearing white suit"}]
00000009660.jpg
9660_evjvqa
[{"from": "human", "value": "what is the woman sitting in front of the black scooter selling?\n<image>"}, {"from": "gpt", "value": "she is selling fruit"}]
00000009664.jpg
9664_evjvqa
[{"from": "human", "value": "where is the kid in yellow shirt sitting?\n<image>"}, {"from": "gpt", "value": "the kid in yellow shirt is sitting in a cart"}]
00000009667.jpg
9667_evjvqa
[{"from": "human", "value": "how many motorbikes are parked on the sidewalk?\n<image>"}, {"from": "gpt", "value": "there are three motorbikes parked on the sidewalk"}]
00000009669.jpg
9669_evjvqa
[{"from": "human", "value": "how many christmas trees are displayed in this store?\n<image>"}, {"from": "gpt", "value": "there are three christmas trees displayed in this store"}]
00000009670.jpg
9670_evjvqa
[{"from": "human", "value": "what color is the scooter running next to the traffic light?\n<image>"}, {"from": "gpt", "value": "that scooter is brown"}]
00000009671.jpg
9671_evjvqa
[{"from": "human", "value": "what color handbag is the woman in yellow shirt wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a red handbag"}]
00000009672.jpg
9672_evjvqa
[{"from": "human", "value": "what are the man wearing on his face?\n<image>"}, {"from": "gpt", "value": "a face mask and a mic"}]
00000009673.jpg
9673_evjvqa
[{"from": "human", "value": "what color shirt is the woman selecting vegetable wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing green shirt"}]
00000009674.jpg
9674_evjvqa
[{"from": "human", "value": "how does the flag hanged on the power pole look like?\n<image>"}, {"from": "gpt", "value": "the flag hanged on the power pole is rectangular, has red background and a yellow star"}]
00000009677.jpg
9677_evjvqa
[{"from": "human", "value": "what is this group of people coming here for?\n<image>"}, {"from": "gpt", "value": "for a tour"}]
00000009683.jpg
9683_evjvqa
[{"from": "human", "value": "how many men are wearing black vest?\n<image>"}, {"from": "gpt", "value": "there are three men wearing black vest"}]
00000009685.jpg
9685_evjvqa
[{"from": "human", "value": "where is this couple standing?\n<image>"}, {"from": "gpt", "value": "they are standing in front of the temple gate"}]
00000009686.jpg
9686_evjvqa
[{"from": "human", "value": "what color ao dai is the girl wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing red ao dai"}]
00000009687.jpg
9687_evjvqa
[{"from": "human", "value": "what color are the walls of this building painted?\n<image>"}, {"from": "gpt", "value": "the walls of this building are painted yellow"}]
00000009689.jpg
9689_evjvqa
[{"from": "human", "value": "what is this road covered by?\n<image>"}, {"from": "gpt", "value": "this road is covered by the canopy"}]
00000009690.jpg
9690_evjvqa
[{"from": "human", "value": "what are hanged on the balcony and on the entrance?\n<image>"}, {"from": "gpt", "value": "the red lanterns"}]
00000009691.jpg
9691_evjvqa
[{"from": "human", "value": "what color tiles is the floor of this grocery store tiled with?\n<image>"}, {"from": "gpt", "value": "the floor of this grocery store is tiled with white tiles"}]
00000009694.jpg
9694_evjvqa
[{"from": "human", "value": "how many pigeons are flapping their wings?\n<image>"}, {"from": "gpt", "value": "there are four pigeons flapping their wings"}]
00000009695.jpg
9695_evjvqa
[{"from": "human", "value": "what is the woman in red shirt doing while talking?\n<image>"}, {"from": "gpt", "value": "she is talking while listening to the cellphone"}]
00000009697.jpg
9697_evjvqa
[{"from": "human", "value": "what color cap is the man wearing?\n<image>"}, {"from": "gpt", "value": "he is wearing a pink cap"}]
00000009700.jpg
9700_evjvqa
[{"from": "human", "value": "what color is the scooter running last?\n<image>"}, {"from": "gpt", "value": "that scooter is blue"}]
00000009702.jpg
9702_evjvqa
[{"from": "human", "value": "who are standing by the street?\n<image>"}, {"from": "gpt", "value": "the firefighters are standing by the street"}]
00000009705.jpg
9705_evjvqa
[{"from": "human", "value": "what color are the letters on the signboard of the shop?\n<image>"}, {"from": "gpt", "value": "the letters on the signboard of the shop are white"}]
00000009708.jpg
9708_evjvqa
[{"from": "human", "value": "what is portrayed on the poster?\n<image>"}, {"from": "gpt", "value": "uncle ho holding a child"}]