image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000003476.jpg | 3476_evjvqa | [{"from": "human", "value": "what are these two doing?\n<image>"}, {"from": "gpt", "value": "sitting and eating"}] |
00000003476.jpg | 3476_evjvqa | [{"from": "human", "value": "what vehicle do these two travel by?\n<image>"}, {"from": "gpt", "value": "by motorbike"}] |
00000003476.jpg | 3476_evjvqa | [{"from": "human", "value": "where is the boy wearing a white shirt parking the motorbike?\n<image>"}, {"from": "gpt", "value": "right next to where the girl is sitting"}] |
00000003476.jpg | 3476_evjvqa | [{"from": "human", "value": "who is the girl in the white dress sitting with?\n<image>"}, {"from": "gpt", "value": "with a boy"}] |
00000003476.jpg | 3476_evjvqa | [{"from": "human", "value": "who is the woman in the white dress eating out with?\n<image>"}, {"from": "gpt", "value": "with a boy sitting next to her"}] |
00000003476.jpg | 3476_evjvqa | [{"from": "human", "value": "what vehicle do the girl travel to the restaurant with the boy by?\n<image>"}, {"from": "gpt", "value": "by motorbike"}] |
00000003476.jpg | 3476_evjvqa | [{"from": "human", "value": "where is the motorcycle parked?\n<image>"}, {"from": "gpt", "value": "in front of a restaurant"}] |
00000003476.jpg | 3476_evjvqa | [{"from": "human", "value": "where do these two park their motorbike?\n<image>"}, {"from": "gpt", "value": "next to the seat"}] |
00000003476.jpg | 3476_evjvqa | [{"from": "human", "value": "what vehicle does the boy travel to eat out with the girl by?\n<image>"}, {"from": "gpt", "value": "the boy travels to eat out with the girl by motorbike"}] |
00000003477.jpg | 3477_evjvqa | [{"from": "human", "value": "what color umbrella are the two women using?\n<image>"}, {"from": "gpt", "value": "pink umbrella"}] |
00000003477.jpg | 3477_evjvqa | [{"from": "human", "value": "how many people are using umbrella?\n<image>"}, {"from": "gpt", "value": "two woman are using umbrella"}] |
00000003477.jpg | 3477_evjvqa | [{"from": "human", "value": "what color is the umbrella the two women are covering?\n<image>"}, {"from": "gpt", "value": "pink"}] |
00000003477.jpg | 3477_evjvqa | [{"from": "human", "value": "where are the two women walking?\n<image>"}, {"from": "gpt", "value": "walking on the sidewalk"}] |
00000003477.jpg | 3477_evjvqa | [{"from": "human", "value": "how many people are walking on the sidewalk?\n<image>"}, {"from": "gpt", "value": "three people are walking on the sidewalk"}] |
00000003478.jpg | 3478_evjvqa | [{"from": "human", "value": "where are these people visiting?\n<image>"}, {"from": "gpt", "value": "at a historical site"}] |
00000003481.jpg | 3481_evjvqa | [{"from": "human", "value": "what stall is the woman in the black shirt standing at?\n<image>"}, {"from": "gpt", "value": "fish sauce stall"}] |
00000003481.jpg | 3481_evjvqa | [{"from": "human", "value": "who is the woman wearing the black shirt buying fish sauce with?\n<image>"}, {"from": "gpt", "value": "with the man holding the red bag"}] |
00000003481.jpg | 3481_evjvqa | [{"from": "human", "value": "where is the man standing?\n<image>"}, {"from": "gpt", "value": "in a merchandise display area"}] |
00000003481.jpg | 3481_evjvqa | [{"from": "human", "value": "who is the man at the fish sauce stall with?\n<image>"}, {"from": "gpt", "value": "with the woman with the blue bag"}] |
00000003481.jpg | 3481_evjvqa | [{"from": "human", "value": "what is the woman doing?\n<image>"}, {"from": "gpt", "value": "buying fish sauce"}] |
00000003481.jpg | 3481_evjvqa | [{"from": "human", "value": "how many people come to see and buy at the fish sauce stall?\n<image>"}, {"from": "gpt", "value": "a man holding the red bag and a woman with the blue bag"}] |
00000003481.jpg | 3481_evjvqa | [{"from": "human", "value": "what is the woman in the pink shirt doing?\n<image>"}, {"from": "gpt", "value": "rearranging the fish sauce bottles"}] |
00000003481.jpg | 3481_evjvqa | [{"from": "human", "value": "where is the woman in the pink shirt standing?\n<image>"}, {"from": "gpt", "value": "at the fish sauce stall"}] |
00000003492.jpg | 3492_evjvqa | [{"from": "human", "value": "what are these three girls doing?\n<image>"}, {"from": "gpt", "value": "posing for a photo"}] |
00000003492.jpg | 3492_evjvqa | [{"from": "human", "value": "what is the girl in the black shirt doing?\n<image>"}, {"from": "gpt", "value": "holding a phone to take a picture"}] |
00000003492.jpg | 3492_evjvqa | [{"from": "human", "value": "where are these three girls taking pictures?\n<image>"}, {"from": "gpt", "value": "where there are three decorated christmas trees"}] |
00000003492.jpg | 3492_evjvqa | [{"from": "human", "value": "how many girls are taking pictures with the pine trees?\n<image>"}, {"from": "gpt", "value": "three girls"}] |
00000003492.jpg | 3492_evjvqa | [{"from": "human", "value": "who is the girl in the red shirt taking pictures with?\n<image>"}, {"from": "gpt", "value": "with the girl in the white shirt and the girl in the black shirt"}] |
00000003492.jpg | 3492_evjvqa | [{"from": "human", "value": "who is the girl in the white shirt taking pictures with?\n<image>"}, {"from": "gpt", "value": "with the girl in the red shirt and the girl in the black shirt"}] |
00000003492.jpg | 3492_evjvqa | [{"from": "human", "value": "what does the girl wearing the black shirt take pictures with?\n<image>"}, {"from": "gpt", "value": "takes pictures with phone"}] |
00000003492.jpg | 3492_evjvqa | [{"from": "human", "value": "what do these three girls use to take pictures?\n<image>"}, {"from": "gpt", "value": "use the phone"}] |
00000003492.jpg | 3492_evjvqa | [{"from": "human", "value": "who is the woman in the black shirt taking pictures for?\n<image>"}, {"from": "gpt", "value": "for the girl wearing the red shirt and the girl wearing the white shirt"}] |
00000003492.jpg | 3492_evjvqa | [{"from": "human", "value": "what occasion do these three girls go to take pictures?\n<image>"}, {"from": "gpt", "value": "on the occasion of christmas"}] |
00000003494.jpg | 3494_evjvqa | [{"from": "human", "value": "what specimens does this area display?\n<image>"}, {"from": "gpt", "value": "display a skeleton"}] |
00000003494.jpg | 3494_evjvqa | [{"from": "human", "value": "where are these people visiting?\n<image>"}, {"from": "gpt", "value": "in a fossil skeleton gallery in a museum"}] |
00000003494.jpg | 3494_evjvqa | [{"from": "human", "value": "what does this room display?\n<image>"}, {"from": "gpt", "value": "display a whale skeleton"}] |
00000003494.jpg | 3494_evjvqa | [{"from": "human", "value": "where are the two women going?\n<image>"}, {"from": "gpt", "value": "in a whale skeleton gallery"}] |
00000003494.jpg | 3494_evjvqa | [{"from": "human", "value": "where is this whale skeleton placed?\n<image>"}, {"from": "gpt", "value": "in a gallery in a museum"}] |
00000003497.jpg | 3497_evjvqa | [{"from": "human", "value": "where are these kids playing?\n<image>"}, {"from": "gpt", "value": "at the seaside"}] |
00000003497.jpg | 3497_evjvqa | [{"from": "human", "value": "where are the kids traveling?\n<image>"}, {"from": "gpt", "value": "traveling at sea"}] |
00000004500.jpg | 4500_evjvqa | [{"from": "human", "value": "what color is the man behind the carriage wearing?\n<image>"}, {"from": "gpt", "value": "wearing a red shirt"}] |
00000004500.jpg | 4500_evjvqa | [{"from": "human", "value": "how many traffic signs are there existing the red color?\n<image>"}, {"from": "gpt", "value": "there is a traffic sign existing the red color"}] |
00000004504.jpg | 4504_evjvqa | [{"from": "human", "value": "how many people are sitting by the river?\n<image>"}, {"from": "gpt", "value": "there are two people sitting by the river"}] |
00000004504.jpg | 4504_evjvqa | [{"from": "human", "value": "which hand is the girl holding the phone in?\n<image>"}, {"from": "gpt", "value": "holding the phone with the left hand"}] |
00000004504.jpg | 4504_evjvqa | [{"from": "human", "value": "which side of the other person does the one holding the phone by the river sit on?\n<image>"}, {"from": "gpt", "value": "on the left of the other"}] |
00000004506.jpg | 4506_evjvqa | [{"from": "human", "value": "what color is the gate of the building?\n<image>"}, {"from": "gpt", "value": "the gate is blue"}] |
00000004506.jpg | 4506_evjvqa | [{"from": "human", "value": "how many people are in the group taking photos?\n<image>"}, {"from": "gpt", "value": "the group of people taking photos has four people"}] |
00000004506.jpg | 4506_evjvqa | [{"from": "human", "value": "which hand is the person holding the phone to take photos use to hold the phone?\n<image>"}, {"from": "gpt", "value": "holding with left hand"}] |
00000004507.jpg | 4507_evjvqa | [{"from": "human", "value": "what color pants does the man model wear?\n<image>"}, {"from": "gpt", "value": "wear blue pants"}] |
00000004507.jpg | 4507_evjvqa | [{"from": "human", "value": "how is the market?\n<image>"}, {"from": "gpt", "value": "the market is very crowded"}] |
00000004507.jpg | 4507_evjvqa | [{"from": "human", "value": "where is the man model placed?\n<image>"}, {"from": "gpt", "value": "placed in the center of the market"}] |
00000004509.jpg | 4509_evjvqa | [{"from": "human", "value": "what color shirt is the woman wearing?\n<image>"}, {"from": "gpt", "value": "wear a white ao dai"}] |
00000004509.jpg | 4509_evjvqa | [{"from": "human", "value": "what is the woman in white doing?\n<image>"}, {"from": "gpt", "value": "telephoning"}] |
00000004510.jpg | 4510_evjvqa | [{"from": "human", "value": "where is the statue located?\n<image>"}, {"from": "gpt", "value": "located in a museum"}] |
00000004510.jpg | 4510_evjvqa | [{"from": "human", "value": "what exhibits does this area display?\n<image>"}, {"from": "gpt", "value": "exhibiting works of art"}] |
00000004510.jpg | 4510_evjvqa | [{"from": "human", "value": "how many human figures does the statue contain?\n<image>"}, {"from": "gpt", "value": "the statue is composed of two people"}] |
00000004511.jpg | 4511_evjvqa | [{"from": "human", "value": "what color shirt is the woman riding a bicycle wearing?\n<image>"}, {"from": "gpt", "value": "wearing a yellow shirt"}] |
00000004511.jpg | 4511_evjvqa | [{"from": "human", "value": "which hand is the woman holding the plastic bag in?\n<image>"}, {"from": "gpt", "value": "holding in the left hand"}] |
00000004511.jpg | 4511_evjvqa | [{"from": "human", "value": "what color shirt is the woman sitting at the vegetable stall on the right wearing?\n<image>"}, {"from": "gpt", "value": "wear a blue shirt"}] |
00000004513.jpg | 4513_evjvqa | [{"from": "human", "value": "what area are people in?\n<image>"}, {"from": "gpt", "value": "in the supermarket"}] |
00000004513.jpg | 4513_evjvqa | [{"from": "human", "value": "what is everyone doing?\n<image>"}, {"from": "gpt", "value": "shopping in the supermarket"}] |
00000004513.jpg | 4513_evjvqa | [{"from": "human", "value": "how is the situation in the supermarket?\n<image>"}, {"from": "gpt", "value": "the supermarket is very crowded right now"}] |
00000004516.jpg | 4516_evjvqa | [{"from": "human", "value": "what is the woman doing?\n<image>"}, {"from": "gpt", "value": "calculating the bill"}] |
00000004516.jpg | 4516_evjvqa | [{"from": "human", "value": "how many customers are shopping at the woman's store?\n<image>"}, {"from": "gpt", "value": "there is a customer shopping at the store"}] |
00000004518.jpg | 4518_evjvqa | [{"from": "human", "value": "what color top is the man sitting on the roof of the car wearing?\n<image>"}, {"from": "gpt", "value": "wear a blue top"}] |
00000004518.jpg | 4518_evjvqa | [{"from": "human", "value": "how many people are on the left side of the bus?\n<image>"}, {"from": "gpt", "value": "there is a person standing on the left side of the bus"}] |
00000004520.jpg | 4520_evjvqa | [{"from": "human", "value": "what color is the supermarket staff uniform?\n<image>"}, {"from": "gpt", "value": "the uniform is blue"}] |
00000004520.jpg | 4520_evjvqa | [{"from": "human", "value": "in which hand is the employee holding the cart?\n<image>"}, {"from": "gpt", "value": "holding the shopping cart in her left hand"}] |
00000004520.jpg | 4520_evjvqa | [{"from": "human", "value": "what is the staff doing?\n<image>"}, {"from": "gpt", "value": "inspecting the goods"}] |
00000004521.jpg | 4521_evjvqa | [{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a temple"}] |
00000004521.jpg | 4521_evjvqa | [{"from": "human", "value": "how many pillars are there at the entrance to the temple?\n<image>"}, {"from": "gpt", "value": "the entrance has four pillars"}] |
00000004521.jpg | 4521_evjvqa | [{"from": "human", "value": "what color is the roof of the temple?\n<image>"}, {"from": "gpt", "value": "the roof is red"}] |
00000004522.jpg | 4522_evjvqa | [{"from": "human", "value": "where are two girls standing?\n<image>"}, {"from": "gpt", "value": "standing in a model gallery"}] |
00000004522.jpg | 4522_evjvqa | [{"from": "human", "value": "what color backpack is the girl on the right carrying?\n<image>"}, {"from": "gpt", "value": "the girl is carrying a black backpack"}] |
00000004522.jpg | 4522_evjvqa | [{"from": "human", "value": "what are the models in front of two girls?\n<image>"}, {"from": "gpt", "value": "in front of the two girls are animal models"}] |
00000004524.jpg | 4524_evjvqa | [{"from": "human", "value": "where are people standing?\n<image>"}, {"from": "gpt", "value": "standing in the supermarket"}] |
00000004524.jpg | 4524_evjvqa | [{"from": "human", "value": "what is the guy doing?\n<image>"}, {"from": "gpt", "value": "selecting an item"}] |
00000004524.jpg | 4524_evjvqa | [{"from": "human", "value": "where does the young male stand compared to the two women?\n<image>"}, {"from": "gpt", "value": "the young male standing between two women"}] |
00000004528.jpg | 4528_evjvqa | [{"from": "human", "value": "what does the photo depict?\n<image>"}, {"from": "gpt", "value": "the photo depicts a floating market"}] |
00000004528.jpg | 4528_evjvqa | [{"from": "human", "value": "what do the boats on the market primarily carry?\n<image>"}, {"from": "gpt", "value": "the cargo boats primarily carry flowers and fruits"}] |
00000004528.jpg | 4528_evjvqa | [{"from": "human", "value": "what color are the flowers on the cargo boats?\n<image>"}, {"from": "gpt", "value": "flowers are yellow"}] |
00000004531.jpg | 4531_evjvqa | [{"from": "human", "value": "what are the kids doing?\n<image>"}, {"from": "gpt", "value": "going to hide in an underground tunnel"}] |
00000004531.jpg | 4531_evjvqa | [{"from": "human", "value": "how many children are at the crater?\n<image>"}, {"from": "gpt", "value": "there are four children at the crater"}] |
00000004531.jpg | 4531_evjvqa | [{"from": "human", "value": "what shape is the mouth of the hole?\n<image>"}, {"from": "gpt", "value": "the mouths of the holes are round"}] |
00000004536.jpg | 4536_evjvqa | [{"from": "human", "value": "how is the market?\n<image>"}, {"from": "gpt", "value": "the market is very crowded"}] |
00000004536.jpg | 4536_evjvqa | [{"from": "human", "value": "what color shirt is the woman with the scarf on head wearing?\n<image>"}, {"from": "gpt", "value": "wearing a black shirt"}] |
00000004536.jpg | 4536_evjvqa | [{"from": "human", "value": "where is the woman in the blue-and-white jacket sitting?\n<image>"}, {"from": "gpt", "value": "sitting at the vegetable stall"}] |
00000004537.jpg | 4537_evjvqa | [{"from": "human", "value": "what color shirt are the people holding the placards wearing?\n<image>"}, {"from": "gpt", "value": "wearing red t-shirts"}] |
00000004537.jpg | 4537_evjvqa | [{"from": "human", "value": "what color shirt are the row wearing in front of the people holding placards?\n<image>"}, {"from": "gpt", "value": "wearing blue t-shirts"}] |
00000004537.jpg | 4537_evjvqa | [{"from": "human", "value": "how many black people are there wearing white shirt?\n<image>"}, {"from": "gpt", "value": "there are two black people wearing white t-shirts"}] |
00000004541.jpg | 4541_evjvqa | [{"from": "human", "value": "how many umbrellas are there in front of the shop?\n<image>"}, {"from": "gpt", "value": "there are two umbrellas in front of the shop"}] |
00000004541.jpg | 4541_evjvqa | [{"from": "human", "value": "what color are the umbrellas in front of the shop?\n<image>"}, {"from": "gpt", "value": "the umbrellas are white"}] |
00000004541.jpg | 4541_evjvqa | [{"from": "human", "value": "where is the rabbit statue placed?\n<image>"}, {"from": "gpt", "value": "the rabbit statue is placed next to the doll on the table in front of the shop"}] |
00000004542.jpg | 4542_evjvqa | [{"from": "human", "value": "where is everyone?\n<image>"}, {"from": "gpt", "value": "in the supermarket"}] |
00000004542.jpg | 4542_evjvqa | [{"from": "human", "value": "what is the guy wearing an orange shirt doing?\n<image>"}, {"from": "gpt", "value": "cleaning the floor"}] |
00000004542.jpg | 4542_evjvqa | [{"from": "human", "value": "how many people wear blue shirts?\n<image>"}, {"from": "gpt", "value": "there are two people in blue"}] |
00000004543.jpg | 4543_evjvqa | [{"from": "human", "value": "what is the main color of the house?\n<image>"}, {"from": "gpt", "value": "the house's main color is yellow"}] |
00000004543.jpg | 4543_evjvqa | [{"from": "human", "value": "what color is the car parked next to the house?\n<image>"}, {"from": "gpt", "value": "the car is black"}] |
00000004543.jpg | 4543_evjvqa | [{"from": "human", "value": "how many circular windows does the house have?\n<image>"}, {"from": "gpt", "value": "the house has two circular windows"}] |
00000004545.jpg | 4545_evjvqa | [{"from": "human", "value": "what color shirt is the kid riding a bike with a flag wearing?\n<image>"}, {"from": "gpt", "value": "the child is in white shirt"}] |
Subsets and Splits