image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000007398.jpg
7398_evjvqa
[{"from": "human", "value": "what color is the store assistant's apron?\n<image>"}, {"from": "gpt", "value": "it is dark green. "}]
00000007398.jpg
7398_evjvqa
[{"from": "human", "value": "how many bags of rice are red?\n<image>"}, {"from": "gpt", "value": "two of them are red. "}]
00000007399.jpg
7399_evjvqa
[{"from": "human", "value": "what color is her shirt?\n<image>"}, {"from": "gpt", "value": "it is black with polka dots."}]
00000007399.jpg
7399_evjvqa
[{"from": "human", "value": "what color is the young girl sitting behind the lady dressed in?\n<image>"}, {"from": "gpt", "value": "she is dressed in white. "}]
00000003408.jpg
3408_evjvqa
[{"from": "human", "value": "where is the girl in the black shirt standing?\n<image>"}, {"from": "gpt", "value": "on a balcony"}]
00000003408.jpg
3408_evjvqa
[{"from": "human", "value": "what color shirt does the girl standing on the balcony wear?\n<image>"}, {"from": "gpt", "value": "wears black shirt"}]
00000003409.jpg
3409_evjvqa
[{"from": "human", "value": "how many vehicles are coming here to fill up?\n<image>"}, {"from": "gpt", "value": "a vehicle is coming here to fill up"}]
00000003412.jpg
3412_evjvqa
[{"from": "human", "value": "where is this girl sitting?\n<image>"}, {"from": "gpt", "value": "on a tree branch"}]
00000003412.jpg
3412_evjvqa
[{"from": "human", "value": "where is this girl putting her hands on?\n<image>"}, {"from": "gpt", "value": "on a tree branch"}]
00000003412.jpg
3412_evjvqa
[{"from": "human", "value": "where is this girl?\n<image>"}, {"from": "gpt", "value": "on a tree branch in a garden"}]
00000003423.jpg
3423_evjvqa
[{"from": "human", "value": "how many trucks are ahead of the white car?\n<image>"}, {"from": "gpt", "value": "one truck are ahead of the white car"}]
00000003426.jpg
3426_evjvqa
[{"from": "human", "value": "what items does this store sell?\n<image>"}, {"from": "gpt", "value": "sells noel decorations"}]
00000003428.jpg
3428_evjvqa
[{"from": "human", "value": "where are these people going?\n<image>"}, {"from": "gpt", "value": "going shopping"}]
00000003428.jpg
3428_evjvqa
[{"from": "human", "value": "where are these people standing?\n<image>"}, {"from": "gpt", "value": "in the supermarket"}]
00000003428.jpg
3428_evjvqa
[{"from": "human", "value": "what products does this area mainly sell?\n<image>"}, {"from": "gpt", "value": "fruits and vegetables"}]
00000003428.jpg
3428_evjvqa
[{"from": "human", "value": "what do the counters in this area sell?\n<image>"}, {"from": "gpt", "value": "sell mainly fruits and vegetables"}]
00000003428.jpg
3428_evjvqa
[{"from": "human", "value": "what products do the counters here display?\n<image>"}, {"from": "gpt", "value": "display fruits and vegetables"}]
00000003430.jpg
3430_evjvqa
[{"from": "human", "value": "where is the man in the brown shirt walking at?\n<image>"}, {"from": "gpt", "value": "walking at a market"}]
00000003430.jpg
3430_evjvqa
[{"from": "human", "value": "where is the man in the brown shirt going?\n<image>"}, {"from": "gpt", "value": "going to the market to buy things"}]
00000003430.jpg
3430_evjvqa
[{"from": "human", "value": "what do the stalls on the roadside sell?\n<image>"}, {"from": "gpt", "value": "sell mainly fruits"}]
00000003430.jpg
3430_evjvqa
[{"from": "human", "value": "what do the stalls on the roadside sell?\n<image>"}, {"from": "gpt", "value": "sell fruits"}]
00000003431.jpg
3431_evjvqa
[{"from": "human", "value": "where are these men standing?\n<image>"}, {"from": "gpt", "value": "on a fishing boat"}]
00000003431.jpg
3431_evjvqa
[{"from": "human", "value": "what are these men doing?\n<image>"}, {"from": "gpt", "value": "preparing to trawl"}]
00000003432.jpg
3432_evjvqa
[{"from": "human", "value": "wwhere is this specimen placed?\n<image>"}, {"from": "gpt", "value": "in a glass cage in a gallery room"}]
00000003432.jpg
3432_evjvqa
[{"from": "human", "value": "what does this room display?\n<image>"}, {"from": "gpt", "value": "display the marine specimens"}]
00000003432.jpg
3432_evjvqa
[{"from": "human", "value": "where is this specimen preserved?\n<image>"}, {"from": "gpt", "value": "in a glass cage"}]
00000003434.jpg
3434_evjvqa
[{"from": "human", "value": "what vehicle is the woman wearing a conical hat traveling by?\n<image>"}, {"from": "gpt", "value": "the woman is traveling by bicycle"}]
00000003434.jpg
3434_evjvqa
[{"from": "human", "value": "what vehicle is the man in the red shirt traveling by?\n<image>"}, {"from": "gpt", "value": "the man is walking"}]
00000003434.jpg
3434_evjvqa
[{"from": "human", "value": "how many people are traveling by motorbike?\n<image>"}, {"from": "gpt", "value": "four people are traveling by motorbike"}]
00000003434.jpg
3434_evjvqa
[{"from": "human", "value": "how many motorbikes are runnng into the market?\n<image>"}, {"from": "gpt", "value": "three motorbikes are running into the market"}]
00000003434.jpg
3434_evjvqa
[{"from": "human", "value": "how many people are traveling by bicycle?\n<image>"}, {"from": "gpt", "value": "a woman wearing the conical hat is traveling by bicycle"}]
00000003434.jpg
3434_evjvqa
[{"from": "human", "value": "how many motorbikes are carrying only one person?\n<image>"}, {"from": "gpt", "value": "two motorbikes are carrying only one person"}]
00000003434.jpg
3434_evjvqa
[{"from": "human", "value": "how many motorbikes are carrying two people?\n<image>"}, {"from": "gpt", "value": "a motorbike is carrying two people"}]
00000003438.jpg
3438_evjvqa
[{"from": "human", "value": "where are these people hanging out?\n<image>"}, {"from": "gpt", "value": "in a decorated yard"}]
00000003438.jpg
3438_evjvqa
[{"from": "human", "value": "what occasion do these people go out for?\n<image>"}, {"from": "gpt", "value": "for christmas"}]
00000003438.jpg
3438_evjvqa
[{"from": "human", "value": "what holiday is the scene here decorated for?\n<image>"}, {"from": "gpt", "value": "christmas holiday"}]
00000003439.jpg
3439_evjvqa
[{"from": "human", "value": "how many lights are placed in this room?\n<image>"}, {"from": "gpt", "value": "one light is placed"}]
00000003439.jpg
3439_evjvqa
[{"from": "human", "value": "how many red chairs are there in the room?\n<image>"}, {"from": "gpt", "value": "one red chairs is in the room"}]
00000003442.jpg
3442_evjvqa
[{"from": "human", "value": "how many cars are parked in front of the restaurant?\n<image>"}, {"from": "gpt", "value": "two cars are parked in front of the restaurant"}]
00000003444.jpg
3444_evjvqa
[{"from": "human", "value": "what items does the stall where the woman in the blue shirt is going sell?\n<image>"}, {"from": "gpt", "value": "sells clothes"}]
00000003444.jpg
3444_evjvqa
[{"from": "human", "value": "what items does the stall where the woman in the blue shirt is passing sell?\n<image>"}, {"from": "gpt", "value": "sells clothes"}]
00000003444.jpg
3444_evjvqa
[{"from": "human", "value": "what vehicles do people travel to this street mainly by?\n<image>"}, {"from": "gpt", "value": "they mainly walk"}]
00000003445.jpg
3445_evjvqa
[{"from": "human", "value": "what is the man in the gray shirt buying?\n<image>"}, {"from": "gpt", "value": "buying glass and ceramic items"}]
00000003445.jpg
3445_evjvqa
[{"from": "human", "value": "what stall is this man standing at?\n<image>"}, {"from": "gpt", "value": "standing at a ceramics stall"}]
00000003445.jpg
3445_evjvqa
[{"from": "human", "value": "how many people are standing inside the ceramics stall?\n<image>"}, {"from": "gpt", "value": "a woman is standing inside the ceramics stall"}]
00000003446.jpg
3446_evjvqa
[{"from": "human", "value": "where do the stall owners in this market sit?\n<image>"}, {"from": "gpt", "value": "in front of their stall"}]
00000003446.jpg
3446_evjvqa
[{"from": "human", "value": "what items does this area sell?\n<image>"}, {"from": "gpt", "value": "mainly jam and sweets"}]
00000003446.jpg
3446_evjvqa
[{"from": "human", "value": "what do the stalls on both side of the way sell?\n<image>"}, {"from": "gpt", "value": "sell jam and sweets"}]
00000003449.jpg
3449_evjvqa
[{"from": "human", "value": "where does this gate lead?\n<image>"}, {"from": "gpt", "value": "leading into an alley"}]
00000003449.jpg
3449_evjvqa
[{"from": "human", "value": "what items does the shop on the left side of the gate leading into the alley sell in the courtyard?\n<image>"}, {"from": "gpt", "value": "sells banh mi"}]
00000003449.jpg
3449_evjvqa
[{"from": "human", "value": "what store is on the right of the gate leading into the alley?\n<image>"}, {"from": "gpt", "value": "fabric and clothing store"}]
00000003449.jpg
3449_evjvqa
[{"from": "human", "value": "what items do the store on the right of the gate leading into the alley sell?\n<image>"}, {"from": "gpt", "value": "sells fabrics and clothing"}]
00000003449.jpg
3449_evjvqa
[{"from": "human", "value": "what objects are placed in front of the store next to the alley?\n<image>"}, {"from": "gpt", "value": "two mannequins"}]
00000003449.jpg
3449_evjvqa
[{"from": "human", "value": "what items does the shop next to the store place the two mannequins in the yard sell?\n<image>"}, {"from": "gpt", "value": "sells clothing"}]
00000003449.jpg
3449_evjvqa
[{"from": "human", "value": "what does the front of the house on the left side of a gate leading into the alley sell?\n<image>"}, {"from": "gpt", "value": "sells banh mi"}]
00000003450.jpg
3450_evjvqa
[{"from": "human", "value": "where is this woman standing?\n<image>"}, {"from": "gpt", "value": "at the vegetable counter in a supermarket"}]
00000003450.jpg
3450_evjvqa
[{"from": "human", "value": "what counter is this woman standing and choosing goods at?\n<image>"}, {"from": "gpt", "value": "vegetable counter"}]
00000003450.jpg
3450_evjvqa
[{"from": "human", "value": "what is the woman in the black shirt intending to buy?\n<image>"}, {"from": "gpt", "value": "buy some vegetables"}]
00000003450.jpg
3450_evjvqa
[{"from": "human", "value": "what does the counter where the woman in the black shirt is standing sell?\n<image>"}, {"from": "gpt", "value": "sell fresh vegetables"}]
00000003451.jpg
3451_evjvqa
[{"from": "human", "value": "where are these people standing?\n<image>"}, {"from": "gpt", "value": "at the main lobby of a museum"}]
00000003455.jpg
3455_evjvqa
[{"from": "human", "value": "where are these trees planted?\n<image>"}, {"from": "gpt", "value": "on a sidewalk"}]
00000003456.jpg
3456_evjvqa
[{"from": "human", "value": "where are these two girls sitting?\n<image>"}, {"from": "gpt", "value": "on the rooftop of a cafe"}]
00000003456.jpg
3456_evjvqa
[{"from": "human", "value": "what are these two girls doing?\n<image>"}, {"from": "gpt", "value": "having drinks on the rooftop of a cafe"}]
00000003456.jpg
3456_evjvqa
[{"from": "human", "value": "how many people are sitting at the table on the rooftop?\n<image>"}, {"from": "gpt", "value": "two girls are sitting on the rooftop"}]
00000003456.jpg
3456_evjvqa
[{"from": "human", "value": "where is the table these two girls are sitting at in the cafe?\n<image>"}, {"from": "gpt", "value": "on the rooftop of the cafe"}]
00000003459.jpg
3459_evjvqa
[{"from": "human", "value": "what is this man doing?\n<image>"}, {"from": "gpt", "value": "standing and speaking"}]
00000003459.jpg
3459_evjvqa
[{"from": "human", "value": "where is this man standing?\n<image>"}, {"from": "gpt", "value": "standing in a meeting room"}]
00000003459.jpg
3459_evjvqa
[{"from": "human", "value": "what event is this man attending?\n<image>"}, {"from": "gpt", "value": "this man is attending a meeting"}]
00000003463.jpg
3463_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is the area of an ancient citadel"}]
00000003468.jpg
3468_evjvqa
[{"from": "human", "value": "what is the man in the blue shirt doing?\n<image>"}, {"from": "gpt", "value": "holding a camcorder"}]
00000003468.jpg
3468_evjvqa
[{"from": "human", "value": "how many people are using the camcorder?\n<image>"}, {"from": "gpt", "value": "a man wearing the blue shirt is using the camcorder"}]
00000003468.jpg
3468_evjvqa
[{"from": "human", "value": "what does the man in the blue shirt use the camcorder for?\n<image>"}, {"from": "gpt", "value": "for recording"}]
00000003468.jpg
3468_evjvqa
[{"from": "human", "value": "how many people are sitting at the table?\n<image>"}, {"from": "gpt", "value": "four people are sitting at the table"}]
00000003468.jpg
3468_evjvqa
[{"from": "human", "value": "where are these people sitting?\n<image>"}, {"from": "gpt", "value": "at a table in a cafe"}]
00000003468.jpg
3468_evjvqa
[{"from": "human", "value": "how many chairs are already occupied?\n<image>"}, {"from": "gpt", "value": "four chairs are already occupied"}]
00000003468.jpg
3468_evjvqa
[{"from": "human", "value": "how many unoccupied chairs are there?\n<image>"}, {"from": "gpt", "value": "there is an unoccupied chair"}]
00000003471.jpg
3471_evjvqa
[{"from": "human", "value": "how many people go to the market by bicycle?\n<image>"}, {"from": "gpt", "value": "a woman goes to the market by bicycle"}]
00000003471.jpg
3471_evjvqa
[{"from": "human", "value": "which direction is the woman leading the bicycle going?\n<image>"}, {"from": "gpt", "value": "going into the market"}]
00000003472.jpg
3472_evjvqa
[{"from": "human", "value": "how many people are walking on the railway?\n<image>"}, {"from": "gpt", "value": "a man is walking on the railway"}]
00000003472.jpg
3472_evjvqa
[{"from": "human", "value": "where is the man in the distance walking?\n<image>"}, {"from": "gpt", "value": "walking on the railway"}]
00000003473.jpg
3473_evjvqa
[{"from": "human", "value": "what is the woman in the red shirt standing near the fresh meat counter doing?\n<image>"}, {"from": "gpt", "value": "cleaning the floor"}]
00000003473.jpg
3473_evjvqa
[{"from": "human", "value": "what does the woman standing near the fresh meat counter do at the supermarket?\n<image>"}, {"from": "gpt", "value": "staff"}]
00000003473.jpg
3473_evjvqa
[{"from": "human", "value": "how many trolleys does the man in the red shirt going to the supermarket use?\n<image>"}, {"from": "gpt", "value": "use two trolleys"}]
00000003473.jpg
3473_evjvqa
[{"from": "human", "value": "what items did the man in the red shirt choose to put in the trolley?\n<image>"}, {"from": "gpt", "value": "the man choose vegetables"}]
00000003473.jpg
3473_evjvqa
[{"from": "human", "value": "what products does the counter opposite the vegetable counter sell?\n<image>"}, {"from": "gpt", "value": "sells fresh meat"}]
00000003473.jpg
3473_evjvqa
[{"from": "human", "value": "what products does the counter opposite the fresh meat counter sell?\n<image>"}, {"from": "gpt", "value": "sell vegetables"}]
00000003473.jpg
3473_evjvqa
[{"from": "human", "value": "where is the woman cleaning the floor?\n<image>"}, {"from": "gpt", "value": "near the fresh meat counter in the supermarket"}]
00000003473.jpg
3473_evjvqa
[{"from": "human", "value": "what is the woman in the pink shirt buying?\n<image>"}, {"from": "gpt", "value": "buying vegetables"}]
00000003473.jpg
3473_evjvqa
[{"from": "human", "value": "where are the supermarket staff in blue shirts standing?\n<image>"}, {"from": "gpt", "value": "near the vegetable counter"}]
00000003473.jpg
3473_evjvqa
[{"from": "human", "value": "what color uniform is the supermarket staff standing inside the fresh meat counter wearing?\n<image>"}, {"from": "gpt", "value": "yellow"}]
00000003474.jpg
3474_evjvqa
[{"from": "human", "value": "where are these people visiting?\n<image>"}, {"from": "gpt", "value": "in an old pagoda"}]
00000003474.jpg
3474_evjvqa
[{"from": "human", "value": "how many umbrellas are used to block the sun?\n<image>"}, {"from": "gpt", "value": "four umbrellas are used to block the sun"}]
00000003474.jpg
3474_evjvqa
[{"from": "human", "value": "how many groups of visitors use umbrellas?\n<image>"}, {"from": "gpt", "value": "two groups of visitors use umbrellas"}]
00000003474.jpg
3474_evjvqa
[{"from": "human", "value": "where is the woman riding the motorbike?\n<image>"}, {"from": "gpt", "value": "in front of an old pagoda"}]
00000003474.jpg
3474_evjvqa
[{"from": "human", "value": "how many motorbikes are running on the road?\n<image>"}, {"from": "gpt", "value": "two motorbikes are running on the road"}]
00000003474.jpg
3474_evjvqa
[{"from": "human", "value": "how many people wear conical hats?\n<image>"}, {"from": "gpt", "value": "a woman wear conical hats"}]
00000003474.jpg
3474_evjvqa
[{"from": "human", "value": "what is the woman in the conical hat doing?\n<image>"}, {"from": "gpt", "value": "pushing a cart"}]
00000003474.jpg
3474_evjvqa
[{"from": "human", "value": "where is the woman in the conical hat pushing the cart?\n<image>"}, {"from": "gpt", "value": "in front of a pagoda gate"}]
00000003474.jpg
3474_evjvqa
[{"from": "human", "value": "where is the woman riding the motorcycle passing by?\n<image>"}, {"from": "gpt", "value": "by the gate of a temple"}]
00000003476.jpg
3476_evjvqa
[{"from": "human", "value": "where are these two sitting?\n<image>"}, {"from": "gpt", "value": "in front of a restaurant"}]