image
stringlengths 15
15
| id
stringlengths 8
12
| conversations
stringlengths 79
268
|
---|---|---|
00000007324.jpg | 7324_evjvqa | [{"from": "human", "value": "how many baskets are placed on the table?\n<image>"}, {"from": "gpt", "value": "there are two baskets placed on the table. "}] |
00000007324.jpg | 7324_evjvqa | [{"from": "human", "value": "where do they put the hotpot?\n<image>"}, {"from": "gpt", "value": "the hotpot is put between the twin baskets. "}] |
00000007324.jpg | 7324_evjvqa | [{"from": "human", "value": "how many plates of food are on the table?\n<image>"}, {"from": "gpt", "value": "there are three plates of food on the table. "}] |
00000007325.jpg | 7325_evjvqa | [{"from": "human", "value": "how many people are standing at the gate?\n<image>"}, {"from": "gpt", "value": "there are three people standing at the gate."}] |
00000007325.jpg | 7325_evjvqa | [{"from": "human", "value": "what is the roof of the house at the back made of?\n<image>"}, {"from": "gpt", "value": "the roof is made of leaves."}] |
00000007325.jpg | 7325_evjvqa | [{"from": "human", "value": "how many people are wearing the conical hats?\n<image>"}, {"from": "gpt", "value": "there are two people wearing the conical hats. "}] |
00000007327.jpg | 7327_evjvqa | [{"from": "human", "value": "what color is the moving motorbike?\n<image>"}, {"from": "gpt", "value": "it is red."}] |
00000007327.jpg | 7327_evjvqa | [{"from": "human", "value": "what color is the shirt that the man standing at the rear of the motorbikes is wearing?\n<image>"}, {"from": "gpt", "value": "he is wearing a white shirt. "}] |
00000007332.jpg | 7332_evjvqa | [{"from": "human", "value": "how many items is she holding?\n<image>"}, {"from": "gpt", "value": "she is holding one item."}] |
00000007332.jpg | 7332_evjvqa | [{"from": "human", "value": "to which side of the girl is the man in red standing?\n<image>"}, {"from": "gpt", "value": "he is standing to the left of the girl."}] |
00000007332.jpg | 7332_evjvqa | [{"from": "human", "value": "what color is her coat?\n<image>"}, {"from": "gpt", "value": "it is black."}] |
00000007334.jpg | 7334_evjvqa | [{"from": "human", "value": "what are they hanging on the ceiling of the store?\n<image>"}, {"from": "gpt", "value": "they are hanging lanterns on the ceiling of the store."}] |
00000007334.jpg | 7334_evjvqa | [{"from": "human", "value": "how many rows of dining sets are there in the store?\n<image>"}, {"from": "gpt", "value": "there are two rows of dining sets."}] |
00000007335.jpg | 7335_evjvqa | [{"from": "human", "value": "what color is her shirt?\n<image>"}, {"from": "gpt", "value": "it is black."}] |
00000007335.jpg | 7335_evjvqa | [{"from": "human", "value": "which hand is she raising?\n<image>"}, {"from": "gpt", "value": "she is raising her left hand."}] |
00000007336.jpg | 7336_evjvqa | [{"from": "human", "value": "where is she facing?\n<image>"}, {"from": "gpt", "value": "she is facing the painting. "}] |
00000007336.jpg | 7336_evjvqa | [{"from": "human", "value": "what is on the left of the painting?\n<image>"}, {"from": "gpt", "value": "on the left of the painting lies a carrying pole with a basket. "}] |
00000007339.jpg | 7339_evjvqa | [{"from": "human", "value": "what animal is drawn on the bus?\n<image>"}, {"from": "gpt", "value": "the snake is drawn on the bus."}] |
00000007339.jpg | 7339_evjvqa | [{"from": "human", "value": "what color is the bus?\n<image>"}, {"from": "gpt", "value": "the bus is yellow."}] |
00000007339.jpg | 7339_evjvqa | [{"from": "human", "value": "what color are the letters on the bus?\n<image>"}, {"from": "gpt", "value": "they are red."}] |
00000007341.jpg | 7341_evjvqa | [{"from": "human", "value": "what color is the coat that the woman standing in front of the produce stall is wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a blue coat."}] |
00000007341.jpg | 7341_evjvqa | [{"from": "human", "value": "what color is the handbag that the woman in the blue coat is wearing?\n<image>"}, {"from": "gpt", "value": "it is black."}] |
00000007341.jpg | 7341_evjvqa | [{"from": "human", "value": "what stall is next to the produce stall?\n<image>"}, {"from": "gpt", "value": "the liquor stall. "}] |
00000007345.jpg | 7345_evjvqa | [{"from": "human", "value": "what color is the flag hung from the temple gate?\n<image>"}, {"from": "gpt", "value": "it is red."}] |
00000007345.jpg | 7345_evjvqa | [{"from": "human", "value": "where is the censer situated?\n<image>"}, {"from": "gpt", "value": "the censer is situated at the foot of the flagpole."}] |
00000007346.jpg | 7346_evjvqa | [{"from": "human", "value": "what is the girl in red doing?\n<image>"}, {"from": "gpt", "value": "she is charging the customer. "}] |
00000007346.jpg | 7346_evjvqa | [{"from": "human", "value": "how many customers are standing in front of the counter?\n<image>"}, {"from": "gpt", "value": "there is one customer."}] |
00000007346.jpg | 7346_evjvqa | [{"from": "human", "value": "how many employees are standing behind the counter?\n<image>"}, {"from": "gpt", "value": "there are two employees."}] |
00000007348.jpg | 7348_evjvqa | [{"from": "human", "value": "which hand is the girl in pink using to pick up the item?\n<image>"}, {"from": "gpt", "value": "the girl in pink is using her left hand to pick up the item."}] |
00000007348.jpg | 7348_evjvqa | [{"from": "human", "value": "with which hand is the girl in black holding the item?\n<image>"}, {"from": "gpt", "value": "the girl in black is holding the item with her left hand."}] |
00000007348.jpg | 7348_evjvqa | [{"from": "human", "value": "to which side of the girl in pink is the girl in black standing?\n<image>"}, {"from": "gpt", "value": "she is standing to the right of the girl in pink."}] |
00000007350.jpg | 7350_evjvqa | [{"from": "human", "value": "what boxes are stacked in front of the market?\n<image>"}, {"from": "gpt", "value": "the fruit boxes are stacked in front of the market."}] |
00000007350.jpg | 7350_evjvqa | [{"from": "human", "value": "how many people are standing on the left of the fruit boxes?\n<image>"}, {"from": "gpt", "value": "there is one person standing on the left of the fruit boxes."}] |
00000007351.jpg | 7351_evjvqa | [{"from": "human", "value": "with which hand is he holding the grocery basket?\n<image>"}, {"from": "gpt", "value": "he is holding the grocery basket with his left hand."}] |
00000007351.jpg | 7351_evjvqa | [{"from": "human", "value": "to which side of the man is the woman standing?\n<image>"}, {"from": "gpt", "value": "she is standing to the left of the man."}] |
00000007351.jpg | 7351_evjvqa | [{"from": "human", "value": "what is he wearing on his left wrist?\n<image>"}, {"from": "gpt", "value": "he is wearing a watch on his left wrist. "}] |
00000007353.jpg | 7353_evjvqa | [{"from": "human", "value": "how many people are putting up hoods?\n<image>"}, {"from": "gpt", "value": "there is one person putting up the hood. "}] |
00000007353.jpg | 7353_evjvqa | [{"from": "human", "value": "how many groups of customers are eating in?\n<image>"}, {"from": "gpt", "value": "there are two groups of customers eating in."}] |
00000007354.jpg | 7354_evjvqa | [{"from": "human", "value": "how many lanterns are hung from the house with the black gate?\n<image>"}, {"from": "gpt", "value": "there are two lanterns."}] |
00000007354.jpg | 7354_evjvqa | [{"from": "human", "value": "how many trashcans are there on the side of the street?\n<image>"}, {"from": "gpt", "value": "there are two trashcans on the side of the street."}] |
00000007354.jpg | 7354_evjvqa | [{"from": "human", "value": "how many blue vehicles are there in the neighborhood?\n<image>"}, {"from": "gpt", "value": "there are two blue vehicles in the neighborhood."}] |
00000007355.jpg | 7355_evjvqa | [{"from": "human", "value": "what is he doing?\n<image>"}, {"from": "gpt", "value": "he is showing her the pictures in the camera."}] |
00000007355.jpg | 7355_evjvqa | [{"from": "human", "value": "to which side of the guy is the girl sitting?\n<image>"}, {"from": "gpt", "value": "she is sitting to the left of the guy."}] |
00000007355.jpg | 7355_evjvqa | [{"from": "human", "value": "what is on her left wrist?\n<image>"}, {"from": "gpt", "value": "there is a watch on her left wrist."}] |
00000007357.jpg | 7357_evjvqa | [{"from": "human", "value": "what is she doing?\n<image>"}, {"from": "gpt", "value": "she is pushing a trolley."}] |
00000007357.jpg | 7357_evjvqa | [{"from": "human", "value": "where is the child standing?\n<image>"}, {"from": "gpt", "value": "the child is standing in the trolley."}] |
00000007357.jpg | 7357_evjvqa | [{"from": "human", "value": "what is behind the child?\n<image>"}, {"from": "gpt", "value": "behind the child lies a handbag."}] |
00000007359.jpg | 7359_evjvqa | [{"from": "human", "value": "what color are the lights inside the building?\n<image>"}, {"from": "gpt", "value": "the lights inside the building are yellow."}] |
00000007359.jpg | 7359_evjvqa | [{"from": "human", "value": "how many colors do the stairs to the building have?\n<image>"}, {"from": "gpt", "value": "the stairs have two colors."}] |
00000007363.jpg | 7363_evjvqa | [{"from": "human", "value": "what is the man in white doing?\n<image>"}, {"from": "gpt", "value": "he is taking the plastic bag out of the water barrel. "}] |
00000007363.jpg | 7363_evjvqa | [{"from": "human", "value": "how many people are standing in front of the man in white?\n<image>"}, {"from": "gpt", "value": "there is one person standing in front of the man in white. "}] |
00000007363.jpg | 7363_evjvqa | [{"from": "human", "value": "what color is the basket at his foot?\n<image>"}, {"from": "gpt", "value": "it is blue. "}] |
00000007364.jpg | 7364_evjvqa | [{"from": "human", "value": "how many girls are standing in front of the store shelves?\n<image>"}, {"from": "gpt", "value": "there are two girls standing in front of the store shelves."}] |
00000007364.jpg | 7364_evjvqa | [{"from": "human", "value": "which hand is the girl wearing the black mask holding the box with?\n<image>"}, {"from": "gpt", "value": "she is holding the box with her right hand. "}] |
00000007364.jpg | 7364_evjvqa | [{"from": "human", "value": "who is holding the grocery basket?\n<image>"}, {"from": "gpt", "value": "the grocery basket is held by the girl wearing the white mask. "}] |
00000007365.jpg | 7365_evjvqa | [{"from": "human", "value": "with which hand is she holding the grocery basket?\n<image>"}, {"from": "gpt", "value": "she is holding the grocery basket with her left hand."}] |
00000007369.jpg | 7369_evjvqa | [{"from": "human", "value": "what color is the t-shirt that the girl pushing the trolley is wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a black t-shirt."}] |
00000007369.jpg | 7369_evjvqa | [{"from": "human", "value": "which section is the girl in the black t-shirt standing at?\n<image>"}, {"from": "gpt", "value": "she is standing at the produce section."}] |
00000007371.jpg | 7371_evjvqa | [{"from": "human", "value": "how many stools are placed by the price board?\n<image>"}, {"from": "gpt", "value": "there are two stools placed by the price board."}] |
00000007371.jpg | 7371_evjvqa | [{"from": "human", "value": "where is the red stool situated?\n<image>"}, {"from": "gpt", "value": "it is situated between the blue stool and the price board. "}] |
00000007372.jpg | 7372_evjvqa | [{"from": "human", "value": "how many people are sitting on the canoe?\n<image>"}, {"from": "gpt", "value": "there are two people sitting on the canoe."}] |
00000007372.jpg | 7372_evjvqa | [{"from": "human", "value": "what color is the life jacket?\n<image>"}, {"from": "gpt", "value": "it is orange."}] |
00000007372.jpg | 7372_evjvqa | [{"from": "human", "value": "what color is the shirt that the paddler is wearing?\n<image>"}, {"from": "gpt", "value": "it is black."}] |
00000007373.jpg | 7373_evjvqa | [{"from": "human", "value": "how many mannequins does the pillion carry?\n<image>"}, {"from": "gpt", "value": "two mannequins."}] |
00000007373.jpg | 7373_evjvqa | [{"from": "human", "value": "how many motorbikes are running behind the car?\n<image>"}, {"from": "gpt", "value": "there are four motorbikes running behind the car."}] |
00000007374.jpg | 7374_evjvqa | [{"from": "human", "value": "what is the woman with the glasses doing?\n<image>"}, {"from": "gpt", "value": "she is selling produce to the customer. "}] |
00000007374.jpg | 7374_evjvqa | [{"from": "human", "value": "which hand is the customer using to point at the produce?\n<image>"}, {"from": "gpt", "value": "he is using his right hand."}] |
00000007374.jpg | 7374_evjvqa | [{"from": "human", "value": "how many customers are standing at the woman with the glasses' stall?\n<image>"}, {"from": "gpt", "value": "there is one customer standing in front of her stall. "}] |
00000007380.jpg | 7380_evjvqa | [{"from": "human", "value": "what color is the shirt that the woman holding the bowl is wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a black shirt."}] |
00000007380.jpg | 7380_evjvqa | [{"from": "human", "value": "what color is the bowl that she is holding?\n<image>"}, {"from": "gpt", "value": "it is green."}] |
00000007380.jpg | 7380_evjvqa | [{"from": "human", "value": "how many people are standing next to the woman in black?\n<image>"}, {"from": "gpt", "value": "there are two people standing next to the woman in black. "}] |
00000007382.jpg | 7382_evjvqa | [{"from": "human", "value": "what color is her dress?\n<image>"}, {"from": "gpt", "value": "it is white."}] |
00000007382.jpg | 7382_evjvqa | [{"from": "human", "value": "to which side of the woman is the man standing?\n<image>"}, {"from": "gpt", "value": "he is standing opposite her. "}] |
00000007382.jpg | 7382_evjvqa | [{"from": "human", "value": "where does he place his hands?\n<image>"}, {"from": "gpt", "value": "he places his hands on her hip."}] |
00000007383.jpg | 7383_evjvqa | [{"from": "human", "value": "what is the market gate made from?\n<image>"}, {"from": "gpt", "value": "it is made from bamboo. "}] |
00000007383.jpg | 7383_evjvqa | [{"from": "human", "value": "what color are the beds of flowers at the foot of the gate?\n<image>"}, {"from": "gpt", "value": "they are yellow."}] |
00000007384.jpg | 7384_evjvqa | [{"from": "human", "value": "what color is the motorbike that she is driving?\n<image>"}, {"from": "gpt", "value": "it is blue."}] |
00000007384.jpg | 7384_evjvqa | [{"from": "human", "value": "what is the man walking ahead of the motorbike doing?\n<image>"}, {"from": "gpt", "value": "he is loading the product."}] |
00000007385.jpg | 7385_evjvqa | [{"from": "human", "value": "how many paintings are drawn above the market entrance?\n<image>"}, {"from": "gpt", "value": "there are three paintings drawn above the market entrance. "}] |
00000007385.jpg | 7385_evjvqa | [{"from": "human", "value": "what colors are the paintings above the market entrance?\n<image>"}, {"from": "gpt", "value": "they are purple and orange."}] |
00000007386.jpg | 7386_evjvqa | [{"from": "human", "value": "what color are the chairs?\n<image>"}, {"from": "gpt", "value": "they are white."}] |
00000007386.jpg | 7386_evjvqa | [{"from": "human", "value": "how many coffee grinders are drawn on the picture?\n<image>"}, {"from": "gpt", "value": "there are four coffee grinders drawn on the picture. "}] |
00000007389.jpg | 7389_evjvqa | [{"from": "human", "value": "how many canoes are drifting on the river?\n<image>"}, {"from": "gpt", "value": "there is one canoe drifting on the river."}] |
00000007389.jpg | 7389_evjvqa | [{"from": "human", "value": "how many unused canoes are there on the river?\n<image>"}, {"from": "gpt", "value": "there are two unused canoes on the river. "}] |
00000007389.jpg | 7389_evjvqa | [{"from": "human", "value": "which side is the paddler holding the paddle on?\n<image>"}, {"from": "gpt", "value": "the paddler is holding the paddle on the left."}] |
00000007390.jpg | 7390_evjvqa | [{"from": "human", "value": "what color is she dressed in?\n<image>"}, {"from": "gpt", "value": "she is dressed in pastel green. "}] |
00000007390.jpg | 7390_evjvqa | [{"from": "human", "value": "to which side of the woman is the man standing?\n<image>"}, {"from": "gpt", "value": "he is standing opposite her."}] |
00000007390.jpg | 7390_evjvqa | [{"from": "human", "value": "what is he holding in his right hand?\n<image>"}, {"from": "gpt", "value": "he is holding her hand. "}] |
00000007391.jpg | 7391_evjvqa | [{"from": "human", "value": "where is she looking at?\n<image>"}, {"from": "gpt", "value": "she is looking at the ship model."}] |
00000007391.jpg | 7391_evjvqa | [{"from": "human", "value": "what animal is the ship modelled after?\n<image>"}, {"from": "gpt", "value": "the ship is modelled after a dragon."}] |
00000007391.jpg | 7391_evjvqa | [{"from": "human", "value": "what color is her scarf?\n<image>"}, {"from": "gpt", "value": "it is red. "}] |
00000007394.jpg | 7394_evjvqa | [{"from": "human", "value": "what is she doing?\n<image>"}, {"from": "gpt", "value": "she is posing for the picture."}] |
00000007394.jpg | 7394_evjvqa | [{"from": "human", "value": "what color are her pants?\n<image>"}, {"from": "gpt", "value": "they are white. "}] |
00000007395.jpg | 7395_evjvqa | [{"from": "human", "value": "what does the bag in front of the girl in red contain?\n<image>"}, {"from": "gpt", "value": "the bag in front of the girl in red contains chillies."}] |
00000007395.jpg | 7395_evjvqa | [{"from": "human", "value": "to which side of the girl in red is the girl in yellow standing?\n<image>"}, {"from": "gpt", "value": "she is standing opposite the girl in red. "}] |
00000007395.jpg | 7395_evjvqa | [{"from": "human", "value": "what color is the man on the right of the girl in red dressed in?\n<image>"}, {"from": "gpt", "value": "he is dressed in black."}] |
00000007397.jpg | 7397_evjvqa | [{"from": "human", "value": "what is the man in the white shirt wearing across his chest?\n<image>"}, {"from": "gpt", "value": "a camera. "}] |
00000007397.jpg | 7397_evjvqa | [{"from": "human", "value": "to which side of the man in blue is the man in white standing?\n<image>"}, {"from": "gpt", "value": "he is standing to the left of the man in white. "}] |
00000007397.jpg | 7397_evjvqa | [{"from": "human", "value": "in which hand is the woman in the black t-shirt holding the bag?\n<image>"}, {"from": "gpt", "value": "she is holding the bag in her left hand."}] |
00000007398.jpg | 7398_evjvqa | [{"from": "human", "value": "what color is the store assistant's uniform? \n<image>"}, {"from": "gpt", "value": "it is green."}] |
Subsets and Splits