image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000004495.jpg
4495_evjvqa
[{"from": "human", "value": "where is the place the woman in conical hat selling?\n<image>"}, {"from": "gpt", "value": "on the sidewalk"}]
00000004100.jpg
4100_evjvqa
[{"from": "human", "value": "where is this girl sitting?\n<image>"}, {"from": "gpt", "value": "beside a pink wall"}]
00000004100.jpg
4100_evjvqa
[{"from": "human", "value": "what kind of plants do people plant beside the wall?\n<image>"}, {"from": "gpt", "value": "people plant cactuses"}]
00000004100.jpg
4100_evjvqa
[{"from": "human", "value": "where are these cactuses planted?\n<image>"}, {"from": "gpt", "value": "beside a wall"}]
00000004102.jpg
4102_evjvqa
[{"from": "human", "value": "what is the man in vietnamese silk pajamas doing?\n<image>"}, {"from": "gpt", "value": "pulling a rickshaw"}]
00000004102.jpg
4102_evjvqa
[{"from": "human", "value": "what does the man wearing a conical hat do?\n<image>"}, {"from": "gpt", "value": "towing"}]
00000004102.jpg
4102_evjvqa
[{"from": "human", "value": "where is the man in the conical hat going?\n<image>"}, {"from": "gpt", "value": "going on a street"}]
00000004104.jpg
4104_evjvqa
[{"from": "human", "value": "what is the woman in pink shirt doing?\n<image>"}, {"from": "gpt", "value": "selecting fruits"}]
00000004104.jpg
4104_evjvqa
[{"from": "human", "value": "what item is the woman in pink shirt selecting?\n<image>"}, {"from": "gpt", "value": "selecting fruits"}]
00000004110.jpg
4110_evjvqa
[{"from": "human", "value": "where is the car parked?\n<image>"}, {"from": "gpt", "value": "on the roadside, in front of an apartment complex"}]
00000004110.jpg
4110_evjvqa
[{"from": "human", "value": "how many cars are parked on the roadside?\n<image>"}, {"from": "gpt", "value": "there is one car parked on the roadside"}]
00000004110.jpg
4110_evjvqa
[{"from": "human", "value": "where are the children playing?\n<image>"}, {"from": "gpt", "value": "at the courtyard in front of the apartment complex"}]
00000004115.jpg
4115_evjvqa
[{"from": "human", "value": "what are these woman doing?\n<image>"}, {"from": "gpt", "value": "selecting fruits"}]
00000004115.jpg
4115_evjvqa
[{"from": "human", "value": "how many people are selecting fruits?\n<image>"}, {"from": "gpt", "value": "there are four women selecting fruits"}]
00000004115.jpg
4115_evjvqa
[{"from": "human", "value": "where are these women selecting goods?\n<image>"}, {"from": "gpt", "value": "at a fruit counter"}]
00000004116.jpg
4116_evjvqa
[{"from": "human", "value": "what color are the bags that these people use motorbikes to carry?\n<image>"}, {"from": "gpt", "value": "yellow bags"}]
00000004116.jpg
4116_evjvqa
[{"from": "human", "value": "where do these people use motorbikes to go?\n<image>"}, {"from": "gpt", "value": "to go to the supermarket"}]
00000004116.jpg
4116_evjvqa
[{"from": "human", "value": "what means of transportation do these people go to the supermarket by?\n<image>"}, {"from": "gpt", "value": "by motorbikes"}]
00000004119.jpg
4119_evjvqa
[{"from": "human", "value": "what is this girl doing?\n<image>"}, {"from": "gpt", "value": "selecting fruits"}]
00000004119.jpg
4119_evjvqa
[{"from": "human", "value": "what counter is the girl in black outfit standing at?\n<image>"}, {"from": "gpt", "value": "the girl in black outfit is standing at the apple counter"}]
00000004119.jpg
4119_evjvqa
[{"from": "human", "value": "how many people are selecting apples?\n<image>"}, {"from": "gpt", "value": "there is one girl in black outfit selecting apples"}]
00000004120.jpg
4120_evjvqa
[{"from": "human", "value": "where are these people sitting?\n<image>"}, {"from": "gpt", "value": "in a restaurant"}]
00000004121.jpg
4121_evjvqa
[{"from": "human", "value": "how many people are singing on the stage?\n<image>"}, {"from": "gpt", "value": "there are six people singing on the stage"}]
00000004121.jpg
4121_evjvqa
[{"from": "human", "value": "what repertoire are the people sitting on the road following?\n<image>"}, {"from": "gpt", "value": "they are following an outdoor concert"}]
00000004121.jpg
4121_evjvqa
[{"from": "human", "value": "where is this performance held?\n<image>"}, {"from": "gpt", "value": "outdoor"}]
00000004121.jpg
4121_evjvqa
[{"from": "human", "value": "how do the women singing on stage dress?\n<image>"}, {"from": "gpt", "value": "holding conical hats in hands and wearing brown ao dai"}]
00000004126.jpg
4126_evjvqa
[{"from": "human", "value": "what does this room display?\n<image>"}, {"from": "gpt", "value": "airplane models"}]
00000004126.jpg
4126_evjvqa
[{"from": "human", "value": "where are these airplane models displayed?\n<image>"}, {"from": "gpt", "value": "in a room"}]
00000004127.jpg
4127_evjvqa
[{"from": "human", "value": "what are these people using mobile phones for?\n<image>"}, {"from": "gpt", "value": "for filming and taking pictures"}]
00000004127.jpg
4127_evjvqa
[{"from": "human", "value": "where are these people standing?\n<image>"}, {"from": "gpt", "value": "on a walking street"}]
00000004128.jpg
4128_evjvqa
[{"from": "human", "value": "how many people go behind the girl in pink coat?\n<image>"}, {"from": "gpt", "value": "there is one girl going behind the girl in pink coat"}]
00000004128.jpg
4128_evjvqa
[{"from": "human", "value": "how many friends go ahead of the girl in yellow coat?\n<image>"}, {"from": "gpt", "value": "there is one girl going ahead of the girl in yellow coat"}]
00000004128.jpg
4128_evjvqa
[{"from": "human", "value": "what is the girl going ahead holding?\n<image>"}, {"from": "gpt", "value": "she is holding a phone"}]
00000004130.jpg
4130_evjvqa
[{"from": "human", "value": "where are these people standing?\n<image>"}, {"from": "gpt", "value": "on the courtyard in front of a buddha statue"}]
00000004130.jpg
4130_evjvqa
[{"from": "human", "value": "what are the people who are on the green grass in front of the buddha statue doing?\n<image>"}, {"from": "gpt", "value": "praying in front of a buddha statue"}]
00000004130.jpg
4130_evjvqa
[{"from": "human", "value": "in order to celebrate in front of the buddha statue, where must the people attending the ceremony kneel?\n<image>"}, {"from": "gpt", "value": "on the grass in front of the statue"}]
00000004131.jpg
4131_evjvqa
[{"from": "human", "value": "where does this road lead?\n<image>"}, {"from": "gpt", "value": "into a village"}]
00000004131.jpg
4131_evjvqa
[{"from": "human", "value": "how many people are driving motorbikes on the road?\n<image>"}, {"from": "gpt", "value": "there is one person driving motorbike on the road"}]
00000004132.jpg
4132_evjvqa
[{"from": "human", "value": "what area is this?\n<image>"}, {"from": "gpt", "value": "this is a park"}]
00000004132.jpg
4132_evjvqa
[{"from": "human", "value": "where are the two women sitting?\n<image>"}, {"from": "gpt", "value": "on the bench at the house"}]
00000004133.jpg
4133_evjvqa
[{"from": "human", "value": "how many people is the cyclo carrying?\n<image>"}, {"from": "gpt", "value": "the cyclo is carrying two people"}]
00000004133.jpg
4133_evjvqa
[{"from": "human", "value": "what means of transportation are the man and woman moving on the street by?\n<image>"}, {"from": "gpt", "value": "by a cyclo"}]
00000004133.jpg
4133_evjvqa
[{"from": "human", "value": "where are the woman in white shirt and the man in black shirt sitting on the roadside?\n<image>"}, {"from": "gpt", "value": "sitting in a cafe"}]
00000004133.jpg
4133_evjvqa
[{"from": "human", "value": "where are the lanterns hung for decoration?\n<image>"}, {"from": "gpt", "value": "above a street"}]
00000004134.jpg
4134_evjvqa
[{"from": "human", "value": "where are these items displayed for sale?\n<image>"}, {"from": "gpt", "value": "at a grocery store"}]
00000004135.jpg
4135_evjvqa
[{"from": "human", "value": "what are these two women doing?\n<image>"}, {"from": "gpt", "value": "introducing products of a stall"}]
00000004135.jpg
4135_evjvqa
[{"from": "human", "value": "where are these two women standing?\n<image>"}, {"from": "gpt", "value": "in a stall"}]
00000004136.jpg
4136_evjvqa
[{"from": "human", "value": "in which direction is the motorbike at the citadel gate going?\n<image>"}, {"from": "gpt", "value": "in the direction heading out of the citadel gate"}]
00000004136.jpg
4136_evjvqa
[{"from": "human", "value": "what color is the umbrella that the man pushing the hawker cart uses?\n<image>"}, {"from": "gpt", "value": "a purple umbrella"}]
00000004136.jpg
4136_evjvqa
[{"from": "human", "value": "in which direction is the street vendor pushing the hawker cart?\n<image>"}, {"from": "gpt", "value": "in the direction heading out of the citadel gate"}]
00000004138.jpg
4138_evjvqa
[{"from": "human", "value": "which girl is holding a mobile phone?\n<image>"}, {"from": "gpt", "value": "the girl in white shirt is holding a mobile phone"}]
00000004138.jpg
4138_evjvqa
[{"from": "human", "value": "what is the girl in black shirt doing?\n<image>"}, {"from": "gpt", "value": "taking a photo with the girl standing next to"}]
00000004138.jpg
4138_evjvqa
[{"from": "human", "value": "what are these two girls doing?\n<image>"}, {"from": "gpt", "value": "posing for a photo"}]
00000004138.jpg
4138_evjvqa
[{"from": "human", "value": "whom does the girl holding the mobile phone take the photos with?\n<image>"}, {"from": "gpt", "value": "with the girl in black shirt standing next to"}]
00000004140.jpg
4140_evjvqa
[{"from": "human", "value": "where are these people walking?\n<image>"}, {"from": "gpt", "value": "in a crowded fairground"}]
00000004141.jpg
4141_evjvqa
[{"from": "human", "value": "how many cars are parked by the roadside?\n<image>"}, {"from": "gpt", "value": "there is one car parked by the roadside"}]
00000004149.jpg
4149_evjvqa
[{"from": "human", "value": "how many vehicles are parked by the roadside?\n<image>"}, {"from": "gpt", "value": "there is one vehicle parked by the roadside"}]
00000004166.jpg
4166_evjvqa
[{"from": "human", "value": "what items do these shops sell?\n<image>"}, {"from": "gpt", "value": "these shops sell christmas decorations"}]
00000004166.jpg
4166_evjvqa
[{"from": "human", "value": "what do people ride here to buy?\n<image>"}, {"from": "gpt", "value": "to buy christmas decorations"}]
00000004166.jpg
4166_evjvqa
[{"from": "human", "value": "where is the man holding the stick standing?\n<image>"}, {"from": "gpt", "value": "in front of a shop selling christmas decorations"}]
00000004169.jpg
4169_evjvqa
[{"from": "human", "value": "where is this girl standing?\n<image>"}, {"from": "gpt", "value": "the girl is standing on the balcony of a house"}]
00000004169.jpg
4169_evjvqa
[{"from": "human", "value": "where is the girl standing on the balcony?\n<image>"}, {"from": "gpt", "value": "the girl is standing next to a lamp"}]
00000004173.jpg
4173_evjvqa
[{"from": "human", "value": "what do people build on both sides of the road going into the palace?\n<image>"}, {"from": "gpt", "value": "people build lakes"}]
00000004173.jpg
4173_evjvqa
[{"from": "human", "value": "where does the path in the middle lead?\n<image>"}, {"from": "gpt", "value": "it leads into the courtyard of a palace"}]
00000004174.jpg
4174_evjvqa
[{"from": "human", "value": "how do the woman travel on the street?\n<image>"}, {"from": "gpt", "value": "the woman travel on the street by a bicycle"}]
00000004174.jpg
4174_evjvqa
[{"from": "human", "value": "what does the woman wearing a conical hat do?\n<image>"}, {"from": "gpt", "value": "the woman wearing a conical hat work as a florist"}]
00000004174.jpg
4174_evjvqa
[{"from": "human", "value": "how does the woman wearing a conical hat travel to sell flowers?\n<image>"}, {"from": "gpt", "value": "the woman travel to sell flowers by bicycle"}]
00000004174.jpg
4174_evjvqa
[{"from": "human", "value": "where is the woman selling flower going?\n<image>"}, {"from": "gpt", "value": "by the roadside, past a building"}]
00000004175.jpg
4175_evjvqa
[{"from": "human", "value": "where are these people travelling?\n<image>"}, {"from": "gpt", "value": "these people are travelling at the sea"}]
00000004175.jpg
4175_evjvqa
[{"from": "human", "value": "where are these women standing?\n<image>"}, {"from": "gpt", "value": "they are standing on the beach"}]
00000004175.jpg
4175_evjvqa
[{"from": "human", "value": "how many women are standing on the beach?\n<image>"}, {"from": "gpt", "value": "there are three women standing on the beach"}]
00000004177.jpg
4177_evjvqa
[{"from": "human", "value": "what items does this store specialize in?\n<image>"}, {"from": "gpt", "value": "fitness equipment"}]
00000004178.jpg
4178_evjvqa
[{"from": "human", "value": "what goods are these boats used for transporting?\n<image>"}, {"from": "gpt", "value": "for transporting kinds of fruits, vegetables"}]
00000004178.jpg
4178_evjvqa
[{"from": "human", "value": "what way are these fruit items transported by?\n<image>"}, {"from": "gpt", "value": "transported by river"}]
00000004179.jpg
4179_evjvqa
[{"from": "human", "value": "where is this billboard located?\n<image>"}, {"from": "gpt", "value": "outside a wall of a building under construction"}]
00000004180.jpg
4180_evjvqa
[{"from": "human", "value": "where is this man sitting?\n<image>"}, {"from": "gpt", "value": "this man is sitting in a meeting room"}]
00000004180.jpg
4180_evjvqa
[{"from": "human", "value": "what is this man doing?\n<image>"}, {"from": "gpt", "value": "this man is attending a meeting"}]
00000004180.jpg
4180_evjvqa
[{"from": "human", "value": "how many men are sitting in the room?\n<image>"}, {"from": "gpt", "value": "there is one man sitting in the room"}]
00000004181.jpg
4181_evjvqa
[{"from": "human", "value": "what are the counters here decorated with?\n<image>"}, {"from": "gpt", "value": "with bubbles"}]
00000004181.jpg
4181_evjvqa
[{"from": "human", "value": "what do these counters sell?\n<image>"}, {"from": "gpt", "value": "fruits, agricultural products"}]
00000004185.jpg
4185_evjvqa
[{"from": "human", "value": "what is the woman in yellow shirt standing to do?\n<image>"}, {"from": "gpt", "value": "to sell rice"}]
00000004185.jpg
4185_evjvqa
[{"from": "human", "value": "where is the woman in yellow shirt standing?\n<image>"}, {"from": "gpt", "value": "in a restaurant selling rice"}]
00000004191.jpg
4191_evjvqa
[{"from": "human", "value": "where is this chicken going?\n<image>"}, {"from": "gpt", "value": "on a well mouth"}]
00000004193.jpg
4193_evjvqa
[{"from": "human", "value": "where is this tv located?\n<image>"}, {"from": "gpt", "value": "in a supermarket"}]
00000004193.jpg
4193_evjvqa
[{"from": "human", "value": "what items do these counters sell?\n<image>"}, {"from": "gpt", "value": "kinds of fruits"}]
00000004193.jpg
4193_evjvqa
[{"from": "human", "value": "what items does this area sell?\n<image>"}, {"from": "gpt", "value": "this area sells kinds of fruits"}]
00000004194.jpg
4194_evjvqa
[{"from": "human", "value": "from which direction is this scene taken?\n<image>"}, {"from": "gpt", "value": "the direction looking down from above"}]
00000004195.jpg
4195_evjvqa
[{"from": "human", "value": "where is the woman riding the bicycle running out from?\n<image>"}, {"from": "gpt", "value": "from an alley"}]
00000004195.jpg
4195_evjvqa
[{"from": "human", "value": "in which direction is the woman on the bicycle running?\n<image>"}, {"from": "gpt", "value": "towards the right hand side of the alley"}]
00000004195.jpg
4195_evjvqa
[{"from": "human", "value": "how many cars are parked by the left roadside?\n<image>"}, {"from": "gpt", "value": "there is one car parked"}]
00000004199.jpg
4199_evjvqa
[{"from": "human", "value": "where is this girl standing?\n<image>"}, {"from": "gpt", "value": "in a field of flowers"}]
00000004199.jpg
4199_evjvqa
[{"from": "human", "value": "what is the girl holding in her hand?\n<image>"}, {"from": "gpt", "value": "the girl is holding a bouquet"}]
00000001301.jpg
1301_evjvqa
[{"from": "human", "value": "what is thw woman holding the knife doing?\n<image>"}, {"from": "gpt", "value": "she is cutting pork to sell to the customer"}]
00000001301.jpg
1301_evjvqa
[{"from": "human", "value": "what is the woman standing opposite the woman holding the knife doing?\n<image>"}, {"from": "gpt", "value": "she is buying pork"}]
00000001301.jpg
1301_evjvqa
[{"from": "human", "value": "which product is the woman in brown overcoat buying?\n<image>"}, {"from": "gpt", "value": "pork"}]
00000001301.jpg
1301_evjvqa
[{"from": "human", "value": "what does the stall sell?\n<image>"}, {"from": "gpt", "value": "pork"}]
00000001303.jpg
1303_evjvqa
[{"from": "human", "value": "which direction is the woman looking?\n<image>"}, {"from": "gpt", "value": "menu"}]
00000001303.jpg
1303_evjvqa
[{"from": "human", "value": "who is the girl in green shirt going with?\n<image>"}, {"from": "gpt", "value": "with the man wearing sky blue with horizontal stripes"}]
00000001303.jpg
1303_evjvqa
[{"from": "human", "value": "who is the bald man standing with?\n<image>"}, {"from": "gpt", "value": "the woman is in green shirt"}]
00000001305.jpg
1305_evjvqa
[{"from": "human", "value": "which side are the two girls standing talking compared to the woman in black dress?\n<image>"}, {"from": "gpt", "value": "in the behind"}]