image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000001305.jpg
1305_evjvqa
[{"from": "human", "value": "what is the woman in the black dress doing with the stroller?\n<image>"}, {"from": "gpt", "value": "to shopping"}]
00000001305.jpg
1305_evjvqa
[{"from": "human", "value": "where is the woman in the black dress for the chosen product?\n<image>"}, {"from": "gpt", "value": "in the stroller"}]
00000001312.jpg
1312_evjvqa
[{"from": "human", "value": "how many cars are on this road?\n<image>"}, {"from": "gpt", "value": "only one"}]
00000001312.jpg
1312_evjvqa
[{"from": "human", "value": "how many directions is this road?\n<image>"}, {"from": "gpt", "value": "just one"}]
00000001312.jpg
1312_evjvqa
[{"from": "human", "value": "how many directions can vehicles travel on this route?\n<image>"}, {"from": "gpt", "value": "just one"}]
00000001313.jpg
1313_evjvqa
[{"from": "human", "value": "why some people hold the umbrellas?\n<image>"}, {"from": "gpt", "value": "to sundshade"}]
00000001313.jpg
1313_evjvqa
[{"from": "human", "value": "how many people are wearing conical hat?\n<image>"}, {"from": "gpt", "value": "only one"}]
00000001313.jpg
1313_evjvqa
[{"from": "human", "value": "what color umbrella is the person standing on the right side holding?\n<image>"}, {"from": "gpt", "value": "red"}]
00000001318.jpg
1318_evjvqa
[{"from": "human", "value": "what is the woman wearing conical hat doing?\n<image>"}, {"from": "gpt", "value": "she sitting and scooping food into bowls for the customers"}]
00000001318.jpg
1318_evjvqa
[{"from": "human", "value": "what is the woman wearing conical hat holding?\n<image>"}, {"from": "gpt", "value": "the ladle and the bowl"}]
00000001318.jpg
1318_evjvqa
[{"from": "human", "value": "what is the woman who is scooping food into the bowl for the customers wearing?\n<image>"}, {"from": "gpt", "value": "the conical hat"}]
00000001322.jpg
1322_evjvqa
[{"from": "human", "value": "what is the man behind the podium for?\n<image>"}, {"from": "gpt", "value": "he is speaking at the conference"}]
00000001322.jpg
1322_evjvqa
[{"from": "human", "value": "what do the speakers wearing at the conference wear?\n<image>"}, {"from": "gpt", "value": "vest and tie"}]
00000001328.jpg
1328_evjvqa
[{"from": "human", "value": "where is the shopping activity taken place?\n<image>"}, {"from": "gpt", "value": "in the supermarket"}]
00000001328.jpg
1328_evjvqa
[{"from": "human", "value": "what is the man in sky blue shirt doing at the stall?\n<image>"}, {"from": "gpt", "value": "buying dragon fruits"}]
00000001343.jpg
1343_evjvqa
[{"from": "human", "value": "how do these two women move in the flower garden?\n<image>"}, {"from": "gpt", "value": "the boat"}]
00000001343.jpg
1343_evjvqa
[{"from": "human", "value": "who are on the boat in the middle of the flower garden?\n<image>"}, {"from": "gpt", "value": "two women are wearing conical hats"}]
00000001343.jpg
1343_evjvqa
[{"from": "human", "value": "where are these two women sailing?\n<image>"}, {"from": "gpt", "value": "in the middle of the flower garden"}]
00000001354.jpg
1354_evjvqa
[{"from": "human", "value": "what is the woman doing?\n<image>"}, {"from": "gpt", "value": "she is surfing"}]
00000001354.jpg
1354_evjvqa
[{"from": "human", "value": "what sport is the girl playing?\n<image>"}, {"from": "gpt", "value": "surfing"}]
00000001354.jpg
1354_evjvqa
[{"from": "human", "value": "what does this girl surf with?\n<image>"}, {"from": "gpt", "value": "the surfboard"}]
00000001357.jpg
1357_evjvqa
[{"from": "human", "value": "what is the girl in the light blue pants leaning on?\n<image>"}, {"from": "gpt", "value": "the bus model"}]
00000001357.jpg
1357_evjvqa
[{"from": "human", "value": "what is the girl standing on the bus model carrying?\n<image>"}, {"from": "gpt", "value": "the crossbody bag"}]
00000001357.jpg
1357_evjvqa
[{"from": "human", "value": "what color is the bus model painted in?\n<image>"}, {"from": "gpt", "value": "sky blue"}]
00000001365.jpg
1365_evjvqa
[{"from": "human", "value": "what does the discount sign look like?\n<image>"}, {"from": "gpt", "value": "it has rectangle shape, pink background, black border and white text"}]
00000001383.jpg
1383_evjvqa
[{"from": "human", "value": "who is the girl in white standing with?\n<image>"}, {"from": "gpt", "value": "nobody"}]
00000001383.jpg
1383_evjvqa
[{"from": "human", "value": "what does the girl standing look like?\n<image>"}, {"from": "gpt", "value": "she is wearing white shirt, blue dress, white shoes and wide-brimmed hat"}]
00000001383.jpg
1383_evjvqa
[{"from": "human", "value": "what is the place behind the girl?\n<image>"}, {"from": "gpt", "value": "cau bridge, hoi an"}]
00000001388.jpg
1388_evjvqa
[{"from": "human", "value": "what is the man sitting next to the pillar going to do?\n<image>"}, {"from": "gpt", "value": "smoking"}]
00000001392.jpg
1392_evjvqa
[{"from": "human", "value": "what are the main items this store sells?\n<image>"}, {"from": "gpt", "value": "type os shoes"}]
00000001392.jpg
1392_evjvqa
[{"from": "human", "value": "what is the woman with the handbag sitting on?\n<image>"}, {"from": "gpt", "value": "the red chair"}]
00000001392.jpg
1392_evjvqa
[{"from": "human", "value": "where can the red chair be found?\n<image>"}, {"from": "gpt", "value": "it is seated by the woman with the handbag"}]
00000001392.jpg
1392_evjvqa
[{"from": "human", "value": "what is the woman in the red chair wearing?\n<image>"}, {"from": "gpt", "value": "the handbag"}]
00000001393.jpg
1393_evjvqa
[{"from": "human", "value": "who is speaking at the event?\n<image>"}, {"from": "gpt", "value": "the woman is in white, holding microphone"}]
00000001393.jpg
1393_evjvqa
[{"from": "human", "value": "what is the person on the right of the woman in the white shirt holding?\n<image>"}, {"from": "gpt", "value": "bunch of flowers"}]
00000001394.jpg
1394_evjvqa
[{"from": "human", "value": "how do people go to the market?\n<image>"}, {"from": "gpt", "value": "walking or riding a motorbike"}]
00000001394.jpg
1394_evjvqa
[{"from": "human", "value": "what kind of goods do the stalls where the man in the black cap sells?\n<image>"}, {"from": "gpt", "value": "raw fish"}]
00000001397.jpg
1397_evjvqa
[{"from": "human", "value": "how many lanes does this road have?\n<image>"}, {"from": "gpt", "value": "four"}]
00000001397.jpg
1397_evjvqa
[{"from": "human", "value": "which lane has the most cars running?\n<image>"}, {"from": "gpt", "value": "the rightmost lane"}]
00000001397.jpg
1397_evjvqa
[{"from": "human", "value": "which lane can continue going straight?\n<image>"}, {"from": "gpt", "value": "two lanes in the middle"}]
00000001397.jpg
1397_evjvqa
[{"from": "human", "value": "which lane is the manhole cover on?\n<image>"}, {"from": "gpt", "value": "the second lane from left"}]
00000003903.jpg
3903_evjvqa
[{"from": "human", "value": "what is the man in gray shirt raising hand for?\n<image>"}, {"from": "gpt", "value": "he is raising hand for greeting the people in the isolation area"}]
00000003903.jpg
3903_evjvqa
[{"from": "human", "value": "what hat is the woman in pink shirt wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a purple wide-brimmed hat"}]
00000003903.jpg
3903_evjvqa
[{"from": "human", "value": "who is the one raising hand to greet the people in the isolation area?\n<image>"}, {"from": "gpt", "value": "the man wearing the gray long-sleeve shirt"}]
00000003904.jpg
3904_evjvqa
[{"from": "human", "value": "where is the woman standing?\n<image>"}, {"from": "gpt", "value": "she is standing at the door"}]
00000003904.jpg
3904_evjvqa
[{"from": "human", "value": "what is the weather like here?\n<image>"}, {"from": "gpt", "value": "it is sunny here"}]
00000003904.jpg
3904_evjvqa
[{"from": "human", "value": "what animals are on the yard?\n<image>"}, {"from": "gpt", "value": "chickens"}]
00000003905.jpg
3905_evjvqa
[{"from": "human", "value": "who is sitting next to the young man in blue coat?\n<image>"}, {"from": "gpt", "value": "sitting next to the young man in blue coat is a person in red coat"}]
00000003905.jpg
3905_evjvqa
[{"from": "human", "value": "how many people are sitting on the stone bench?\n<image>"}, {"from": "gpt", "value": "on the right stone bench, someone is sitting"}]
00000003905.jpg
3905_evjvqa
[{"from": "human", "value": "who are sitting on the right stone bench?\n<image>"}, {"from": "gpt", "value": "a person in blue shirt and a person in red shirt are sitting on the right stone bench"}]
00000003907.jpg
3907_evjvqa
[{"from": "human", "value": "what color is the face mask that the woman standing wears?\n<image>"}, {"from": "gpt", "value": "she wears a white face mask"}]
00000003909.jpg
3909_evjvqa
[{"from": "human", "value": "what is the shape of the flag?\n<image>"}, {"from": "gpt", "value": "the flag has a red background and a yellow star"}]
00000003909.jpg
3909_evjvqa
[{"from": "human", "value": "where the conical hat lies?\n<image>"}, {"from": "gpt", "value": "the conical hat lies on the cargo cart"}]
00000003909.jpg
3909_evjvqa
[{"from": "human", "value": "on which side of the road is the electric pole located?\n<image>"}, {"from": "gpt", "value": "the electric pole is located on the left side of the road"}]
00000003910.jpg
3910_evjvqa
[{"from": "human", "value": "what is the woman in blue coat wearing in her hand?\n<image>"}, {"from": "gpt", "value": "she is wearing a red bag"}]
00000003910.jpg
3910_evjvqa
[{"from": "human", "value": "what hat does the woman in blue coat wear?\n<image>"}, {"from": "gpt", "value": "a conical hat"}]
00000003911.jpg
3911_evjvqa
[{"from": "human", "value": "what color are the two balloons?\n<image>"}, {"from": "gpt", "value": "a balloon is orange, the other is yellow"}]
00000003911.jpg
3911_evjvqa
[{"from": "human", "value": "who is standing to the left of the woman in blue t-shirt?\n<image>"}, {"from": "gpt", "value": "standing to the left of the woman in the blue t-shirt is the woman in black outfit"}]
00000003911.jpg
3911_evjvqa
[{"from": "human", "value": "who is standing to the left of the woman in black shirt?\n<image>"}, {"from": "gpt", "value": "standing to the left of the woman in black shirt is the little girl in white hat"}]
00000003915.jpg
3915_evjvqa
[{"from": "human", "value": "what color is the shirt that the woman holding a red bag is wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a purple t-shirt"}]
00000003915.jpg
3915_evjvqa
[{"from": "human", "value": "what is the man in gray shirt pushing?\n<image>"}, {"from": "gpt", "value": "he is pushing a baby stroller"}]
00000003916.jpg
3916_evjvqa
[{"from": "human", "value": "who is selling directly to the woman in green shirt?\n<image>"}, {"from": "gpt", "value": "the woman in violet shirt"}]
00000003916.jpg
3916_evjvqa
[{"from": "human", "value": "what vehicle is behind the woman in green shirt?\n<image>"}, {"from": "gpt", "value": "a motorbike"}]
00000003916.jpg
3916_evjvqa
[{"from": "human", "value": "what is red hung on the scooter of the woman in black shirt?\n<image>"}, {"from": "gpt", "value": "there is a red helmet hung on the scooter of the woman in black shirt"}]
00000003919.jpg
3919_evjvqa
[{"from": "human", "value": "what color is the car parked on the yard?\n<image>"}, {"from": "gpt", "value": "the car parked on the yard is black"}]
00000003919.jpg
3919_evjvqa
[{"from": "human", "value": "what is the man in black coat doing?\n<image>"}, {"from": "gpt", "value": "he is looking at the exhibits"}]
00000003919.jpg
3919_evjvqa
[{"from": "human", "value": "what are on display on the table?\n<image>"}, {"from": "gpt", "value": "the ceramic wares"}]
00000003920.jpg
3920_evjvqa
[{"from": "human", "value": "what kinds of vehicles are moving on the road?\n<image>"}, {"from": "gpt", "value": "motorbikes and cars are moving on the road"}]
00000003920.jpg
3920_evjvqa
[{"from": "human", "value": "what is the sky like in this locality?\n<image>"}, {"from": "gpt", "value": "the sky is cloudy"}]
00000003922.jpg
3922_evjvqa
[{"from": "human", "value": "what color is the shirt that the woman wears?\n<image>"}, {"from": "gpt", "value": "the woman wears black shirt"}]
00000003922.jpg
3922_evjvqa
[{"from": "human", "value": "what kind of flower tree is the girl standing next to?\n<image>"}, {"from": "gpt", "value": "peach tree"}]
00000003922.jpg
3922_evjvqa
[{"from": "human", "value": "what shirt is the this girl wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a black halter"}]
00000003924.jpg
3924_evjvqa
[{"from": "human", "value": "how many guests are sitting in this restaurant?\n<image>"}, {"from": "gpt", "value": "there are no guests"}]
00000003924.jpg
3924_evjvqa
[{"from": "human", "value": "where is the air conditioner located in this restaurant?\n<image>"}, {"from": "gpt", "value": "the air conditioner is on the left wall with the same height level as the loft railing"}]
00000003928.jpg
3928_evjvqa
[{"from": "human", "value": "what is the person in red shirt wearing on the hand?\n<image>"}, {"from": "gpt", "value": "that person is wearing rubber gloves"}]
00000003928.jpg
3928_evjvqa
[{"from": "human", "value": "what is the woman in black shirt wearing glasses doing?\n<image>"}, {"from": "gpt", "value": "she is selecting oranges"}]
00000003928.jpg
3928_evjvqa
[{"from": "human", "value": "what type of fruit is being taken by the woman in black shirt with glasses?\n<image>"}, {"from": "gpt", "value": "oranges"}]
00000003930.jpg
3930_evjvqa
[{"from": "human", "value": "what items are hung on the shelf behind the woman wearing a white face mask?\n<image>"}, {"from": "gpt", "value": "pans"}]
00000003930.jpg
3930_evjvqa
[{"from": "human", "value": "which side of the woman wearing white face mask does the woman wearing blue face mask stand?\n<image>"}, {"from": "gpt", "value": "the woman wearing blue face mask stands on the left side of the woman wearing white face mask"}]
00000003930.jpg
3930_evjvqa
[{"from": "human", "value": "what is the woman standing next to the woman wearing a blue face mask wearing on her face?\n<image>"}, {"from": "gpt", "value": "she is wearing a medical face mask and a face shield"}]
00000003933.jpg
3933_evjvqa
[{"from": "human", "value": "which side lane of the median has more traffic?\n<image>"}, {"from": "gpt", "value": "the right side lane of the median"}]
00000003933.jpg
3933_evjvqa
[{"from": "human", "value": "what is the weather like in this area?\n<image>"}, {"from": "gpt", "value": "it is sunny in this area"}]
00000003933.jpg
3933_evjvqa
[{"from": "human", "value": "which side lane of the median has only three moving motorbikes?\n<image>"}, {"from": "gpt", "value": "the left lane of the median"}]
00000003935.jpg
3935_evjvqa
[{"from": "human", "value": "what color is the sea water?\n<image>"}, {"from": "gpt", "value": "the sea water is blue"}]
00000003935.jpg
3935_evjvqa
[{"from": "human", "value": "what vehicles are moving on the sea?\n<image>"}, {"from": "gpt", "value": "the ships"}]
00000003935.jpg
3935_evjvqa
[{"from": "human", "value": "what are on the sea surface?\n<image>"}, {"from": "gpt", "value": "the ships are on the sea surface"}]
00000003936.jpg
3936_evjvqa
[{"from": "human", "value": "how many floors does the row of shelves in the middle of the room have?\n<image>"}, {"from": "gpt", "value": "the row of shelves in the middle of the room has four floors"}]
00000003937.jpg
3937_evjvqa
[{"from": "human", "value": "what are the people standing in between the girls in red ao dai doing?\n<image>"}, {"from": "gpt", "value": "they are cutting the ribbon"}]
00000003937.jpg
3937_evjvqa
[{"from": "human", "value": "who does each person cutting the ribbon stand between?\n<image>"}, {"from": "gpt", "value": "each person cutting the ribbon stands between the two girls in red ao dai"}]
00000003937.jpg
3937_evjvqa
[{"from": "human", "value": "what are the people who do not wear red ao dai doing?\n<image>"}, {"from": "gpt", "value": "the people who do not wear red ao dai is cutting the ribbon"}]
00000003938.jpg
3938_evjvqa
[{"from": "human", "value": "what color is the sweater that the young man wears?\n<image>"}, {"from": "gpt", "value": "the young man wears brown sweater"}]
00000003938.jpg
3938_evjvqa
[{"from": "human", "value": "who does the girl in black shirt stand next to?\n<image>"}, {"from": "gpt", "value": "the girl in black shirt stands next to the young man in brown sweater"}]
00000003939.jpg
3939_evjvqa
[{"from": "human", "value": "where is the woman standing?\n<image>"}, {"from": "gpt", "value": "the woman is standing by the bridge railing"}]
00000003939.jpg
3939_evjvqa
[{"from": "human", "value": "what does the woman wear?\n<image>"}, {"from": "gpt", "value": "the woman wears a red ao dai with a yellow star on the chest"}]
00000003939.jpg
3939_evjvqa
[{"from": "human", "value": "what is the woman in ao dai wearing on her face?\n<image>"}, {"from": "gpt", "value": "she is wearing glasses"}]
00000003940.jpg
3940_evjvqa
[{"from": "human", "value": "where can the green trash can be seen?\n<image>"}, {"from": "gpt", "value": "the green trash can lies next to the head of the orange truck"}]
00000003940.jpg
3940_evjvqa
[{"from": "human", "value": "what is the color of the car in the right front side of the brown car?\n<image>"}, {"from": "gpt", "value": "that car is white"}]
00000003941.jpg
3941_evjvqa
[{"from": "human", "value": "what color are the baubles on the christmas tree?\n<image>"}, {"from": "gpt", "value": "the baubles on the christmas tree are red"}]
00000003941.jpg
3941_evjvqa
[{"from": "human", "value": "how many gift boxes are there under the christmas tree?\n<image>"}, {"from": "gpt", "value": "there are two gift boxes under the christmas tree"}]
00000003941.jpg
3941_evjvqa
[{"from": "human", "value": "where are the red baubles attached?\n<image>"}, {"from": "gpt", "value": "the red baubles attached on the christmas tree"}]