image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000005343.jpg
5343_evjvqa
[{"from": "human", "value": "how many styrofoam boxes are placed in front of the house?\n<image>"}, {"from": "gpt", "value": "two styrofoam boxes are placed in front of the house"}]
00000005346.jpg
5346_evjvqa
[{"from": "human", "value": "what are the couple doing?\n<image>"}, {"from": "gpt", "value": "holding hands"}]
00000005347.jpg
5347_evjvqa
[{"from": "human", "value": "what animal is the mascot costume?\n<image>"}, {"from": "gpt", "value": "the mascot costume is chicken"}]
00000005347.jpg
5347_evjvqa
[{"from": "human", "value": "what color uniforms are the people at the stall wearing?\n<image>"}, {"from": "gpt", "value": "blue uniform shirt"}]
00000005347.jpg
5347_evjvqa
[{"from": "human", "value": "what color is the guitar in the hand of the mascot chicken?\n<image>"}, {"from": "gpt", "value": "red"}]
00000005348.jpg
5348_evjvqa
[{"from": "human", "value": "where are people gathering to buy?\n<image>"}, {"from": "gpt", "value": "at the flower shop"}]
00000005348.jpg
5348_evjvqa
[{"from": "human", "value": "where is the man in the brown shirt sitting?\n<image>"}, {"from": "gpt", "value": "on the motorbike"}]
00000005348.jpg
5348_evjvqa
[{"from": "human", "value": "how many women in striped shirts are buying flowers?\n<image>"}, {"from": "gpt", "value": "one woman in striped shirts is buying flowers"}]
00000005352.jpg
5352_evjvqa
[{"from": "human", "value": "how many monks are walking in front of the market gate?\n<image>"}, {"from": "gpt", "value": "one monk is walking in front of the market gate"}]
00000005354.jpg
5354_evjvqa
[{"from": "human", "value": "where are the two people sitting?\n<image>"}, {"from": "gpt", "value": "sitting by the lake"}]
00000005354.jpg
5354_evjvqa
[{"from": "human", "value": "what is the vehicle parked next to the two young people?\n<image>"}, {"from": "gpt", "value": "bicycle"}]
00000005354.jpg
5354_evjvqa
[{"from": "human", "value": "what time of day is it?\n<image>"}, {"from": "gpt", "value": "it is evening"}]
00000005355.jpg
5355_evjvqa
[{"from": "human", "value": "where are people trading?\n<image>"}, {"from": "gpt", "value": "at the mall"}]
00000005355.jpg
5355_evjvqa
[{"from": "human", "value": "how many men are sitting on the chair?\n<image>"}, {"from": "gpt", "value": "one man is sitting on the chair"}]
00000005355.jpg
5355_evjvqa
[{"from": "human", "value": "what is the woman in the yellow apron doing?\n<image>"}, {"from": "gpt", "value": "carrying a tray of food"}]
00000005357.jpg
5357_evjvqa
[{"from": "human", "value": "what color backpack does the girl in the white shirt have?\n<image>"}, {"from": "gpt", "value": "green backpack"}]
00000005357.jpg
5357_evjvqa
[{"from": "human", "value": "what color is the shawl of the girl with the green backpack?\n<image>"}, {"from": "gpt", "value": "red shawl"}]
00000005357.jpg
5357_evjvqa
[{"from": "human", "value": "where are people standing?\n<image>"}, {"from": "gpt", "value": "at the church"}]
00000005359.jpg
5359_evjvqa
[{"from": "human", "value": "what are people doing?\n<image>"}, {"from": "gpt", "value": "shopping at the mall"}]
00000005359.jpg
5359_evjvqa
[{"from": "human", "value": "where are people shopping?\n<image>"}, {"from": "gpt", "value": "at the mall"}]
00000005359.jpg
5359_evjvqa
[{"from": "human", "value": "what is the boy in the yellow shirt doing?\n<image>"}, {"from": "gpt", "value": "choosing fruit"}]
00000005360.jpg
5360_evjvqa
[{"from": "human", "value": "what is the woman in gray shirt and purple pants doing?\n<image>"}, {"from": "gpt", "value": "buying fish"}]
00000005361.jpg
5361_evjvqa
[{"from": "human", "value": "what is the main color of the design of the room?\n<image>"}, {"from": "gpt", "value": "white"}]
00000005361.jpg
5361_evjvqa
[{"from": "human", "value": "how many potted plants are placed in the room?\n<image>"}, {"from": "gpt", "value": "one potted plant is placed in the room"}]
00000005361.jpg
5361_evjvqa
[{"from": "human", "value": "what color is the ceiling?\n<image>"}, {"from": "gpt", "value": "the ceiling is white"}]
00000005364.jpg
5364_evjvqa
[{"from": "human", "value": "where is the girl sitting?\n<image>"}, {"from": "gpt", "value": "sitting on the boat"}]
00000005364.jpg
5364_evjvqa
[{"from": "human", "value": "what is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing long sleeved outfit"}]
00000005364.jpg
5364_evjvqa
[{"from": "human", "value": "what color shirt is the girl wearing?\n<image>"}, {"from": "gpt", "value": "the girl is wearing a brown shirt"}]
00000005365.jpg
5365_evjvqa
[{"from": "human", "value": "what color slippers do the young people sitting at the cafe wear?\n<image>"}, {"from": "gpt", "value": "white slippers"}]
00000005365.jpg
5365_evjvqa
[{"from": "human", "value": "how many girls in white shirts are standing?\n<image>"}, {"from": "gpt", "value": "one girl in white shirt is standing"}]
00000005365.jpg
5365_evjvqa
[{"from": "human", "value": "how many young men are wearing cream yellow shirts?\n<image>"}, {"from": "gpt", "value": "one young man is wearing cream yellow shirt"}]
00000005366.jpg
5366_evjvqa
[{"from": "human", "value": "what market are people at?\n<image>"}, {"from": "gpt", "value": "food market"}]
00000005366.jpg
5366_evjvqa
[{"from": "human", "value": "what color are the lanterns hanging in the center of the road?\n<image>"}, {"from": "gpt", "value": "the lanterns hanging in the center of the road are red"}]
00000005366.jpg
5366_evjvqa
[{"from": "human", "value": "what color shirts do the food vendors wear?\n<image>"}, {"from": "gpt", "value": "white shirt"}]
00000005368.jpg
5368_evjvqa
[{"from": "human", "value": "how many men wear blue shirts?\n<image>"}, {"from": "gpt", "value": "one man wears blue shirt"}]
00000005368.jpg
5368_evjvqa
[{"from": "human", "value": "what color pants do all the men wear?\n<image>"}, {"from": "gpt", "value": "black pants"}]
00000005370.jpg
5370_evjvqa
[{"from": "human", "value": "how many motorbikes are parked in the shop?\n<image>"}, {"from": "gpt", "value": "four motorbikes are parked in the shop"}]
00000005370.jpg
5370_evjvqa
[{"from": "human", "value": "what color bag does the girl in the dress have?\n<image>"}, {"from": "gpt", "value": "pink bag"}]
00000005370.jpg
5370_evjvqa
[{"from": "human", "value": "what color helmet does the man in the blue shirt have?\n<image>"}, {"from": "gpt", "value": "blue helmet"}]
00000005373.jpg
5373_evjvqa
[{"from": "human", "value": "what is the girl in the white shirt and the black skirt doing?\n<image>"}, {"from": "gpt", "value": "presenting"}]
00000005373.jpg
5373_evjvqa
[{"from": "human", "value": "how many boys wear blue shirts?\n<image>"}, {"from": "gpt", "value": "one man wear blue shirt"}]
00000005373.jpg
5373_evjvqa
[{"from": "human", "value": "what are people doing?\n<image>"}, {"from": "gpt", "value": "listening to the girl in the white shirt presenting"}]
00000005374.jpg
5374_evjvqa
[{"from": "human", "value": "how many people are standing next to the blue motorbike?\n<image>"}, {"from": "gpt", "value": "three people are standing next to the blue motorbike"}]
00000005374.jpg
5374_evjvqa
[{"from": "human", "value": "what time of day is it?\n<image>"}, {"from": "gpt", "value": "late afternoon"}]
00000005383.jpg
5383_evjvqa
[{"from": "human", "value": "where is everyone?\n<image>"}, {"from": "gpt", "value": "at the mall"}]
00000005383.jpg
5383_evjvqa
[{"from": "human", "value": "what color are the trolleys?\n<image>"}, {"from": "gpt", "value": "black and yellow"}]
00000005386.jpg
5386_evjvqa
[{"from": "human", "value": "how many orange motorbikes are moving on the road?\n<image>"}, {"from": "gpt", "value": "one orange motorbike is moving on the road"}]
00000005388.jpg
5388_evjvqa
[{"from": "human", "value": "what color are the cups on the sink?\n<image>"}, {"from": "gpt", "value": "the cups on the sink are black"}]
00000005388.jpg
5388_evjvqa
[{"from": "human", "value": "what shape is the mirror on the wall?\n<image>"}, {"from": "gpt", "value": "the mirror on the wall is square"}]
00000005388.jpg
5388_evjvqa
[{"from": "human", "value": "how many towels are provided in the room?\n<image>"}, {"from": "gpt", "value": "one towel is provided in the room"}]
00000005389.jpg
5389_evjvqa
[{"from": "human", "value": "where did the woman just come back?\n<image>"}, {"from": "gpt", "value": "the woman just came back from buying food"}]
00000005389.jpg
5389_evjvqa
[{"from": "human", "value": "what is the militiamen doing?\n<image>"}, {"from": "gpt", "value": "the militiaman is standing at the checkpoint to control the quarantine"}]
00000005392.jpg
5392_evjvqa
[{"from": "human", "value": "how many dragon statues are sculpted?\n<image>"}, {"from": "gpt", "value": "two dragon statues are sculpted"}]
00000005392.jpg
5392_evjvqa
[{"from": "human", "value": "what color is the name of the temple painted?\n<image>"}, {"from": "gpt", "value": "red"}]
00000005398.jpg
5398_evjvqa
[{"from": "human", "value": "where are the two girls sitting?\n<image>"}, {"from": "gpt", "value": "the two girls are sitting on the bridge by the river"}]
00000005398.jpg
5398_evjvqa
[{"from": "human", "value": "what time of day is it?\n<image>"}, {"from": "gpt", "value": "it is sunset time"}]
00000005398.jpg
5398_evjvqa
[{"from": "human", "value": "how many girls are sitting on the bridge?\n<image>"}, {"from": "gpt", "value": "two girls are sitting on the bridge"}]
00000005399.jpg
5399_evjvqa
[{"from": "human", "value": "what time of day is the scene?\n<image>"}, {"from": "gpt", "value": "the scene is evening"}]
00000005399.jpg
5399_evjvqa
[{"from": "human", "value": "what color are the street lights?\n<image>"}, {"from": "gpt", "value": "the street lights are yellow"}]
00000005399.jpg
5399_evjvqa
[{"from": "human", "value": "what color light are the streets illuminated?\n<image>"}, {"from": "gpt", "value": "yellow"}]
00000007300.jpg
7300_evjvqa
[{"from": "human", "value": "what color are the umbrellas?\n<image>"}, {"from": "gpt", "value": "they are white. "}]
00000007300.jpg
7300_evjvqa
[{"from": "human", "value": "what color are the reclining chairs?\n<image>"}, {"from": "gpt", "value": "the reclining chairs are blue. "}]
00000007300.jpg
7300_evjvqa
[{"from": "human", "value": "what is the beach like?\n<image>"}, {"from": "gpt", "value": "the beach looks uncrowded."}]
00000007302.jpg
7302_evjvqa
[{"from": "human", "value": "what color is the car parked on the side of the road?\n<image>"}, {"from": "gpt", "value": "the car is white."}]
00000007302.jpg
7302_evjvqa
[{"from": "human", "value": "what is on the left of the woman in the white shirt?\n<image>"}, {"from": "gpt", "value": "on the left of the woman lies a clothing store. "}]
00000007302.jpg
7302_evjvqa
[{"from": "human", "value": "on which shoulder does the woman in the vest carry her handbag?\n<image>"}, {"from": "gpt", "value": "she carries it on her right shoulder."}]
00000007303.jpg
7303_evjvqa
[{"from": "human", "value": "how many lanterns are hung at the front of the temple?\n<image>"}, {"from": "gpt", "value": "there are two lanterns."}]
00000007303.jpg
7303_evjvqa
[{"from": "human", "value": "what are they doing in the temple?\n<image>"}, {"from": "gpt", "value": "they are praying."}]
00000007303.jpg
7303_evjvqa
[{"from": "human", "value": "where is the woman in the white pants standing?\n<image>"}, {"from": "gpt", "value": "she is standing at the temple yard. "}]
00000007304.jpg
7304_evjvqa
[{"from": "human", "value": "what color is the car?\n<image>"}, {"from": "gpt", "value": "the car is green."}]
00000007304.jpg
7304_evjvqa
[{"from": "human", "value": "how many bicycles are running ahead of the car?\n<image>"}, {"from": "gpt", "value": "there is one bicycle. "}]
00000007307.jpg
7307_evjvqa
[{"from": "human", "value": "what are on display in the museum?\n<image>"}, {"from": "gpt", "value": "the dinosaur and prehistoric animal specimens are on display in the museum."}]
00000007307.jpg
7307_evjvqa
[{"from": "human", "value": "where is the mammoth specimen situated?\n<image>"}, {"from": "gpt", "value": "it is situated to the left of the dinosaur specimen."}]
00000007307.jpg
7307_evjvqa
[{"from": "human", "value": "where is the globe situated?\n<image>"}, {"from": "gpt", "value": "it is situated in front of the dinosaur specimen. "}]
00000007308.jpg
7308_evjvqa
[{"from": "human", "value": "what is installed above each table?\n<image>"}, {"from": "gpt", "value": "above each table installed an exhaust hood. "}]
00000007308.jpg
7308_evjvqa
[{"from": "human", "value": "what is the logo of the food store?\n<image>"}, {"from": "gpt", "value": "the logo of the food store is the cow's head."}]
00000007311.jpg
7311_evjvqa
[{"from": "human", "value": "how many people are standing in the botanical garden?\n<image>"}, {"from": "gpt", "value": "there are three people standing in the botanical garden."}]
00000007311.jpg
7311_evjvqa
[{"from": "human", "value": "where is the woman in yellow standing?\n<image>"}, {"from": "gpt", "value": "she is standing behind the other two women. "}]
00000007311.jpg
7311_evjvqa
[{"from": "human", "value": "what color is the ao dai that the woman without glasses is wearing?\n<image>"}, {"from": "gpt", "value": "she is wearing a pink ao dai. "}]
00000007312.jpg
7312_evjvqa
[{"from": "human", "value": "how is the traffic on the street?\n<image>"}, {"from": "gpt", "value": "the street looks very crowded."}]
00000007314.jpg
7314_evjvqa
[{"from": "human", "value": "which hand is she using to hold the microphone with?\n<image>"}, {"from": "gpt", "value": "she is holding the microphone with her right hand."}]
00000007314.jpg
7314_evjvqa
[{"from": "human", "value": "what color is her shirt?\n<image>"}, {"from": "gpt", "value": "it is orange."}]
00000007314.jpg
7314_evjvqa
[{"from": "human", "value": "what is she holding in her left hand?\n<image>"}, {"from": "gpt", "value": "she is holding a conical hat. "}]
00000007315.jpg
7315_evjvqa
[{"from": "human", "value": "what color is her dress?\n<image>"}, {"from": "gpt", "value": "it is white. "}]
00000007315.jpg
7315_evjvqa
[{"from": "human", "value": "on which shoulder is she carrying the handbag?\n<image>"}, {"from": "gpt", "value": "she is carrying the handbag on her right shoulder."}]
00000007315.jpg
7315_evjvqa
[{"from": "human", "value": "to which direction is she facing?\n<image>"}, {"from": "gpt", "value": "she is facing west. "}]
00000007316.jpg
7316_evjvqa
[{"from": "human", "value": "what is she doing?\n<image>"}, {"from": "gpt", "value": "she is stacking the shelves with stocks. "}]
00000007316.jpg
7316_evjvqa
[{"from": "human", "value": "what color are the beverage cans that she is holding?\n<image>"}, {"from": "gpt", "value": "the beverage cans are silvery. "}]
00000007316.jpg
7316_evjvqa
[{"from": "human", "value": "what kind of shelves is she standing in front of?\n<image>"}, {"from": "gpt", "value": "she is standing in front of the soft drink shelves. "}]
00000007318.jpg
7318_evjvqa
[{"from": "human", "value": "how many people are standing by the well?\n<image>"}, {"from": "gpt", "value": "there is one person standing by the well."}]
00000007318.jpg
7318_evjvqa
[{"from": "human", "value": "what color is the shirt that the man standing by the well is wearing?\n<image>"}, {"from": "gpt", "value": "he is wearing a brown shirt."}]
00000007318.jpg
7318_evjvqa
[{"from": "human", "value": "what is the man in the brown shirt doing?\n<image>"}, {"from": "gpt", "value": "he is praying. "}]
00000007319.jpg
7319_evjvqa
[{"from": "human", "value": "how many ships are moving?\n<image>"}, {"from": "gpt", "value": "there are four ship moving."}]
00000007319.jpg
7319_evjvqa
[{"from": "human", "value": "towards which direction is the ship from the upper harbour is heading?\n<image>"}, {"from": "gpt", "value": "it is heading towards the harbour on the left. "}]
00000007322.jpg
7322_evjvqa
[{"from": "human", "value": "what color are the words on the board?\n<image>"}, {"from": "gpt", "value": "the words on the board are yellow."}]
00000007322.jpg
7322_evjvqa
[{"from": "human", "value": "what color is the locomotive?\n<image>"}, {"from": "gpt", "value": "the locomotive is red. "}]
00000007322.jpg
7322_evjvqa
[{"from": "human", "value": "how many motorbikes are running on the left of the train?\n<image>"}, {"from": "gpt", "value": "there is one motorbike running on the left of the train."}]
00000007323.jpg
7323_evjvqa
[{"from": "human", "value": "what is the girl wearing the black mask doing?\n<image>"}, {"from": "gpt", "value": "she is using her phone."}]
00000007323.jpg
7323_evjvqa
[{"from": "human", "value": "what color are her sneakers?\n<image>"}, {"from": "gpt", "value": "her sneakers are black."}]
00000007323.jpg
7323_evjvqa
[{"from": "human", "value": "what items are on sale on the rack to the right of the girl sitting?\n<image>"}, {"from": "gpt", "value": "the rack on the right of the girl sitting holds different types of hats that are on sale."}]