image
stringlengths
15
15
id
stringlengths
8
12
conversations
stringlengths
79
268
00000001408.jpg
1408_evjvqa
[{"from": "human", "value": "what items are sold on the back of the person wearing a black overcoat?\n<image>"}, {"from": "gpt", "value": "cheese"}]
00000001408.jpg
1408_evjvqa
[{"from": "human", "value": "which stall is the woman standing?\n<image>"}, {"from": "gpt", "value": "cheese stall"}]
00000001409.jpg
1409_evjvqa
[{"from": "human", "value": "how many people are going into the supermarket?\n<image>"}, {"from": "gpt", "value": "just one"}]
00000001409.jpg
1409_evjvqa
[{"from": "human", "value": "how many security guards are standing outside the supermarket?\n<image>"}, {"from": "gpt", "value": "only one"}]
00000001411.jpg
1411_evjvqa
[{"from": "human", "value": "what is the woman in green doing?\n<image>"}, {"from": "gpt", "value": "buying vegetable"}]
00000001411.jpg
1411_evjvqa
[{"from": "human", "value": "what is the woman in red doing?\n<image>"}, {"from": "gpt", "value": "buying meat"}]
00000001411.jpg
1411_evjvqa
[{"from": "human", "value": "how many women are going to the market?\n<image>"}, {"from": "gpt", "value": "two"}]
00000001411.jpg
1411_evjvqa
[{"from": "human", "value": "which counter is the woman in the green shirt walking near?\n<image>"}, {"from": "gpt", "value": "vegetable"}]
00000001411.jpg
1411_evjvqa
[{"from": "human", "value": "what is the woman in the green shirt planning to buy?\n<image>"}, {"from": "gpt", "value": "vegetable"}]
00000001411.jpg
1411_evjvqa
[{"from": "human", "value": "what is the woman in red waiting to buy?\n<image>"}, {"from": "gpt", "value": "meat"}]
00000001411.jpg
1411_evjvqa
[{"from": "human", "value": "what stall is the woman in red standing at?\n<image>"}, {"from": "gpt", "value": "the meat stall"}]
00000001413.jpg
1413_evjvqa
[{"from": "human", "value": "how are the two girls dressed?\n<image>"}, {"from": "gpt", "value": "white ao dai"}]
00000001413.jpg
1413_evjvqa
[{"from": "human", "value": "how many little girls are standing outside in the yard?\n<image>"}, {"from": "gpt", "value": "two"}]
00000001413.jpg
1413_evjvqa
[{"from": "human", "value": "what are the two little girls holding in their hands?\n<image>"}, {"from": "gpt", "value": "the one is holding a flower, the one is carrying a basket"}]
00000001415.jpg
1415_evjvqa
[{"from": "human", "value": "how many people did the young man go hiking with?\n<image>"}, {"from": "gpt", "value": "nobody"}]
00000001419.jpg
1419_evjvqa
[{"from": "human", "value": "where is the girl sitting?\n<image>"}, {"from": "gpt", "value": "near the railway"}]
00000001419.jpg
1419_evjvqa
[{"from": "human", "value": "how many people are the girl sitting with?\n<image>"}, {"from": "gpt", "value": "nobody"}]
00000001420.jpg
1420_evjvqa
[{"from": "human", "value": "what is the man in green shirt doing?\n<image>"}, {"from": "gpt", "value": "he is carrying boxes into the store"}]
00000001420.jpg
1420_evjvqa
[{"from": "human", "value": "what is the woman in orange helmet doing?\n<image>"}, {"from": "gpt", "value": "buying lychee"}]
00000001420.jpg
1420_evjvqa
[{"from": "human", "value": "how many people are buying lychee?\n<image>"}, {"from": "gpt", "value": "three"}]
00000001420.jpg
1420_evjvqa
[{"from": "human", "value": "what is the woman in grey dress holding on her hand?\n<image>"}, {"from": "gpt", "value": "the bag of lychee"}]
00000001422.jpg
1422_evjvqa
[{"from": "human", "value": "what is the store on the both of side of the street selling?\n<image>"}, {"from": "gpt", "value": "food"}]
00000001425.jpg
1425_evjvqa
[{"from": "human", "value": "what products does this store sell?\n<image>"}, {"from": "gpt", "value": "clothes"}]
00000001425.jpg
1425_evjvqa
[{"from": "human", "value": "what is the store?\n<image>"}, {"from": "gpt", "value": "clothing store"}]
00000001425.jpg
1425_evjvqa
[{"from": "human", "value": "what items are being hung up for sale?\n<image>"}, {"from": "gpt", "value": "clothes"}]
00000001427.jpg
1427_evjvqa
[{"from": "human", "value": "where is the boy standing?\n<image>"}, {"from": "gpt", "value": "the toy store"}]
00000001427.jpg
1427_evjvqa
[{"from": "human", "value": "who is the boy in the toy store with?\n<image>"}, {"from": "gpt", "value": "with the girl"}]
00000001427.jpg
1427_evjvqa
[{"from": "human", "value": "where is the girl in pink standing?\n<image>"}, {"from": "gpt", "value": "in the toy store"}]
00000001427.jpg
1427_evjvqa
[{"from": "human", "value": "who is the girl in pink in the toy store with?\n<image>"}, {"from": "gpt", "value": "the boy"}]
00000001427.jpg
1427_evjvqa
[{"from": "human", "value": "how many people are standing in the toy store?\n<image>"}, {"from": "gpt", "value": "two"}]
00000001427.jpg
1427_evjvqa
[{"from": "human", "value": "where are the two children standing?\n<image>"}, {"from": "gpt", "value": "in the toy store"}]
00000001428.jpg
1428_evjvqa
[{"from": "human", "value": "what does this market mainly sell?\n<image>"}, {"from": "gpt", "value": "raw fish"}]
00000001428.jpg
1428_evjvqa
[{"from": "human", "value": "where are the women in conical hats going?\n<image>"}, {"from": "gpt", "value": "buying fish"}]
00000001428.jpg
1428_evjvqa
[{"from": "human", "value": "where do the fishmongers sit?\n<image>"}, {"from": "gpt", "value": "on the both side of the path"}]
00000001428.jpg
1428_evjvqa
[{"from": "human", "value": "what do people sitting on the both side of the path sell mainly?\n<image>"}, {"from": "gpt", "value": "raw fish"}]
00000001429.jpg
1429_evjvqa
[{"from": "human", "value": "where is the girl walking?\n<image>"}, {"from": "gpt", "value": "in a narrow alley"}]
00000001430.jpg
1430_evjvqa
[{"from": "human", "value": "how many the cyclos on the road?\n<image>"}, {"from": "gpt", "value": "four"}]
00000001433.jpg
1433_evjvqa
[{"from": "human", "value": "which stall is the women shopping at?\n<image>"}, {"from": "gpt", "value": "fruit stall"}]
00000001433.jpg
1433_evjvqa
[{"from": "human", "value": "what kinds of fruit do the woman in the pink sell mainly?\n<image>"}, {"from": "gpt", "value": "there are oranges and tangerines"}]
00000001433.jpg
1433_evjvqa
[{"from": "human", "value": "what store is the woman in the green shirt going to buy at?\n<image>"}, {"from": "gpt", "value": "oranges and tangerines"}]
00000001436.jpg
1436_evjvqa
[{"from": "human", "value": "where is the supermarket staff standing?\n<image>"}, {"from": "gpt", "value": "fruit stall"}]
00000001436.jpg
1436_evjvqa
[{"from": "human", "value": "what color shirt do the supermarket staff wear?\n<image>"}, {"from": "gpt", "value": "green"}]
00000001436.jpg
1436_evjvqa
[{"from": "human", "value": "how many people are standing near the fruit stall?\n<image>"}, {"from": "gpt", "value": "three"}]
00000001436.jpg
1436_evjvqa
[{"from": "human", "value": "what is the supermarket staff doing?\n<image>"}, {"from": "gpt", "value": "she is arranging the bitter gourd counter"}]
00000001437.jpg
1437_evjvqa
[{"from": "human", "value": "which stall is these people shopping at?\n<image>"}, {"from": "gpt", "value": "vegetable"}]
00000001437.jpg
1437_evjvqa
[{"from": "human", "value": "which product is the woman in pink choosing?\n<image>"}, {"from": "gpt", "value": "tomato"}]
00000001438.jpg
1438_evjvqa
[{"from": "human", "value": "what is the man doing?\n<image>"}, {"from": "gpt", "value": "buying milk"}]
00000001438.jpg
1438_evjvqa
[{"from": "human", "value": "how many people are coming to buy milk?\n<image>"}, {"from": "gpt", "value": "just one"}]
00000001438.jpg
1438_evjvqa
[{"from": "human", "value": "which stall is the man standing?\n<image>"}, {"from": "gpt", "value": "milk stall"}]
00000001440.jpg
1440_evjvqa
[{"from": "human", "value": "where is the girl in white standing?\n<image>"}, {"from": "gpt", "value": "in the museum"}]
00000001440.jpg
1440_evjvqa
[{"from": "human", "value": "where is the boy in black standing?\n<image>"}, {"from": "gpt", "value": "in the museum"}]
00000001440.jpg
1440_evjvqa
[{"from": "human", "value": "how many people are visiting the museum?\n<image>"}, {"from": "gpt", "value": "four"}]
00000001440.jpg
1440_evjvqa
[{"from": "human", "value": "how many statues are placed in the center of the exhibition of statues?\n<image>"}, {"from": "gpt", "value": "two"}]
00000001441.jpg
1441_evjvqa
[{"from": "human", "value": "how many people are choosing jelly ?\n<image>"}, {"from": "gpt", "value": "only one"}]
00000001442.jpg
1442_evjvqa
[{"from": "human", "value": "how many transport are riding on the road?\n<image>"}, {"from": "gpt", "value": "two"}]
00000001442.jpg
1442_evjvqa
[{"from": "human", "value": "how many motorbikes are riding on the road?\n<image>"}, {"from": "gpt", "value": "there are no motorbikes"}]
00000001444.jpg
1444_evjvqa
[{"from": "human", "value": "what is the man standing on the stairs doing?\n<image>"}, {"from": "gpt", "value": "repairing the signboard"}]
00000001444.jpg
1444_evjvqa
[{"from": "human", "value": "how many people are holding the stairs?\n<image>"}, {"from": "gpt", "value": "just one"}]
00000001444.jpg
1444_evjvqa
[{"from": "human", "value": "how many people are repairing signboard?\n<image>"}, {"from": "gpt", "value": "four"}]
00000001447.jpg
1447_evjvqa
[{"from": "human", "value": "which stall is flower vase displayed?\n<image>"}, {"from": "gpt", "value": "candy stall"}]
00000001447.jpg
1447_evjvqa
[{"from": "human", "value": "which stall is the woman with the stroller and the helmet standing?\n<image>"}, {"from": "gpt", "value": "candy stall "}]
00000001447.jpg
1447_evjvqa
[{"from": "human", "value": "what is the woman in pink hat doing?\n<image>"}, {"from": "gpt", "value": "she is choosing candy"}]
00000001449.jpg
1449_evjvqa
[{"from": "human", "value": "who is standing with the little girl?\n<image>"}, {"from": "gpt", "value": "the woman in pink shirt"}]
00000001450.jpg
1450_evjvqa
[{"from": "human", "value": "where is the children visiting?\n<image>"}, {"from": "gpt", "value": "the skeletons museum"}]
00000001450.jpg
1450_evjvqa
[{"from": "human", "value": "how many people are standing to guide the children?\n<image>"}, {"from": "gpt", "value": "just one"}]
00000001450.jpg
1450_evjvqa
[{"from": "human", "value": "what objects are displayed in the museum ?\n<image>"}, {"from": "gpt", "value": "the skeletons"}]
00000001451.jpg
1451_evjvqa
[{"from": "human", "value": "where are these people traveling?\n<image>"}, {"from": "gpt", "value": "on the ocean"}]
00000001451.jpg
1451_evjvqa
[{"from": "human", "value": "how many people are using phones on the boat?\n<image>"}, {"from": "gpt", "value": "three"}]
00000001451.jpg
1451_evjvqa
[{"from": "human", "value": "how many people are using their phones to take pictures on the boat?\n<image>"}, {"from": "gpt", "value": "just one"}]
00000001451.jpg
1451_evjvqa
[{"from": "human", "value": "how many people are on the boat wearing hats?\n<image>"}, {"from": "gpt", "value": "three"}]
00000001456.jpg
1456_evjvqa
[{"from": "human", "value": "what is the woman standing at the entrance doing?\n<image>"}, {"from": "gpt", "value": "she is cooking broth"}]
00000001457.jpg
1457_evjvqa
[{"from": "human", "value": "where do these people celebrate?\n<image>"}, {"from": "gpt", "value": "around the lake"}]
00000001457.jpg
1457_evjvqa
[{"from": "human", "value": "what events are these people participating in?\n<image>"}, {"from": "gpt", "value": "the festival"}]
00000001457.jpg
1457_evjvqa
[{"from": "human", "value": "where is this festival held?\n<image>"}, {"from": "gpt", "value": "around the lake"}]
00000001458.jpg
1458_evjvqa
[{"from": "human", "value": "what is the woman in black doing?\n<image>"}, {"from": "gpt", "value": "buying fish sauce "}]
00000001458.jpg
1458_evjvqa
[{"from": "human", "value": "which stall is the woman in black standing?\n<image>"}, {"from": "gpt", "value": "fish sauce stall"}]
00000001458.jpg
1458_evjvqa
[{"from": "human", "value": "how many people are standing to buy at fish sauce stall?\n<image>"}, {"from": "gpt", "value": "only one"}]
00000001458.jpg
1458_evjvqa
[{"from": "human", "value": "how many people are standing at fish sauce stall?\n<image>"}, {"from": "gpt", "value": "two"}]
00000001460.jpg
1460_evjvqa
[{"from": "human", "value": "how many cyclos are running at the roundabout?\n<image>"}, {"from": "gpt", "value": "three"}]
00000001460.jpg
1460_evjvqa
[{"from": "human", "value": "how many flags are on the top of the house?\n<image>"}, {"from": "gpt", "value": "only one"}]
00000001460.jpg
1460_evjvqa
[{"from": "human", "value": "what color shirt do people riding cyclo wear?\n<image>"}, {"from": "gpt", "value": "blue"}]
00000001466.jpg
1466_evjvqa
[{"from": "human", "value": "what are the students doing?\n<image>"}, {"from": "gpt", "value": "offering of incense"}]
00000001466.jpg
1466_evjvqa
[{"from": "human", "value": "where are the students going?\n<image>"}, {"from": "gpt", "value": "they are visiting the historical monument"}]
00000001470.jpg
1470_evjvqa
[{"from": "human", "value": "how many girls are playing volleyball on the beach?\n<image>"}, {"from": "gpt", "value": "four"}]
00000001470.jpg
1470_evjvqa
[{"from": "human", "value": "what colors are the two teams wearing?\n<image>"}, {"from": "gpt", "value": "violet and blue"}]
00000001470.jpg
1470_evjvqa
[{"from": "human", "value": "what sport are the four women playing?\n<image>"}, {"from": "gpt", "value": "volleyball"}]
00000001472.jpg
1472_evjvqa
[{"from": "human", "value": "how many groups of people are walking on the street?\n<image>"}, {"from": "gpt", "value": "two"}]
00000001474.jpg
1474_evjvqa
[{"from": "human", "value": "where is the girl traveling?\n<image>"}, {"from": "gpt", "value": "on the ocean"}]
00000001474.jpg
1474_evjvqa
[{"from": "human", "value": "what color dress is the girl sitting on the rock wearing?\n<image>"}, {"from": "gpt", "value": "white"}]
00000001474.jpg
1474_evjvqa
[{"from": "human", "value": "how many people are traveling on the ocean with the girl?\n<image>"}, {"from": "gpt", "value": "alone"}]
00000001475.jpg
1475_evjvqa
[{"from": "human", "value": "how many people are standing at the book stall?\n<image>"}, {"from": "gpt", "value": "three"}]
00000001475.jpg
1475_evjvqa
[{"from": "human", "value": "what are two young girls doing?\n<image>"}, {"from": "gpt", "value": "they are reading book"}]
00000001475.jpg
1475_evjvqa
[{"from": "human", "value": "how many people did the woman in the blue overcoat go to book shopping with?\n<image>"}, {"from": "gpt", "value": "two"}]
00000001476.jpg
1476_evjvqa
[{"from": "human", "value": "where is the woman in white standing?\n<image>"}, {"from": "gpt", "value": "the fruit stall"}]
00000001476.jpg
1476_evjvqa
[{"from": "human", "value": "what is the woman in white doing?\n<image>"}, {"from": "gpt", "value": "she is on the phone"}]
00000001476.jpg
1476_evjvqa
[{"from": "human", "value": "how many people is the woman standing at the fruit stall with?\n<image>"}, {"from": "gpt", "value": "nobody"}]
00000001476.jpg
1476_evjvqa
[{"from": "human", "value": "how many people are coming to buy fruit?\n<image>"}, {"from": "gpt", "value": "just one"}]
00000001477.jpg
1477_evjvqa
[{"from": "human", "value": "where is this tower located?\n<image>"}, {"from": "gpt", "value": "in the middle of the lake"}]
00000001477.jpg
1477_evjvqa
[{"from": "human", "value": "what is around this tower?\n<image>"}, {"from": "gpt", "value": "water"}]
00000001477.jpg
1477_evjvqa
[{"from": "human", "value": "where is this tower built?\n<image>"}, {"from": "gpt", "value": "in the middle of the lake"}]