Datasets:

Modalities:
Text
Languages:
English
Libraries:
Datasets
License:
id
stringlengths
1
4
tokens
sequence
ner_tags
sequence
0
[ "I", "hate", "the", "words", "chunder", ",", "vomit", "and", "puke", ".", "BUUH", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
1
[ "♥", ".", ".", ")", ")", "(", "♫", ".", "(", "ړײ", ")", "♫", ".", "♥", ".", "«", "▓", "»", "♥", ".", "♫", ".", ".", "╝", "╚", ".", ".", "♫", "Happy", "New", "Year" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
2
[ "Alesan", "kenapa", "mlm", "kita", "lbh", "srg", "galau", "Poconggg", "'\"'", "TwitFAKTA", ":", "Otak", "lebih", "aktif", "di", "malam", "hari", "dari", "pada", "di", "pagi", "hari", ".", "#", "TwitFAKTA", "'\"'" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
3
[ "Complete", "Tosca", "on", "the", "tube", "http://t.co/O90deSLB" ]
[ 0, 0, 0, 0, 0, 0 ]
4
[ "Think", "you", "call", "that", "smash", "and", "grab", ".", "#", "Gateshead", "'s", "media", "man", "just", "admitted", "to", "me", "it", "was", "'\"'", "daylight", "robbery", "'\"'", ".", "Shaw", "'s", "only", "touch", "was", "his", "goal", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 ]
5
[ "Happy", "New", "Year", "world", "!" ]
[ 0, 0, 0, 0, 0 ]
6
[ "middle", "aged", "man", "band", "playing", "blink", "182", ".", "l0", "l", "." ]
[ 0, 0, 0, 0, 0, 3, 4, 4, 0, 0, 0 ]
7
[ "14", "days", "till", "my", "birthday", ":", ")" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
8
[ "Happy", "New", "Year", "!", "It", "'s", "time", "to", "start", "thinking", "where", "you", "'d", "like", "to", "visit", "in", "#", "2012", ".", "Need", "some", "ideas", "?", "http://t.co/NpsUeTAG" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
9
[ "i", "'ve", "got", "dressed", "but", "only", "because", "I", "need", "biscuits" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
10
[ "Happy", "New", "Year", "2012", "#", "fireworks", "#", "HappyNewYear", "#", "photography", "http://t.co/ImHeLldm" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
11
[ "Happy", "New", "Year", "to", "you", "all", ".", "Feeling", "pretty", "virtuous", "after", "organising", "a", "row", "for", "some", "mates", "and", "myself", "at", "10", "this", "morning", ".", "It", "was", "great", ".", ".", ".", "ish", "!" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
12
[ "My", "dad", "thinks", "I", "ca", "n't", "hear", "him", "singing", "in", "the", "kitchen", ".", ".", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
13
[ "How", "did", "Dorothy", "Gale", "come", "back", "*", "younger", "*", "in", "Return", "to", "Oz", "?", "?" ]
[ 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
14
[ "Need", "to", "stop", "eating", "so", "much", "!" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
15
[ "a", "little", "confused", "on", "who", "I", "did", "the", "countdown", "last", "night", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
16
[ "looking", "4", "a", "new", "dancehall", "albu", "to", "download", ".", ".", ".", ".", ".", ".", "nebody", "hav", "ne", "ideas", "?", "?", "?", "?" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
17
[ "Check", "out", "the", "new", "#", "TantamountToTreaser", "preview", "on", "our", "website", "!", "http://t.co/NiqVnls3" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
18
[ "I", "gotta", "feel", "alive", "even", "if", "it", "kills", "me", ",", "promise", "to", "always", "give", "you", "me", "the", "real", "me" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
19
[ "TD", "Chargers", "=", "\\", ".", "If", "we", "ca", "n't", "beat", "the", "Chargers", "we", "do", "n't", "deserve", "to", "go", "to", "the", "playoffs", "." ]
[ 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
20
[ "well", "I", "dunno", "whether", "this", "makes", "me", "uncool", "or", "uncultured", "or", "what", "but", "I", "blimmin", "LOVED", "sherlock", ",", "more", "more", "please", ".", "yay", "good", "telly", "finally" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
21
[ "Random", "boy", "I", "'ve", "never", "met", "before", "said", "to", "me", "last", "night", "'", "you", "'re", "saffron", "coe", ",", "you", "'re", "19", "and", "you", "live", "in", "stamford", "'", "erm", "#", "lockingmydoors" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0 ]
22
[ "Deciding", "whether", "or", "not", "to", "add", "my", "parents", "in", "facebook", "or", "not", ".", ".", ".", "What", "do", "you", "guys", "think", "?" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
23
[ "Wadey", "Is", "3", "Sets", "To", "1", "Now", ".", "3", "More", "Sets", "&", "His", "In", "The", "Final", "!", "!" ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
24
[ "#", "DownloadDYFWIF", "please", "please", "please", "JLSOfficial" ]
[ 0, 0, 0, 0, 0, 0 ]
25
[ "It", "'s", "hard", "to", "find", "a", "boy", "like", "you", ",", "and", "I", "guess", "it", "'s", "hard", "to", "find", "a", "girl", "like", "me", ".", "But", "I", "just", "suppose", "that", "'s", "the", "way", "things", "are", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
26
[ "Well", ",", "this", "made", "My", "Day", "Thank", "Apple", ":", ")", "http://t.co/YOj7iZrn", "#", "RelationshipSongs", "#", "NoStupidHeadsAllowed", "#", "InMyFreeTime" ]
[ 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
27
[ "Retweet", "if", "you", "'ve", "ever", "had", "/", "been", "to", "a", "party", "where", "the", "stupid", "neighbours", "complained", "and", "called", "fed" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
28
[ "I", "think", "it", "'s", "11", ",", "it", "'s", "pretty", "hard", "to", "count", ".", "Maybe", "it", "'s", "10", ".", "I", "went", "to", "the", "theatre", "17", "times", "in", "total", "though", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
29
[ "This", "life", ".", ".", ".", ".", ".", "#", "Ones" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
30
[ "What", "sleeping", "pattern", "?" ]
[ 0, 0, 0, 0 ]
31
[ "Why", "remain", "in", "a", "relationship", "that", "has", "no", "benefits", ".", ".", ".", "?", "Why", "stay", "in", "a", "relationship", "when", "you", "have", "no", "reasons", "to", "?" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
32
[ "The", "music", "sounds", "better", "with", "you", "#", "fb" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
33
[ "[", "VIDEO", "]", ":", "How", "to", "Get", "Promoted", "at", "Work", "http://t.co/hakhXCcN", "#", "Career", "#", "Featured" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
34
[ "people", "who", "send", "there", "tweets", "to", "facebook", "omg", "fuck", "of" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
35
[ "I", "used", "to", "be", "so", "bad", "at", "naming", "any", "k", "idol", "group", "members", "pmsl", "I", "would", "get", "so", "confused", "and", "now", "I", "'m", "pro", ";", ")", ")", ")" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
36
[ "Neighbors", "Occupy", "Road", ",", "Blockade", "Sludge", "Trucks", ":", "Recently", ",", "a", "group", "of", "farmers", "and", "neighbors", "in", "Salmon", "Valley", ",", "near", ".", ".", ".", "http://t.co/dvNVix5J" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 6, 0, 0, 0, 0, 0, 0 ]
37
[ "Well", "that", "could", "of", "been", "a", "lot", "more", "painful", ".", "3", "miles", ",", "fairly", "slowly", "but", "my", "lungs", "did", "n't", "try", "to", "leap", "out", "of", "my", "chest", ".", "My", "thighs", "though", ".", ".", ".", ".", ".", "OWW" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
38
[ "smart", "move", "of", "the", "J", ".", "Edgar", "Promo", "team", "showing", "exclusively", "young", "shots", "of", "leo", "to", "grab", "the", "fan", "base", ".", ".", "them", "be", "some", "confused", "punters", ";", ")" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
39
[ "Photo", ":", "http://t.co/RGBss9WK" ]
[ 0, 0, 0 ]
40
[ "Pride", "and", "Prejudice", "is", "on", ".", "Oh", "my", "oh", "my", "be", "still", "my", "beating", "heart", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
41
[ "I", "'ve", "been", "making", "shapes", "in", "this", "for", "2", "days", "solid", ".", "#", "simplethingsinlife", "http://t.co/Aa0i40NI" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
42
[ "Chfhc", "vm" ]
[ 0, 0 ]
43
[ "FUUUUUULHAAAAAAM", "!", "!", "!" ]
[ 3, 0, 0, 0 ]
44
[ "Wengers", "big", "mistake", "is", "not", "being", "ruthless", "enough", "with", "shit", "players", ".", "Do", "they", "want", "to", "go", "?", "Probably", "no", "but", "when", "your", "the", "manager", "you", "hold", "cards" ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
45
[ "I", "lobe", "Hobo", "Rob", "#", "JS" ]
[ 0, 0, 0, 1, 0, 0 ]
46
[ "Not", "sure", "if", "I", "like", "#", "bankjob", "or", "not", "?", "Maybe", "it", "'s", "because", "I", "do", "n't", "understand", "it", "#", "confused" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
47
[ "If", "I", "have", "to", "yell", "at", "Otis", "one", "more", "time", "tonight", "for", "doing", "something", "dangerous", ",", "he", "'s", "getting", "sent", "back", "to", "Dad", "'s", ".", "Boy", "'s", "got", "a", "death", "wish" ]
[ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
48
[ "need", "a", "new", "tattoo", ".", ".", ".", ".", "any", "suggestions", "guys", "?", "?", "tweet", "me" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
49
[ "Fresh", "sheets", "&", "lt", ";", "3" ]
[ 0, 0, 0, 0, 0, 0 ]
50
[ "I", "'m", "going", "to", "regret", "staying", "up", "this", "late", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
51
[ "Fat", "or", "thin", "am", "still", "fit", "hahahaha" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
52
[ "#", "lessambitiousbooks", "The", "Green", "inch", "." ]
[ 0, 0, 0, 0, 0, 0 ]
53
[ "same", "shit", "different", "day", "." ]
[ 0, 0, 0, 0, 0 ]
54
[ "Kane", "loves", "coming", "through", "the", "ring", ".", "What", "'s", "this", "new", "move", "all", "about", "?", "It", "'s", "a", "shit", "mandible", "claw", "#", "Raw", "#", "wrestlezone" ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
55
[ "It", "'s", "too", "bloody", "early", ",", "I", "want", "to", "go", "back", "to", "bed", "as", "it", "'s", "cold", ",", "it", "'s", "wet", "and", "it", "'s", "blowing", "a", "bloody", "gale", "out", "there", ":", "(" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
56
[ "10", "years", "ago", "today", "I", "won", "my", "world", "title", "with", "my", "dancing", ".", "I", "'m", "now", "seeing", "the", "results", "for", "the", "2012", "world", "championships", "at", "disneyworld", ",", "Florida" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 5 ]
57
[ "Good", "morning", "all", ".", "(", ":" ]
[ 0, 0, 0, 0, 0, 0 ]
58
[ "feel", "sorry", "for", "my", "dad", ",", "he", "has", "to", "work", "in", "this", "weather", "all", "day" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
59
[ "Do", "n't", "even", "care", "about", "mocks", ",", "they", "'re", "mocks", "ffs", "!", "#", "pointless", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
60
[ "You", "just", "piss", "my", "actual", "life", "off" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
61
[ "Home", "Business", "Marketing", "Through", "Your", "Social", "Media", "Profile" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
62
[ "#", "ICD", "Daniel", "James", "Evans", ",", "Woolwich", "Room", ":", "3", "at", "10", ":", "39" ]
[ 0, 0, 1, 2, 2, 0, 5, 6, 0, 0, 0, 0, 0, 0 ]
63
[ "Ski", "Resorts", "Note", "Increase", "in", "Car", "Hire", "Demand", "|", "Season", "Car", "Hire", "Blog", "http://t.co/5amtlxc3" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
64
[ "Por", "primera", "vez", "me", "pagan", "(", "de", "verdad", ")", "productividad", ".", "Aleluya", "!", "!", "!" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
65
[ "#", "jobs", "#", "php", "Database", "Developer", ",", "Oracle", ",", "MySQL", ",", "Perl", ",", "OO", ",", "Java", ":", "Salary", "/", "Rate", ":", "Market", "RateLocation", ":", "city", "of", "london", "Da", ".", ".", ".", "http://t.co/PhPoOza4" ]
[ 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 6, 6, 0, 0, 0, 0, 0 ]
66
[ "Último", "dia", "de", "férias", ",", "tempo", "feio", ",", "chuva", ".", ".", ".", "boo" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
67
[ "Who", "is", "stupid", "enough", "to", "wear", "uggs", "in", "this", "weather", ".", ".", ".", ".", "Me", "!", "That", "'s", "who", ".", ".", ".", "#", "fml", "http://t.co/jNSVSCsl" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
68
[ "#", "Jobs", "#", "Sales", "Telesales", "/", "Telemarketing", "/", "Business", "Development", ":", "Yorkshire-", "Doncaster", ",", "My", "client", "is", "a", "well", "establ", ".", ".", ".", "http://t.co/EaOHrSZ2", "#", "Leeds" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5 ]
69
[ "Sticky", "toffee", "pudding", "from", "plate", "to", "tummy", "in", "under", "2", "minutes", ".", "Feeling", "a", "little", "unwell", "now", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
70
[ "Same", "goes", "for", "the", "new", "Lamb", "of", "God", "stuff", "too", "!" ]
[ 0, 0, 0, 0, 0, 3, 4, 4, 0, 0, 0 ]
71
[ "hopefully", "i", "'m", "right" ]
[ 0, 0, 0, 0 ]
72
[ "Parents", "in", "law", "are", "cooking", "dinner", ".", "I", "would", "go", "and", "help", "but", "I", "can", "hear", "father", "in", "law", "is", "getting", "shouted", "at", ",", "which", "means", "I", "will", "too", ".", "#", "hides" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
73
[ "#", "ManCity", "v", "#", "Liverpool", "kicks", "off", "at", "8", ",", "will", "be", "a", "tough", "game", "for", "both", "sides", "but", "I", "reckon", "City", "will", "take", "all", "3", "points", "away", "from", "Liverpool", ".", "#", "PL" ]
[ 0, 3, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0 ]
74
[ "I", "JUST", "LOST", "ALL", "MY", "MUSIC", "ON", "MY", "PLAYLIST", "!", "!", "!", "GAME", "OVER", "#", "itunes", "#", "twats" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
75
[ "literally", "all", "my", "trousers", "are", "too", "big", "for", "me", ":", "(", ":", "(" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
76
[ "im", "gonna", "learn", "to", "be", "a", "lifegaurd", "hopfully", "so", "while", "everyone", "else", "is", "working", "in", "a", "shop", "actually", "doing", "stuff", "il", "be", "sitting", "on", "a", "pool", "side", ".", "yay" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
77
[ "Kuyt", "responsible", "for", "giving", "ball", "away", "prior", "to", "the", "#", "Aguero", "goal", ".", "From", "being", "someone", "previously", "so", "reliable", ",", "he", "'s", "been", "terrible", "this", "year", "." ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
78
[ "Back", "on", "the", "90210", "buzz", "!" ]
[ 0, 0, 0, 0, 0, 0 ]
79
[ "Am", "I", "the", "only", "one", "who", "thinks", "that", "Louise", "Mensch", "looks", "like", "a", "female", "version", "of", "Alan", "B", "'", "stard", "with", "views", "to", "match", "?" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 1, 2, 2, 2, 0, 0, 0, 0, 0 ]
80
[ "The", "Business", "#", "GreatFilm" ]
[ 0, 0, 0, 0 ]
81
[ "If", "it", "was", "a", "white", "man", "stabbed", "by", "a", "group", "of", "black", "men", ".", ".", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
82
[ "I", "think", "I", "may", "have", "to", "stop", "following", "people", "who", "only", "are", "sad", "on", "twitter", ",", "it", "makes", "me", "unhappy", ".", "#", "januaryblues", "#", "cheerup" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
83
[ "if", "i", "changed", "the", "world", "for", "you", "i", "bet", "you", "would", "n't", "have", "a", "clue" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
84
[ "Feddddd", "up" ]
[ 0, 0 ]
85
[ "Random", "thought", "why", "do", "ppl", "from", "East", "(", "not", "Hackney", ")", "say", "Yii", "And", "wt", "does", "it", "mean", ":", "S" ]
[ 0, 0, 0, 0, 0, 0, 5, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
86
[ "Come", "Dine", "with", "Me", "-", "Extra", "Spicy", "http://t.co/KS0ROHmm", "Winner", "of", "Best", "Lifestyle", "Show", "at", "the", "2011", "TV", "Choice", "Awards", "#", "Amazon", "#", "UK", "#", "DVD", "#", "ITV" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 3 ]
87
[ "Have", "you", "Liked", "our", "Facebook", "page", "?", "http://t.co/6Vkx6tzN", "Start", "2012", "with", "a", "Like", "!", "You", "can", "even", "get", "an", "energy", "quote", "there", "too", "!" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
88
[ "Putting", "the", "finishing", "touches", "on", "my", "Mindful", "Living", "Guide", "mailing", "about", "the", "6", "-", "week", "I", "CHING", "course", ",", "#", "Lewes", ",", "beg", ".", "20", "Jan", ":", "http://t.co/rhRPOjuD" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0 ]
89
[ "Do", "n't", "you", "hate", "it", "when", "you", "can", "see", "your", "parent", "is", "upset", "/", "confused", ",", "you", "have", "an", "idea", "to", "help", "but", "wo", "n't", "confront", "them", "about", "it", "and", "you", "just", "+" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
90
[ "listening", "to", "'\"'", "Time", "is", "not", "Enough", "-", "BRENDA", "LEE", "'\"'", "♫", "http://t.co/nQYdQoO8" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0 ]
91
[ "I", "'m", "still", "opening", "xmas", "presents", "#", "ridiculous", ".", ".", ".", "anyone", "want", "an", "idiot", "abroad", "1", "on", "dvd", "?" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
92
[ "Rachid", "Harkouk", "#", "randomformerfootballers", "#", "qpr", "#", "showsmyage" ]
[ 1, 2, 0, 0, 0, 3, 0, 0 ]
93
[ "Schaue", "gerade", "mal", "wieder", "Sex", "and", "the", "City", "und", "stelle", "so", "fest", ":", "Carrie", "ist", "eine", "ganz", "schön", "hysterische", "Ziege", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
94
[ "same", "shit", ",", "different", "day", "." ]
[ 0, 0, 0, 0, 0, 0 ]
95
[ "The", "Wind", "In", "The", "Willow", ":", "Part", "of", "what", "garden", "writers", "refer", "to", "as", "'\"'", "the", "borrowed", "landscape", "…", "http://t.co/rNNoLoxh" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
96
[ "Still", "find", "it", "creepy", "the", "account", "trying", "to", "be", "Casey", "hahaha", "freak" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 ]
97
[ "33" ]
[ 0 ]
98
[ "murked", "a", "FP", "1", "paper", ".", "YEAH", "BOY", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
99
[ "Besti-vid", "of", "the", "week", ":", "Air", "'", "Sonic", "Armada", "'", "(", "A", "Trip", "To", "The", "Moon", ")", "http://t.co/TmgSpWjy" ]
[ 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]

Dataset Card for broad_twitter_corpus

Dataset Summary

This is the Broad Twitter corpus, a dataset of tweets collected over stratified times, places and social uses. The goal is to represent a broad range of activities, giving a dataset more representative of the language used in this hardest of social media formats to process. Further, the BTC is annotated for named entities.

See the paper, Broad Twitter Corpus: A Diverse Named Entity Recognition Resource, for details.

Supported Tasks and Leaderboards

Languages

English from UK, US, Australia, Canada, Ireland, New Zealand; bcp47:en

Dataset Structure

Data Instances

Feature Count
Documents 9 551
Tokens 165 739
Person entities 5 271
Location entities 3 114
Organization entities 3 732

Data Fields

Each tweet contains an ID, a list of tokens, and a list of NER tags

  • id: a string feature.
  • tokens: a list of strings
  • ner_tags: a list of class IDs (ints) representing the NER class:
  0: O
  1: B-PER
  2: I-PER
  3: B-ORG
  4: I-ORG
  5: B-LOC
  6: I-LOC

Data Splits

Section Region Collection period Description Annotators Tweet count
A UK 2012.01 General collection Expert 1000
B UK 2012.01-02 Non-directed tweets Expert 2000
E Global 2014.07 Related to MH17 disaster Crowd & expert 200
F Stratified 2009-2014 Twitterati Crowd & expert 2000
G Stratified 2011-2014 Mainstream news Crowd & expert 2351
H Non-UK 2014 General collection Crowd & expert 2000

The most varied parts of the BTC are sections F and H. However, each of the remaining four sections has some specific readily-identifiable bias. So, we propose that one uses half of section H for evaluation and leaves the other half in the training data. Section H should be partitioned in the order of the JSON-format lines. Note that the CoNLL-format data is readily reconstructible from the JSON format, which is the authoritative data format from which others are derived.

Test: Section F

Development: Section H (the paper says "second half of Section H" but ordinality could be ambiguous, so it all goes in. Bonne chance)

Training: everything else

Dataset Creation

Curation Rationale

[Needs More Information]

Source Data

Initial Data Collection and Normalization

[Needs More Information]

Who are the source language producers?

[Needs More Information]

Annotations

Annotation process

[Needs More Information]

Who are the annotators?

[Needs More Information]

Personal and Sensitive Information

[Needs More Information]

Considerations for Using the Data

Social Impact of Dataset

[Needs More Information]

Discussion of Biases

[Needs More Information]

Other Known Limitations

[Needs More Information]

Additional Information

Dataset Curators

[Needs More Information]

Licensing Information

Creative Commons Attribution 4.0 International (CC BY 4.0)

Citation Information

@inproceedings{derczynski2016broad,
  title={Broad twitter corpus: A diverse named entity recognition resource},
  author={Derczynski, Leon and Bontcheva, Kalina and Roberts, Ian},
  booktitle={Proceedings of COLING 2016, the 26th International Conference on Computational Linguistics: Technical Papers},
  pages={1169--1179},
  year={2016}
}

Contributions

Author-added dataset @leondz

Downloads last month
122