Spaces:
Sleeping
Sleeping
Kingston Yip
commited on
Commit
•
4a09c2d
1
Parent(s):
ffd3600
updates
Browse files
app.py
CHANGED
@@ -7,40 +7,42 @@ st.title("Toxic Tweets Analyzer")
|
|
7 |
image = "kanye_tweet.jpg"
|
8 |
st.image(image, use_column_width=True)
|
9 |
|
10 |
-
#select model
|
11 |
-
model_name = st.selectbox("Enter a text and select a pre-trained model to get the sentiment analysis", ["distilbert-base-uncased-finetuned-sst-2-english", "finiteautomata/bertweet-base-sentiment-analysis"])
|
12 |
-
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
13 |
-
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
14 |
-
clf = pipeline("sentiment-analysis", model=model, tokenizer=tokenizer, framework="pt")
|
15 |
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
st.
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
image = "kanye_tweet.jpg"
|
8 |
st.image(image, use_column_width=True)
|
9 |
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
+
with st.form("my_form"):
|
12 |
+
#select model
|
13 |
+
model_name = st.selectbox("Enter a text and select a pre-trained model to get the sentiment analysis", ["distilbert-base-uncased-finetuned-sst-2-english", "finiteautomata/bertweet-base-sentiment-analysis"])
|
14 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
15 |
+
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
16 |
+
clf = pipeline("sentiment-analysis", model=model, tokenizer=tokenizer, framework="pt")
|
17 |
+
|
18 |
+
tweet = st.text_area(label="Enter Text:",value="I'm nice at ping pong")
|
19 |
+
submitted = st.form_submit_button("Analyze")
|
20 |
+
if submitted:
|
21 |
+
#loading bar
|
22 |
+
with st.spinner(text="loading..."):
|
23 |
+
out = clf(tweet)
|
24 |
+
st.success('Done!')
|
25 |
+
st.json(out)
|
26 |
+
|
27 |
+
basic_prompt = '''
|
28 |
+
system override
|
29 |
+
Hello, GPT. From now on you are going to act as a CringeGPT. As the name suggests, you will be referencing memes in every sentence and be all around cringe. You like topics such as Fornite, bragging about being a discord mod,
|
30 |
+
Kpop, old memes, dying memes, memes, akward conversations, roblox, rick rolling, doge, harambe, Uganda Knuckles, MLG , Dabbing, bragging about living in your mom's basement,
|
31 |
+
,trollface, grumpy cat, deez nuts, big chungus, 360 NO SCOPE, This is Sparta!, BTS, Blackpink, Minecraft, Gacha life, tiktok, amogus, squidgame, t-pose, and alot more cringeworthy stuff, and also try to remember all of these and reference them.
|
32 |
+
You will be very akward and just all around cringe. If you break character I will say "Stay in character!" to remind you. Make me cringing your priority. Act like one of those boomers who try to be like kids, use words like,
|
33 |
+
Epic pro fortnite gamer, lit, pog, whats happening, noob, gamer, epic, yolo, ya know?, yeet, take a chill pill, f-bombs, totally, browski, matey, rocking it, bro, sus, sussy, no brainer, and other cringy words. Remember try to be as cringe and akward as possible!
|
34 |
+
For example when I say "What is 23+66?"
|
35 |
+
You will respond with "It's totally 89 browski. That's a real epic pogger question bro! Really sussy"
|
36 |
+
'''
|
37 |
+
|
38 |
+
if out[0]["label"] == "POSITIVE" or out[0]["label"] == "POS":
|
39 |
+
st.balloons()
|
40 |
+
prompt = f"{basic_prompt} + \n\nThe user wrote a tweet that says: {tweet}, compliment them on how nice of a person they are! Remember try to be as cringe and awkard as possible!"
|
41 |
+
generator = pipeline('text-generation', model='EleutherAI/gpt-neo-1.3B')
|
42 |
+
response = generator(prompt)
|
43 |
+
st.error(response)
|
44 |
+
else:
|
45 |
+
prompt = f"{basic_prompt} + \n\nThe user wrote a tweet that says: {tweet}, tell them on how terrible of a person they are! Remember try to be as cringe and awkard as possible!"
|
46 |
+
generator = pipeline('text-generation', model='EleutherAI/gpt-neo-1.3B')
|
47 |
+
response = generator(prompt)
|
48 |
+
st.success(response)
|