Alex Tyshka commited on
Commit
e5cadbb
1 Parent(s): ba02478

UI Improvements

Browse files
Files changed (2) hide show
  1. app.py +39 -17
  2. data.pkl +3 -0
app.py CHANGED
@@ -4,7 +4,7 @@ import numpy as np
4
  import gradio as gr
5
  from nltk import word_tokenize, sent_tokenize
6
  import nltk
7
- from scipy.stats import shapiro
8
  from transformers import GPT2LMHeadModel, GPT2TokenizerFast
9
 
10
  nltk.download('punkt')
@@ -14,6 +14,8 @@ tokenizer: GPT2TokenizerFast = GPT2TokenizerFast.from_pretrained('gpt2-large')
14
 
15
  with open('model.pkl', 'rb') as f:
16
  lr_model = pickle.load(f)
 
 
17
 
18
  def get_perplexity(text: str):
19
  tokens = tokenizer(text, return_tensors='pt', truncation=True, return_offsets_mapping=True)
@@ -28,17 +30,10 @@ def get_perplexity(text: str):
28
  perplexities = torch.nn.functional.cross_entropy(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1), reduce=False)
29
  output = []
30
  targets = targets.to('cpu')[0].tolist()
31
- # tokens = tokenizer.convert_ids_to_tokens(targets)
32
  offsets = tokens.offset_mapping[0].tolist()
33
- print(perplexities.to('cpu').tolist())
34
  perplexities = perplexities.to('cpu').numpy()
35
  perplexities = perplexities / np.max(perplexities)
36
  perplexities = perplexities.tolist()
37
- print(perplexities)
38
- # output.append((text[:offsets[0][1]], 0))
39
- # for offset, p in zip(offsets[1:], perplexities):
40
- # output.append((text[offset[0]:offset[1]], p))
41
- # print(type(p))
42
  output.append((text[:tokens.word_to_chars(0)[1]], 0))
43
  for word_id, p in zip(tokens.word_ids()[1:], perplexities):
44
  if word_id == len(output):
@@ -53,17 +48,44 @@ def score_text(text):
53
  lengths = []
54
  for sentence in sent_tokenize(text):
55
  lengths.append(len(word_tokenize(sentence)))
56
- scores = lr_model.predict_proba([[perplexity.item(), np.mean(lengths), np.std(lengths), shapiro(lengths).pvalue if len(lengths) > 2 else 0.5]])[0]
 
 
 
 
 
 
 
 
 
 
57
 
58
- return {'Human': scores[0], 'AI': scores[1]}, word_perplexities
59
 
60
- sample_text = """
61
- The Saturn V is a type of rocket that was developed by NASA in the 1960s to support the Apollo program, which aimed to land humans on the Moon.
62
- It remains the most powerful rocket ever built, and its five F-1 engines generated more than 7.5 million pounds of thrust at liftoff.
63
- The Saturn V was used for all of the Apollo missions to the Moon, as well as the launch of the Skylab space station.
64
- Despite its impressive capabilities, the Saturn V was only used for a brief period of time before being retired in 1973.
65
- Nevertheless, it remains a landmark achievement in the history of space exploration and a symbol of human ingenuity and determination."""
 
 
 
 
 
 
 
 
 
66
 
67
- demo = gr.Interface(fn=score_text, inputs=[gr.Textbox(label="Text to score", lines=5, value=sample_text)], outputs=[gr.Label(), gr.HighlightedText()] )
 
 
 
 
 
 
 
 
 
68
 
69
  demo.launch()
 
4
  import gradio as gr
5
  from nltk import word_tokenize, sent_tokenize
6
  import nltk
7
+ from scipy.stats import shapiro, percentileofscore
8
  from transformers import GPT2LMHeadModel, GPT2TokenizerFast
9
 
10
  nltk.download('punkt')
 
14
 
15
  with open('model.pkl', 'rb') as f:
16
  lr_model = pickle.load(f)
17
+ with open('data.pkl', 'rb') as f:
18
+ data = pickle.load(f)
19
 
20
  def get_perplexity(text: str):
21
  tokens = tokenizer(text, return_tensors='pt', truncation=True, return_offsets_mapping=True)
 
30
  perplexities = torch.nn.functional.cross_entropy(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1), reduce=False)
31
  output = []
32
  targets = targets.to('cpu')[0].tolist()
 
33
  offsets = tokens.offset_mapping[0].tolist()
 
34
  perplexities = perplexities.to('cpu').numpy()
35
  perplexities = perplexities / np.max(perplexities)
36
  perplexities = perplexities.tolist()
 
 
 
 
 
37
  output.append((text[:tokens.word_to_chars(0)[1]], 0))
38
  for word_id, p in zip(tokens.word_ids()[1:], perplexities):
39
  if word_id == len(output):
 
48
  lengths = []
49
  for sentence in sent_tokenize(text):
50
  lengths.append(len(word_tokenize(sentence)))
51
+ pp = perplexity.item()
52
+ length = np.mean(lengths)
53
+ std_lengths = np.std(lengths)
54
+ predictability = shapiro(lengths).pvalue if len(lengths) > 2 else 0.5
55
+ scores = lr_model.predict_proba([[pp, length, std_lengths, predictability]])[0]
56
+ pp_percentile = percentileofscore(data[:,0], pp)
57
+ length_percentile = percentileofscore(data[:,1], length)
58
+ std_percentile = percentileofscore(data[:,2], std_lengths)
59
+ predictability_percentile = percentileofscore(data[:,3], predictability)
60
+ print(f'Perplexity: {pp_percentile}%, Length: {length_percentile}%, Std: {std_percentile}%, Predictability: {predictability_percentile}%')
61
+ return {'Human': scores[0], 'AI': scores[1]}, {'Perplexity': pp_percentile / 100, 'Sentence Length': length_percentile / 100, 'Length Variation': std_percentile / 100, 'Length Normality': predictability_percentile / 100}, word_perplexities
62
 
 
63
 
64
+ sample_1 = """The Saturn V is a type of rocket that was developed by NASA in the 1960s to support the Apollo program, which aimed to land humans on the Moon. It remains the most powerful rocket ever built, and its five F-1 engines generated more than 7.5 million pounds of thrust at liftoff. The Saturn V was used for all of the Apollo missions to the Moon, as well as the launch of the Skylab space station. Despite its impressive capabilities, the Saturn V was only used for a brief period of time before being retired in 1973. Nevertheless, it remains a landmark achievement in the history of space exploration and a symbol of human ingenuity and determination."""
65
+ sample_2 = """Saturn V[a] is a retired American super heavy-lift launch vehicle developed by NASA under the Apollo program for human exploration of the Moon. The rocket was human-rated, with three stages, and powered with liquid fuel. It was flown from 1967 to 1973. It was used for nine crewed flights to the Moon, and to launch Skylab, the first American space station.
66
+ As of 2023, the Saturn V remains the only launch vehicle to carry humans beyond low Earth orbit (LEO). Saturn V holds records for the heaviest payload launched and largest payload capacity to low Earth orbit: 310,000 lb (140,000 kg), which included the third stage and unburned propellant needed to send the Apollo command and service module and Lunar Module to the Moon.
67
+ The largest production model of the Saturn family of rockets, the Saturn V was designed under the direction of Wernher von Braun at the Marshall Space Flight Center in Huntsville, Alabama; the lead contractors were Boeing, North American Aviation, Douglas Aircraft Company, and IBM. A total of 15 flight-capable vehicles were built, plus three for ground testing. Thirteen were launched from Kennedy Space Center with no loss of crew or payload. A total of 24 astronauts were launched to the Moon from Apollo 8 (December 1968) to Apollo 17 (December 1972)."""
68
+ sample_3 = """“The Signora had no business to do it,” said Miss Bartlett, “no business at all. She promised us south rooms with a view close together, instead of which here are north rooms, looking into a courtyard, and a long way apart. Oh, Lucy!”
69
+ “And a Cockney, besides!” said Lucy, who had been further saddened by the Signora’s unexpected accent. “It might be London.” She looked at the two rows of English people who were sitting at the table; at the row of white bottles of water and red bottles of wine that ran between the English people; at the portraits of the late Queen and the late Poet Laureate that hung behind the English people, heavily framed; at the notice of the English church (Rev. Cuthbert Eager, M. A. Oxon.), that was the only other decoration of the wall. “Charlotte, don’t you feel, too, that we might be in London? I can hardly believe that all kinds of other things are just outside. I suppose it is one’s being so tired.”
70
+ “This meat has surely been used for soup,” said Miss Bartlett, laying down her fork.
71
+ “I want so to see the Arno. The rooms the Signora promised us in her letter would have looked over the Arno. The Signora had no business to do it at all. Oh, it is a shame!”
72
+ “Any nook does for me,” Miss Bartlett continued; “but it does seem hard that you shouldn’t have a view.”
73
+ Lucy felt that she had been selfish. “Charlotte, you mustn’t spoil me: of course, you must look over the Arno, too. I meant that. The first vacant room in the front—” “You must have it,” said Miss Bartlett, part of whose travelling expenses were paid by Lucy’s mother—a piece of generosity to which she made many a tactful allusion."""
74
+ sample_4 = """Miss Bartlett looked at Lucy with a mixture of disapproval and concern. She had hoped that this trip to Italy would broaden Lucy’s horizons and introduce her to a world beyond their sheltered English existence. But it seemed that Lucy was not quite ready to embrace the differences that came with travel.
75
+ “Don’t be absurd, Lucy,” Miss Bartlett said, “how could we be in London? Look outside and see the sunshine, the olive groves, and the mountains. This is Italy, and it is a completely different experience than what we are used to.”
76
+ Lucy sighed and looked out of the window. Miss Bartlett was right, of course. The view was stunning, and the warm Italian breeze was a welcome change from the damp English weather. But the Signora’s deception had put a damper on their arrival, and Lucy couldn’t help feeling disappointed.
77
+ Just then, a young man walked into the dining room and greeted the English guests with a friendly smile. He was tall and handsome, with dark hair and sparkling eyes. Lucy felt a flutter in her chest as he approached their table.
78
+ “Buongiorno,” he said, “my name is George Emerson. I couldn’t help but notice that you were disappointed with your rooms. If you’d like, I could switch with you. My mother and I are in south rooms, and we’d be happy to take the north ones.”"""
79
 
80
+ description = """This Space can be used to measure the likelihood of a text being generated by an LLM like ChatGPT.
81
+ In general, human written text has higher perplexity, sentence length, and length variation than AI generated text, with lower length normality."""
82
+
83
+
84
+ demo = gr.Interface(fn=score_text,
85
+ inputs=[gr.Textbox(label="Text to score", lines=5)],
86
+ outputs=[gr.Label(label="Result"), gr.Label(label="Feature Scores (higher for humans)", show_label=False), gr.HighlightedText(label="Perplexities")],
87
+ title="LLM Text Detector",
88
+ description=description,
89
+ examples=[[sample_1], [sample_2], [sample_3], [sample_4]])
90
 
91
  demo.launch()
data.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8723d3435b0bd5e1042c6713948132342b1420d33bbe60633589a3ee501eaa81
3
+ size 800162