Arjav commited on
Commit
f01dcf0
1 Parent(s): 3ca5359

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +1 -0
  2. requirements.txt +1 -0
app.py ADDED
@@ -0,0 +1 @@
 
0
  tokenizer = PegasusTokenizer.from_pretrained('nsi319/legal-pegasus')
1
  model = PegasusForConditionalGeneration.from_pretrained(
2
  "arjav/TOS-Pegasus")
3
  input_tokenized = tokenizer.encode(
4
  Terms, return_tensors='pt', max_length=1024, truncation=True)
5
  summary_ids = model.generate(input_tokenized,
6
  num_beams=9,
7
  no_repeat_ngram_size=3,
8
  length_penalty=2.0,
9
  min_length=50,
10
  max_length=150,
11
  early_stopping=True)
12
  summary = [tokenizer.decode(g, skip_special_tokens=True,
13
  clean_up_tokenization_spaces=False) for g in summary_ids][0]
14
  return summary
15
  inputs=gr.Textbox(
16
  label="Terms of Service", lines=2, placeholder="Enter Terms of Service"),
17
  outputs=gr.Textbox(label="Summary"),
18
  description=description,
19
  title=title,
20
  examples=[['account termination policy youtube will terminate a user s access to the service if under appropriate circumstances the user is determined to be a repeat infringer. youtube reserves the right to decide whether content violates these terms of service for reasons other than copyright infringement such as but not limited to pornography obscenity or excessive length. youtube may at any time without prior notice and in its sole discretion remove such content and or terminate a user s account for submitting such material in violation of these terms of service.']],
21
  allow_flagging='never'
22
  )
 
1
+ import gradio as gr
2
  tokenizer = PegasusTokenizer.from_pretrained('nsi319/legal-pegasus')
3
  model = PegasusForConditionalGeneration.from_pretrained(
4
  "arjav/TOS-Pegasus")
5
  input_tokenized = tokenizer.encode(
6
  Terms, return_tensors='pt', max_length=1024, truncation=True)
7
  summary_ids = model.generate(input_tokenized,
8
  num_beams=9,
9
  no_repeat_ngram_size=3,
10
  length_penalty=2.0,
11
  min_length=50,
12
  max_length=150,
13
  early_stopping=True)
14
  summary = [tokenizer.decode(g, skip_special_tokens=True,
15
  clean_up_tokenization_spaces=False) for g in summary_ids][0]
16
  return summary
17
  inputs=gr.Textbox(
18
  label="Terms of Service", lines=2, placeholder="Enter Terms of Service"),
19
  outputs=gr.Textbox(label="Summary"),
20
  description=description,
21
  title=title,
22
  examples=[['account termination policy youtube will terminate a user s access to the service if under appropriate circumstances the user is determined to be a repeat infringer. youtube reserves the right to decide whether content violates these terms of service for reasons other than copyright infringement such as but not limited to pornography obscenity or excessive length. youtube may at any time without prior notice and in its sole discretion remove such content and or terminate a user s account for submitting such material in violation of these terms of service.']],
23
  allow_flagging='never'
24
  )
requirements.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ streamlit