File size: 1,673 Bytes
78b7f01
 
 
 
 
 
 
 
 
 
1157b5f
 
78b7f01
 
 
 
 
 
 
 
0b376da
 
78b7f01
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import streamlit as st
import transformers
import tensorflow
import PIL
from PIL import Image
import time
from transformers import pipeline

model_checkpoint = "Modfiededition/t5-base-fine-tuned-on-jfleg"

@st.cache_data(allow_output_mutation=True)
# @st.cache_(allow_output_mutation=True, suppress_st_warning=True)
def load_model():
    return pipeline("text2text-generation", model=model_checkpoint)
model = load_model()

    
#prompts
st.title("Writing Assistant for you 🤖")
st.markdown("This writing assistant detects and corrects grammatical mistakes for you! This assitant uses **T5-base model ✍️** fine-tuned on jfleg dataset.")
#image = Image.open('new_grammar.jpg')
#st.image(image, caption='Image Credit: https://abrc.org.au/wp-content/uploads/2020/12/Grammar-checker.jpg')
st.subheader("Some examples: ")
example_1 = st.button("I am write on AI")
example_2 = st.button("This sentence has, bads grammar mistake!")

textbox = st.text_area('Write your text in this box:', '',height=100,  max_chars=500 )

button = st.button('Detect grammar mistakes:')

# output
st.subheader("Correct sentence: ")
if example_1:
    with st.spinner('In progress.......'):
        output_text = model("I am write on AI")[0]["generated_text"]
    st.markdown("## "+output_text)

if example_2:
    with st.spinner('In progress.......'):
        output_text = model("This sentence has, bads grammar mistake!")[0]["generated_text"]
    st.markdown("## "+output_text)
if button:
    with st.spinner('In progress.......'):
        if textbox:
            output_text = model(textbox)[0]["generated_text"]
        else:
            output_text = " "
    st.markdown("## "+output_text)