Spaces:
Sleeping
Sleeping
File size: 767 Bytes
9cbeca2 bf6d44e 1051d6e 9cbeca2 1051d6e 9cbeca2 1051d6e 9cbeca2 1051d6e 9cbeca2 a51c7d7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 |
import streamlit as st
import transformers
# @st.cache(hash_funcs={tokenizers.Tokenizer: id, tokenizers.Tokenizer: id})
def load_stuff():
model = transformers.AutoModelForCausalLM.from_pretrained("distilgpt2")
tokenizer = transformers.AutoTokenizer.from_pretrained("distilgpt2")
return model, tokenizer
st.image("./img.jpg")
model, tokenizer = load_stuff()
user_inputed_text = st.text_input("Insert text")
if len(user_inputed_text) == 0:
outputs_text = "no text provided. write some text, meatbag"
else:
outputs = model.generate(
**tokenizer([user_inputed_text], return_tensors='pt'),
max_new_tokens=50, do_sample=True,
)
outputs_text = tokenizer.decode(outputs[0])
st.text_area(label='output', value=outputs_text)
|