patpizio commited on
Commit
cac961a
1 Parent(s): 6deae30

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -3
app.py CHANGED
@@ -1,8 +1,31 @@
 
1
  import streamlit as st
2
 
3
  st.title('How does the LLM choose its words?')
4
 
5
- text = st.text_area('Enter some text!')
6
 
7
- if text:
8
- st.write(text)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoTokenizer, T5Tokenizer, T5ForConditionalGeneration, AutoModelForCausalLM
2
  import streamlit as st
3
 
4
  st.title('How does the LLM choose its words?')
5
 
6
+ model_checkpoint = "google/flan-t5-small"
7
 
8
+ tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
9
+
10
+ model = T5ForConditionalGeneration.from_pretrained(
11
+ model_checkpoint,
12
+ load_in_8bit=False,
13
+ device_map="auto"
14
+ )
15
+
16
+ instruction = st.text_area('Write an instruction:')
17
+
18
+ prompts = [
19
+ f"""Below is an instruction that describes a task. Write a response that appropriately completes the request.
20
+ ### Instruction: {instruction}
21
+ ### Response:"""
22
+ ]
23
+
24
+ inputs = tokenizer(
25
+ prompts[0],
26
+ return_tensors="pt",
27
+ )
28
+ # input_ids = inputs["input_ids"].to("cuda")
29
+
30
+ if instruction:
31
+ st.write(tokenizer.decode(input_ids[0]))