BeardedMonster commited on
Commit
d16eece
·
verified ·
1 Parent(s): ca70f03

update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -14
app.py CHANGED
@@ -16,24 +16,45 @@ model = AutoModelForCausalLM.from_pretrained(repo_name, trust_remote_code=True).
16
  # Add sidebar with instructions
17
  st.sidebar.title("Instructions: How to use")
18
  st.sidebar.write("""
19
- 1. Write something in the text area (a prompt or random text) or use the dropdown menu to select predefined text.
20
- 2. Select a task from the **task dropdown menu** below. **This is very important as it ensures the model responds accordingly.**
 
21
  3. If a dropdown menu pops up for a nigerian language, **select the target nigerian language.**
22
- 4. Click Generate to get a response below the text area.\n
23
  **Note: Model's performance when given prompts vary due to model size and training data distribution.**\n
24
  5. Lastly, you can play with some of the generation parameters below to improve performance.
25
  """)
26
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  # Define generation configuration
28
- max_length = st.sidebar.slider("Max Length", min_value=10, max_value=500, value=100)
29
- max_new_tokens = st.sidebar.slider("Max tokens", min_value=30, max_value=768, value=50)
30
- num_beams = st.sidebar.slider("Number of Beams", min_value=1, max_value=10, value=5)
31
- temperature = st.sidebar.slider("Temperature", min_value=0.1, max_value=2.0, value=0.9)
32
- top_k = st.sidebar.slider("Top-K", min_value=1, max_value=100, value=50)
33
- top_p = st.sidebar.slider("Top-P", min_value=0.1, max_value=1.0, value=0.95)
34
- repetition_penalty = st.sidebar.slider("Repetition Penalty", min_value=1.0, max_value=10.0, value=2.0)
35
- length_penalty = st.sidebar.slider("Length Penalty", min_value=0.1, max_value=10.0, value=1.7)
36
- # early_stopping = st.sidebar.selectbox("Early Stopping", [True, False], index=0)
37
 
38
  generation_config = {
39
  "max_length": max_length,
@@ -75,6 +96,7 @@ async def generate_from_api(user_input, generation_config):
75
 
76
  # Sample texts
77
  sample_texts = {
 
78
  "Tell me a story in pidgin": "Tell me a story in pidgin",
79
  "Oma Ede, Mi ji ogede...": "Oma Ede, Mi ji ogede mi a foroma orhorho edha meji ri eka. ",
80
  "who are you?": "who are you?",
@@ -139,8 +161,8 @@ def wrap_text(text, task_value):
139
  return task_value.format(text)
140
 
141
 
142
- # Text input
143
- user_input = st.text_area("Enter text below **(please, first read the instructions on how to use in the side bar for better experience)**: ", sample_texts[sample_text])
144
  user_input = instruction_wrap.get(sample_texts.get(user_input, user_input), user_input)
145
 
146
  if st.button("Generate"):
 
16
  # Add sidebar with instructions
17
  st.sidebar.title("Instructions: How to use")
18
  st.sidebar.write("""
19
+ 1. Write something in the text area (a prompt or random text) or use the dropdown menu to select predefined sample text.
20
+ 2. Select a task from the **task dropdown menu** below if you are providing your own text. **This is very important as it ensures the model responds accordingly.**
21
+ 3. If you provide your own text, please do not select any predefined sample text from the dropdown menu.
22
  3. If a dropdown menu pops up for a nigerian language, **select the target nigerian language.**
23
+ 4. Click the Generate button to get a response below the text area.\n
24
  **Note: Model's performance when given prompts vary due to model size and training data distribution.**\n
25
  5. Lastly, you can play with some of the generation parameters below to improve performance.
26
  """)
27
 
28
+ max_length = 100
29
+ max_new_tokens = 50
30
+ num_beams = 5
31
+ temperature = 1.1
32
+ top_k = 75
33
+ top_p = 0.95
34
+ repetition_penalty = 4.0
35
+ length_penalty = 2.0
36
+
37
+ # Create sliders in the sidebar
38
+ max_length = st.sidebar.slider("Max Length", min_value=10, max_value=500, value=max_length)
39
+ max_new_tokens = st.sidebar.slider("Max tokens", min_value=30, max_value=768, value=max_new_tokens)
40
+ num_beams = st.sidebar.slider("Number of Beams", min_value=1, max_value=10, value=num_beams)
41
+ temperature = st.sidebar.slider("Temperature", min_value=0.1, max_value=2.0, value=temperature)
42
+ top_k = st.sidebar.slider("Top-K", min_value=1, max_value=100, value=top_k)
43
+ top_p = st.sidebar.slider("Top-P", min_value=0.1, max_value=1.0, value=top_p)
44
+ repetition_penalty = st.sidebar.slider("Repetition Penalty", min_value=1.0, max_value=10.0, value=repetition_penalty)
45
+ length_penalty = st.sidebar.slider("Length Penalty", min_value=0.1, max_value=10.0, value=length_penalty)
46
+
47
+
48
  # Define generation configuration
49
+ # max_length = st.sidebar.slider("Max Length", min_value=10, max_value=500, value=100)
50
+ # max_new_tokens = st.sidebar.slider("Max tokens", min_value=30, max_value=768, value=50)
51
+ # num_beams = st.sidebar.slider("Number of Beams", min_value=1, max_value=10, value=5)
52
+ # temperature = st.sidebar.slider("Temperature", min_value=0.1, max_value=2.0, value=0.9)
53
+ # top_k = st.sidebar.slider("Top-K", min_value=1, max_value=100, value=50)
54
+ # top_p = st.sidebar.slider("Top-P", min_value=0.1, max_value=1.0, value=0.95)
55
+ # repetition_penalty = st.sidebar.slider("Repetition Penalty", min_value=1.0, max_value=10.0, value=2.0)
56
+ # length_penalty = st.sidebar.slider("Length Penalty", min_value=0.1, max_value=10.0, value=1.7)
57
+ # # early_stopping = st.sidebar.selectbox("Early Stopping", [True, False], index=0)
58
 
59
  generation_config = {
60
  "max_length": max_length,
 
96
 
97
  # Sample texts
98
  sample_texts = {
99
+ "":"",
100
  "Tell me a story in pidgin": "Tell me a story in pidgin",
101
  "Oma Ede, Mi ji ogede...": "Oma Ede, Mi ji ogede mi a foroma orhorho edha meji ri eka. ",
102
  "who are you?": "who are you?",
 
161
  return task_value.format(text)
162
 
163
 
164
+ # Text input
165
+ user_input = st.text_area("Enter text below **(PLEASE, FIRST READ THE INSTRUCTIONS ON HOW TO USE IN THE SIDE BAR FOR BETTER EXPERIENCE)**: ", sample_texts[sample_text])
166
  user_input = instruction_wrap.get(sample_texts.get(user_input, user_input), user_input)
167
 
168
  if st.button("Generate"):