Myranda commited on
Commit
df6534a
·
1 Parent(s): 57a8fee

Merge pull request #146 from vanderbilt-data-science/115-regenerate-background

Browse files
app.py CHANGED
@@ -142,7 +142,10 @@ if authentication_status:
142
 
143
 
144
  with right_col:
 
145
  background_info = st.text_area("Background information on original post (This autopopulates for your reference). The Background information generated by OpenAI's GPT-4.", height = 780, value=st.session_state.background_info if 'background_info' in st.session_state else '', key = 'background_info_key')
 
 
146
 
147
  with left_col:
148
  original_post = st.text_area("Paste Original Post Here \n", height=100)
@@ -255,10 +258,43 @@ if authentication_status:
255
  model_name='gpt-4', temperature=0.1)
256
 
257
  if chat_mdl is not None:
258
- updated_response = regenerate_custom_response(
259
- chat_mdl, regenerate_prompt, st.session_state.draft_response).content
260
- st.session_state.draft_response = updated_response
261
  st.session_state.is_regenerating = False
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
262
 
263
  st.rerun()
264
 
 
142
 
143
 
144
  with right_col:
145
+ regenerate_background = False
146
  background_info = st.text_area("Background information on original post (This autopopulates for your reference). The Background information generated by OpenAI's GPT-4.", height = 780, value=st.session_state.background_info if 'background_info' in st.session_state else '', key = 'background_info_key')
147
+ if st.button("Request New Background Information"):
148
+ regenerate_background = True
149
 
150
  with left_col:
151
  original_post = st.text_area("Paste Original Post Here \n", height=100)
 
258
  model_name='gpt-4', temperature=0.1)
259
 
260
  if chat_mdl is not None:
261
+ updated_response, background_info = regenerate_custom_response(
262
+ original_post, chat_mdl, regenerate_prompt, st.session_state.draft_response, regenerate_background, st.session_state.background_text)
263
+ st.session_state.draft_response = updated_response.content
264
  st.session_state.is_regenerating = False
265
+
266
+
267
+ st.rerun()
268
+
269
+
270
+ if regenerate_background:
271
+ if st.session_state.api_key:
272
+ os.environ['OPENAI_API_KEY'] = st.session_state.api_key
273
+ # add condition to check for passphrase to allow use of DSI api key stored in secrets
274
+ if (os.environ["OPENAI_API_KEY"] == st.secrets["secret_passphrase"]):
275
+ #umang key
276
+ os.environ["OPENAI_API_KEY"] = st.secrets["dsi_openai_key"]
277
+ elif (os.environ["OPENAI_API_KEY"] == st.secrets["secret_passphrase2"]):
278
+ #abbie key
279
+ os.environ["OPENAI_API_KEY"] = st.secrets["dsi_openai_key2"]
280
+ elif (os.environ["OPENAI_API_KEY"] == st.secrets["secret_passphrase3"]):
281
+ #myranda key
282
+ os.environ["OPENAI_API_KEY"] = st.secrets["dsi_openai_key3"]
283
+ elif (os.environ["OPENAI_API_KEY"] == st.secrets["secret_passphrase4"]):
284
+ #jasmine key
285
+ os.environ["OPENAI_API_KEY"] = st.secrets["dsi_openai_key4"]
286
+ chat_mdl = ChatOpenAI(
287
+ model_name='gpt-4', temperature=0.1)
288
+
289
+ if chat_mdl is not None:
290
+ updated_response, background_text = regenerate_custom_response(
291
+ original_post, chat_mdl, regenerate_prompt, st.session_state.draft_response, regenerate_background, st.session_state.background_text)
292
+ st.session_state.draft_response = updated_response.content
293
+ st.session_state.background_text = background_info
294
+ st.session_state.background_info = background_info # what is the difference between this and background_text?
295
+ regenerate_background = False
296
+ st.session_state.is_regenerating = False
297
+
298
 
299
  st.rerun()
300
 
free_speech_app/FreeSpeechPromptsResponses.py CHANGED
@@ -113,12 +113,18 @@ def generate_custom_response(original_post, chat_mdl, principles=None, writing_s
113
  return draft_response, background_info
114
 
115
  # %% ../nbs/free-speech-prompts.ipynb 14
116
- def regenerate_custom_response(chat_mdl, regenerate_prompt, draft_response):
 
 
 
 
 
 
117
 
118
  # create customized prompt
119
- customized_prompt = f"Please update the original response according to the following request. {regenerate_prompt}. Here is the original response: {draft_response}"
120
 
121
  # get response
122
  updated_response = get_chat_model_response(chat_mdl, customized_prompt)
123
 
124
- return updated_response
 
113
  return draft_response, background_info
114
 
115
  # %% ../nbs/free-speech-prompts.ipynb 14
116
+ def regenerate_custom_response(original_post, chat_mdl, regenerate_prompt, draft_response, regenerate_background=False, prev_background = None):
117
+ # regenerate background info
118
+ if regenerate_background:
119
+ __, background_info = generate_custom_prompt(original_post)
120
+ else:
121
+ background_info = prev_background
122
+
123
 
124
  # create customized prompt
125
+ customized_prompt = f"Please update the original response according to the following request. {regenerate_prompt}. Here is the original response: {draft_response}. And background information for the original post: {background_info}"
126
 
127
  # get response
128
  updated_response = get_chat_model_response(chat_mdl, customized_prompt)
129
 
130
+ return updated_response, background_info