brianjking commited on
Commit
6fc8fb7
1 Parent(s): 63f376b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -10
app.py CHANGED
@@ -6,7 +6,9 @@ from llama_index import (
6
  VectorStoreIndex,
7
  )
8
  from llama_index.llms import OpenAI
9
- import openai
 
 
10
 
11
  # Define Streamlit layout and interaction
12
  st.title("Grounded Generations")
@@ -59,20 +61,17 @@ content_type = st.selectbox("Select content type:", ["Blog", "Tweet"]) # make so
59
  if st.button("Generate") and content_type:
60
  with st.spinner('Generating text...'):
61
  # Generate text using OpenAI API
62
- openai.api_key = os.getenv("OPENAI_API_KEY")
63
  try:
64
  if content_type == "Blog":
65
  prompt = f"Write a blog about 500 words in length using the {st.session_state['retrieved_text']}" #uses content from the stored state
66
  elif content_type == "Tweet":
67
  prompt = f"Compose a tweet using the {st.session_state['retrieved_text']}"
68
- response = openai.ChatCompletion.create(
69
- model="gpt-3.5-turbo-16k",
70
- messages=[
71
- {"role": "system", "content": "You are a helpful assistant."},
72
- {"role": "user", "content": prompt}
73
- ]
74
- )
75
- generated_text = response['choices'][0]['message']['content']
76
  st.write(f"Generated Text: {generated_text}")
77
  except Exception as e:
78
  st.write(f"An error occurred: {e}")
 
6
  VectorStoreIndex,
7
  )
8
  from llama_index.llms import OpenAI
9
+ from openai import OpenAI
10
+
11
+ client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
12
 
13
  # Define Streamlit layout and interaction
14
  st.title("Grounded Generations")
 
61
  if st.button("Generate") and content_type:
62
  with st.spinner('Generating text...'):
63
  # Generate text using OpenAI API
 
64
  try:
65
  if content_type == "Blog":
66
  prompt = f"Write a blog about 500 words in length using the {st.session_state['retrieved_text']}" #uses content from the stored state
67
  elif content_type == "Tweet":
68
  prompt = f"Compose a tweet using the {st.session_state['retrieved_text']}"
69
+ response = client.chat.completions.create(model="gpt-3.5-turbo-16k",
70
+ messages=[
71
+ {"role": "system", "content": "You are a helpful assistant."},
72
+ {"role": "user", "content": prompt}
73
+ ])
74
+ generated_text = response.choices[0].message.content
 
 
75
  st.write(f"Generated Text: {generated_text}")
76
  except Exception as e:
77
  st.write(f"An error occurred: {e}")