Avo-k commited on
Commit
c48e036
1 Parent(s): 8b11fc5

clé azure ekimetrics

Browse files
Files changed (2) hide show
  1. app.py +21 -21
  2. utils.py +8 -0
app.py CHANGED
@@ -7,6 +7,7 @@ from utils import (
7
  make_pairs,
8
  set_openai_api_key,
9
  create_user_id,
 
10
  )
11
  import numpy as np
12
  from datetime import datetime
@@ -15,6 +16,11 @@ from azure.storage.fileshare import ShareServiceClient
15
 
16
  system_template = {"role": "system", "content": os.environ["content"]}
17
 
 
 
 
 
 
18
  retrieve_all = EmbeddingRetriever(
19
  document_store=FAISSDocumentStore.load(
20
  index_path="./documents/climate_gpt.faiss",
@@ -49,7 +55,7 @@ def chat(
49
  query: str,
50
  history: list = [system_template],
51
  report_type: str = "All available",
52
- threshold: float = 0.559,
53
  ) -> tuple:
54
  """retrieve relevant documents in the document store then query gpt-turbo
55
 
@@ -80,23 +86,24 @@ def chat(
80
  )
81
 
82
  if sources:
83
- messages.append(
84
- {"role": "system", "content": f"{os.environ['sources']}\n\n{sources}"}
85
- )
86
 
87
- response = openai.ChatCompletion.create(
88
- model="gpt-3.5-turbo",
89
- messages=messages,
 
90
  temperature=0.2,
91
  stream=True,
92
  )
93
 
94
  if sources:
95
- messages.pop()
96
  complete_response = ""
 
97
  else:
98
  sources = "No environmental report was used to provide this answer."
99
- complete_response = "No relevant documents found, for a sourced answer you may want to try a more specific question.\n\n"
 
 
100
 
101
  messages.append({"role": "assistant", "content": complete_response})
102
  timestamp = str(datetime.now().timestamp())
@@ -113,7 +120,8 @@ def chat(
113
  log_on_azure(file, logs, share_client)
114
 
115
  for chunk in response:
116
- if chunk_message := chunk["choices"][0]["delta"].get("content", None):
 
117
  complete_response += chunk_message
118
  messages[-1]["content"] = complete_response
119
  gradio_format = make_pairs([a["content"] for a in messages[1:]])
@@ -144,10 +152,8 @@ def log_on_azure(file, logs, share_client):
144
 
145
  # Gradio
146
  css_code = ".gradio-container {background-image: url('file=background.png');background-position: top right}"
147
-
148
  with gr.Blocks(title="🌍 ClimateGPT Ekimetrics", css=css_code) as demo:
149
 
150
- openai.api_key = os.environ["api_key"]
151
  user_id = create_user_id(10)
152
  user_id_state = gr.State([user_id])
153
 
@@ -174,9 +180,7 @@ with gr.Blocks(title="🌍 ClimateGPT Ekimetrics", css=css_code) as demo:
174
 
175
  with gr.Column(scale=1, variant="panel"):
176
  gr.Markdown("### Sources")
177
- sources_textbox = gr.Textbox(
178
- interactive=False, show_label=False, max_lines=50
179
- )
180
  ask.submit(
181
  fn=chat,
182
  inputs=[
@@ -210,12 +214,8 @@ with gr.Blocks(title="🌍 ClimateGPT Ekimetrics", css=css_code) as demo:
210
  lines=1,
211
  type="password",
212
  )
213
- openai_api_key_textbox.change(
214
- set_openai_api_key, inputs=[openai_api_key_textbox]
215
- )
216
- openai_api_key_textbox.submit(
217
- set_openai_api_key, inputs=[openai_api_key_textbox]
218
- )
219
 
220
  with gr.Tab("Information"):
221
  gr.Markdown(
 
7
  make_pairs,
8
  set_openai_api_key,
9
  create_user_id,
10
+ to_completion,
11
  )
12
  import numpy as np
13
  from datetime import datetime
 
16
 
17
  system_template = {"role": "system", "content": os.environ["content"]}
18
 
19
+ openai.api_type = "azure"
20
+ openai.api_key = os.environ["api_key"]
21
+ openai.api_base = os.environ["ressource_endpoint"]
22
+ openai.api_version = "2022-12-01"
23
+
24
  retrieve_all = EmbeddingRetriever(
25
  document_store=FAISSDocumentStore.load(
26
  index_path="./documents/climate_gpt.faiss",
 
55
  query: str,
56
  history: list = [system_template],
57
  report_type: str = "All available",
58
+ threshold: float = 0.555,
59
  ) -> tuple:
60
  """retrieve relevant documents in the document store then query gpt-turbo
61
 
 
86
  )
87
 
88
  if sources:
89
+ messages.append({"role": "system", "content": f"{os.environ['sources']}\n\n{sources}"})
 
 
90
 
91
+ response = openai.Completion.create(
92
+ engine="climateGPT",
93
+ # messages=messages,
94
+ prompt=to_completion(messages),
95
  temperature=0.2,
96
  stream=True,
97
  )
98
 
99
  if sources:
 
100
  complete_response = ""
101
+ messages.pop()
102
  else:
103
  sources = "No environmental report was used to provide this answer."
104
+ complete_response = (
105
+ "No relevant documents found, for a sourced answer you may want to try a more specific question.\n\n"
106
+ )
107
 
108
  messages.append({"role": "assistant", "content": complete_response})
109
  timestamp = str(datetime.now().timestamp())
 
120
  log_on_azure(file, logs, share_client)
121
 
122
  for chunk in response:
123
+ # if chunk_message := chunk["choices"][0]["delta"].get("content"):
124
+ if (chunk_message := chunk["choices"][0].get("text")) and chunk_message != "<|im_end|>":
125
  complete_response += chunk_message
126
  messages[-1]["content"] = complete_response
127
  gradio_format = make_pairs([a["content"] for a in messages[1:]])
 
152
 
153
  # Gradio
154
  css_code = ".gradio-container {background-image: url('file=background.png');background-position: top right}"
 
155
  with gr.Blocks(title="🌍 ClimateGPT Ekimetrics", css=css_code) as demo:
156
 
 
157
  user_id = create_user_id(10)
158
  user_id_state = gr.State([user_id])
159
 
 
180
 
181
  with gr.Column(scale=1, variant="panel"):
182
  gr.Markdown("### Sources")
183
+ sources_textbox = gr.Textbox(interactive=False, show_label=False, max_lines=50)
 
 
184
  ask.submit(
185
  fn=chat,
186
  inputs=[
 
214
  lines=1,
215
  type="password",
216
  )
217
+ openai_api_key_textbox.change(set_openai_api_key, inputs=[openai_api_key_textbox])
218
+ openai_api_key_textbox.submit(set_openai_api_key, inputs=[openai_api_key_textbox])
 
 
 
 
219
 
220
  with gr.Tab("Information"):
221
  gr.Markdown(
utils.py CHANGED
@@ -59,3 +59,11 @@ def create_user_id(length):
59
  letters = string.ascii_lowercase
60
  user_id = "".join(random.choice(letters) for i in range(length))
61
  return user_id
 
 
 
 
 
 
 
 
 
59
  letters = string.ascii_lowercase
60
  user_id = "".join(random.choice(letters) for i in range(length))
61
  return user_id
62
+
63
+
64
+ def to_completion(messages):
65
+ s = []
66
+ for message in messages:
67
+ s.append(f"<|im_start|>{message['role']}\n{message['content']}<|im_end|>")
68
+ s.append("<|im_start|>assistant\n")
69
+ return "\n".join(s)