Nils Durner commited on
Commit
0008662
1 Parent(s): 32ad276

OpenAI adaptation

Browse files
Files changed (3) hide show
  1. README.md +2 -2
  2. app.py +46 -43
  3. requirements.txt +1 -2
README.md CHANGED
@@ -1,10 +1,10 @@
1
  ---
2
- title: Amz Bedrock Chat
3
  emoji: 📈
4
  colorFrom: yellow
5
  colorTo: gray
6
  sdk: gradio
7
- sdk_version: 4.1.1
8
  app_file: app.py
9
  pinned: false
10
  license: mit
 
1
  ---
2
+ title: OAI Chat
3
  emoji: 📈
4
  colorFrom: yellow
5
  colorTo: gray
6
  sdk: gradio
7
+ sdk_version: 4.2.0
8
  app_file: app.py
9
  pinned: false
10
  license: mit
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import gradio as gr
2
  import json
3
  import os
4
- import boto3
5
 
6
  dump_controls = False
7
  log_to_console = False
@@ -42,51 +42,51 @@ def load_settings():
42
  # Dummy Python function, actual loading is done in JS
43
  pass
44
 
45
- def save_settings(acc, sec, prompt, temp):
46
  # Dummy Python function, actual saving is done in JS
47
  pass
48
 
49
  def process_values_js():
50
  return """
51
  () => {
52
- return ["access_key", "secret_key", "token"];
53
  }
54
  """
55
 
56
- def bot(message, history, aws_access, aws_secret, aws_token, temperature, max_tokens):
57
  try:
58
- prompt = "\n\n"
 
 
 
 
 
 
 
 
59
  for human, assi in history:
60
- if prompt is not None:
61
- prompt += f"Human: {human}\n\n"
62
  if assi is not None:
63
- prompt += f"Assistant: {assi}\n\n"
64
  if message:
65
- prompt += f"Human: {message}\n\n"
66
- prompt += f"Assistant:"
 
 
 
 
 
 
 
 
 
 
67
 
68
  if log_to_console:
69
- print(f"br_prompt: {str(prompt)}")
70
-
71
- body = json.dumps({
72
- "prompt": prompt,
73
- "max_tokens_to_sample": max_tokens,
74
- "temperature": temperature,
75
- })
76
-
77
- sess = boto3.Session(
78
- aws_access_key_id=aws_access,
79
- aws_secret_access_key=aws_secret,
80
- aws_session_token=aws_token,
81
- region_name='eu-central-1')
82
- br = sess.client(service_name="bedrock-runtime")
83
-
84
- response = br.invoke_model(body=body, modelId="anthropic.claude-v2",
85
- accept="application/json", contentType="application/json")
86
- response_body = json.loads(response.get('body').read())
87
- br_result = response_body.get('completion')
88
-
89
- history[-1][1] = br_result
90
  if log_to_console:
91
  print(f"br_result: {str(history)}")
92
 
@@ -96,12 +96,14 @@ def bot(message, history, aws_access, aws_secret, aws_token, temperature, max_to
96
  return "", history
97
 
98
  with gr.Blocks() as demo:
99
- gr.Markdown("# Amazon™️ Bedrock™️ Chat™️ (Nils' Version™️) feat. Anthropic™️ Claude-2™️")
100
 
101
  with gr.Accordion("Settings"):
102
- aws_access = gr.Textbox(label="AWS Access Key", elem_id="aws_access")
103
- aws_secret = gr.Textbox(label="AWS Secret Key", elem_id="aws_secret")
104
- aws_token = gr.Textbox(label="AWS Session Token", elem_id="aws_token")
 
 
105
  temp = gr.Slider(0, 1, label="Temperature", elem_id="temp", value=1)
106
  max_tokens = gr.Slider(1, 4000, label="Max. Tokens", elem_id="max_tokens", value=4000)
107
  save_button = gr.Button("Save Settings")
@@ -109,7 +111,7 @@ with gr.Blocks() as demo:
109
 
110
  load_button.click(load_settings, js="""
111
  () => {
112
- let elems = ['#aws_access textarea', '#aws_secret textarea', '#aws_token textarea', '#temp input', '#max_tokens input'];
113
  elems.forEach(elem => {
114
  let item = document.querySelector(elem);
115
  let event = new InputEvent('input', { bubbles: true });
@@ -119,13 +121,14 @@ with gr.Blocks() as demo:
119
  }
120
  """)
121
 
122
- save_button.click(save_settings, [aws_access, aws_secret, aws_token, temp, max_tokens], js="""
123
- (acc, sec, tok, prompt, temp, ntok) => {
124
- localStorage.setItem('aws_access', acc);
125
- localStorage.setItem('aws_secret', sec);
126
- localStorage.setItem('aws_token', tok);
127
  localStorage.setItem('temp', document.querySelector('#temp input').value);
128
  localStorage.setItem('max_tokens', document.querySelector('#max_tokens input').value);
 
129
  }
130
  """)
131
 
@@ -146,7 +149,7 @@ with gr.Blocks() as demo:
146
  )
147
  submit_btn = gr.Button("🚀 Send", scale=0)
148
  submit_click = submit_btn.click(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
149
- bot, [txt, chatbot, aws_access, aws_secret, aws_token, temp, max_tokens], [txt, chatbot],
150
  )
151
  submit_click.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
152
 
@@ -164,7 +167,7 @@ with gr.Blocks() as demo:
164
  dmp_btn.click(dump, inputs=[chatbot], outputs=[txt_dmp])
165
 
166
  txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
167
- bot, [txt, chatbot, aws_access, aws_secret, aws_token, temp, max_tokens], [txt, chatbot],
168
  )
169
  txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
170
  file_msg = btn.upload(add_file, [chatbot, btn], [chatbot], queue=False, postprocess=False)
 
1
  import gradio as gr
2
  import json
3
  import os
4
+ import openai
5
 
6
  dump_controls = False
7
  log_to_console = False
 
42
  # Dummy Python function, actual loading is done in JS
43
  pass
44
 
45
+ def save_settings(acc, sec, prompt, temp, tokens, model):
46
  # Dummy Python function, actual saving is done in JS
47
  pass
48
 
49
  def process_values_js():
50
  return """
51
  () => {
52
+ return ["oai_key", "system_prompt", "seed"];
53
  }
54
  """
55
 
56
+ def bot(message, history, oai_key, system_prompt, seed, temperature, max_tokens, model):
57
  try:
58
+ openai.api_key = oai_key
59
+
60
+ seed_i = None
61
+ if seed:
62
+ seed_i = int(seed)
63
+
64
+ history_openai_format = []
65
+ if system_prompt:
66
+ history_openai_format.append({"role": "system", "content": system_prompt})
67
  for human, assi in history:
68
+ if human is not None:
69
+ history_openai_format.append({"role": "user", "content": human})
70
  if assi is not None:
71
+ history_openai_format.append({"role": "assistant", "content": assi})
72
  if message:
73
+ history_openai_format.append({"role": "user", "content": message})
74
+
75
+ if log_to_console:
76
+ print(f"br_prompt: {str(history_openai_format)}")
77
+
78
+ response = openai.ChatCompletion.create(
79
+ model=model,
80
+ messages= history_openai_format,
81
+ temperature=temperature,
82
+ seed=seed_i,
83
+ max_tokens=max_tokens
84
+ )
85
 
86
  if log_to_console:
87
+ print(f"br_response: {str(response)}")
88
+
89
+ history[-1][1] = response.choices[0].message.content
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
  if log_to_console:
91
  print(f"br_result: {str(history)}")
92
 
 
96
  return "", history
97
 
98
  with gr.Blocks() as demo:
99
+ gr.Markdown("# OAI Chat (Nils' Version™️)")
100
 
101
  with gr.Accordion("Settings"):
102
+ oai_key = gr.Textbox(label="OpenAI API Key", elem_id="oai_key")
103
+ model = gr.Dropdown(label="Model", value="gpt-4-1106-preview", allow_custom_value=True, elem_id="model",
104
+ choices=["gpt-4-1106-preview", "gpt-4", "gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-3.5-turbo-1106"])
105
+ system_prompt = gr.TextArea("You are a helpful AI.", label="System Prompt", lines=3, max_lines=250, elem_id="system_prompt")
106
+ seed = gr.Textbox(label="Seed", elem_id="seed")
107
  temp = gr.Slider(0, 1, label="Temperature", elem_id="temp", value=1)
108
  max_tokens = gr.Slider(1, 4000, label="Max. Tokens", elem_id="max_tokens", value=4000)
109
  save_button = gr.Button("Save Settings")
 
111
 
112
  load_button.click(load_settings, js="""
113
  () => {
114
+ let elems = ['#oai_key textarea', '#system_prompt textarea', '#seed textarea', '#temp input', '#max_tokens input', '#model'];
115
  elems.forEach(elem => {
116
  let item = document.querySelector(elem);
117
  let event = new InputEvent('input', { bubbles: true });
 
121
  }
122
  """)
123
 
124
+ save_button.click(save_settings, [oai_key, system_prompt, seed, temp, max_tokens, model], js="""
125
+ (oai, sys, seed, temp, ntok, model) => {
126
+ localStorage.setItem('oai_key', oai);
127
+ localStorage.setItem('system_prompt', sys);
128
+ localStorage.setItem('seed', seed);
129
  localStorage.setItem('temp', document.querySelector('#temp input').value);
130
  localStorage.setItem('max_tokens', document.querySelector('#max_tokens input').value);
131
+ localStorage.setItem('model', model);
132
  }
133
  """)
134
 
 
149
  )
150
  submit_btn = gr.Button("🚀 Send", scale=0)
151
  submit_click = submit_btn.click(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
152
+ bot, [txt, chatbot, oai_key, system_prompt, seed, temp, max_tokens, model], [txt, chatbot],
153
  )
154
  submit_click.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
155
 
 
167
  dmp_btn.click(dump, inputs=[chatbot], outputs=[txt_dmp])
168
 
169
  txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
170
+ bot, [txt, chatbot, oai_key, system_prompt, seed, temp, max_tokens, model], [txt, chatbot],
171
  )
172
  txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
173
  file_msg = btn.upload(add_file, [chatbot, btn], [chatbot], queue=False, postprocess=False)
requirements.txt CHANGED
@@ -1,3 +1,2 @@
1
  gradio
2
- langchain
3
- boto3
 
1
  gradio
2
+ openai