ysharma HF staff commited on
Commit
281c3bf
·
1 Parent(s): fedda13
Files changed (1) hide show
  1. app.py +2 -12
app.py CHANGED
@@ -45,31 +45,22 @@ def predict_old(user_input, chatbot):
45
 
46
  def predict(inputs, chatbot):
47
 
48
- print(f'inputs is - {inputs}')
49
- print(f'chatbot is - {chatbot}')
50
- #if chatbot[0][-1] is None: #[["hi there'", None]]
51
- # chatbot=[]
52
  messages = []
53
  for conv in chatbot:
54
  user = conv[0]
55
  messages.append({"role": "user", "content":user })
56
- if conv[1] is None: #[["hi there'", None]]
57
- #chatbot=[]
58
  break
59
  assistant = conv[1]
60
  messages.append({"role": "assistant", "content":assistant})
61
-
62
- #messages.append({"role": "user", "content":inputs})
63
- print(f'messages is - {messages}')
64
 
65
  # a ChatCompletion request
66
  response = openai.ChatCompletion.create(
67
  model='gpt-3.5-turbo',
68
- messages= messages, # [{'role': 'user', 'content': "What is life? Answer in three words."}],
69
  temperature=1.0,
70
  stream=True # for streaming the output to chatbot
71
  )
72
- print(f'response is - {response}')
73
 
74
  partial_message = ""
75
  for chunk in response:
@@ -77,7 +68,6 @@ def predict(inputs, chatbot):
77
  print(chunk['choices'][0]['delta']['content'])
78
  partial_message = partial_message + chunk['choices'][0]['delta']['content']
79
  yield partial_message
80
- #time.sleep(0.5)
81
 
82
  ChatInterface(predict, delete_last_btn="❌Delete").queue().launch(debug=True)
83
 
 
45
 
46
  def predict(inputs, chatbot):
47
 
 
 
 
 
48
  messages = []
49
  for conv in chatbot:
50
  user = conv[0]
51
  messages.append({"role": "user", "content":user })
52
+ if conv[1] is None:
 
53
  break
54
  assistant = conv[1]
55
  messages.append({"role": "assistant", "content":assistant})
 
 
 
56
 
57
  # a ChatCompletion request
58
  response = openai.ChatCompletion.create(
59
  model='gpt-3.5-turbo',
60
+ messages= messages, # example : [{'role': 'user', 'content': "What is life? Answer in three words."}],
61
  temperature=1.0,
62
  stream=True # for streaming the output to chatbot
63
  )
 
64
 
65
  partial_message = ""
66
  for chunk in response:
 
68
  print(chunk['choices'][0]['delta']['content'])
69
  partial_message = partial_message + chunk['choices'][0]['delta']['content']
70
  yield partial_message
 
71
 
72
  ChatInterface(predict, delete_last_btn="❌Delete").queue().launch(debug=True)
73