ricklamers commited on
Commit
60ed75d
1 Parent(s): 4d5c750

fix: hacky sessions.

Browse files
Files changed (1) hide show
  1. app.py +18 -8
app.py CHANGED
@@ -2,6 +2,7 @@ import gradio as gr
2
  import json
3
  import os
4
  import numexpr
 
5
  from groq import Groq
6
  from groq.types.chat.chat_completion_tool_param import ChatCompletionToolParam
7
 
@@ -65,14 +66,22 @@ def get_model_response(messages):
65
  print(messages)
66
  return None
67
 
68
- conversation_cache = {}
69
 
70
  def respond(message, history, system_message):
71
- history_id = id(history)
72
- if history_id not in conversation_cache:
73
- conversation_cache[history_id] = [{"role": "system", "content": system_message}]
 
 
74
 
75
- conversation_cache[history_id].append({"role": "user", "content": message})
 
 
 
 
 
 
76
 
77
  available_functions = {
78
  "evaluate_math_expression": evaluate_math_expression,
@@ -80,9 +89,9 @@ def respond(message, history, system_message):
80
 
81
  function_calls = []
82
  while True:
83
- response = get_model_response(conversation_cache[history_id])
84
  response_message = response.choices[0].message
85
- conversation_cache[history_id].append(response_message)
86
 
87
  if not response_message.tool_calls and response_message.content is not None:
88
  break
@@ -95,7 +104,7 @@ def respond(message, history, system_message):
95
  }
96
  function_calls.append(function_call)
97
  function_response = call_function(tool_call, available_functions)
98
- conversation_cache[history_id].append(function_response)
99
  function_calls.append({
100
  "name": function_response["name"],
101
  "result": json.loads(function_response["content"])
@@ -109,6 +118,7 @@ def respond(message, history, system_message):
109
 
110
  return response_message.content + function_calls_md
111
 
 
112
  demo = gr.ChatInterface(
113
  respond,
114
  additional_inputs=[
 
2
  import json
3
  import os
4
  import numexpr
5
+ import uuid
6
  from groq import Groq
7
  from groq.types.chat.chat_completion_tool_param import ChatCompletionToolParam
8
 
 
66
  print(messages)
67
  return None
68
 
69
+ conversation_state = {}
70
 
71
  def respond(message, history, system_message):
72
+ if not history or not isinstance(history[0][0], str):
73
+ session_id = str(uuid.uuid4())
74
+ history.insert(0, (session_id, "Confirmed."))
75
+ else:
76
+ session_id = history[0][0]
77
 
78
+ if session_id not in conversation_state:
79
+ conversation_state[session_id] = []
80
+
81
+ if len(conversation_state[session_id]) == 0:
82
+ conversation_state[session_id].append({"role": "system", "content": system_message})
83
+
84
+ conversation_state[session_id].append({"role": "user", "content": message})
85
 
86
  available_functions = {
87
  "evaluate_math_expression": evaluate_math_expression,
 
89
 
90
  function_calls = []
91
  while True:
92
+ response = get_model_response(conversation_state[session_id])
93
  response_message = response.choices[0].message
94
+ conversation_state[session_id].append(response_message)
95
 
96
  if not response_message.tool_calls and response_message.content is not None:
97
  break
 
104
  }
105
  function_calls.append(function_call)
106
  function_response = call_function(tool_call, available_functions)
107
+ conversation_state[session_id].append(function_response)
108
  function_calls.append({
109
  "name": function_response["name"],
110
  "result": json.loads(function_response["content"])
 
118
 
119
  return response_message.content + function_calls_md
120
 
121
+
122
  demo = gr.ChatInterface(
123
  respond,
124
  additional_inputs=[