Arcypojeb commited on
Commit
80c00c7
1 Parent(s): da75ef2

Update agents.py

Browse files
Files changed (1) hide show
  1. agents.py +10 -15
agents.py CHANGED
@@ -36,7 +36,7 @@ used_ports = []
36
  server_ports = []
37
  client_ports = []
38
 
39
- class Llama2:
40
 
41
  def __init__(self, fireworksAPI):
42
 
@@ -81,18 +81,13 @@ class Llama2:
81
  else:
82
  generated_responses.append(message[2])
83
 
84
- # Create a list of message dictionaries for the conversation history
85
- conversation_history = []
86
- for user_input, generated_response in zip(past_user_inputs, generated_responses):
87
- conversation_history.append({"role": "user", "content": str(user_input)})
88
- conversation_history.append({"role": "assistant", "content": str(generated_response)})
89
-
90
  # Prepare data to send to the chatgpt-api.shn.hk
91
  response = fireworks.client.ChatCompletion.create(
92
- model="accounts/fireworks/models/llama-v2-7b-chat",
93
  messages=[
94
- {"role": "system", "content": instruction},
95
- conversation_history,
 
96
  {"role": "user", "content": question}
97
  ],
98
  stream=False,
@@ -189,11 +184,11 @@ class Llama2:
189
  continue
190
 
191
  except websockets.exceptions.ConnectionClosedError as e:
192
- clients.remove(self.cli_name2)
193
  print(f"Connection closed: {e}")
194
 
195
  except Exception as e:
196
- clients.remove(self.cli_name2)
197
  print(f"Error: {e}")
198
 
199
  async def start_server(self, serverPort):
@@ -296,14 +291,14 @@ class Llama2:
296
  await self.startClient(clientPort)
297
 
298
  async def ask_Forefront(self, question):
299
- api = FOREFRONT_API_KEY
300
  forefront = ForefrontAI(api)
301
  response = await forefront.handleInput(question)
302
  print(response)
303
  return response
304
 
305
  async def ask_Claude(self, question):
306
- api = ANTHROPIC_API_KEY
307
  claude = Claude3(api)
308
  response = await claude.handleInput(question)
309
  print(response)
@@ -494,7 +489,7 @@ class Llama2:
494
 
495
  async def askCharacter(self, question):
496
  characterID = await self.pickCharacter(question)
497
- token = CHARACTERAI_API_KEY
498
  character = CharacterAI(token, characterID)
499
  answer = await character.handleInput(question)
500
  return answer
 
36
  server_ports = []
37
  client_ports = []
38
 
39
+ class Llama2:
40
 
41
  def __init__(self, fireworksAPI):
42
 
 
81
  else:
82
  generated_responses.append(message[2])
83
 
 
 
 
 
 
 
84
  # Prepare data to send to the chatgpt-api.shn.hk
85
  response = fireworks.client.ChatCompletion.create(
86
+ model="accounts/fireworks/models/llama-v2-70b-chat",
87
  messages=[
88
+ {"role": "system", "content": self.system_instruction},
89
+ *[{"role": "user", "content": input} for input in past_user_inputs],
90
+ *[{"role": "assistant", "content": response} for response in generated_responses],
91
  {"role": "user", "content": question}
92
  ],
93
  stream=False,
 
184
  continue
185
 
186
  except websockets.exceptions.ConnectionClosedError as e:
187
+ self.clients.remove(self.cli_name2)
188
  print(f"Connection closed: {e}")
189
 
190
  except Exception as e:
191
+ self.clients.remove(self.cli_name2)
192
  print(f"Error: {e}")
193
 
194
  async def start_server(self, serverPort):
 
291
  await self.startClient(clientPort)
292
 
293
  async def ask_Forefront(self, question):
294
+ api = "sk-9nDzLqZ7Umy7hmp1kZRPun628aSpABt6"
295
  forefront = ForefrontAI(api)
296
  response = await forefront.handleInput(question)
297
  print(response)
298
  return response
299
 
300
  async def ask_Claude(self, question):
301
+ api = "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"
302
  claude = Claude3(api)
303
  response = await claude.handleInput(question)
304
  print(response)
 
489
 
490
  async def askCharacter(self, question):
491
  characterID = await self.pickCharacter(question)
492
+ token = "d9016ef1aa499a1addb44049cedece57e21e8cbb"
493
  character = CharacterAI(token, characterID)
494
  answer = await character.handleInput(question)
495
  return answer