chokiproai commited on
Commit
8449d3f
1 Parent(s): af671a0

Update service/src/chatgpt/index.ts

Browse files
Files changed (1) hide show
  1. service/src/chatgpt/index.ts +13 -9
service/src/chatgpt/index.ts CHANGED
@@ -62,11 +62,22 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI
62
  options.maxModelTokens = 32768
63
  options.maxResponseTokens = 8192
64
  }
 
 
 
 
 
65
  else {
66
  options.maxModelTokens = 8192
67
  options.maxResponseTokens = 2048
68
  }
69
  }
 
 
 
 
 
 
70
 
71
  if (isNotEmptyString(OPENAI_API_BASE_URL))
72
  options.apiBaseUrl = `${OPENAI_API_BASE_URL}/v1`
@@ -77,7 +88,6 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI
77
  apiModel = 'ChatGPTAPI'
78
  }
79
  else {
80
- console.log('OPENAI_ACCESS_TOKEN',OPENAI_ACCESS_TOKEN);
81
  const options: ChatGPTUnofficialProxyAPIOptions = {
82
  accessToken: process.env.OPENAI_ACCESS_TOKEN,
83
  apiReverseProxyUrl: isNotEmptyString(process.env.API_REVERSE_PROXY) ? process.env.API_REVERSE_PROXY : 'https://ai.fakeopen.com/api/conversation',
@@ -129,15 +139,9 @@ async function chatReplyProcess(options: RequestOptions) {
129
  }
130
 
131
  async function fetchUsage() {
132
- let OPENAI_API_KEY = process.env.OPENAI_API_KEY
133
  const OPENAI_API_BASE_URL = process.env.OPENAI_API_BASE_URL
134
 
135
- if (isNotEmptyString(process.env.OPENAI_API_KEY_ARR)){
136
- const OPENAI_API_KEY_ARR = JSON.parse(process.env.OPENAI_API_KEY_ARR);
137
- const randomIndex = Math.floor(Math.random() * OPENAI_API_KEY_ARR.length);
138
- OPENAI_API_KEY = OPENAI_API_KEY_ARR[randomIndex];
139
- }
140
-
141
  if (!isNotEmptyString(OPENAI_API_KEY))
142
  return Promise.resolve('-')
143
 
@@ -231,4 +235,4 @@ function currentModel(): ApiModel {
231
 
232
  export type { ChatContext, ChatMessage }
233
 
234
- export { chatReplyProcess, chatConfig, currentModel }
 
62
  options.maxModelTokens = 32768
63
  options.maxResponseTokens = 8192
64
  }
65
+ // if use GPT-4 Turbo
66
+ else if (model.toLowerCase().includes('1106-preview')) {
67
+ options.maxModelTokens = 128000
68
+ options.maxResponseTokens = 4096
69
+ }
70
  else {
71
  options.maxModelTokens = 8192
72
  options.maxResponseTokens = 2048
73
  }
74
  }
75
+ else if (model.toLowerCase().includes('gpt-3.5')) {
76
+ if (model.toLowerCase().includes('16k')) {
77
+ options.maxModelTokens = 16384
78
+ options.maxResponseTokens = 4096
79
+ }
80
+ }
81
 
82
  if (isNotEmptyString(OPENAI_API_BASE_URL))
83
  options.apiBaseUrl = `${OPENAI_API_BASE_URL}/v1`
 
88
  apiModel = 'ChatGPTAPI'
89
  }
90
  else {
 
91
  const options: ChatGPTUnofficialProxyAPIOptions = {
92
  accessToken: process.env.OPENAI_ACCESS_TOKEN,
93
  apiReverseProxyUrl: isNotEmptyString(process.env.API_REVERSE_PROXY) ? process.env.API_REVERSE_PROXY : 'https://ai.fakeopen.com/api/conversation',
 
139
  }
140
 
141
  async function fetchUsage() {
142
+ const OPENAI_API_KEY = process.env.OPENAI_API_KEY
143
  const OPENAI_API_BASE_URL = process.env.OPENAI_API_BASE_URL
144
 
 
 
 
 
 
 
145
  if (!isNotEmptyString(OPENAI_API_KEY))
146
  return Promise.resolve('-')
147
 
 
235
 
236
  export type { ChatContext, ChatMessage }
237
 
238
+ export { chatReplyProcess, chatConfig, currentModel }