Spaces:
Runtime error
Runtime error
gpt-4 token +8192
Browse files
request_llm/bridge_all.py
CHANGED
@@ -38,7 +38,7 @@ model_info = {
|
|
38 |
"fn_with_ui": chatgpt_ui,
|
39 |
"fn_without_ui": chatgpt_noui,
|
40 |
"endpoint": "https://api.openai.com/v1/chat/completions",
|
41 |
-
"max_token":
|
42 |
"tokenizer": tiktoken.encoding_for_model("gpt-4"),
|
43 |
"token_cnt": lambda txt: len(tiktoken.encoding_for_model("gpt-4").encode(txt, disallowed_special=())),
|
44 |
},
|
@@ -57,7 +57,7 @@ model_info = {
|
|
57 |
"fn_with_ui": chatgpt_ui,
|
58 |
"fn_without_ui": chatgpt_noui,
|
59 |
"endpoint": "https://openai.api2d.net/v1/chat/completions",
|
60 |
-
"max_token":
|
61 |
"tokenizer": tiktoken.encoding_for_model("gpt-4"),
|
62 |
"token_cnt": lambda txt: len(tiktoken.encoding_for_model("gpt-4").encode(txt, disallowed_special=())),
|
63 |
},
|
|
|
38 |
"fn_with_ui": chatgpt_ui,
|
39 |
"fn_without_ui": chatgpt_noui,
|
40 |
"endpoint": "https://api.openai.com/v1/chat/completions",
|
41 |
+
"max_token": 8192,
|
42 |
"tokenizer": tiktoken.encoding_for_model("gpt-4"),
|
43 |
"token_cnt": lambda txt: len(tiktoken.encoding_for_model("gpt-4").encode(txt, disallowed_special=())),
|
44 |
},
|
|
|
57 |
"fn_with_ui": chatgpt_ui,
|
58 |
"fn_without_ui": chatgpt_noui,
|
59 |
"endpoint": "https://openai.api2d.net/v1/chat/completions",
|
60 |
+
"max_token": 8192,
|
61 |
"tokenizer": tiktoken.encoding_for_model("gpt-4"),
|
62 |
"token_cnt": lambda txt: len(tiktoken.encoding_for_model("gpt-4").encode(txt, disallowed_special=())),
|
63 |
},
|