Spaces:
Runtime error
Runtime error
Synced repo using 'sync_with_huggingface' Github Action
Browse files- client/html/index.html +5 -0
- g4f/Provider/Providers/Chimera.py +4 -1
- g4f/models.py +6 -0
client/html/index.html
CHANGED
@@ -120,6 +120,11 @@
|
|
120 |
<option value="oasst-sft-6-llama-30b">LLaMa-30B-sft-6</option>
|
121 |
<option value="llama-2-70b-chat">LLaMa-2-70B-chat</option>
|
122 |
</optgroup>
|
|
|
|
|
|
|
|
|
|
|
123 |
<optgroup label="{{_('IMAGE')}}">
|
124 |
<option value="" disabled>Kandinsky (soon)</option>
|
125 |
</optgroup>
|
|
|
120 |
<option value="oasst-sft-6-llama-30b">LLaMa-30B-sft-6</option>
|
121 |
<option value="llama-2-70b-chat">LLaMa-2-70B-chat</option>
|
122 |
</optgroup>
|
123 |
+
<optgroup label="CLAUDE">
|
124 |
+
<option value="claude-instant">Claude-instant</option>
|
125 |
+
<option value="claude-2">Claude-2</option>
|
126 |
+
<option value="claude-2-100k">Claude-2-100k</option>
|
127 |
+
</optgroup>
|
128 |
<optgroup label="{{_('IMAGE')}}">
|
129 |
<option value="" disabled>Kandinsky (soon)</option>
|
130 |
</optgroup>
|
g4f/Provider/Providers/Chimera.py
CHANGED
@@ -18,7 +18,10 @@ model = [
|
|
18 |
'gpt-4-0314',
|
19 |
'gpt-4-32k',
|
20 |
'llama-2-70b-chat',
|
21 |
-
'oasst-sft-6-llama-30b'
|
|
|
|
|
|
|
22 |
]
|
23 |
supports_stream = True
|
24 |
needs_auth = False
|
|
|
18 |
'gpt-4-0314',
|
19 |
'gpt-4-32k',
|
20 |
'llama-2-70b-chat',
|
21 |
+
'oasst-sft-6-llama-30b',
|
22 |
+
'claude-instant',
|
23 |
+
'claude-2',
|
24 |
+
'claude-2-100k'
|
25 |
]
|
26 |
supports_stream = True
|
27 |
needs_auth = False
|
g4f/models.py
CHANGED
@@ -67,6 +67,11 @@ class Model:
|
|
67 |
base_provider: str = 'anthropic'
|
68 |
best_provider: Provider.Provider = Provider.Chimera
|
69 |
|
|
|
|
|
|
|
|
|
|
|
70 |
class claude_2_100k:
|
71 |
name: str = 'claude-2-100k'
|
72 |
base_provider: str = 'anthropic'
|
@@ -109,6 +114,7 @@ class ModelUtils:
|
|
109 |
|
110 |
'claude-instant-100k': Model.claude_instant_100k,
|
111 |
'claude-instant': Model.claude_instant,
|
|
|
112 |
'claude-2-100k': Model.claude_2_100k,
|
113 |
|
114 |
'llama-2-7b-chat': Model.llama_2_7b_chat,
|
|
|
67 |
base_provider: str = 'anthropic'
|
68 |
best_provider: Provider.Provider = Provider.Chimera
|
69 |
|
70 |
+
class claude_2:
|
71 |
+
name: str = 'claude-2'
|
72 |
+
base_provider: str = 'anthropic'
|
73 |
+
best_provider: Provider.Provider = Provider.Chimera
|
74 |
+
|
75 |
class claude_2_100k:
|
76 |
name: str = 'claude-2-100k'
|
77 |
base_provider: str = 'anthropic'
|
|
|
114 |
|
115 |
'claude-instant-100k': Model.claude_instant_100k,
|
116 |
'claude-instant': Model.claude_instant,
|
117 |
+
'claude-2': Model.claude_2_100k,
|
118 |
'claude-2-100k': Model.claude_2_100k,
|
119 |
|
120 |
'llama-2-7b-chat': Model.llama_2_7b_chat,
|