:recycle: [Refactor] Rename gemma-7b to gemma-7b-1.1

#2
Files changed (1) hide show
  1. constants/models.py +86 -2
constants/models.py CHANGED
@@ -3,7 +3,7 @@ MODEL_MAP = {
3
  "nous-mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
4
  "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.2",
5
  "openchat-3.5": "openchat/openchat-3.5-0106",
6
- "gemma-7b": "google/gemma-7b-it",
7
  "default": "mistralai/Mixtral-8x7B-Instruct-v0.1",
8
  }
9
 
@@ -69,7 +69,82 @@ AVAILABLE_MODELS_DICTS = [
69
  },
70
  {
71
  "id": "gemma-7b",
72
- "description": "[google/gemma-7b-it]: https://huggingface.co/google/gemma-7b-it",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
  "object": "model",
74
  "created": 1700000000,
75
  "owned_by": "Google",
@@ -82,3 +157,12 @@ AVAILABLE_MODELS_DICTS = [
82
  "owned_by": "OpenAI",
83
  },
84
  ]
 
 
 
 
 
 
 
 
 
 
3
  "nous-mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
4
  "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.2",
5
  "openchat-3.5": "openchat/openchat-3.5-0106",
6
+ "gemma-7b": "google/gemma-1.1-7b-it",
7
  "default": "mistralai/Mixtral-8x7B-Instruct-v0.1",
8
  }
9
 
 
69
  },
70
  {
71
  "id": "gemma-7b",
72
+ "description": "[google/gemma-1.1-7b-it]: https://huggingface.co/google/gemma-1.1-7b-it",
73
+ "object": "model",
74
+ "created": 1700000000,
75
+ "owned_by": "Google",
76
+ },MODEL_MAP = {
77
+ "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1", # [Recommended]
78
+ "nous-mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
79
+ "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.2",
80
+ "openchat-3.5": "openchat/openchat-3.5-0106",
81
+ "gemma-7b-1.1": "google/gemma-1.1-7b-it",
82
+ "default": "mistralai/Mixtral-8x7B-Instruct-v0.1",
83
+ }
84
+
85
+
86
+ STOP_SEQUENCES_MAP = {
87
+ "mixtral-8x7b": "</s>",
88
+ "nous-mixtral-8x7b": "<|im_end|>",
89
+ "mistral-7b": "</s>",
90
+ "openchat-3.5": "<|end_of_turn|>",
91
+ "gemma-7b-1.1": "<eos>",
92
+ }
93
+
94
+ TOKEN_LIMIT_MAP = {
95
+ "mixtral-8x7b": 32768,
96
+ "nous-mixtral-8x7b": 32768,
97
+ "mistral-7b": 32768,
98
+ "openchat-3.5": 8192,
99
+ "gemma-7b-1.1": 8192,
100
+ "gpt-3.5-turbo": 8192,
101
+ }
102
+
103
+ TOKEN_RESERVED = 20
104
+
105
+
106
+ AVAILABLE_MODELS = [
107
+ "mixtral-8x7b",
108
+ "nous-mixtral-8x7b",
109
+ "mistral-7b",
110
+ "openchat-3.5",
111
+ "-7b-1.1",
112
+ "gpt-3.5-turbo",
113
+ ]
114
+
115
+ # https://platform.openai.com/docs/api-reference/models/list
116
+ AVAILABLE_MODELS_DICTS = [
117
+ {
118
+ "id": "mixtral-8x7b",
119
+ "description": "[mistralai/Mixtral-8x7B-Instruct-v0.1]: https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1",
120
+ "object": "model",
121
+ "created": 1700000000,
122
+ "owned_by": "mistralai",
123
+ },
124
+ {
125
+ "id": "nous-mixtral-8x7b",
126
+ "description": "[NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO]: https://huggingface.co/NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
127
+ "object": "model",
128
+ "created": 1700000000,
129
+ "owned_by": "NousResearch",
130
+ },
131
+ {
132
+ "id": "mistral-7b",
133
+ "description": "[mistralai/Mistral-7B-Instruct-v0.2]: https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2",
134
+ "object": "model",
135
+ "created": 1700000000,
136
+ "owned_by": "mistralai",
137
+ },
138
+ {
139
+ "id": "openchat-3.5",
140
+ "description": "[openchat/openchat-3.5-0106]: https://huggingface.co/openchat/openchat-3.5-0106",
141
+ "object": "model",
142
+ "created": 1700000000,
143
+ "owned_by": "openchat",
144
+ },
145
+ {
146
+ "id": "gemma-7b-1.1",
147
+ "description": "[google/gemma-1.1-7b-it]: https://huggingface.co/google/gemma-1.1-7b-it",
148
  "object": "model",
149
  "created": 1700000000,
150
  "owned_by": "Google",
 
157
  "owned_by": "OpenAI",
158
  },
159
  ]
160
+
161
+ {
162
+ "id": "gpt-3.5-turbo",
163
+ "description": "[openai/gpt-3.5-turbo]: https://platform.openai.com/docs/models/gpt-3-5-turbo",
164
+ "object": "model",
165
+ "created": 1700000000,
166
+ "owned_by": "OpenAI",
167
+ },
168
+ ]