flyingfishinwater commited on
Commit
d2793ed
·
verified ·
1 Parent(s): b30a7e7

Upload models.json

Browse files
Files changed (1) hide show
  1. models.json +33 -0
models.json CHANGED
@@ -304,5 +304,38 @@
304
  "add_bos_token": true,
305
  "add_eos_token": false,
306
  "parse_special_tokens": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
307
  }
308
  ]
 
304
  "add_bos_token": true,
305
  "add_eos_token": false,
306
  "parse_special_tokens": true
307
+ },
308
+ {
309
+ "id": "chinese-tiny-llm-2b",
310
+ "model_title": "Chinese Tiny LLM 2B",
311
+ "model_file": "chinese-tiny-llm-2b-Q8_0.gguf",
312
+ "model_url": "https://huggingface.co/flyingfishinwater/goodmodels/resolve/main/chinese-tiny-llm-2b-Q8_0.gguf?download=true",
313
+ "model_info_url": "https://huggingface.co/m-a-p/CT-LLM-SFT-DPO",
314
+ "model_avatar": "logo_mapai",
315
+ "model_intention": "这是一个参数规模2B的中文模型,具有很好的中文理解和应答能力",
316
+ "model_license": "license_bigcode.txt",
317
+ "model_license_info": "APACHE LICENSE, VERSION 2.0",
318
+ "model_license_url": "https://www.apache.org/licenses/LICENSE-2.0",
319
+ "model_description": "Chinese Tiny LLM 2B 是首个以中文为中心的大型语言模型,主要在中文语料库上进行预训练和微调,提供了对潜在偏见、中文语言能力和多语言适应性的重要洞见。",
320
+ "developer_url": "https://m-a-p.ai/",
321
+ "file_size": 2218,
322
+ "context" : 4096,
323
+ "temp" : 0.6,
324
+ "prompt_format" : "<|im_start|>user\n{{prompt}}\n<|im_end|>\n<|im_start|>assistant\n",
325
+ "top_k" : 5,
326
+ "top_p" : 0.9,
327
+ "model_inference" : "llama",
328
+ "n_batch" : 10,
329
+ "template_name" : "chatml",
330
+ "is_ready": true,
331
+ "is_internal": false,
332
+ "use_metal": true,
333
+ "mlock": false,
334
+ "mmap": true,
335
+ "repeat_last_n": 64,
336
+ "repeat_penalty": 1.2,
337
+ "add_bos_token": true,
338
+ "add_eos_token": false,
339
+ "parse_special_tokens": true
340
  }
341
  ]