sharpenb commited on
Commit
2419959
1 Parent(s): 0005496

Upload folder using huggingface_hub (#2)

Browse files

- 390ab0350e9b265e97d07bcf252c5c5688d30920907cac9196a1fa2b093df263 (e48cff9f8a26d0f13e20c01630f3649e641ed3c9)
- 73c8dd476526ca0897291a0641988ac078589a51e691ddeab0cc797576e923d1 (3fbbbe02ba888fdd850f48827fc8368a04302163)

Files changed (3) hide show
  1. config.json +1 -1
  2. plots.png +0 -0
  3. smash_config.json +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/tmp/tmpsdxt5o0w",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
1
  {
2
+ "_name_or_path": "/tmp/tmpsstwovzf",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
plots.png CHANGED
smash_config.json CHANGED
@@ -8,7 +8,7 @@
8
  "compilers": "None",
9
  "task": "text_text_generation",
10
  "device": "cuda",
11
- "cache_dir": "/ceph/hdd/staff/charpent/.cache/modelskhir_7fa",
12
  "batch_size": 1,
13
  "model_name": "LeoLM/leo-hessianai-7b-chat",
14
  "pruning_ratio": 0.0,
 
8
  "compilers": "None",
9
  "task": "text_text_generation",
10
  "device": "cuda",
11
+ "cache_dir": "/ceph/hdd/staff/charpent/.cache/modelsatcjg8d3",
12
  "batch_size": 1,
13
  "model_name": "LeoLM/leo-hessianai-7b-chat",
14
  "pruning_ratio": 0.0,