DevilishDaoSaint commited on
Commit
b465214
·
verified ·
1 Parent(s): 29a8d8d

Trained with Unsloth

Browse files
README.md CHANGED
@@ -1,19 +1,25 @@
1
  ---
2
  license: apache-2.0
 
 
 
 
 
 
 
 
 
 
3
  model_type: translation
4
  language_bcp47:
5
- - zh
6
- - en
7
  finetuned_from: unsloth/Yi-1.5-6B-bnb-4bit
8
- description: |
9
- Mythical Translator is a fine-tuned model specifically designed to translate Chinese light novels into natural-sounding English suitable for the genre. It also extracts terms and proper nouns for consistency in future translations.
10
- tags:
11
- - light-novel
12
- - translation
13
- - Chinese
14
- - English
15
- - named-entity-recognition
16
- - term-extraction
17
  ---
18
 
19
  # Model Card for Mythical Translator
 
1
  ---
2
  license: apache-2.0
3
+ tags:
4
+ - light-novel
5
+ - translation
6
+ - Chinese
7
+ - English
8
+ - named-entity-recognition
9
+ - term-extraction
10
+ - unsloth
11
+ - trl
12
+ - sft
13
  model_type: translation
14
  language_bcp47:
15
+ - zh
16
+ - en
17
  finetuned_from: unsloth/Yi-1.5-6B-bnb-4bit
18
+ description: 'Mythical Translator is a fine-tuned model specifically designed to translate
19
+ Chinese light novels into natural-sounding English suitable for the genre. It also
20
+ extracts terms and proper nouns for consistency in future translations.
21
+
22
+ '
 
 
 
 
23
  ---
24
 
25
  # Model Card for Mythical Translator
config.json CHANGED
@@ -1,3 +1,31 @@
1
  {
2
- "model_type": "llama"
3
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  {
2
+ "_name_or_path": "unsloth/yi-1.5-6b-bnb-4bit",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 11008,
14
+ "max_position_embeddings": 4096,
15
+ "mlp_bias": false,
16
+ "model_type": "llama",
17
+ "num_attention_heads": 32,
18
+ "num_hidden_layers": 32,
19
+ "num_key_value_heads": 4,
20
+ "pad_token_id": 0,
21
+ "pretraining_tp": 1,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_scaling": null,
24
+ "rope_theta": 5000000.0,
25
+ "tie_word_embeddings": false,
26
+ "torch_dtype": "float16",
27
+ "transformers_version": "4.43.2",
28
+ "unsloth_version": "2024.8",
29
+ "use_cache": true,
30
+ "vocab_size": 64000
31
+ }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.42.4"
7
  }
 
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.43.2"
7
  }
pytorch_model-00001-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7b4cfa76c182579f7fb41c9e928aa6f01bbbd74f8c231ad36c3e7e34dbd1f933
3
  size 4932739043
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2761c9ce5cfcca1301ec6ac02fdd0a90cb1653307d5fc219254260fe9c8afa79
3
  size 4932739043
pytorch_model-00002-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e1fc6e8ce5fbecda987a8b838046c0f49797d671bd4504c66bf9cc2b0a93e065
3
  size 4976834114
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1df7387262a85b3f0d2fddde929ebcddd81d91a821383dede9bc2086af2c78c7
3
  size 4976834114
pytorch_model-00003-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fddbc94aba13c1ea8f660bad76f18128047c305e7f6a71f21c9be2ff6e3ea6b6
3
  size 2212600834
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8833affe729f1ddcad8a657f75cc7e58f0f8976f655a8f2c494a94e2b040a96
3
  size 2212600834