Update README.md
Browse files
README.md
CHANGED
@@ -46,6 +46,49 @@ The following models were included in the merge:
|
|
46 |
* [cognitivecomputations/dolphin-2.9.1-llama-3-8b](https://huggingface.co/cognitivecomputations/dolphin-2.9.1-llama-3-8b)
|
47 |
* [abacusai/Llama-3-Smaug-8B](https://huggingface.co/abacusai/Llama-3-Smaug-8B)
|
48 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
49 |
### Configuration
|
50 |
|
51 |
The following YAML configuration was used to produce this model:
|
|
|
46 |
* [cognitivecomputations/dolphin-2.9.1-llama-3-8b](https://huggingface.co/cognitivecomputations/dolphin-2.9.1-llama-3-8b)
|
47 |
* [abacusai/Llama-3-Smaug-8B](https://huggingface.co/abacusai/Llama-3-Smaug-8B)
|
48 |
|
49 |
+
### Ollama
|
50 |
+
Ollama Create
|
51 |
+
```
|
52 |
+
jaylee@lees-MacBook-Pro-2 % ./ollama create joah_remix -f ./Modelfile_Q5_K_M
|
53 |
+
transferring model data
|
54 |
+
creating model layer
|
55 |
+
creating template layer
|
56 |
+
creating system layer
|
57 |
+
creating parameters layer
|
58 |
+
creating config layer
|
59 |
+
using already created layer sha256:4eadb53f0c70683aeab133c60d76b8ffc9f41ca5d49524d4b803c19e5ce7e3a5
|
60 |
+
using already created layer sha256:8ab4849b038cf0abc5b1c9b8ee1443dca6b93a045c2272180d985126eb40bf6f
|
61 |
+
writing layer sha256:ae2974c64ea5d6f488eeb1b10717a270f48fb3452432589db6f5e60472ae96ac
|
62 |
+
writing layer sha256:74ef6315972b317734fe01e7e1ad5b49fce1fa8ed3978cb66501ecb8c3a2e984
|
63 |
+
writing layer sha256:83882a5e957b8ce0d454f26bcedb2819413b49d6b967b28d60edb8ac61edfa58
|
64 |
+
writing manifest
|
65 |
+
success
|
66 |
+
```
|
67 |
+
|
68 |
+
MODELFILE
|
69 |
+
```
|
70 |
+
FROM joah-remix-llama-3-koen-8b-reborn-Q5_K_M.gguf
|
71 |
+
TEMPLATE """{{ if .System }}<|start_header_id|>system<|end_header_id|>
|
72 |
+
|
73 |
+
{{ .System }}<|eot_id|>{{ end }}{{ if .Prompt }}<|start_header_id|>user<|end_header_id|>
|
74 |
+
|
75 |
+
{{ .Prompt }}<|eot_id|>{{ end }}<|start_header_id|>assistant<|end_header_id|>
|
76 |
+
|
77 |
+
{{ .Response }}<|eot_id|>"""
|
78 |
+
|
79 |
+
|
80 |
+
SYSTEM """
|
81 |
+
μΉμ ν μ±λ΄μΌλ‘μ μλλ°©μ μμ²μ μ΅λν μμΈνκ³ μΉμ νκ² λ΅νμ. λͺ¨λ λλ΅μ νκ΅μ΄(Korean)μΌλ‘ λλ΅ν΄μ€.
|
82 |
+
"""
|
83 |
+
|
84 |
+
PARAMETER num_keep 24
|
85 |
+
PARAMETER temperature 0.7
|
86 |
+
PARAMETER num_predict 3000
|
87 |
+
PARAMETER stop "<|start_header_id|>"
|
88 |
+
PARAMETER stop "<|end_header_id|>"
|
89 |
+
PARAMETER stop "<|eot_id|>"
|
90 |
+
```
|
91 |
+
|
92 |
### Configuration
|
93 |
|
94 |
The following YAML configuration was used to produce this model:
|