Text Generation
Transformers
mistral
Not-For-All-Audiences
conversational
Inference Endpoints
8-bit precision
gptq
Epiculous commited on
Commit
1f06a20
1 Parent(s): 445fc45

Create README.md

Browse files
Files changed (1) hide show
  1. README.md +80 -0
README.md ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ datasets:
3
+ - lemonilia/LimaRP
4
+ - grimulkan/theory-of-mind
5
+ - Epiculous/Gnosis
6
+ tags:
7
+ - not-for-all-audiences
8
+ license: agpl-3.0
9
+ ---
10
+
11
+ # Fett-uccine
12
+
13
+ This model is created by training Mistral base model on LimaRP (ShareGPT format provided by SAO), theory of mind, and gnosis(provided by jeiku).
14
+
15
+ The 8-bit lora was then merged into Mistral Instruct resulting in what you see here.
16
+
17
+ Works best with ChatML Instruct
18
+
19
+ This model is in honor of the SillyTavern community, keep being awesome!
20
+
21
+ Optimal Settings provided by Nitral:
22
+ ```
23
+ {
24
+ "temp": 5,
25
+ "temperature_last": true,
26
+ "top_p": 1,
27
+ "top_k": 0,
28
+ "top_a": 0,
29
+ "tfs": 1,
30
+ "epsilon_cutoff": 0,
31
+ "eta_cutoff": 0,
32
+ "typical_p": 1,
33
+ "min_p": 0.05,
34
+ "rep_pen": 1,
35
+ "rep_pen_range": 0,
36
+ "no_repeat_ngram_size": 0,
37
+ "penalty_alpha": 0,
38
+ "num_beams": 1,
39
+ "length_penalty": 0,
40
+ "min_length": 0,
41
+ "encoder_rep_pen": 1,
42
+ "freq_pen": 0,
43
+ "presence_pen": 0,
44
+ "do_sample": true,
45
+ "early_stopping": false,
46
+ "dynatemp": false,
47
+ "min_temp": 1,
48
+ "max_temp": 5,
49
+ "dynatemp_exponent": 1,
50
+ "smoothing_factor": 0.3,
51
+ "add_bos_token": true,
52
+ "truncation_length": 2048,
53
+ "ban_eos_token": false,
54
+ "skip_special_tokens": true,
55
+ "streaming": false,
56
+ "mirostat_mode": 0,
57
+ "mirostat_tau": 5,
58
+ "mirostat_eta": 0.1,
59
+ "guidance_scale": 1,
60
+ "negative_prompt": "",
61
+ "grammar_string": "",
62
+ "banned_tokens": "",
63
+ "ignore_eos_token_aphrodite": false,
64
+ "spaces_between_special_tokens_aphrodite": true,
65
+ "sampler_order": [
66
+ 6,
67
+ 0,
68
+ 1,
69
+ 3,
70
+ 4,
71
+ 2,
72
+ 5
73
+ ],
74
+ "logit_bias": [],
75
+ "n": 1,
76
+ "rep_pen_size": 0,
77
+ "genamt": 150,
78
+ "max_length": 8192
79
+ }
80
+ ```