Spaces:
Running
Running
AFischer1985
commited on
Commit
•
ec648c9
1
Parent(s):
b1ef6e0
Update app.py
Browse files
app.py
CHANGED
@@ -24,7 +24,8 @@ modelPath="/home/af/gguf/models/SauerkrautLM-7b-HerO-q8_0.gguf"
|
|
24 |
if(os.path.exists(modelPath)==False):
|
25 |
#url="https://huggingface.co/TheBloke/WizardLM-13B-V1.2-GGUF/resolve/main/wizardlm-13b-v1.2.Q4_0.gguf"
|
26 |
#url="https://huggingface.co/TheBloke/Mixtral-8x7B-Instruct-v0.1-GGUF/resolve/main/mixtral-8x7b-instruct-v0.1.Q4_0.gguf?download=true"
|
27 |
-
url="https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.2-GGUF/resolve/main/mistral-7b-instruct-v0.2.Q4_0.gguf?download=true"
|
|
|
28 |
response = requests.get(url)
|
29 |
with open("./model.gguf", mode="wb") as file:
|
30 |
file.write(response.content)
|
@@ -53,17 +54,20 @@ import json
|
|
53 |
def response(message, history):
|
54 |
prompt=message
|
55 |
system="Du bist ein KI-basiertes Assistenzsystem."
|
56 |
-
|
|
|
|
|
|
|
57 |
prompt=f"[INST] {prompt} [/INST]"
|
58 |
-
if("Mistral-7B-Instruct" in
|
59 |
prompt=f"[INST] {prompt} [/INST]"
|
60 |
-
if("openchat-3.5" in
|
61 |
prompt=f"GPT4 Correct User: {system} {prompt}<|end_of_turn|>GPT4 Correct Assistant:"
|
62 |
-
if("SauerkrautLM-7b-HerO" in
|
63 |
prompt=f"<|im_start|>system\n{system}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant\n"
|
64 |
-
if("WizardLM-13B-V1.2" in
|
65 |
prompt=f"{system} USER: {prompt} ASSISTANT: "
|
66 |
-
if("phi-2" in
|
67 |
prompt=f"Instruct: {prompt}\nOutput:"
|
68 |
print(prompt)
|
69 |
#url="https://afischer1985-wizardlm-13b-v1-2-q4-0-gguf.hf.space/v1/completions"
|
|
|
24 |
if(os.path.exists(modelPath)==False):
|
25 |
#url="https://huggingface.co/TheBloke/WizardLM-13B-V1.2-GGUF/resolve/main/wizardlm-13b-v1.2.Q4_0.gguf"
|
26 |
#url="https://huggingface.co/TheBloke/Mixtral-8x7B-Instruct-v0.1-GGUF/resolve/main/mixtral-8x7b-instruct-v0.1.Q4_0.gguf?download=true"
|
27 |
+
#url="https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.2-GGUF/resolve/main/mistral-7b-instruct-v0.2.Q4_0.gguf?download=true"
|
28 |
+
url="https://huggingface.co/bartowski/gemma-2-9b-it-GGUF/resolve/main/gemma-2-9b-it-Q4_K_M.gguf?download=true"
|
29 |
response = requests.get(url)
|
30 |
with open("./model.gguf", mode="wb") as file:
|
31 |
file.write(response.content)
|
|
|
54 |
def response(message, history):
|
55 |
prompt=message
|
56 |
system="Du bist ein KI-basiertes Assistenzsystem."
|
57 |
+
model="gemma-2"
|
58 |
+
if("gemma-2" in model):
|
59 |
+
prompt=f"<bos><start_of_turn>user\n{prompt}<end_of_turn>\n<start_of_turn>model\n"
|
60 |
+
if("mixtral-8x7b-instruct" in model):
|
61 |
prompt=f"[INST] {prompt} [/INST]"
|
62 |
+
if("Mistral-7B-Instruct" in model):
|
63 |
prompt=f"[INST] {prompt} [/INST]"
|
64 |
+
if("openchat-3.5" in model):
|
65 |
prompt=f"GPT4 Correct User: {system} {prompt}<|end_of_turn|>GPT4 Correct Assistant:"
|
66 |
+
if("SauerkrautLM-7b-HerO" in model):
|
67 |
prompt=f"<|im_start|>system\n{system}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant\n"
|
68 |
+
if("WizardLM-13B-V1.2" in model):
|
69 |
prompt=f"{system} USER: {prompt} ASSISTANT: "
|
70 |
+
if("phi-2" in model):
|
71 |
prompt=f"Instruct: {prompt}\nOutput:"
|
72 |
print(prompt)
|
73 |
#url="https://afischer1985-wizardlm-13b-v1-2-q4-0-gguf.hf.space/v1/completions"
|