Spaces:
Sleeping
Sleeping
File size: 3,456 Bytes
d687a03 0b8e35a d687a03 a37f1f4 d687a03 a37f1f4 d687a03 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
import random
import streamlit as st
from ctransformers import AutoModelForCausalLM
system_prompt = """Dies ist eine Unterhaltung zwischen \
einem intelligenten, hilfsbereitem \
KI-Assistenten und einem Nutzer.
Der Assistent gibt Antworten in Form von Zitaten."""
prompt_format = "<|im_start|>system\n{system_prompt}\
<|im_end|>\n<|im_start|>user\nZitiere {prompt}\
<|im_end|>\n<|im_start|>assistant\n"
modes = {
"Authentisch": {"temperature": 0.05, "top_k": 10},
"Ausgeglichen": {"temperature": 0.5, "top_p": 0.9},
"Chaotisch": {"temperature": 0.9},
}
authors = [
"Johann Wolfgang von Goethe",
"Friedrich Schiller",
"Immanuel Kant",
"Oscar Wilde",
"Lü Bu We",
"Wilhelm Busch",
"Friedrich Nietzsche",
"Karl Marx",
"William Shakespeare",
"Kurt Tucholsky",
"Georg Christoph Lichtenberg",
"Arthur Schopenhauer",
"Seneca der Jüngere",
"Martin Luther",
"Mark Twain",
"Cicero",
"Marie von Ebner-Eschenbach",
"Novalis",
"Franz Kafka",
"Jean-Jacques Rousseau",
"Heinrich Heine",
"Honoré de Balzac",
"Georg Büchner",
"Gotthold Ephraim Lessing",
"Markus M. Ronner",
"Gerhard Uhlenbruck",
"Theodor Fontane",
"Jean Paul",
"Leo Tolstoi",
"Friedrich Hebbel",
"Horaz",
"Albert Einstein",
"Jesus von Nazareth",
"Angela Merkel",
"Ambrose Bierce",
"Christian Morgenstern",
"Friedrich Hölderlin",
"Joseph Joubert",
"François de La Rochefoucauld",
"Otto von Bismarck",
"Fjodor Dostojewski",
"Ovid",
"Rudolf Steiner",
"Ludwig Börne",
"Hugo von Hofmannsthal",
"Laotse",
"Thomas von Aquin",
"Ludwig Wittgenstein",
"Friedrich Engels",
"Charles de Montesquieu",
]
st.title("Zitatgenerator")
# Initialization
if "model" not in st.session_state:
data_load_state = st.text("Lade Modell...")
model = AutoModelForCausalLM.from_pretrained(
"caretech-owl/leo-hessionai-7B-quotes-gguf", model_type="Llama"
)
st.session_state["model"] = model
st.session_state["author"] = ""
data_load_state.text("Modell geladen!")
else:
data_load_state = st.text("Modell geladen!")
with st.form("user_form", clear_on_submit=False):
col1, col2 = st.columns([2, 1])
with col1:
placeholder = st.empty()
question = placeholder.text_input(
"Zitat generieren von: ",
placeholder="Zufallsautor",
value=st.session_state["author"],
)
generate = st.form_submit_button(label="Zitat generieren")
with col2:
mode = st.selectbox(
"Modus:",
("Authentisch", "Ausgeglichen", "Chaotisch"),
index=1,
)
if generate:
if not question:
question = placeholder.text_input(
"Zitat generieren von: ",
placeholder="Aristoteles",
value=random.choice(authors),
)
st.session_state["author"] = question
with st.spinner("Denke über Zitat nach (das kann etwas dauern)..."):
query = prompt_format.format(
system_prompt=system_prompt,
prompt=st.session_state["author"],
)
print("=" * 20)
print(query)
output = st.session_state["model"](
query, stop="<|im_end|>", max_new_tokens=200, **modes[mode]
)
print("-" * 20)
print(output)
print("=" * 20)
st.success(output)
|