Spaces:
Sleeping
Sleeping
add app
Browse files- app.py +123 -0
- requirements.txt +3 -0
app.py
ADDED
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import random
|
2 |
+
|
3 |
+
import streamlit as st
|
4 |
+
from ctransformers import AutoModelForCausalLM
|
5 |
+
|
6 |
+
system_prompt = """Dies ist eine Unterhaltung zwischen \
|
7 |
+
einem intelligenten, hilfsbereitem \
|
8 |
+
KI-Assistenten und einem Nutzer.
|
9 |
+
Der Assistent gibt Antworten in Form von Zitaten."""
|
10 |
+
|
11 |
+
prompt_format = "<|im_start|>system\n{system_prompt}\
|
12 |
+
<|im_end|>\n<|im_start|>user\nZitiere {prompt}\
|
13 |
+
<|im_end|>\n<|im_start|>assistant\n"
|
14 |
+
|
15 |
+
modes = {
|
16 |
+
"Authentisch": {"temperature": 0.05, "top_k": 10},
|
17 |
+
"Ausgeglichen": {"temperature": 0.5, "top_p": 0.9},
|
18 |
+
"Chaotisch": {"temperature": 0.9},
|
19 |
+
}
|
20 |
+
|
21 |
+
authors = [
|
22 |
+
"Johann Wolfgang von Goethe",
|
23 |
+
"Friedrich Schiller",
|
24 |
+
"Immanuel Kant",
|
25 |
+
"Oscar Wilde",
|
26 |
+
"Lü Bu We",
|
27 |
+
"Wilhelm Busch",
|
28 |
+
"Friedrich Nietzsche",
|
29 |
+
"Karl Marx",
|
30 |
+
"William Shakespeare",
|
31 |
+
"Kurt Tucholsky",
|
32 |
+
"Georg Christoph Lichtenberg",
|
33 |
+
"Arthur Schopenhauer",
|
34 |
+
"Seneca der Jüngere",
|
35 |
+
"Martin Luther",
|
36 |
+
"Mark Twain",
|
37 |
+
"Cicero",
|
38 |
+
"Marie von Ebner-Eschenbach",
|
39 |
+
"Novalis",
|
40 |
+
"Franz Kafka",
|
41 |
+
"Jean-Jacques Rousseau",
|
42 |
+
"Heinrich Heine",
|
43 |
+
"Honoré de Balzac",
|
44 |
+
"Georg Büchner",
|
45 |
+
"Gotthold Ephraim Lessing",
|
46 |
+
"Markus M. Ronner",
|
47 |
+
"Gerhard Uhlenbruck",
|
48 |
+
"Theodor Fontane",
|
49 |
+
"Jean Paul",
|
50 |
+
"Leo Tolstoi",
|
51 |
+
"Friedrich Hebbel",
|
52 |
+
"Horaz",
|
53 |
+
"Albert Einstein",
|
54 |
+
"Jesus von Nazareth",
|
55 |
+
"Angela Merkel",
|
56 |
+
"Ambrose Bierce",
|
57 |
+
"Christian Morgenstern",
|
58 |
+
"Friedrich Hölderlin",
|
59 |
+
"Joseph Joubert",
|
60 |
+
"François de La Rochefoucauld",
|
61 |
+
"Otto von Bismarck",
|
62 |
+
"Fjodor Dostojewski",
|
63 |
+
"Ovid",
|
64 |
+
"Rudolf Steiner",
|
65 |
+
"Ludwig Börne",
|
66 |
+
"Hugo von Hofmannsthal",
|
67 |
+
"Laotse",
|
68 |
+
"Thomas von Aquin",
|
69 |
+
"Ludwig Wittgenstein",
|
70 |
+
"Friedrich Engels",
|
71 |
+
"Charles de Montesquieu",
|
72 |
+
]
|
73 |
+
|
74 |
+
|
75 |
+
st.title("Zitatgenerator")
|
76 |
+
|
77 |
+
# Initialization
|
78 |
+
if "model" not in st.session_state:
|
79 |
+
data_load_state = st.text("Lade Modell...")
|
80 |
+
model = AutoModelForCausalLM.from_pretrained(
|
81 |
+
"caretech-owl/leo-hessionai-7B-quotes-gguf", model_type="Llama"
|
82 |
+
)
|
83 |
+
st.session_state["model"] = model
|
84 |
+
st.session_state["author"] = ""
|
85 |
+
data_load_state.text("Modell geladen!")
|
86 |
+
else:
|
87 |
+
data_load_state = st.text("Modell geladen!")
|
88 |
+
|
89 |
+
with st.form("user_form", clear_on_submit=False):
|
90 |
+
col1, col2 = st.columns([2, 1])
|
91 |
+
with col1:
|
92 |
+
placeholder = st.empty()
|
93 |
+
question = placeholder.text_input(
|
94 |
+
"Zitat generieren von: ",
|
95 |
+
placeholder="Zufallsautor",
|
96 |
+
value=st.session_state["author"],
|
97 |
+
)
|
98 |
+
generate = st.form_submit_button(label="Zitat generieren")
|
99 |
+
with col2:
|
100 |
+
mode = st.selectbox(
|
101 |
+
"Modus:",
|
102 |
+
("Authentisch", "Ausgeglichen", "Chaotisch"),
|
103 |
+
index=1,
|
104 |
+
)
|
105 |
+
if generate:
|
106 |
+
if not question:
|
107 |
+
question = placeholder.text_input(
|
108 |
+
"Zitat generieren von: ",
|
109 |
+
placeholder="Aristoteles",
|
110 |
+
value=random.choice(authors),
|
111 |
+
)
|
112 |
+
st.session_state["author"] = question
|
113 |
+
|
114 |
+
with st.spinner("Generiere Zitat..."):
|
115 |
+
query = prompt_format.format(
|
116 |
+
system_prompt=system_prompt,
|
117 |
+
temperature=0.01,
|
118 |
+
prompt=st.session_state["author"],
|
119 |
+
)
|
120 |
+
output = st.session_state["model"](
|
121 |
+
query, stop="<|im_end|>", max_new_tokens=200, **modes[mode]
|
122 |
+
)
|
123 |
+
st.success(output)
|
requirements.txt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
streamlit
|
2 |
+
ctransformers==0.2.27
|
3 |
+
transformers==4.33.3
|