booru_txt2tag / app.py
theonerichy's picture
use updated quantized model
c7cbbe6
raw
history blame
No virus
1.2 kB
import gradio as gr
from llama_cpp import Llama
MAX_TOKENS = 64
llm = Llama(model_path="ggml-model.bin", n_ctx=2048)
def generate_text_instruct(input_text):
response = ""
txt2tag_prompt = f"You are a tool that helps tag danbooru images when given a textual image description. Provide me with danbooru tags that accurately fit the following description. {input_text}"
for output in llm(f" A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. USER: {txt2tag_prompt} ASSISTANT:",
echo=False, stream=True, max_tokens=96, stop=["</s>", "\n", "User:", "<unk>"]):
answer = output['choices'][0]['text']
response += answer
yield response
instruct_interface = gr.Interface(
fn=generate_text_instruct,
inputs=gr.inputs.Textbox(lines= 10, label="Enter your image description"),
outputs=gr.outputs.Textbox(label="danbooru tags"),
)
with gr.Blocks() as demo:
with gr.Tab("Instruct"):
gr.Markdown("# GGML Booru Txt2Tag Demo")
instruct_interface.render()
demo.queue(max_size=16, concurrency_count=1).launch(debug=True)