Spaces:
Runtime error
Runtime error
File size: 4,252 Bytes
e648a4e df95c95 e648a4e 671308a e648a4e 96cf2e5 5d6831b 96cf2e5 e648a4e 0b3dcc4 e648a4e 3bf96ec 4486ad7 17849a0 4486ad7 3bf96ec 4486ad7 671308a 9e5bd19 4486ad7 3bf96ec 9e5bd19 2aa58d4 3bf96ec 046526a 0b3dcc4 e648a4e 0b3dcc4 e648a4e 0b3dcc4 e648a4e 0b3dcc4 e648a4e 0b3dcc4 2aa58d4 0b3dcc4 e648a4e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 |
from __future__ import annotations
from typing import Iterable
import gradio as gr
from gradio.themes.base import Base
from gradio.themes.utils import colors, fonts, sizes
from llama_cpp import Llama
from huggingface_hub import hf_hub_download
import requests
from bs4 import BeautifulSoup
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36"
}
hf_hub_download(repo_id="LLukas22/gpt4all-lora-quantized-ggjt", filename="ggjt-model.bin", local_dir=".")
llm = Llama(model_path="./ggjt-model.bin", n_ctx=640)
ins = '''### Instruction:
{question}
{data}
### Response:
'''
theme = gr.themes.Monochrome(
primary_hue="purple",
secondary_hue="red",
neutral_hue="neutral",
radius_size=gr.themes.sizes.radius_sm,
font=[gr.themes.GoogleFont("Inter"), "ui-sans-serif", "system-ui", "sans-serif"],
)
def search_ddg(question: str):
response = requests.get("https://duckduckgo.com/html/", headers=headers, params={"q": question})
data = response.text
soup = BeautifulSoup(data, "html.parser")
result_texts = soup.find_all("a", class_="result__snippet")
results: list[str] = []
output_string: str = ""
for element in result_texts:
if len(results) < 2:
text_content = element.get_text()
results.append(text_content)
else:
continue
for step in range(len(results)):
output_string += f"{results[step]} \n"
return output_string
def generate(instruction):
feeding_data = "\n" + search_ddg(instruction)
prompt = ins.format(question=instruction, data=feeding_data)
print(prompt)
response = llm(prompt, stop=['### Instruction:', '### End'])
result = response['choices'][0]['text']
return result
examples = [
"How do dogs bark?",
"Why are apples red?",
"How do I make a campfire?",
"Why do cats love to chirp at something?"
]
def process_example(args):
for x in generate(args):
pass
return x
css = ".generating {visibility: hidden}"
class PurpleTheme(Base):
def __init__(
self,
*,
primary_hue: colors.Color | str = colors.purple,
secondary_hue: colors.Color | str = colors.red,
neutral_hue: colors.Color | str = colors.neutral,
spacing_size: sizes.Size | str = sizes.spacing_md,
radius_size: sizes.Size | str = sizes.radius_md,
font: fonts.Font
| str
| Iterable[fonts.Font | str] = (
fonts.GoogleFont("Inter"),
"ui-sans-serif",
"sans-serif",
),
font_mono: fonts.Font
| str
| Iterable[fonts.Font | str] = (
fonts.GoogleFont("Space Grotesk"),
"ui-monospace",
"monospace",
),
):
super().__init__(
primary_hue=primary_hue,
secondary_hue=secondary_hue,
neutral_hue=neutral_hue,
spacing_size=spacing_size,
radius_size=radius_size,
font=font,
font_mono=font_mono,
)
super().set(
button_primary_background_fill="linear-gradient(90deg, *primary_300, *secondary_400)",
button_primary_background_fill_hover="linear-gradient(90deg, *primary_200, *secondary_300)",
button_primary_text_color="white",
button_primary_background_fill_dark="linear-gradient(90deg, *primary_600, *secondary_800)",
block_shadow="*shadow_drop_lg",
button_shadow="*shadow_drop_lg",
input_background_fill="zinc",
input_border_color="*secondary_300",
input_shadow="*shadow_drop",
input_shadow_focus="*shadow_drop_lg",
)
custom_theme = PurpleTheme()
with gr.Blocks(theme=custom_theme, analytics_enabled=False, css=css) as demo:
with gr.Column():
gr.Markdown(
""" ## GPT4ALL
7b quantized 4bit (q4_0)
Type in the box below and click the button to generate answers to your most pressing questions!
""")
with gr.Row():
with gr.Column(scale=3):
instruction = gr.Textbox(placeholder="Enter your question here", label="Question", elem_id="q-input")
with gr.Box():
gr.Markdown("**Answer**")
output = gr.Markdown(elem_id="q-output")
submit = gr.Button("Generate", variant="primary")
gr.Examples(
examples=examples,
inputs=[instruction],
cache_examples=False,
fn=process_example,
outputs=[output],
)
submit.click(generate, inputs=[instruction], outputs=[output])
instruction.submit(generate, inputs=[instruction], outputs=[output])
demo.queue(concurrency_count=1).launch(debug=True) |