Spaces:
Sleeping
Sleeping
File size: 2,372 Bytes
a58cf5c 90592aa b6c1552 a58cf5c ac6e794 4272192 a58cf5c b6c1552 a58cf5c b6c1552 a58cf5c 38a5c45 ac6e794 f145553 a58cf5c 38a5c45 a58cf5c 34e179e ac6e794 90592aa a58cf5c ac6e794 a58cf5c 90592aa 6a0b151 be233a1 d3c4702 a58cf5c 90592aa a58cf5c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
# chatbot.py
import gradio as gr
from huggingface_hub import InferenceClient
import faiss
import json
from sentence_transformers import SentenceTransformer
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
# Load the FAISS index and the sentence transformer model
index = faiss.read_index("apexcustoms_index.faiss")
model = SentenceTransformer('sentence_transformer_model')
# Load the extracted text
with open("apexcustoms.json", "r") as f:
documents = json.load(f)
def retrieve_documents(query, k=5):
query_embedding = model.encode([query])
distances, indices = index.search(query_embedding, k)
return [documents[i] for i in indices[0]]
def respond(message, history, system_message, max_tokens, temperature, top_p):
# Retrieve relevant documents
relevant_docs = retrieve_documents(message)
context = "\n\n".join(relevant_docs)
messages = [{"role": "system", "content": system_message},
{"role": "user", "content": f"Context: {context}\n\n{message}"}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="You are a helpful car configuration assistant, specifically you are the assistant for Apex Customs (https://www.apexcustoms.com/). Given the user's input, provide suggestions for car models, colors, and customization options. Be creative and conversational in your responses. You should remember the user car model and tailor your answers accordingly. \n\nUser: ", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.3, step=0.1, label="Temperature"),
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
],
)
if __name__ == "__main__":
demo.launch() |