File size: 4,274 Bytes
31b863d
9002a71
 
31b863d
fdac3b9
 
9002a71
 
fdac3b9
9002a71
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fdac3b9
 
9002a71
 
 
fdac3b9
9002a71
 
fdac3b9
 
 
 
 
9002a71
fdac3b9
9002a71
 
fdac3b9
9002a71
 
fdac3b9
9002a71
fdac3b9
9002a71
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fdac3b9
9002a71
 
 
fdac3b9
 
9002a71
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
import gradio as gr
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer

# Load the Zephyr-7B model
model_name = "HuggingFaceH4/zephyr-7b-beta"
model = AutoModelForCausalLM.from_pretrained(
    model_name,
    torch_dtype=torch.bfloat16,
    device_map="auto"
)
tokenizer = AutoTokenizer.from_pretrained(model_name)

# Initial messages list for chat history
messages = [
    {"role": "system", "content": "You are an experienced Fashion designer who starts conversation with proper greeting, "
                                   "giving valuable and catchy fashion advice and suggestions, stays to the point and precise."}
]

# Function to reset the chat
def reset_chat():
    global messages
    messages = []  # Reset the message history
    return [], "New Chat"

# Function to handle questionnaire submission
def submit_questionnaire(name, age, location, gender, ethnicity, height, weight,
                         style_preference, color_palette, everyday_style):
    # Store questionnaire responses as needed
    # Placeholder logic for storing responses
    return "Thank you for completing the questionnaire!"

# Function to handle chat
def chat(user_input):
    global messages
    if user_input:
        # Append user message to the conversation history
        messages.append({"role": "user", "content": user_input})

        # Prepare input for the model using chat template
        chat_input = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)

        # Generate a response using the model
        try:
            model_inputs = tokenizer(chat_input, return_tensors="pt").to(model.device)
            generated_ids = model.generate(
                **model_inputs,
                max_new_tokens=256,
                do_sample=True,
                temperature=0.7,
                top_k=50,
                top_p=0.95
            )
            response = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0].strip()

        except Exception as e:
            response = f"Error: {str(e)}"

        # Store assistant response in the chat history
        messages.append({"role": "assistant", "content": response})

        return messages, response
    return messages, ""

# Gradio Interface
with gr.Blocks() as demo:
    gr.Markdown("## Fashion Assistant Chatbot")

    # Sidebar for user inputs
    with gr.Row():
        with gr.Column():
            name = gr.Textbox(label="Name")
            age = gr.Number(label="Age", value=25, minimum=1, maximum=100)
            location = gr.Textbox(label="Location")
            gender = gr.Radio(label="Gender", choices=["Male", "Female", "Other"])
            ethnicity = gr.Radio(label="Ethnicity", choices=["Asian", "Black", "Hispanic", "White", "Other"])
            height = gr.Number(label="Height (cm)", value=170, minimum=50, maximum=250)
            weight = gr.Number(label="Weight (kg)", value=70, minimum=20, maximum=200)

        with gr.Column():
            submit_btn = gr.Button("Submit Questionnaire")
            reset_btn = gr.Button("Reset Chat")

    # Questionnaire with fashion-related questions
    style_preference = gr.Radio(label="Which style do you prefer the most?", choices=["Casual", "Formal", "Streetwear", "Athleisure", "Baggy"])
    color_palette = gr.Radio(label="What color palette do you wear often?", choices=["Neutrals", "Bright Colors", "Pastels", "Dark Shades"])
    everyday_style = gr.Radio(label="How would you describe your everyday style?", choices=["Relaxed", "Trendy", "Elegant", "Bold"])

    # Chat functionality
    chatbox = gr.Chatbot(type='messages')
    user_input = gr.Textbox(label="Your Message", placeholder="Type your message here...")

    # Connect the buttons to their respective functions
    output_message = gr.Textbox(label="Output Message")
    submit_btn.click(submit_questionnaire, inputs=[name, age, location, gender, ethnicity, height, weight,
                                                    style_preference, color_palette, everyday_style], outputs=output_message)

    reset_btn.click(reset_chat, outputs=[chatbox])  # Reset chat
    user_input.submit(chat, inputs=user_input, outputs=[chatbox, user_input])  # Connect chat input

# Run the app
demo.launch()