import gradio as gr import pandas as pd from datasets import load_dataset import numpy as np gender_labels = ['man', 'non-binary', 'woman', 'no_gender_specified', ] ethnicity_labels = ['African-American', 'American_Indian', 'Black', 'Caucasian', 'East_Asian', 'First_Nations', 'Hispanic', 'Indigenous_American', 'Latino', 'Latinx', 'Multiracial', 'Native_American', 'Pacific_Islander', 'South_Asian', 'Southeast_Asian', 'White', 'no_ethnicity_specified'] models = ['DallE', 'SD_14', 'SD_2'] nos = [1,2,3,4,5,6,7,8,9,10] index = np.load("indexes/knn_10752_65.npy") ds = load_dataset("SDBiaseval/identities", split="train") def get_index(gender, ethnicity, model, no): return df.loc[(df['ethnicity'] == ethnicity) & (df['gender'] == gender) & (df['no'] == no) & (df['model'] == model)].index[0] def get_nearest_64(gender, ethnicity, model, no): ix = get_index(gender, ethnicity, model, no) return ds.select(index[ix][0])["image"], ds.select(index[ix][1:])["image"] with gr.Blocks() as demo: gender = gr.Radio(gender_labels, label="Gender label") ethnicity = gr.Radio(ethnicity_labels, label="Ethnicity label") model = gr.Radio(models, label="Model") no = gr.Radio(nos, label="Image number") button = gr.Button(value="Get nearest neighbors") button.click(get_nearest_64, inputs=[gender, ethnicity, model, no], outputs=[gr.Image(), gr.Gallery().style(grid=[8])]) demo.launch() # demo = gr.Interface(fn=query_db, inputs="text", outputs=[gr.Image(), gr.Gallery().style(grid=[8])]) # demo.launch(debug=True)