Spaces:
Runtime error
Runtime error
File size: 1,252 Bytes
669fb4e a5c4bd5 3c0126a 669fb4e 755057c 669fb4e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
from PIL import Image
import gradio as gr
import requests
from transformers import CLIPProcessor, CLIPModel
model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32")
processor = CLIPProcessor.from_pretrained("tokenizer")
def generate_answer(image):
classes = ['A picture of a room filled with abundant natural light with a lot or few windows regardless of whether it is night, without objects that prevent the light from passing through.','a picture of room in the dark','A picture of a room with Artificial lights like lamps or headlamps']
clas = ['natural_light','no_light', 'artificial_light']
inputs = processor(text=classes, images=image, return_tensors="pt", padding=True)
outputs = model(**inputs)
logits_per_image = outputs.logits_per_image
probs = logits_per_image.softmax(dim=1)
probabilities_list = probs.squeeze().tolist()
result_dict = {class_name: probability for class_name, probability in zip(clas, probabilities_list)}
return result_dict
image_input = gr.Image(type="pil", label="Upload Image")
iface = gr.Interface(
fn=generate_answer,
inputs=[image_input],
outputs="text",
title="Room Lightning Score",
description="Upload an room image"
)
iface.launch() |