metaformer / app.py
joshvm's picture
update to torch2
8f243be
raw
history blame
1.48 kB
from inference import Inference
import argparse
import gradio as gr
import glob
def parse_option():
parser = argparse.ArgumentParser('MetaFG Inference script', add_help=False)
parser.add_argument('--cfg', type=str, metavar="FILE", help='path to config file', default="configs/MetaFG_2_224.yaml")
# easy config modification
parser.add_argument('--model-path', type=str, help="path to model data", default="./ckpt_4_mf2.pth")
parser.add_argument('--img-size', type=int, default=384, help='path to image')
parser.add_argument('--meta-path', default="meta.txt", type=str, help='path to meta data')
parser.add_argument('--names-path', default="names_mf2.txt", type=str, help='path to meta data')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_option()
model = Inference(config_path=args.cfg,
model_path=args.model_path,
names_path=args.names_path)
def classify(image):
preds = model.infer(img_path=image, meta_data_path="meta.txt").squeeze()
print(len(model.classes))
print(model.classes)
confidences = {c: float(preds[i]) for i,c in enumerate(model.classes)}
return confidences
gr.Interface(pfn=classify,
inputs=gr.Image(shape=(args.img_size, args.img_size), type="pil"),
outputs=gr.Label(num_top_classes=10),
examples=glob.glob("./example_images/*")).launch()