Jyothirmai commited on
Commit
e5a0b2d
1 Parent(s): 30e7fb3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -39,7 +39,7 @@ with gr.Row():
39
  with gr.Column(): # Column for dropdowns and model choice
40
  max_tokens = gr.Dropdown(list(range(50, 101)), label="Max Tokens", value=75)
41
  temperature = gr.Slider(0.5, 0.9, step=0.1, label="Temperature", value=0.9)
42
- imgID = gr.Dropdown(list(range(1,7)), label="Example Image Selected")
43
 
44
  model_choice = gr.Radio(["CLIP-GPT2", "ViT-GPT2", "ViT-CoAttention", "Baseline Model CNN-RNN"], label="Select Model")
45
  generate_button = gr.Button("Generate Caption")
@@ -67,7 +67,8 @@ def predict(img, model_name, max_tokens, temperature, imgID):
67
  elif model_name == "ViT-CoAttention":
68
  return generate_caption_vitCoAtt(img), getCaption(imgID)
69
  elif model_name == "Baseline Model CNN-RNN":
70
- img = getImageID(str(imgID))
 
71
  return generate_caption_cnnrnn(img), getCaption(imgID)
72
  else:
73
  return "select a model","select an image"
 
39
  with gr.Column(): # Column for dropdowns and model choice
40
  max_tokens = gr.Dropdown(list(range(50, 101)), label="Max Tokens", value=75)
41
  temperature = gr.Slider(0.5, 0.9, step=0.1, label="Temperature", value=0.9)
42
+ imgID = gr.Dropdown(["1","2","3","4"], label="Example Image Selected")
43
 
44
  model_choice = gr.Radio(["CLIP-GPT2", "ViT-GPT2", "ViT-CoAttention", "Baseline Model CNN-RNN"], label="Select Model")
45
  generate_button = gr.Button("Generate Caption")
 
67
  elif model_name == "ViT-CoAttention":
68
  return generate_caption_vitCoAtt(img), getCaption(imgID)
69
  elif model_name == "Baseline Model CNN-RNN":
70
+ print(imgID)
71
+ img = getImageID(imgID)
72
  return generate_caption_cnnrnn(img), getCaption(imgID)
73
  else:
74
  return "select a model","select an image"