vishalkatheriya commited on
Commit
65ecaf3
·
verified ·
1 Parent(s): 55b2a7e

Update inference.py

Browse files
Files changed (1) hide show
  1. inference.py +4 -3
inference.py CHANGED
@@ -1,6 +1,7 @@
1
  import streamlit as st
2
  def demo():
3
- st.write("yes its work")
 
4
  def run_example(image, model, processor, task_prompt, text_input=None):
5
 
6
  inputs = processor(text=text_input, images=image, return_tensors="pt")
@@ -12,6 +13,6 @@ def run_example(image, model, processor, task_prompt, text_input=None):
12
  )
13
  generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)[0]
14
  print("generated_text:",generated_text)
15
- parsed_answer = processor.post_process_generation(generated_text, task=task_prompt, image_size=(image.width, image.height))
16
- return parsed_answer
17
 
 
1
  import streamlit as st
2
  def demo():
3
+ #st.write("yes its work")
4
+ pass
5
  def run_example(image, model, processor, task_prompt, text_input=None):
6
 
7
  inputs = processor(text=text_input, images=image, return_tensors="pt")
 
13
  )
14
  generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)[0]
15
  print("generated_text:",generated_text)
16
+ # parsed_answer = processor.post_process_generation(generated_text, task=task_prompt, image_size=(image.width, image.height))
17
+ return generated_text
18