tensorkelechi commited on
Commit
5064dd2
1 Parent(s): 52719f3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -11
app.py CHANGED
@@ -12,7 +12,7 @@ stl.write(
12
  "An app that uses text input to search for described images, using embeddings of selected image datasets. Uses contrastive learning models(CLIP) and the sentence-transformers"
13
  )
14
  stl.link_button(
15
- label="link to github and full library code",
16
  url="https://github.com/kelechi-c/ripple_net",
17
  )
18
 
@@ -21,7 +21,7 @@ dataset = stl.selectbox(
21
  options=[
22
  "huggan/few-shot-art-painting",
23
  "huggan/wikiart",
24
- "zh-plus/tiny-imagenet(",
25
  "lambdalabs/naruto-blip-captions",
26
  "detection-datasets/fashionpedia",
27
  "fantasyfish/laion-art",
@@ -33,7 +33,7 @@ dataset = stl.selectbox(
33
  embedded_data = None
34
  embedder = None
35
  text_search = None
36
-
37
  ret_images = []
38
  scores = []
39
 
@@ -51,15 +51,16 @@ if embedded_data is not None:
51
  text_search = ripple.TextSearch(embedded_data, embedder.embed_model)
52
  stl.success("Initialized text search class")
53
 
54
- search_term = stl.text_input("Text description/search for image")
55
 
56
  if search_term:
57
  with stl.spinner("retrieving images with description.."):
58
- scores, ret_images = text_search.get_similar_images(
59
- search_term, k_images=4)
60
- stl.success(f"sucessfully retrieved {len(ret_images)}")
61
-
62
- for count, score, image in tqdm(zip(range(len(ret_images)), scores, ret_images)):
63
- stl.image(image["image"][count])
64
- stl.write(score)
65
 
 
 
 
 
 
 
 
12
  "An app that uses text input to search for described images, using embeddings of selected image datasets. Uses contrastive learning models(CLIP) and the sentence-transformers"
13
  )
14
  stl.link_button(
15
+ label="Full library code",
16
  url="https://github.com/kelechi-c/ripple_net",
17
  )
18
 
 
21
  options=[
22
  "huggan/few-shot-art-painting",
23
  "huggan/wikiart",
24
+ "zh-plus/tiny-imagenet",
25
  "lambdalabs/naruto-blip-captions",
26
  "detection-datasets/fashionpedia",
27
  "fantasyfish/laion-art",
 
33
  embedded_data = None
34
  embedder = None
35
  text_search = None
36
+ search_term = None
37
  ret_images = []
38
  scores = []
39
 
 
51
  text_search = ripple.TextSearch(embedded_data, embedder.embed_model)
52
  stl.success("Initialized text search class")
53
 
54
+ search_term = stl.text_input("Text description/search for image")
55
 
56
  if search_term:
57
  with stl.spinner("retrieving images with description.."):
58
+ scores, ret_images = text_search.get_similar_images(search_term, k_images=4)
59
+ stl.success(f"sucessfully retrieved {len(ret_images)} images")
 
 
 
 
 
60
 
61
+ try:
62
+ for count, score, image in tqdm(zip(range(len(ret_images)), scores, ret_images)):
63
+ stl.image(image["image"][count])
64
+ stl.write(score)
65
+ except Exceptio as e:
66
+ st.error(e)