Johannes commited on
Commit
268ddb9
1 Parent(s): 27851c7

update description

Browse files
Files changed (1) hide show
  1. app.py +10 -8
app.py CHANGED
@@ -28,7 +28,7 @@ mask_generator = SamAutomaticMaskGenerator(sam)
28
 
29
 
30
  controlnet, controlnet_params = FlaxControlNetModel.from_pretrained(
31
- "mfidabel/controlnet-segment-anything", dtype=jnp.float32
32
  )
33
 
34
  pipe, params = FlaxStableDiffusionControlNetPipeline.from_pretrained(
@@ -49,13 +49,15 @@ with gr.Blocks() as demo:
49
  gr.Markdown("# WildSynth: Synthetic Wildlife Data Generation")
50
  gr.Markdown(
51
  """
52
- We have trained a JAX ControlNet model with
53
- To try the demo, upload an image and select object(s) you want to inpaint.
54
- Write a prompt & a negative prompt to control the inpainting.
55
- Click on the "Submit" button to inpaint the selected object(s).
56
- Check "Background" to inpaint the background instead of the selected object(s).
57
-
58
- If the demo is slow, clone the space to your own HF account and run on a GPU.
 
 
59
  """
60
  )
61
  with gr.Row():
 
28
 
29
 
30
  controlnet, controlnet_params = FlaxControlNetModel.from_pretrained(
31
+ "SAMControlNet/sd-controlnet-sam-seg", dtype=jnp.float32
32
  )
33
 
34
  pipe, params = FlaxStableDiffusionControlNetPipeline.from_pretrained(
 
49
  gr.Markdown("# WildSynth: Synthetic Wildlife Data Generation")
50
  gr.Markdown(
51
  """
52
+ ### About
53
+ We have trained a JAX ControlNet model for semantic segmentation on Wildlife Animal Images.
54
+
55
+ For the training data creation we used the [Wildlife Animals Images](https://www.kaggle.com/datasets/anshulmehtakaggl/wildlife-animals-images) dataset.
56
+ We created segmentation masks with the help of [Grounded SAM](https://github.com/IDEA-Research/Grounded-Segment-Anything) where we used the animals names
57
+ as input prompts for detection and more accurate segmentation.
58
+
59
+ ### How To Use
60
+
61
  """
62
  )
63
  with gr.Row():