nightfury commited on
Commit
5aea391
1 Parent(s): 9c573a8

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +166 -0
app.py ADDED
@@ -0,0 +1,166 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from share_btn import community_icon_html, loading_icon_html, share_js
2
+
3
+ import os, subprocess
4
+ import torch
5
+
6
+ def setup():
7
+ install_cmds = [
8
+ ['pip', 'install', 'ftfy', 'gradio', 'regex', 'tqdm', 'transformers==4.21.2', 'timm', 'fairscale', 'requests'],
9
+ ['pip', 'install', 'open_clip_torch'],
10
+ ['pip', 'install', '-e', 'git+https://github.com/pharmapsychotic/BLIP.git@lib#egg=blip'],
11
+ ['git', 'clone', '-b', 'open-clip', 'https://github.com/pharmapsychotic/clip-interrogator.git']
12
+ ]
13
+ for cmd in install_cmds:
14
+ print(subprocess.run(cmd, stdout=subprocess.PIPE).stdout.decode('utf-8'))
15
+
16
+ setup()
17
+
18
+ # download cache files
19
+ print("Download preprocessed cache files...")
20
+ CACHE_URLS = [
21
+ 'https://huggingface.co/pharma/ci-preprocess/resolve/main/ViT-H-14_laion2b_s32b_b79k_artists.pkl',
22
+ 'https://huggingface.co/pharma/ci-preprocess/resolve/main/ViT-H-14_laion2b_s32b_b79k_flavors.pkl',
23
+ 'https://huggingface.co/pharma/ci-preprocess/resolve/main/ViT-H-14_laion2b_s32b_b79k_mediums.pkl',
24
+ 'https://huggingface.co/pharma/ci-preprocess/resolve/main/ViT-H-14_laion2b_s32b_b79k_movements.pkl',
25
+ 'https://huggingface.co/pharma/ci-preprocess/resolve/main/ViT-H-14_laion2b_s32b_b79k_trendings.pkl',
26
+ ]
27
+ os.makedirs('cache', exist_ok=True)
28
+ for url in CACHE_URLS:
29
+ print(subprocess.run(['wget', url, '-P', 'cache'], stdout=subprocess.PIPE).stdout.decode('utf-8'))
30
+
31
+ import sys
32
+ sys.path.append('src/blip')
33
+ sys.path.append('clip-interrogator')
34
+
35
+ import gradio as gr
36
+ from clip_interrogator import Config, Interrogator
37
+
38
+ config = Config()
39
+ config.device = 'cuda' if torch.cuda.is_available() else 'cpu'
40
+ config.blip_offload = False if torch.cuda.is_available() else True
41
+ config.chunk_size = 2048
42
+ config.flavor_intermediate_count = 512
43
+ config.blip_num_beams = 64
44
+
45
+ ci = Interrogator(config)
46
+
47
+ def inference(image, mode, best_max_flavors):
48
+ image = image.convert('RGB')
49
+ if mode == 'best':
50
+
51
+ prompt_result = ci.interrogate(image, max_flavors=int(best_max_flavors))
52
+
53
+ print("mode best: " + prompt_result)
54
+
55
+ return prompt_result, gr.update(visible=True), gr.update(visible=True), gr.update(visible=True)
56
+
57
+ elif mode == 'classic':
58
+
59
+ prompt_result = ci.interrogate_classic(image)
60
+
61
+ print("mode classic: " + prompt_result)
62
+
63
+ return prompt_result, gr.update(visible=True), gr.update(visible=True), gr.update(visible=True)
64
+
65
+ else:
66
+
67
+ prompt_result = ci.interrogate_fast(image)
68
+
69
+ print("mode fast: " + prompt_result)
70
+
71
+ return prompt_result, gr.update(visible=True), gr.update(visible=True), gr.update(visible=True)
72
+
73
+ title = """
74
+ <div style="text-align: center; max-width: 500px; margin: 0 auto;">
75
+ <div
76
+ style="
77
+ display: inline-flex;
78
+ align-items: center;
79
+ gap: 0.8rem;
80
+ font-size: 1.75rem;
81
+ margin-bottom: 10px;
82
+ "
83
+ >
84
+ <h1 style="font-weight: 600; margin-bottom: 7px;">
85
+ CLIP Interrogator for SD 2.0 Img2Prompt Generator
86
+ </h1>
87
+ </div>
88
+ <p style="margin-bottom: 10px;font-size: 94%;font-weight: 100;line-height: 1.5em;">
89
+ Do you want to figure out 'what a good prompt might be' to create new images similar from an existing one?
90
+ <br />The CLIP Interrogator is here to get you answered!
91
+ <br />This version is specialized for producing nice prompts for use with Stable Diffusion 2.0 using the ViT-H-14 OpenCLIP model!
92
+ </p>
93
+ </div>
94
+ """
95
+
96
+ article = """
97
+ <div style="text-align: center; max-width: 500px; margin: 0 auto;font-size: 94%;">
98
+ <p>
99
+ Server busy? You can also run on <a href="https://colab.research.google.com/github/pharmapsychotic/clip-interrogator/blob/open-clip/clip_interrogator.ipynb">Google Colab</a>
100
+ </p>
101
+ <p>
102
+ Has this been helpful to you? Follow pharmapsychotic website for more tools like this at his
103
+ <a href="https://pharmapsychotic.com/tools.html"> AI generative art tools list </a>
104
+ </p>
105
+ </div>
106
+ """
107
+
108
+ css = '''
109
+ #col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
110
+ a {text-decoration-line: underline; font-weight: 600;}
111
+ .animate-spin {
112
+ animation: spin 1s linear infinite;
113
+ }
114
+ @keyframes spin {
115
+ from {
116
+ transform: rotate(0deg);
117
+ }
118
+ to {
119
+ transform: rotate(360deg);
120
+ }
121
+ }
122
+ #share-btn-container {
123
+ display: flex; padding-left: 0.5rem !important; padding-right: 0.5rem !important; background-color: #000000; justify-content: center; align-items: center; border-radius: 9999px !important; width: 13rem;
124
+ }
125
+ #share-btn {
126
+ all: initial; color: #ffffff;font-weight: 600; cursor:pointer; font-family: 'IBM Plex Sans', sans-serif; margin-left: 0.5rem !important; padding-top: 0.25rem !important; padding-bottom: 0.25rem !important;
127
+ }
128
+ #share-btn * {
129
+ all: unset;
130
+ }
131
+ #share-btn-container div:nth-child(-n+2){
132
+ width: auto !important;
133
+ min-height: 0px !important;
134
+ }
135
+ #share-btn-container .wrap {
136
+ display: none !important;
137
+ }
138
+ '''
139
+
140
+ with gr.Blocks(css=css) as block:
141
+ with gr.Column(elem_id="col-container"):
142
+ gr.HTML(title)
143
+
144
+ input_image = gr.Image(type='pil', elem_id="input-img")
145
+ mode_input = gr.Radio(['best', 'classic', 'fast'], label='', value='best')
146
+ flavor_input = gr.Number(value=4, label='best mode max flavors')
147
+
148
+ submit_btn = gr.Button("Submit")
149
+
150
+ output_text = gr.Textbox(label="Output", elem_id="output-txt")
151
+
152
+ with gr.Group(elem_id="share-btn-container"):
153
+ community_icon = gr.HTML(community_icon_html, visible=False)
154
+ loading_icon = gr.HTML(loading_icon_html, visible=False)
155
+ share_button = gr.Button("Share to community", elem_id="share-btn", visible=False)
156
+
157
+ examples=[['27E894C4-9375-48A1-A95D-CB2425416B4B.png', "best",4], ['DB362F56-BA98-4CA1-A999-A25AA94B723B.png',"fast",4]]
158
+ ex = gr.Examples(examples=examples, fn=inference, inputs=[input_image, mode_input, flavor_input], outputs=[output_text, share_button, community_icon, loading_icon], cache_examples=True, run_on_click=True)
159
+ ex.dataset.headers = [""]
160
+
161
+ gr.HTML(article)
162
+
163
+ submit_btn.click(fn=inference, inputs=[input_image,mode_input,flavor_input], outputs=[output_text, share_button, community_icon, loading_icon], api_name="clipi2")
164
+ share_button.click(None, [], [], _js=share_js)
165
+
166
+ block.queue(max_size=32,concurrency_count=20).launch(show_api=False)