Salman Naqvi commited on
Commit
be341a6
·
1 Parent(s): a12ccf4

Readded blog post link.

Browse files
Files changed (5) hide show
  1. .idea/FloodDetector.iml +8 -0
  2. .idea/discord.xml +7 -0
  3. .idea/modules.xml +8 -0
  4. app.ipynb +110 -101
  5. app.py +3 -2
.idea/FloodDetector.iml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <module type="PYTHON_MODULE" version="4">
3
+ <component name="NewModuleRootManager">
4
+ <content url="file://$MODULE_DIR$" />
5
+ <orderEntry type="inheritedJdk" />
6
+ <orderEntry type="sourceFolder" forTests="false" />
7
+ </component>
8
+ </module>
.idea/discord.xml ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="DiscordProjectSettings">
4
+ <option name="show" value="ASK" />
5
+ <option name="description" value="" />
6
+ </component>
7
+ </project>
.idea/modules.xml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <project version="4">
3
+ <component name="ProjectModuleManager">
4
+ <modules>
5
+ <module fileurl="file://$PROJECT_DIR$/.idea/FloodDetector.iml" filepath="$PROJECT_DIR$/.idea/FloodDetector.iml" />
6
+ </modules>
7
+ </component>
8
+ </project>
app.ipynb CHANGED
@@ -2,40 +2,54 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 1,
6
- "metadata": {
7
- "collapsed": true
8
- },
9
  "outputs": [],
10
  "source": [
11
  "#|default_exp app"
12
- ]
 
 
 
13
  },
14
  {
15
  "cell_type": "markdown",
16
- "source": [
17
- "# Flood or no flood?"
18
- ],
19
  "metadata": {
20
  "collapsed": false
21
- }
 
 
 
22
  },
23
  {
24
  "cell_type": "code",
25
  "execution_count": 2,
26
- "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  "source": [
28
  "#|export\n",
29
  "import gradio as gr\n",
30
  "from fastai.vision.all import *"
31
- ],
32
- "metadata": {
33
- "collapsed": false
34
- }
35
  },
36
  {
37
  "cell_type": "code",
38
  "execution_count": 3,
 
 
 
39
  "outputs": [
40
  {
41
  "data": {
@@ -49,36 +63,36 @@
49
  ],
50
  "source": [
51
  "image = PILImage.create('images/test_images/1.jpeg'); image"
52
- ],
53
- "metadata": {
54
- "collapsed": false
55
- }
56
  },
57
  {
58
  "cell_type": "markdown",
59
- "source": [
60
- "## Create learner."
61
- ],
62
  "metadata": {
63
  "collapsed": false
64
- }
 
 
 
65
  },
66
  {
67
  "cell_type": "code",
68
  "execution_count": 4,
 
 
 
69
  "outputs": [],
70
  "source": [
71
  "#|export\n",
72
  "\n",
73
  "learner = load_learner('model/flood_classifier.pkl')"
74
- ],
75
- "metadata": {
76
- "collapsed": false
77
- }
78
  },
79
  {
80
  "cell_type": "code",
81
  "execution_count": 5,
 
 
 
82
  "outputs": [
83
  {
84
  "data": {
@@ -107,23 +121,23 @@
107
  ],
108
  "source": [
109
  "learner.predict(image)"
110
- ],
111
- "metadata": {
112
- "collapsed": false
113
- }
114
  },
115
  {
116
  "cell_type": "markdown",
117
- "source": [
118
- "## Create classification function."
119
- ],
120
  "metadata": {
121
  "collapsed": false
122
- }
 
 
 
123
  },
124
  {
125
  "cell_type": "code",
126
  "execution_count": 6,
 
 
 
127
  "outputs": [],
128
  "source": [
129
  "#|export\n",
@@ -133,14 +147,14 @@
133
  "def classify_image(image):\n",
134
  " prediction, index, probabilities = learner.predict(image)\n",
135
  " return dict(zip(categories, map(float, probabilities)))"
136
- ],
137
- "metadata": {
138
- "collapsed": false
139
- }
140
  },
141
  {
142
  "cell_type": "code",
143
  "execution_count": 7,
 
 
 
144
  "outputs": [
145
  {
146
  "data": {
@@ -169,23 +183,23 @@
169
  ],
170
  "source": [
171
  "classify_image(PILImage.create('images/example_images/flooded/1.jpeg'))"
172
- ],
173
- "metadata": {
174
- "collapsed": false
175
- }
176
  },
177
  {
178
  "cell_type": "markdown",
179
- "source": [
180
- "## Intialize attributes for the interface."
181
- ],
182
  "metadata": {
183
  "collapsed": false
184
- }
 
 
 
185
  },
186
  {
187
  "cell_type": "code",
188
- "execution_count": 32,
 
 
 
189
  "outputs": [],
190
  "source": [
191
  "#|export\n",
@@ -202,18 +216,17 @@
202
  " \" This model was trained on the ResNet18 architecture and the \" \\\n",
203
  " \"fastai library.\" \\\n",
204
  " \" Check out the associated blog post with the link below!\"\n",
205
- "article = \"<p style='text-align: center; font-size: 36px'><a \" \\\n",
206
- " \"href='https://forbo7.github\" \\\n",
207
- " \".io/ForBlog/fastai/image%20classification/2022/09/12/Detecting\" \\\n",
208
- " \"-Floods-for-Disaster-Relief.html' targets='_blank'>Blog Post</a></p>'\""
209
- ],
210
- "metadata": {
211
- "collapsed": false
212
- }
213
  },
214
  {
215
  "cell_type": "code",
216
  "execution_count": 9,
 
 
 
217
  "outputs": [
218
  {
219
  "data": {
@@ -226,40 +239,34 @@
226
  ],
227
  "source": [
228
  "examples"
229
- ],
230
- "metadata": {
231
- "collapsed": false
232
- }
233
  },
234
  {
235
  "cell_type": "markdown",
236
- "source": [
237
- "## Create the interface."
238
- ],
239
  "metadata": {
240
  "collapsed": false
241
- }
 
 
 
242
  },
243
  {
244
  "cell_type": "code",
245
- "execution_count": 28,
 
 
 
246
  "outputs": [
247
  {
248
- "name": "stdout",
249
- "output_type": "stream",
250
- "text": [
251
- "Running on local URL: http://127.0.0.1:7869\n",
252
- "\n",
253
- "To create a public link, set `share=True` in `launch()`.\n"
 
 
254
  ]
255
- },
256
- {
257
- "data": {
258
- "text/plain": "(<gradio.routes.App at 0x29d4a8040>, 'http://127.0.0.1:7869/', None)"
259
- },
260
- "execution_count": 28,
261
- "metadata": {},
262
- "output_type": "execute_result"
263
  }
264
  ],
265
  "source": [
@@ -270,55 +277,52 @@
270
  " examples=examples, title=title,\n",
271
  " description=description, article=article)\n",
272
  "interface.launch(inline=False, enable_queue=True)"
273
- ],
274
- "metadata": {
275
- "collapsed": false
276
- }
277
  },
278
  {
279
  "cell_type": "markdown",
280
- "source": [
281
- "## Export"
282
- ],
283
  "metadata": {
284
  "collapsed": false
285
- }
 
 
 
286
  },
287
  {
288
  "cell_type": "code",
289
- "execution_count": 30,
 
 
 
290
  "outputs": [],
291
  "source": [
292
  "from nbdev.export import nb_export"
293
- ],
294
- "metadata": {
295
- "collapsed": false
296
- }
297
  },
298
  {
299
  "cell_type": "code",
300
- "execution_count": 33,
 
 
 
301
  "outputs": [],
302
  "source": [
303
  "nb_export('app.ipynb', '.')"
304
- ],
305
- "metadata": {
306
- "collapsed": false
307
- }
308
  },
309
  {
310
  "cell_type": "code",
311
  "execution_count": 12,
312
- "outputs": [],
313
- "source": [],
314
  "metadata": {
315
  "collapsed": false
316
- }
 
 
317
  }
318
  ],
319
  "metadata": {
320
  "kernelspec": {
321
- "display_name": "Python 3",
322
  "language": "python",
323
  "name": "python3"
324
  },
@@ -332,7 +336,12 @@
332
  "name": "python",
333
  "nbconvert_exporter": "python",
334
  "pygments_lexer": "ipython2",
335
- "version": "2.7.6"
 
 
 
 
 
336
  }
337
  },
338
  "nbformat": 4,
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": null,
 
 
 
6
  "outputs": [],
7
  "source": [
8
  "#|default_exp app"
9
+ ],
10
+ "metadata": {
11
+ "collapsed": false
12
+ }
13
  },
14
  {
15
  "cell_type": "markdown",
 
 
 
16
  "metadata": {
17
  "collapsed": false
18
+ },
19
+ "source": [
20
+ "# Flood or no flood?"
21
+ ]
22
  },
23
  {
24
  "cell_type": "code",
25
  "execution_count": 2,
26
+ "metadata": {
27
+ "collapsed": false
28
+ },
29
+ "outputs": [
30
+ {
31
+ "name": "stderr",
32
+ "output_type": "stream",
33
+ "text": [
34
+ "/Users/salmannaqvi/lib/python3.10/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: dlopen(/Users/salmannaqvi/lib/python3.10/site-packages/torchvision/image.so, 0x0006): Symbol not found: (__ZN2at4_ops19empty_memory_format4callEN3c108ArrayRefINS2_6SymIntEEENS2_8optionalINS2_10ScalarTypeEEENS6_INS2_6LayoutEEENS6_INS2_6DeviceEEENS6_IbEENS6_INS2_12MemoryFormatEEE)\n",
35
+ " Referenced from: '/Users/salmannaqvi/lib/python3.10/site-packages/torchvision/image.so'\n",
36
+ " Expected in: '/Users/salmannaqvi/lib/python3.10/site-packages/torch/lib/libtorch_cpu.dylib'\n",
37
+ " warn(f\"Failed to load image Python extension: {e}\")\n"
38
+ ]
39
+ }
40
+ ],
41
  "source": [
42
  "#|export\n",
43
  "import gradio as gr\n",
44
  "from fastai.vision.all import *"
45
+ ]
 
 
 
46
  },
47
  {
48
  "cell_type": "code",
49
  "execution_count": 3,
50
+ "metadata": {
51
+ "collapsed": false
52
+ },
53
  "outputs": [
54
  {
55
  "data": {
 
63
  ],
64
  "source": [
65
  "image = PILImage.create('images/test_images/1.jpeg'); image"
66
+ ]
 
 
 
67
  },
68
  {
69
  "cell_type": "markdown",
 
 
 
70
  "metadata": {
71
  "collapsed": false
72
+ },
73
+ "source": [
74
+ "## Create learner."
75
+ ]
76
  },
77
  {
78
  "cell_type": "code",
79
  "execution_count": 4,
80
+ "metadata": {
81
+ "collapsed": false
82
+ },
83
  "outputs": [],
84
  "source": [
85
  "#|export\n",
86
  "\n",
87
  "learner = load_learner('model/flood_classifier.pkl')"
88
+ ]
 
 
 
89
  },
90
  {
91
  "cell_type": "code",
92
  "execution_count": 5,
93
+ "metadata": {
94
+ "collapsed": false
95
+ },
96
  "outputs": [
97
  {
98
  "data": {
 
121
  ],
122
  "source": [
123
  "learner.predict(image)"
124
+ ]
 
 
 
125
  },
126
  {
127
  "cell_type": "markdown",
 
 
 
128
  "metadata": {
129
  "collapsed": false
130
+ },
131
+ "source": [
132
+ "## Create classification function."
133
+ ]
134
  },
135
  {
136
  "cell_type": "code",
137
  "execution_count": 6,
138
+ "metadata": {
139
+ "collapsed": false
140
+ },
141
  "outputs": [],
142
  "source": [
143
  "#|export\n",
 
147
  "def classify_image(image):\n",
148
  " prediction, index, probabilities = learner.predict(image)\n",
149
  " return dict(zip(categories, map(float, probabilities)))"
150
+ ]
 
 
 
151
  },
152
  {
153
  "cell_type": "code",
154
  "execution_count": 7,
155
+ "metadata": {
156
+ "collapsed": false
157
+ },
158
  "outputs": [
159
  {
160
  "data": {
 
183
  ],
184
  "source": [
185
  "classify_image(PILImage.create('images/example_images/flooded/1.jpeg'))"
186
+ ]
 
 
 
187
  },
188
  {
189
  "cell_type": "markdown",
 
 
 
190
  "metadata": {
191
  "collapsed": false
192
+ },
193
+ "source": [
194
+ "## Intialize attributes for the interface."
195
+ ]
196
  },
197
  {
198
  "cell_type": "code",
199
+ "execution_count": 8,
200
+ "metadata": {
201
+ "collapsed": false
202
+ },
203
  "outputs": [],
204
  "source": [
205
  "#|export\n",
 
216
  " \" This model was trained on the ResNet18 architecture and the \" \\\n",
217
  " \"fastai library.\" \\\n",
218
  " \" Check out the associated blog post with the link below!\"\n",
219
+ "article = \"\"\"\n",
220
+ "<p style='text-align: center; font-size: 36px'><a href='https://forbo7.github.io/forblog/posts/5_detecting_floods_for_disaster_relief.html'>Blog Post</a></p>\n",
221
+ "\"\"\""
222
+ ]
 
 
 
 
223
  },
224
  {
225
  "cell_type": "code",
226
  "execution_count": 9,
227
+ "metadata": {
228
+ "collapsed": false
229
+ },
230
  "outputs": [
231
  {
232
  "data": {
 
239
  ],
240
  "source": [
241
  "examples"
242
+ ]
 
 
 
243
  },
244
  {
245
  "cell_type": "markdown",
 
 
 
246
  "metadata": {
247
  "collapsed": false
248
+ },
249
+ "source": [
250
+ "## Create the interface."
251
+ ]
252
  },
253
  {
254
  "cell_type": "code",
255
+ "execution_count": 1,
256
+ "metadata": {
257
+ "collapsed": false
258
+ },
259
  "outputs": [
260
  {
261
+ "ename": "NameError",
262
+ "evalue": "name 'gr' is not defined",
263
+ "output_type": "error",
264
+ "traceback": [
265
+ "\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
266
+ "\u001B[0;31mNameError\u001B[0m Traceback (most recent call last)",
267
+ "Cell \u001B[0;32mIn [1], line 4\u001B[0m\n\u001B[1;32m 1\u001B[0m \u001B[38;5;66;03m#|export\u001B[39;00m\n\u001B[1;32m 2\u001B[0m \n\u001B[1;32m 3\u001B[0m \u001B[38;5;66;03m# Perhaps I can make the interface below with **kwargs?\u001B[39;00m\n\u001B[0;32m----> 4\u001B[0m interface \u001B[38;5;241m=\u001B[39m \u001B[43mgr\u001B[49m\u001B[38;5;241m.\u001B[39mInterface(fn\u001B[38;5;241m=\u001B[39mclassify_image, inputs\u001B[38;5;241m=\u001B[39m\u001B[38;5;124m'\u001B[39m\u001B[38;5;124mimage\u001B[39m\u001B[38;5;124m'\u001B[39m, outputs\u001B[38;5;241m=\u001B[39m\u001B[38;5;124m'\u001B[39m\u001B[38;5;124mlabel\u001B[39m\u001B[38;5;124m'\u001B[39m,\n\u001B[1;32m 5\u001B[0m examples\u001B[38;5;241m=\u001B[39mexamples, title\u001B[38;5;241m=\u001B[39mtitle,\n\u001B[1;32m 6\u001B[0m description\u001B[38;5;241m=\u001B[39mdescription, article\u001B[38;5;241m=\u001B[39marticle)\n\u001B[1;32m 7\u001B[0m interface\u001B[38;5;241m.\u001B[39mlaunch(inline\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mFalse\u001B[39;00m, enable_queue\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mTrue\u001B[39;00m)\n",
268
+ "\u001B[0;31mNameError\u001B[0m: name 'gr' is not defined"
269
  ]
 
 
 
 
 
 
 
 
270
  }
271
  ],
272
  "source": [
 
277
  " examples=examples, title=title,\n",
278
  " description=description, article=article)\n",
279
  "interface.launch(inline=False, enable_queue=True)"
280
+ ]
 
 
 
281
  },
282
  {
283
  "cell_type": "markdown",
 
 
 
284
  "metadata": {
285
  "collapsed": false
286
+ },
287
+ "source": [
288
+ "## Export"
289
+ ]
290
  },
291
  {
292
  "cell_type": "code",
293
+ "execution_count": 2,
294
+ "metadata": {
295
+ "collapsed": false
296
+ },
297
  "outputs": [],
298
  "source": [
299
  "from nbdev.export import nb_export"
300
+ ]
 
 
 
301
  },
302
  {
303
  "cell_type": "code",
304
+ "execution_count": 3,
305
+ "metadata": {
306
+ "collapsed": false
307
+ },
308
  "outputs": [],
309
  "source": [
310
  "nb_export('app.ipynb', '.')"
311
+ ]
 
 
 
312
  },
313
  {
314
  "cell_type": "code",
315
  "execution_count": 12,
 
 
316
  "metadata": {
317
  "collapsed": false
318
+ },
319
+ "outputs": [],
320
+ "source": []
321
  }
322
  ],
323
  "metadata": {
324
  "kernelspec": {
325
+ "display_name": "Python 3.10.7 ('salmannaqvi')",
326
  "language": "python",
327
  "name": "python3"
328
  },
 
336
  "name": "python",
337
  "nbconvert_exporter": "python",
338
  "pygments_lexer": "ipython2",
339
+ "version": "3.10.7"
340
+ },
341
+ "vscode": {
342
+ "interpreter": {
343
+ "hash": "e2325d4fde750cacac60ba4c06f149a07b5aad47b4e6bf1c4dca1c7b2184bba3"
344
+ }
345
  }
346
  },
347
  "nbformat": 4,
app.py CHANGED
@@ -31,8 +31,9 @@ description = "An image classifier that can tell whether an image is flooded " \
31
  " This model was trained on the ResNet18 architecture and the " \
32
  "fastai library." \
33
  " Check out the associated blog post with the link below!"
34
- article = "<p style='text-align: center; font-size: 36px'><a href='https://forbo7.github.io/forblog/posts/2_bear_classifier_model.html' " \
35
- "targets='_blank'><Blog Post</a></p>"
 
36
 
37
  # %% app.ipynb 14
38
  # Perhaps I can make the interface below with **kwargs?
 
31
  " This model was trained on the ResNet18 architecture and the " \
32
  "fastai library." \
33
  " Check out the associated blog post with the link below!"
34
+ article = """
35
+ <p style='text-align: center; font-size: 36px'><a href='https://forbo7.github.io/forblog/posts/5_detecting_floods_for_disaster_relief.html'>Blog Post</a></p>
36
+ """
37
 
38
  # %% app.ipynb 14
39
  # Perhaps I can make the interface below with **kwargs?