vumichien commited on
Commit
58126ce
1 Parent(s): d346b49

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +114 -114
app.py CHANGED
@@ -1,114 +1,114 @@
1
- import math
2
- import numpy as np
3
- import pandas as pd
4
-
5
- import os
6
- import glob
7
- import trimesh
8
- import tensorflow as tf
9
- from tensorflow import keras
10
- from tensorflow.keras import layers
11
- from matplotlib import pyplot as plt
12
-
13
- import gradio as gr
14
- from huggingface_hub import from_pretrained_keras
15
-
16
-
17
-
18
- def conv_bn(x, filters):
19
- x = layers.Conv1D(filters, kernel_size=1, padding="valid")(x)
20
- x = layers.BatchNormalization(momentum=0.0)(x)
21
- return layers.Activation("relu")(x)
22
-
23
-
24
- def dense_bn(x, filters):
25
- x = layers.Dense(filters)(x)
26
- x = layers.BatchNormalization(momentum=0.0)(x)
27
- return layers.Activation("relu")(x)
28
-
29
- # @keras.utils.register_keras_serializable
30
- class OrthogonalRegularizer(keras.regularizers.Regularizer):
31
- def __init__(self, num_features, l2reg=0.001, **kwarg):
32
- super(OrthogonalRegularizer, self).__init__(**kwargs)
33
- self.num_features = num_features
34
- self.l2reg = l2reg
35
- self.eye = tf.eye(num_features)
36
-
37
- def __call__(self, x):
38
- x = tf.reshape(x, (-1, self.num_features, self.num_features))
39
- xxt = tf.tensordot(x, x, axes=(2, 2))
40
- xxt = tf.reshape(xxt, (-1, self.num_features, self.num_features))
41
- return tf.reduce_sum(self.l2reg * tf.square(xxt - self.eye))
42
-
43
- def get_config(self):
44
- return {'l2reg': float(self.l2reg)}
45
-
46
- def tnet(inputs, num_features):
47
-
48
- # Initalise bias as the indentity matrix
49
- bias = keras.initializers.Constant(np.eye(num_features).flatten())
50
- reg = OrthogonalRegularizer(num_features)
51
-
52
- x = conv_bn(inputs, 32)
53
- x = conv_bn(x, 64)
54
- x = conv_bn(x, 512)
55
- x = layers.GlobalMaxPooling1D()(x)
56
- x = dense_bn(x, 256)
57
- x = dense_bn(x, 128)
58
- x = layers.Dense(
59
- num_features * num_features,
60
- kernel_initializer="zeros",
61
- bias_initializer=bias,
62
- activity_regularizer=reg,
63
- )(x)
64
- feat_T = layers.Reshape((num_features, num_features))(x)
65
- # Apply affine transformation to input features
66
- return layers.Dot(axes=(2, 1))([inputs, feat_T])
67
-
68
- EXAMPLES_PATH = './examples'
69
- model = from_pretrained_keras('keras-io/PointNet')
70
-
71
- CLASS_MAP = {0: 'chair',
72
- 1: 'sofa',
73
- 2: 'desk',
74
- 3: 'bed',
75
- 4: 'dresser',
76
- 5: 'night_stand',
77
- 6: 'toilet',
78
- 7: 'bathtub',
79
- 8: 'monitor',
80
- 9: 'table'}
81
-
82
- def infer(img_path):
83
- mesh = trimesh.load(img_path.name)
84
- points = mesh.sample(2048)
85
- points = np.expand_dims(np.asarray(points), axis=0)
86
-
87
- # run test data through model
88
- preds = model.predict(points)
89
- preds = tf.math.argmax(preds, -1)
90
-
91
- # plot points with predicted class and label
92
- fig = plt.figure(figsize=(4, 6))
93
- ax = fig.add_subplot(2, 1, 1, projection="3d")
94
- ax.scatter(points[0, :, 0], points[0, :, 1], points[0, :, 2])
95
- ax.set_title(f"This is {CLASS_MAP[preds[0].numpy()]}")
96
- ax.set_axis_off()
97
- # plt.imshow(image)
98
- return plt.gcf()
99
-
100
- # get the inputs
101
- inputs = gr.File()
102
-
103
- # the app outputs two segmented images
104
- output = gr.Plot()
105
-
106
-
107
- # it's good practice to pass examples, description and a title to guide users
108
- title = 'PointNet Classification and Segmentation'
109
- description = 'Classify images using point cloud Segmentation'
110
- article = "Author: <a href=\"https://huggingface.co/geninhu\">Nhu Hoang</a>. "
111
- examples = [f'{EXAMPLES_PATH}/{f}' for f in os.listdir(EXAMPLES_PATH)]
112
-
113
- gr.Interface(infer, inputs, output, examples= examples, allow_flagging='never',
114
- title=title, description=description, article=article, live=False).launch(enable_queue=True, debug=False, inbrowser=False)
 
1
+ import math
2
+ import numpy as np
3
+ import pandas as pd
4
+
5
+ import os
6
+ import glob
7
+ import trimesh
8
+ import tensorflow as tf
9
+ from tensorflow import keras
10
+ from tensorflow.keras import layers
11
+ from matplotlib import pyplot as plt
12
+
13
+ import gradio as gr
14
+ from huggingface_hub import from_pretrained_keras
15
+
16
+
17
+
18
+ def conv_bn(x, filters):
19
+ x = layers.Conv1D(filters, kernel_size=1, padding="valid")(x)
20
+ x = layers.BatchNormalization(momentum=0.0)(x)
21
+ return layers.Activation("relu")(x)
22
+
23
+
24
+ def dense_bn(x, filters):
25
+ x = layers.Dense(filters)(x)
26
+ x = layers.BatchNormalization(momentum=0.0)(x)
27
+ return layers.Activation("relu")(x)
28
+
29
+ # @keras.utils.register_keras_serializable
30
+ class OrthogonalRegularizer(keras.regularizers.Regularizer):
31
+ def __init__(self, num_features, l2reg=0.001, **kwarg):
32
+ super(OrthogonalRegularizer, self).__init__(**kwargs)
33
+ self.num_features = num_features
34
+ self.l2reg = l2reg
35
+ self.eye = tf.eye(num_features)
36
+
37
+ def __call__(self, x):
38
+ x = tf.reshape(x, (-1, self.num_features, self.num_features))
39
+ xxt = tf.tensordot(x, x, axes=(2, 2))
40
+ xxt = tf.reshape(xxt, (-1, self.num_features, self.num_features))
41
+ return tf.reduce_sum(self.l2reg * tf.square(xxt - self.eye))
42
+
43
+ def get_config(self):
44
+ return {'l2reg': float(self.l2reg)}
45
+
46
+ def tnet(inputs, num_features):
47
+
48
+ # Initalise bias as the indentity matrix
49
+ bias = keras.initializers.Constant(np.eye(num_features).flatten())
50
+ reg = OrthogonalRegularizer(num_features)
51
+
52
+ x = conv_bn(inputs, 32)
53
+ x = conv_bn(x, 64)
54
+ x = conv_bn(x, 512)
55
+ x = layers.GlobalMaxPooling1D()(x)
56
+ x = dense_bn(x, 256)
57
+ x = dense_bn(x, 128)
58
+ x = layers.Dense(
59
+ num_features * num_features,
60
+ kernel_initializer="zeros",
61
+ bias_initializer=bias,
62
+ activity_regularizer=reg,
63
+ )(x)
64
+ feat_T = layers.Reshape((num_features, num_features))(x)
65
+ # Apply affine transformation to input features
66
+ return layers.Dot(axes=(2, 1))([inputs, feat_T])
67
+
68
+ EXAMPLES_PATH = 'examples'
69
+ model = from_pretrained_keras('keras-io/PointNet')
70
+
71
+ CLASS_MAP = {0: 'chair',
72
+ 1: 'sofa',
73
+ 2: 'desk',
74
+ 3: 'bed',
75
+ 4: 'dresser',
76
+ 5: 'night_stand',
77
+ 6: 'toilet',
78
+ 7: 'bathtub',
79
+ 8: 'monitor',
80
+ 9: 'table'}
81
+
82
+ def infer(img_path):
83
+ mesh = trimesh.load(img_path.name)
84
+ points = mesh.sample(2048)
85
+ points = np.expand_dims(np.asarray(points), axis=0)
86
+
87
+ # run test data through model
88
+ preds = model.predict(points)
89
+ preds = tf.math.argmax(preds, -1)
90
+
91
+ # plot points with predicted class and label
92
+ fig = plt.figure(figsize=(4, 6))
93
+ ax = fig.add_subplot(2, 1, 1, projection="3d")
94
+ ax.scatter(points[0, :, 0], points[0, :, 1], points[0, :, 2])
95
+ ax.set_title(f"This is {CLASS_MAP[preds[0].numpy()]}")
96
+ ax.set_axis_off()
97
+ # plt.imshow(image)
98
+ return plt.gcf()
99
+
100
+ # get the inputs
101
+ inputs = gr.File()
102
+
103
+ # the app outputs two segmented images
104
+ output = gr.Plot()
105
+
106
+
107
+ # it's good practice to pass examples, description and a title to guide users
108
+ title = 'PointNet Classification and Segmentation'
109
+ description = 'Classify images using point cloud Segmentation'
110
+ article = "Author: <a href=\"https://huggingface.co/geninhu\">Nhu Hoang</a>. "
111
+ examples = [f'{EXAMPLES_PATH}/{f}' for f in os.listdir(EXAMPLES_PATH)]
112
+
113
+ gr.Interface(infer, inputs, output, examples= examples, allow_flagging='never',
114
+ title=title, description=description, article=article, live=False).launch(enable_queue=True, debug=False, inbrowser=False)