import
os
import
datetime
import
glob
import
trimesh
import
numpy as np
import
tensorflow as tf
from
tensorflow
import
keras
from
tensorflow.keras
import
layers
import
matplotlib.pyplot as plt
tf.random.set_seed(
1
)
DATA_DIR
=
tf.keras.utils.get_file(
"modelnet.zip"
,
extract
=
True
,
)
DATA_DIR
=
os.path.join(os.path.dirname(DATA_DIR),
"ModelNet10"
)
mesh
=
trimesh.load(os.path.join(DATA_DIR,
"chair/train/chair_0001.off"
))
points
=
mesh.sample(
2048
)
fig
=
plt.figure(figsize
=
(
5
,
5
))
ax
=
fig.add_subplot(
111
, projection
=
"3d"
)
ax.scatter(points[:,
0
], points[:,
1
], points[:,
2
],color
=
'red'
)
ax.set_axis_off()
plt.show()
def
parse_dataset(num_points
=
2048
):
train_points
=
[]
train_labels
=
[]
test_points
=
[]
test_labels
=
[]
class_map
=
{}
folders
=
glob.glob(os.path.join(DATA_DIR,
"[!README]*"
))
for
i, folder
in
enumerate
(folders):
print
(
"processing class: {}"
.
format
(os.path.basename(folder)))
class_map[i]
=
folder.split(
"/"
)[
-
1
]
train_files
=
glob.glob(os.path.join(folder,
"train/*"
))
test_files
=
glob.glob(os.path.join(folder,
"test/*"
))
for
f
in
train_files:
train_points.append(trimesh.load(f).sample(num_points))
train_labels.append(i)
for
f
in
test_files:
test_points.append(trimesh.load(f).sample(num_points))
test_labels.append(i)
return
(
np.array(train_points),
np.array(test_points),
np.array(train_labels),
np.array(test_labels),
class_map,
)
class
OrthogonalRegularizer(keras.regularizers.Regularizer):
def
__init__(
self
, num_features, l2reg
=
0.001
):
self
.num_features
=
num_features
self
.l2reg
=
l2reg
self
.eye
=
tf.eye(num_features)
def
__call__(
self
, x):
x
=
tf.reshape(x, (
-
1
,
self
.num_features,
self
.num_features))
xxt
=
tf.tensordot(x, x, axes
=
(
2
,
2
))
xxt
=
tf.reshape(xxt, (
-
1
,
self
.num_features,
self
.num_features))
return
tf.reduce_sum(
self
.l2reg
*
tf.square(xxt
-
self
.eye))
def
t_net(inputs, num_features):
bias
=
keras.initializers.Constant(np.eye(num_features).flatten())
reg
=
OrthogonalRegularizer(num_features)
x
=
conv_bn(inputs,
32
)
x
=
conv_bn(x,
64
)
x
=
conv_bn(x,
512
)
x
=
layers.GlobalMaxPooling1D()(x)
x
=
dense_bn(x,
256
)
x
=
dense_bn(x,
128
)
x
=
layers.Dense(
num_features
*
num_features,
kernel_initializer
=
"zeros"
,
bias_initializer
=
bias,
activity_regularizer
=
reg,
)(x)
feat_T
=
layers.Reshape((num_features, num_features))(x)
return
layers.Dot(axes
=
(
2
,
1
))([inputs, feat_T])
inputs
=
keras.
Input
(shape
=
(NUM_POINTS,
3
))
x
=
t_net(inputs,
3
)
x
=
conv_bn(x,
32
)
x
=
conv_bn(x,
32
)
x
=
t_net(x,
32
)
x
=
conv_bn(x,
32
)
x
=
conv_bn(x,
64
)
x
=
conv_bn(x,
512
)
x
=
layers.GlobalMaxPooling1D()(x)
x
=
dense_bn(x,
256
)
x
=
layers.Dropout(
0.3
)(x)
x
=
dense_bn(x,
128
)
x
=
layers.Dropout(
0.3
)(x)
outputs
=
layers.Dense(NUM_CLASSES, activation
=
"softmax"
)(x)
model
=
keras.Model(inputs
=
inputs, outputs
=
outputs, name
=
"pointnet"
)
model.summary()
%
load_ext tensorboard
model.
compile
(
loss
=
"sparse_categorical_crossentropy"
,
optimizer
=
keras.optimizers.Adam(learning_rate
=
0.001
),
metrics
=
[
"sparse_categorical_accuracy"
],
)
log_dir
=
"logs/fit/"
+
datetime.datetime.now().strftime(
"%Y%m%d-%H%M%S"
)
tensorboard_callback
=
tf.keras.callbacks.TensorBoard(log_dir
=
log_dir, histogram_freq
=
1
)
print
(log_dir)
model.fit(train_dataset, epochs
=
30
, validation_data
=
test_dataset,
callbacks
=
[tensorboard_callback])