diff --git a/onnx2keras/activation_layers.py b/onnx2keras/activation_layers.py index 78a585a6..55be903b 100644 --- a/onnx2keras/activation_layers.py +++ b/onnx2keras/activation_layers.py @@ -141,14 +141,9 @@ def convert_softmax(node, params, layers, lambda_func, node_name, keras_name): input_0 = ensure_tf_type(layers[node.input[0]], name="%s_const" % keras_name) - def target_layer(x, axis=params['axis']): - import tensorflow as tf - return tf.nn.softmax(x, axis=axis) - - lambda_layer = keras.layers.Lambda(target_layer, name=keras_name) - layers[node_name] = lambda_layer(input_0) + softmax_layer = keras.layers.Softmax(axis=params['axis'], name=keras_name) + layers[node_name] = softmax_layer(input_0) layers[node_name].set_shape(layers[node_name].shape) - lambda_func[keras_name] = target_layer def convert_prelu(node, params, layers, lambda_func, node_name, keras_name):