Offline-Signature-Verification-using-Siamese-Network
Offline-Signature-Verification-using-Siamese-Network copied to clipboard
TypeError: ('Keyword argument not understood:', 'init')
I use file SigNet-BHSig260.ipynb on Google Colab, but when i ran code:
network definition
base_network = create_base_network_signet(input_shape)
input_a = Input(shape=(input_shape)) input_b = Input(shape=(input_shape))
because we re-use the same instance base_network
,
the weights of the network
will be shared across the two branches
processed_a = base_network(input_a) processed_b = base_network(input_b)
Compute the Euclidean distance between the two vectors in the latent space
distance = Lambda(euclidean_distance, output_shape=eucl_dist_output_shape)([processed_a, processed_b])
model = Model(input=[input_a, input_b], output=distance)
==>
TypeError Traceback (most recent call last)
5 frames /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/utils/generic_utils.py in validate_kwargs(kwargs, allowed_kwargs, error_message) 806 for kwarg in kwargs: 807 if kwarg not in allowed_kwargs: --> 808 raise TypeError(error_message, kwarg) 809 810
TypeError: ('Keyword argument not understood:', 'init') ############################################################################### And this is ''create_base_network_signet'' fuction:
def create_base_network_signet(input_shape): '''Base Siamese Network'''
seq = Sequential()
seq.add(Conv2D(96, kernel_size=(11, 11), activation='relu', name='conv1_1', strides=4, input_shape= input_shape,
init='glorot_uniform', dim_ordering='tf'))
seq.add(BatchNormalization(epsilon=1e-06, mode=0, axis=1, momentum=0.9))
seq.add(MaxPooling2D((3,3), strides=(2, 2)))
seq.add(ZeroPadding2D((2, 2), dim_ordering='tf'))
seq.add(Conv2D(256, kernel_size=(5, 5), activation='relu', name='conv2_1', strides=1, init='glorot_uniform', dim_ordering='tf'))
seq.add(BatchNormalization(epsilon=1e-06, mode=0, axis=1, momentum=0.9))
seq.add(MaxPooling2D((3,3), strides=(2, 2)))
seq.add(Dropout(0.3))# added extra
seq.add(ZeroPadding2D((1, 1), dim_ordering='tf'))
seq.add(Conv2D(384, kernel_size=(3, 3), activation='relu', name='conv3_1', strides=1, init='glorot_uniform', dim_ordering='tf'))
seq.add(ZeroPadding2D((1, 1), dim_ordering='tf'))
seq.add(Conv2D(256, kernel_size=(3, 3), activation='relu', name='conv3_2', strides=1, init='glorot_uniform', dim_ordering='tf'))
seq.add(MaxPooling2D((3,3), strides=(2, 2)))
seq.add(Dropout(0.3))# added extra
seq.add(Flatten(name='flatten'))
seq.add(Dense(1024, W_regularizer=l2(0.0005), activation='relu', init='glorot_uniform'))
seq.add(Dropout(0.5))
seq.add(Dense(128, W_regularizer=l2(0.0005), activation='relu', init='glorot_uniform')) # softmax changed to relu
return seq
###########################################################################
How can I fix this error? Thank you so much!
change init to kernel_initializer