You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
{{ message }}
This repository has been archived by the owner on Nov 3, 2022. It is now read-only.
Hello bodies,
I'm trying to build a new GRU layer that takes the input and a code of zero-one sequenses, the code has the same size of the input, every digit of the code encodes one input value as follows: If the digit is one then the cell will consider the input value to do the callculations, while if the digit is 0, then the cell will consider part of the hidden state h(t-1) to do the callculations. The new cell should be used in API model
I defined the cell and the layer classes. as follows:
class MyGRUCell(Layer):
"""This Cell are ale to deal with missing values in the input
1- adding another input matrex is not possible, GRU needs only one input matrix
2- include the mask of missing values with the values in the same input.
The mask of the missing values looks like extra samples. It has the same shape of the
input.
so the input shape should be
"""
def init(self, units,
Input_activation = 'relu',
activation = 'tanh',
recurrent_activation='hard_sigmoid',
use_bias=True,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
dropout=0.,
recurrent_dropout=0.,
implementation=0,
**kwargs):
super(MyGRUCell, self).init(**kwargs)
self.units = units
self.Input_activation = activations.get(Input_activation)
self.activation = activations.get(activation)
self.recurrent_activation = activations.get(recurrent_activation)
self.use_bias = use_bias
print("Cell is Built")
##################
######## Build the class of the GRU
class CMyGRU(RNN):
#@interfaces.legacy_recurrent_support
def init(self, units,
Input_activation= 'relu',
activation='tanh',
recurrent_activation='hard_sigmoid',
use_bias=True,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
dropout=0.,
recurrent_dropout=0.,
implementation=1,
return_sequences=False,
return_state=False,
go_backwards=False,
stateful=False,
unroll=False,
**kwargs):
if implementation == 0:
warnings.warn('implementation=0 has been deprecated, '
'and now defaults to implementation=1.'
'Please update your layer call.')
Hello bodies,
I'm trying to build a new GRU layer that takes the input and a code of zero-one sequenses, the code has the same size of the input, every digit of the code encodes one input value as follows: If the digit is one then the cell will consider the input value to do the callculations, while if the digit is 0, then the cell will consider part of the hidden state h(t-1) to do the callculations. The new cell should be used in API model
I defined the cell and the layer classes. as follows:
class MyGRUCell(Layer):
"""This Cell are ale to deal with missing values in the input
1- adding another input matrex is not possible, GRU needs only one input matrix
2- include the mask of missing values with the values in the same input.
The mask of the missing values looks like extra samples. It has the same shape of the
input.
so the input shape should be
"""
def init(self, units,
Input_activation = 'relu',
activation = 'tanh',
recurrent_activation='hard_sigmoid',
use_bias=True,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
dropout=0.,
recurrent_dropout=0.,
implementation=0,
**kwargs):
super(MyGRUCell, self).init(**kwargs)
self.units = units
self.Input_activation = activations.get(Input_activation)
self.activation = activations.get(activation)
self.recurrent_activation = activations.get(recurrent_activation)
self.use_bias = use_bias
def build(self, input_shape):
input_dim = input_shape[-1]
print("Building: in_d = ", input_dim)
# add weights (W) to be multiplied by x
self.kernel = self.add_weight(shape=(input_dim, self.units * 3),
name='kernel',
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
# add weights (Uht-1) to e multiplied by
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units * 4),
name='recurrent_kernel',
initializer=self.recurrent_initializer,
regularizer=self.recurrent_regularizer,
constraint=self.recurrent_constraint)
def call(self, inputs, states, training=None):
print("Call of the cell . . .")
h_tm1 = states[0] # previous memory
print("Cell input shape: ",inputs.shape)
missing = inputs[:, int(inputs.shape[1]/2) :,:]
Values = inputs[:, : int(inputs.shape[1]/2),:]
print("Values and missing code is separated")
if self.implementation == 1:
## My input
h_as_x = K.dot(h_tm1, self.recurrent_kernel_x)
if self.use_bias:
# wh(t-1)+b
h_as_x = K.bias_add(h_as_x, self.bias_x)
def get_config(self):
config = {'units': self.units,
'activation': activations.serialize(self.activation),
'recurrent_activation': activations.serialize(self.recurrent_activation),
'use_bias': self.use_bias,
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'recurrent_initializer': initializers.serialize(self.recurrent_initializer),
'bias_initializer': initializers.serialize(self.bias_initializer),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'recurrent_regularizer': regularizers.serialize(self.recurrent_regularizer),
'bias_regularizer': regularizers.serialize(self.bias_regularizer),
'kernel_constraint': constraints.serialize(self.kernel_constraint),
'recurrent_constraint': constraints.serialize(self.recurrent_constraint),
'bias_constraint': constraints.serialize(self.bias_constraint),
'recurrent_dropout': self.recurrent_dropout,
'implementation': self.implementation}
base_config = super(MyGRUCell, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
print("Cell is Built")
##################
######## Build the class of the GRU
class CMyGRU(RNN):
#@interfaces.legacy_recurrent_support
def init(self, units,
Input_activation= 'relu',
activation='tanh',
recurrent_activation='hard_sigmoid',
use_bias=True,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
dropout=0.,
recurrent_dropout=0.,
implementation=1,
return_sequences=False,
return_state=False,
go_backwards=False,
stateful=False,
unroll=False,
**kwargs):
if implementation == 0:
warnings.warn('
implementation=0
has been deprecated, ''and now defaults to
implementation=1
.''Please update your layer call.')
print("Layer is Built")
#######################
I'm trying to build a neural network model with the new GRU layer as follows:
Input_Features_size = 10
Enter = Input((Input_Features_size*2,1))
print(Enter.shape)
L1 = CMyGRU(units=20, return_sequences = True)(Enter)
L2 = CMyGRU(20, return_sequences = True)(L1, miss= mis_values)
L3 = CMyGRU(10, return_sequences = True )(L2, mis_values)
L4 = CMyGRU(1, return_sequences = False )(L3, mis_values)
Mdl = Model(inputs = Enter, outputs = L4)
opt= optimizers.Adam(lr = 0.001)
Mdl.compile(optimizer = opt, loss = losses.MeanSquaredError(), metrics = ['accuracy'])
Mdl.summary()
unfortunatilly, it gives the following error
ValueError: in user code:
Anyone can find the reason, I think it is related to shape of the input and how it is passed to the inner cell, but i don't know how to fix it.
The text was updated successfully, but these errors were encountered: