I want to manually loop over the varying sequence lengths of the input sequences but Tensorflow automatically makes the time axis to None after noticing varying sequence lengths. Is there any work around for this?
Sample example
import tensorflow as tf
import numpy as np
class MyExample(tf.keras.Model):
def __init__(self, int_dim, **kwargs):
super(MyExample, self).__init__(**kwargs)
self.int_dim = int_dim
self.lstm = tf.keras.layers.LSTMCell(self.int_dim)
self.d2 = tf.keras.layers.Dense(self.int_dim)
def call(self, inputs):
states = (tf.zeros((1, self.int_dim)),
tf.zeros((1, self.int_dim)))
outputs = []
for t in range(inputs.shape[1]):
lstm_out, states = self.lstm(inputs[:, t, :], states)
d2_out = self.d2(lstm_out)
outputs.append(d2_out)
output_stack = tf.stack(outputs, 1)
return output_stack
def generator():
while True:
seq_len = np.random.randint(2, 10)
X = tf.random.uniform((1, seq_len, 5))
Y = tf.random.uniform((1, seq_len, 5))
yield X, Y
model = MyExample(5)
model.compile('adam', 'BinaryCrossentropy')
model.fit(generator(), batch_size=1)
Here is a fix for
Eager Executionmode:A
Graphmode solution could look like this: