I need help in code debugging,
given X1_train_scaled_.shape=(2960642, 17, 1), X2_train_padded.shape=(414, 17, 1), X1_test_.shape=(1214889, 17, 1), X2_test_padded.shape=(190, 17, 1),
- two modalities have different sizes,
- in training I need to create batches for first modality(X1) making samples in each batch is same as X2_train_padded, and in the last batch, randomly select samples from X2_train_padded making it same size as the remainder of X1_train_scaled_,
- in validation, I need to create batches for first modality(X1) making samples in each batch is same as X2_test_padded, and in the last batch, randomly select samples from X2_test_padded making it same size as the remainder of X1_test_,
but I got
IndexError: index 190 is out of bounds for axis 0 with size 190error, can anyone help me solve it? Below is my code:
# Concatenate the features from both modalities
merged_features = concatenate([facial_model.output, mouse_.output])
# Joint Learning
joint_layer = Dense(2, activation='relu', name='dense_hidden_final1')(merged_features)
joint_layer = Dropout(0.5)(joint_layer)
joint_layer = Dense(32, activation='relu', name='dense_hidden_final2')(joint_layer)
joint_layer = Dropout(0.5)(joint_layer)
# Output Layer
output_layer = Dense(1, activation='sigmoid', name='output')(joint_layer)
# Combined Model
model = Model(inputs=[facial_model.input, mouse_.input], outputs=output_layer)
# Compile the model
model.compile(
optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy', Precision(), Recall(), RootMeanSquaredError()]
)
# Print summary
# model.summary()
# Define custom generator
def custom_generator(X1, X2, y, batch_size):
total_samples = min(len(X1), len(X2)) # Ensure we use the minimum length
batches_per_epoch = total_samples // batch_size
while True:
for i in range(batches_per_epoch):
start_idx = i * batch_size
end_idx = (i + 1) * batch_size
yield ([X1[start_idx:end_idx], X2[start_idx:end_idx]], y[start_idx:end_idx])
# Handle the last batch
if total_samples % batch_size != 0:
start_idx = batches_per_epoch * batch_size
end_idx = total_samples
# print(f"Debug: last batch - start_idx: {start_idx}, end_idx: {end_idx}")
# Randomly select samples from the second modality to match the size of the first modality
selected_indices = np.random.choice(len(X2), size=batch_size - (end_idx - start_idx), replace=True)
# print(f"Debug: selected_indices: {selected_indices}")
yield ([X1[start_idx:end_idx], X2[selected_indices]], y[start_idx:end_idx])
# Training
batch_size = X2_train_padded.shape[0]
history_joint = model.fit(
custom_generator(X1_train_scaled_, X2_train_padded, y1_trainE, batch_size),
steps_per_epoch=len(X1_train_scaled_) // batch_size + 1, # +1 to include the last batch
epochs=10,
callbacks=[stop, best],
validation_data=([X1_test_, X2_test_padded], y1_testE),
verbose=1
)