Can someone look at the issue. the problem lies at last two line 'x_val =x_train[:10000]
partial_x_train = x_train[10000:]' Though I'm not using putting other libraries, however, assuming that they are working fine and as I said the real problem exists in the end.
partial_x_train = x_train[10000:]' Though I'm not using putting other libraries, however, assuming that they are working fine and as I said the real problem exists in the end.
from keras.datasets import imdb from keras import optimizers from keras import losses from keras import metrics import numpy as np (train_data, train_lables), (test_data, test_lables)=imdb.load_data(num_words=10000) def vectorize_sequences(sequences, dimension=10000): results = np.zeros((len(sequences), dimension)) print(results) for i, sequence in enumerate(sequences): results[i, sequence] = 1. return x_train = vectorize_sequences(train_data) x_test = vectorize_sequences(test_data) y_train = np.asarray(train_lables).astype('float32') y_test = np.asarray(test_lables).astype('float32') model = models.Sequential() model.add(layers.Dense(16, activation='relu', input_shape=(10000,))) model.add(layers.Dense(16, activation='relu')) model.add(layers.Dense(1, activation='sigmoid')) model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy']) x_val =x_train[:10000] partial_x_train = x_train[10000:]