import numpy as np

    from keras.preprocessing import sequence
    from keras.models import Sequential
    from keras.layers import Dense, Dropout, Activation
    from keras.layers import Embedding
    from keras.layers import Conv1D, GlobalMaxPooling1D
    from keras.callbacks import EarlyStopping
    from keras.datasets import imdb

    n_words = 1000
    (X_train, y_train), (X_test, y_test) = imdb.load_data(num_words=n_words)
    print('Train seq: {}'.format(len(X_train)))
    print('Test seq: {}'.format(len(X_train)))

    print('Train example: \n{}'.format(X_train[0]))
    print('\nTest example: \n{}'.format(X_test[0]))

    # Note: the data is already preprocessed (words are mapped to vectors)

    # Pad sequences with max_len
    max_len = 200
    X_train = sequence.pad_sequences(X_train, maxlen=max_len)
    X_test = sequence.pad_sequences(X_test, maxlen=max_len)

    # Define network architecture and compile
    model = Sequential()
    model.add(Embedding(n_words, 50, input_length=max_len))
    model.add(Conv1D(128, 3, padding='valid', activation='relu', strides=1,))
    model.add(Dense(250, activation='relu'))
    model.add(Dense(1, activation='sigmoid'))

    model.compile(loss='binary_crossentropy',  optimizer='adam', metrics=['accuracy'])

    # Stop training when a monitored quantity has stopped improving.
    # patience: number of epochs with no improvement after which training will be stopped.
    callbacks = [EarlyStopping(monitor='val_acc', patience=3)]

    batch_size = 64
    n_epochs = 100, y_train, batch_size=batch_size, epochs=n_epochs, validation_split=0.2, callbacks=callbacks)

    print('\nAccuracy on test set: {}'.format(model.evaluate(X_test, y_test)[1]))

    # Accuracy on test set: 0.873

/Users/swa/conda/lib/python3.6/site-packages/h5py/ FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.
  from ._conv import register_converters as _register_converters
Using TensorFlow backend.