from keras.layers import Dense from keras.initializers import Constant from keras.models import Sequential, load_model from keras.layers.convolutional import Conv2D from keras.layers import Dropout, Flatten from keras.layers import MaxPooling2D from keras import backend as K from keras.regularizers import l2 from keras.callbacks import EarlyStopping # README: This is the explanation of the NNInv process and parameters: #X: n-dimensional dataset of samples; has to be scaled so all dimensions are in [0,1] #x_2d: 2-dimensional dataset of projections of the above samples; also has to be scaled in [0,1] (independently of the above scaling) #fit(): training NNInv to predict nD samples from a 2D sample set #inverse: using a trained NNInv model to predict an nD point given a 2D location class NNInv(): def __init__(self, loss='mean_squared_error', epochs=300): K.clear_session() self.loss = loss self.epochs = epochs self.stop = EarlyStopping(verbose=1, min_delta=0.00001, mode='min', patience=20, restore_best_weights=True) def fit(self, X, X_2d): self.m = Sequential() self.m.add(Dense(2048, activation='relu', kernel_initializer='he_uniform', kernel_regularizer=l2(0.0002), input_shape=(X_2d.shape[1],))) self.m.add(Dense(2048, activation='relu', kernel_initializer='he_uniform', bias_initializer=Constant(0.01))) self.m.add(Dense(2048, activation='relu', kernel_initializer='he_uniform', bias_initializer=Constant(0.01))) self.m.add(Dense(2048, activation='relu', kernel_initializer='he_uniform', bias_initializer=Constant(0.01))) self.m.add(Dense(X.shape[1], activation='sigmoid', kernel_initializer='he_uniform')) self.m.compile(loss=self.loss, optimizer='adam') hist = self.m.fit(X_2d, X, batch_size=32, epochs=self.epochs, verbose=0, validation_split=0.05, callbacks=[self.stop]) return hist def inverse(self, X_2d): return self.m.predict(X_2d)