0

I want to use Keras-tuner to tune an autoencoder hyperparameters. It is a symetric AE with two layers. I want the number of units in the first layer always greater than or equal the units in the second layer. But I don't know how implement it with keras-tuner. If someone can help, it would be very great. Thank you in advance.

class DAE(tf.keras.Model):
    '''
    A DAE model
    '''

    def __init__(self, hp, **kwargs):
        '''
        DAE instantiation
        args :
            hp  : Tuner
            input_dim  : input dimension
        return:
            None
        '''
        super(DAE, self).__init__(**kwargs)
        input_dim = 15
        latent_dim = hp.Choice("latent_space", [2,4,8])
        units_0 = hp.Choice("units_0", [8, 16, 32, 64])
        units_1 = hp.Choice("units_1", [8, 16, 32, 64])
        
        for i in [8, 16, 32, 64]:
            with hp.conditional_scope("units_0", [i]):
                if units_0 == i:
                    ......? # units_1 should be <= i
                    
        dropout = hp.Choice("dropout_rate", [0.1, 0.2, 0.3, 0.4, 0.5])

        inputs    = tf.keras.Input(shape = (input_dim,))
        x         = layers.Dense(units_0, activation="relu")(inputs)
        x         = layers.Dropout(dropout)(x)
        x         = layers.Dense(units_1, activation="relu")(x)
        x         = layers.Dropout(dropout)(x)
        z         = layers.Dense(latent_dim)(x)
        self.encoder = tf.keras.Model(inputs, z, name="encoder")
        
        inputs  = tf.keras.Input(shape=(latent_dim,))
        x       = layers.Dense(units_1, activation="relu")(inputs)
        x       = layers.Dropout(dropout)(x)
        x       = layers.Dense(units_0, activation="relu")(x)
        x       = layers.Dropout(dropout)(x)
        outputs = layers.Dense(input_dim, activation="linear")(x)
        self.decoder = tf.keras.Model(inputs, outputs, name="decoder")```


See above my code. It's a denoising autoencoder class

1 Answer 1

0

I found the solution. We need to create differents units_1 for for each units_O values

class DAE(tf.keras.Model):
'''
A DAE model
'''

def __init__(self, hp, training=None, **kwargs):
    '''
    DAE instantiation
    args :
        hp  : Tuner
        input_dim  : input dimension
    return:
        None
    '''
    super(DAE, self).__init__(**kwargs)
    self.input_dim = 15
    l_units = [16, 32, 64, 128]
    latent_dim = hp.Choice("latent_space", [2,4,8])
    units_0 = hp.Choice("units_0", l_units)
    dropout_0 = hp.Choice("dropout_rate_0", [0.1, 0.2, 0.3, 0.4, 0.5])
    dropout_1 = hp.Choice("dropout_rate_1", [0.1, 0.2, 0.3, 0.4, 0.5])
    
    for i in l_units:
        name = "units_1_%d" % i  # generates unique name for each hp.Int object
        with hp.conditional_scope("units_0", [i]):
            if units_0 == i:
                locals()[name] = hp.Int(name, min_value = 8, max_value = i, step = 2, sampling = "log" )

                inputs    = tf.keras.Input(shape = (self.input_dim,))
                x         = layers.Dense(units_0, activation="relu")(inputs)
                x         = layers.Dropout(dropout_0)(x, training=training)  
                x         = layers.Dense(locals()[name], activation="relu")(x)
                x         = layers.Dropout(dropout_1)(x, training=training)  
                z         = layers.Dense(latent_dim)(x)
                self.encoder = tf.keras.Model(inputs, z, name="encoder")

                inputs  = tf.keras.Input(shape=(latent_dim,))
                x       = layers.Dense(locals()[name], activation="relu")(inputs)
                x       = layers.Dropout(dropout_1)(x, training=training)  
                x       = layers.Dense(units_0, activation="relu")(x)
                x       = layers.Dropout(dropout_0)(x, training=training)  
                outputs = layers.Dense(self.input_dim, activation="linear")(x)
                self.decoder = tf.keras.Model(inputs, outputs, name="decoder")
Sign up to request clarification or add additional context in comments.

Comments

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.