import tensorflow as tf def ResNET_block(inp): inp_skip = tf.keras.layers.Dense(1024, activation='relu', kernel_initializer='random_normal')(inp) inp = tf.keras.layers.Dropout(.1)(inp_skip) inp = tf.keras.layers.Dense(512, activation='relu', kernel_initializer='random_normal')(inp) inp = tf.keras.layers.Dense(1024, activation='relu', kernel_initializer='random_normal')(inp) inp = tf.keras.layers.Add()([inp, inp_skip]) return tf.keras.layers.LeakyReLU()(inp)
Hosted onDeepnote