딥러닝-예외단계(class)

엔지니어 큐브·2023년 9월 10일
0

딥러닝

목록 보기
4/5

1. 예제

x = tf.keras.layers.Input(shape=[1])
h = tf.keras.layers.Dense(2, activation='swish')(x)
y = tf.keras.layers.Dense(1)(h)            
model = tf.keras.models.Model(x,y)


위와 같은 내용임(레모네이드)

class MyModel(tf.keras.Model):
    def __init__(self, **kwargs):
        super(MyModel, self).__init__(**kwargs)                       #__init__은 초기화 하는거라는데
        self.dense1 = tf.keras.layers.Dense(2, activation = 'swish')
        self.dense2 = tf.keras.layers.Dense(1)
    
    # def __call__(self)이 MyModel 안에 들어있데
    
    def call(self, x):
        h = self.dense1(x)
        y = self.dense2(h)
        return y
    
model = MyModel()
model.compile(loss='mse')

2. 실습

x = tf.keras.layers.Input(shape=[28, 28])
h = tf.keras.layers.Flatten()(x)

h = tf.keras.layers.Dropout(0.5)(h)
h = tf.keras.layers.Dense(64)(h)
h = tf.keras.layers.BatchNormalization()(h)
h = tf.keras.layers.Activation('swish')(h)

h = tf.keras.layers.Dropout(0.5)(h)
h = tf.keras.layers.Dense(20)(h)
h = tf.keras.layers.BatchNormalization()(h)
h = tf.keras.layers.Activation('swish')(h)

y = tf.keras.layers.Dense(10, activation='softmax')(h)

model = tf.keras.Model(x, y)
model.compile(loss='sparse_categorical_crossentropy', metrics = 'accuracy')   

model.summary()

-------
with tf.GradientTape() as tape:
        pred = model(x_train.values, training=True)
        cost = loss(y_train.values, pred)
    grad = tape.gradient(cost, model.trainable_weights)
    optim.apply_gradients(zip(grad, model.trainable_weights))
    print(e, cost)     

위와 아래는 같은 내용이다.

class MyFashionMNISTModel(tf.keras.Model):
    def __init__(self, **kwargs):
        super(MyFashionMNISTModel, self).__init__(**kwargs)
        self.flatten = tf.keras.layers.Flatten()
        self.dense1 = tf.keras.layers.Dense(64, activation="swish")
        self.bn1 = tf.keras.layers.BatchNormalization()
        self.dense2 = tf.keras.layers.Dense(20, activation="swish")
        self.bn2 = tf.keras.layers.BatchNormalization()
        self.dense3 = tf.keras.layers.Dense(10, activation="softmax")
        
        self.skip_connection_layers = []             # for문 쓸때 
        for i in range(32):
        dropout = tf.keras.layers.Dropout(0.5)
        dense = tf.keras.layers.Dense(128)
        bn = tf.keras.layers.BatchNormalization()
        activation = tf.keras.layers.Activation('swish')
        self.skip_connection_layers.append([dropout, dense, bn, activation])

    def call(self, X):
        H = self.flatten(X)
        H = self.dense1(H)
        H = self.bn1(H)
        H = self.dense2(H)
        H = self.bn2(H)
        Y = self.dense3(H)
        
        
        for drop, dense, bn, activation in self.skip_connection_layers:      # for문 쓸 때
            H1 = drop(H)
            H1 = dense(H1)
            H1 = bn(H1)
            H = activation(H1)
        
        
        return Y
    
    
    def train_step(self, batch):
        x_batch, y_batch = batch
        
        with tf.GradientTape() as tape:
            y_pred = self(x_batch, training=True)
            loss = self.compiled_loss(y_batch, y_pred)
        
        grad = tape.gradient(loss, self.trainable_weights)
        self.optimizer.apply_gradient(zip(grad, self.trainable_weights))
        
        self.compiled_metircs.update_state(y_batch, y_pred)
        return {m.name : m.result() for m in self.metrics}
    
    
    
    def test_step(self, batch):
        x_batch, y_batch = batch
        y_pred = self(x_batch)
        
        self.compiled_metrics.update_state(y_batch, y_pred)
        return {m.name: m.result() for m in self.metircs}
    
model = MyFashionMNISTModel()
model.compile(optimizer = 'adam', loss='sparse_categorical_crossentropy', metrics = 'accuracy')
model.build(input_shape=[None, 28, 28])
model.summary()

<출력><for문은 제외한 출력임>
Model: "my_fashion_mnist_model_2"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 flatten_9 (Flatten)         multiple                  0         
                                                                 
 dense_29 (Dense)            multiple                  50240     
                                                                 
 batch_normalization_17 (Ba  multiple                  256       
 tchNormalization)                                               
                                                                 
 dense_30 (Dense)            multiple                  1300      
                                                                 
 batch_normalization_18 (Ba  multiple                  80        
 tchNormalization)                                               
                                                                 
 dense_31 (Dense)            multiple                  210       
                                                                 
=================================================================
profile
큐브가 필요하다...!!!

0개의 댓글