Demo

def forward_propagation_with_dropout(X, parameters, keep_prob = 0.5):
    np.random.seed(1)    # retrieve parameters
    W1 = parameters["W1"]
    b1 = parameters["b1"]
    W2 = parameters["W2"]
    b2 = parameters["b2"]
    W3 = parameters["W3"]
    b3 = parameters["b3"]    # LINEAR -> RELU -> LINEAR -> RELU -> LINEAR -> SIGMOID
    Z1 = np.dot(W1, X) + b1
    A1 = relu(Z1)

    D1 = np.random.rand(A1.shape[0], A1.shape[1])    
    D1 = D1 < keep_prob                             
    A1 = np.multiply(D1, A1)                         
    A1 = A1 / keep_prob                             

    Z2 = np.dot(W2, A1) + b2
    A2 = relu(Z2)

    D2 = np.random.rand(A2.shape[0], A2.shape[1])     
    D2 = D2 < keep_prob                             
    A2 = np.multiply(D2, A2)                       
    A2 = A2 / keep_prob                           
    Z3 = np.dot(W3, A2) + b3
    A3 = sigmoid(Z3)

    cache = (Z1, D1, A1, W1, b1, Z2, D2, A2, W2, b2, Z3, A3, W3, b3)    
    return A3, cache
posted @ 2018-11-20 10:27  四代目湫  阅读(116)  评论(0编辑  收藏  举报