天天看点

吴恩达深度学习-第一课第三周编程作业

# GRADED FUNCTION: compute_cost

def compute_cost(A2, Y, parameters):
    """
    Computes the cross-entropy cost given in equation (13)
    
    Arguments:
    A2 -- The sigmoid output of the second activation, of shape (1, number of examples)
    Y -- "true" labels vector of shape (1, number of examples)
    parameters -- python dictionary containing your parameters W1, b1, W2 and b2
    
    Returns:
    cost -- cross-entropy cost given equation (13)
    """
    
    m = Y.shape[1] # number of example

    # Compute the cross-entropy cost
    ### START CODE HERE ### (≈ 2 lines of code)
   
    logprobs = np.dot(Y, np.log(A2.T))+np.dot((1-Y),np.log((1-A2.T)))
    cost = -np.sum(logprobs)/m
    print(cost.dtype)
    ### END CODE HERE ###
    
    cost = np.squeeze(cost)     # makes sure cost is the dimension we expect. 
                                # E.g., turns [[17]] into 17 
                
    assert(isinstance(cost, float))
    
    return cost
           

为什么 改为cost = logprobs/m 

assert(isinstance(cost,float))报错