吴恩达Coursera, 机器学习专项课程, Machine Learning:Advanced Learning Algorithms第三周编程作业

吴恩达Coursera, 机器学习专项课程, Machine Learning:Advanced Learning Algorithms第三周所有jupyter notebook文件:

吴恩达,机器学习专项课程, Advanced Learning Algorithms第三周所有Python编程文件

本次作业

Exercise 1

# UNQ_C1
# GRADED CELL: eval_mse
def eval_mse(y, yhat):
    """ 
    Calculate the mean squared error on a data set.
    Args:
      y    : (ndarray  Shape (m,) or (m,1))  target value of each example
      yhat : (ndarray  Shape (m,) or (m,1))  predicted value of each example
    Returns:
      err: (scalar)             
    """
    m = len(y)
    err = 0.0
    for i in range(m):
    ### START CODE HERE ### 
        err +=  (y[i]-yhat[i])**2          
    err = err /2/ m  
    ### END CODE HERE ### 
    
    return(err)

Exercise 2

# UNQ_C2
# GRADED CELL: eval_cat_err
def eval_cat_err(y, yhat):
    """ 
    Calculate the categorization error
    Args:
      y    : (ndarray  Shape (m,) or (m,1))  target value of each example
      yhat : (ndarray  Shape (m,) or (m,1))  predicted value of each example
    Returns:|
      cerr: (scalar)             
    """
    m = len(y)
    incorrect = 0
    for i in range(m):
    ### START CODE HERE ### 
        if y[i] != yhat[i]:
            incorrect += 1
    cerr = incorrect / m
        
    ### END CODE HERE ### 
    
    return(cerr)

Exercise 3

# UNQ_C3
# GRADED CELL: model
import logging
logging.getLogger("tensorflow").setLevel(logging.ERROR)

tf.random.set_seed(1234)
model = Sequential(
    [
        ### START CODE HERE ### 
#         tf.keras.Input(shape=(2,)),
        Dense(120,activation='relu',name='layer1'),
        Dense(40,activation='relu',name='layer2'),
        Dense(6,activation='linear',name='layer3')
  
        ### END CODE HERE ### 

    ], name="Complex"
)
model.compile(
    ### START CODE HERE ### 
    loss= tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
    optimizer=tf.keras.optimizers.Adam(0.01),
    ### END CODE HERE ### 
)

Exercise 4

# UNQ_C4
# GRADED CELL: model_s

tf.random.set_seed(1234)
model_s = Sequential(
    [
        ### START CODE HERE ### 
        Dense(6,activation='relu',name='layer1'),
        Dense(6,activation='linear',name='layer2')   
        ### END CODE HERE ### 
    ], name = "Simple"
)
model_s.compile(
    ### START CODE HERE ### 
    loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
    optimizer = tf.keras.optimizers.Adam(0.01),
    ### START CODE HERE ### 
)

Exercise 5

# UNQ_C5
# GRADED CELL: model_r

tf.random.set_seed(1234)
model_r = Sequential(
    [
        ### START CODE HERE ### 
        Dense(120,activation='relu',kernel_regularizer=tf.keras.regularizers.l2(0.1),name='layer1'),
        Dense(40,activation='relu',kernel_regularizer=tf.keras.regularizers.l2(0.1),name='layer2'), 
        Dense(6,activation='linear',name='layer3')
        ### START CODE HERE ### 
    ], name= 'aaa'
)
model_r.compile(
    ### START CODE HERE ### 
    loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
    optimizer = tf.keras.optimizers.Adam(0.01),
    ### START CODE HERE ### 
)
posted @ 2022-07-03 01:52  楚千羽  阅读(728)  评论(0编辑  收藏  举报