神经网络 简单Logistic回归实现

导入所需的库

import pandas as pd
import numpy as np
import matplotlib.pyplot as plt

from sklearn.metrics import mean_squared_error
from sklearn.model_selection import train_test_split

初始化参数

def init_(dim):
    w=np.zeros((dim,1))
    b=0
    return w,b

激活函数

def sigmoid(z):
    return 1/(1+np.exp(-1*z))

传播

def propagate(x,y,w,b):
    
    m=x.shape[1]
    
    A = sigmoid(np.dot(w.T,x) + b) #计算激活值,请参考公式2。
    cost = (- 1 / m) * np.sum(y * np.log(A) + (1 - y) * (np.log(1 - A)))
    
    dw = (1 / m) * np.dot(y, (A - y).T) #请参考视频中的偏导公式。
    db = (1 / m) * np.sum(A - y) 
    
    cost=cost.ravel()
    
    grads={'dw':dw,'db':db}
    
    return (grads,cost)

优化器

def optimizer(x,y,w,b,num_iterations,learning_rate,print_cost=False):
    costs=[]
    
    for i in range(num_iterations):
        grads,cost=propagate(x,y,w,b)
        
        dw=grads['dw']
        db=grads['db']
        
        w=w-learning_rate*dw
        b=b-learning_rate*db
        
        if i%100==0:
            costs.append(cost)
        
        if print_cost and i%100==0:
            print("迭代的次数: %i , 误差值: %f" % (i,cost))
            
        params  = {"w" : w,"b" : b }
    
    return (params , costs)

预测函数

def predict(x,w,b):
#     m  = X.shape[1] #图片的数量
#     Y_prediction = np.zeros((1,m)) 
#     w = w.reshape(X.shape[0],1)
    
#     #计预测猫在图片中出现的概率
#     A = sigmoid(np.dot(w.T , X) + b)
#     for i in range(A.shape[1]):
#         #将概率a [0,i]转换为实际预测p [0,i]
#         Y_prediction[0,i] = 1 if A[0,i] > 0.5 else 0
    A=sigmoid(np.dot(w.T,x)+b)
    return A.ravel()

整合代码

def model(x_train,x_test,y_train,y_test,num_iterations,learning_rate,print_cost=False):
    m=x_train.shape[0]
    w,b=init_(m)
    params,costs=optimizer(x_train,y_train,w,b,num_iterations=num_iterations,learning_rate=learning_rate,print_cost=print_cost)
    w=params['w']
    b=params['b']
    y_pred_train=predict(x_train,w,b)
    y_pred_test=predict(x_test,w,b)
    
    print("训练集准确性:{}".format(mean_squared_error(y_train.ravel(),y_pred_train)))
    print("测试集准确性:{}".format(mean_squared_error(y_test.ravel(),y_pred_test)))
    
    return y_pred_train,y_pred_test
x=np.random.randint(0,1,size=(1000,5))
y=np.random.rand(1000)
x_train,x_test,y_train,y_test=train_test_split(x,y,test_size=0.2,random_state=0)
x_train=x_train.reshape(5,-1)
x_test=x_test.reshape(5,-1)
y_train=y_train.reshape(1,-1)
y_test=y_test.reshape(1,-1)
print(x_train.shape)
print(x_test.shape)
print(y_train.shape)
print(y_test.shape)
(5, 800)
(5, 200)
(1, 800)
(1, 200)
y_pred_train,y_pred_test=model(x_train,x_test,y_train,y_test,num_iterations=20000,learning_rate=0.005,print_cost=True)
迭代的次数: 0 , 误差值: 0.693147
迭代的次数: 100 , 误差值: 0.693143
迭代的次数: 200 , 误差值: 0.693139
迭代的次数: 300 , 误差值: 0.693136
迭代的次数: 400 , 误差值: 0.693134
迭代的次数: 500 , 误差值: 0.693132
迭代的次数: 600 , 误差值: 0.693131
迭代的次数: 700 , 误差值: 0.693130
迭代的次数: 800 , 误差值: 0.693129
迭代的次数: 900 , 误差值: 0.693128
迭代的次数: 1000 , 误差值: 0.693128
迭代的次数: 1100 , 误差值: 0.693127
迭代的次数: 1200 , 误差值: 0.693127
迭代的次数: 1300 , 误差值: 0.693127
迭代的次数: 1400 , 误差值: 0.693127
迭代的次数: 1500 , 误差值: 0.693127
迭代的次数: 1600 , 误差值: 0.693126
迭代的次数: 1700 , 误差值: 0.693126
迭代的次数: 1800 , 误差值: 0.693126
迭代的次数: 1900 , 误差值: 0.693126
迭代的次数: 2000 , 误差值: 0.693126
迭代的次数: 2100 , 误差值: 0.693126
迭代的次数: 2200 , 误差值: 0.693126
迭代的次数: 2300 , 误差值: 0.693126
迭代的次数: 2400 , 误差值: 0.693126
迭代的次数: 2500 , 误差值: 0.693126
迭代的次数: 2600 , 误差值: 0.693126
迭代的次数: 2700 , 误差值: 0.693126
迭代的次数: 2800 , 误差值: 0.693126
迭代的次数: 2900 , 误差值: 0.693126
迭代的次数: 3000 , 误差值: 0.693126
迭代的次数: 3100 , 误差值: 0.693126
迭代的次数: 3200 , 误差值: 0.693126
迭代的次数: 3300 , 误差值: 0.693126
迭代的次数: 3400 , 误差值: 0.693126
迭代的次数: 3500 , 误差值: 0.693126
迭代的次数: 3600 , 误差值: 0.693126
迭代的次数: 3700 , 误差值: 0.693126
迭代的次数: 3800 , 误差值: 0.693126
迭代的次数: 3900 , 误差值: 0.693126
迭代的次数: 4000 , 误差值: 0.693126
迭代的次数: 4100 , 误差值: 0.693126
迭代的次数: 4200 , 误差值: 0.693126
迭代的次数: 4300 , 误差值: 0.693126
迭代的次数: 4400 , 误差值: 0.693126
迭代的次数: 4500 , 误差值: 0.693126
迭代的次数: 4600 , 误差值: 0.693126
迭代的次数: 4700 , 误差值: 0.693126
迭代的次数: 4800 , 误差值: 0.693126
迭代的次数: 4900 , 误差值: 0.693126
迭代的次数: 5000 , 误差值: 0.693126
迭代的次数: 5100 , 误差值: 0.693126
迭代的次数: 5200 , 误差值: 0.693126
迭代的次数: 5300 , 误差值: 0.693126
迭代的次数: 5400 , 误差值: 0.693126
迭代的次数: 5500 , 误差值: 0.693126
迭代的次数: 5600 , 误差值: 0.693126
迭代的次数: 5700 , 误差值: 0.693126
迭代的次数: 5800 , 误差值: 0.693126
迭代的次数: 5900 , 误差值: 0.693126
迭代的次数: 6000 , 误差值: 0.693126
迭代的次数: 6100 , 误差值: 0.693126
迭代的次数: 6200 , 误差值: 0.693126
迭代的次数: 6300 , 误差值: 0.693126
迭代的次数: 6400 , 误差值: 0.693126
迭代的次数: 6500 , 误差值: 0.693126
迭代的次数: 6600 , 误差值: 0.693126
迭代的次数: 6700 , 误差值: 0.693126
迭代的次数: 6800 , 误差值: 0.693126
迭代的次数: 6900 , 误差值: 0.693126
迭代的次数: 7000 , 误差值: 0.693126
迭代的次数: 7100 , 误差值: 0.693126
迭代的次数: 7200 , 误差值: 0.693126
迭代的次数: 7300 , 误差值: 0.693126
迭代的次数: 7400 , 误差值: 0.693126
迭代的次数: 7500 , 误差值: 0.693126
迭代的次数: 7600 , 误差值: 0.693126
迭代的次数: 7700 , 误差值: 0.693126
迭代的次数: 7800 , 误差值: 0.693126
迭代的次数: 7900 , 误差值: 0.693126
迭代的次数: 8000 , 误差值: 0.693126
迭代的次数: 8100 , 误差值: 0.693126
迭代的次数: 8200 , 误差值: 0.693126
迭代的次数: 8300 , 误差值: 0.693126
迭代的次数: 8400 , 误差值: 0.693126
迭代的次数: 8500 , 误差值: 0.693126
迭代的次数: 8600 , 误差值: 0.693126
迭代的次数: 8700 , 误差值: 0.693126
迭代的次数: 8800 , 误差值: 0.693126
迭代的次数: 8900 , 误差值: 0.693126
迭代的次数: 9000 , 误差值: 0.693126
迭代的次数: 9100 , 误差值: 0.693126
迭代的次数: 9200 , 误差值: 0.693126
迭代的次数: 9300 , 误差值: 0.693126
迭代的次数: 9400 , 误差值: 0.693126
迭代的次数: 9500 , 误差值: 0.693126
迭代的次数: 9600 , 误差值: 0.693126
迭代的次数: 9700 , 误差值: 0.693126
迭代的次数: 9800 , 误差值: 0.693126
迭代的次数: 9900 , 误差值: 0.693126
迭代的次数: 10000 , 误差值: 0.693126
迭代的次数: 10100 , 误差值: 0.693126
迭代的次数: 10200 , 误差值: 0.693126
迭代的次数: 10300 , 误差值: 0.693126
迭代的次数: 10400 , 误差值: 0.693126
迭代的次数: 10500 , 误差值: 0.693126
迭代的次数: 10600 , 误差值: 0.693126
迭代的次数: 10700 , 误差值: 0.693126
迭代的次数: 10800 , 误差值: 0.693126
迭代的次数: 10900 , 误差值: 0.693126
迭代的次数: 11000 , 误差值: 0.693126
迭代的次数: 11100 , 误差值: 0.693126
迭代的次数: 11200 , 误差值: 0.693126
迭代的次数: 11300 , 误差值: 0.693126
迭代的次数: 11400 , 误差值: 0.693126
迭代的次数: 11500 , 误差值: 0.693126
迭代的次数: 11600 , 误差值: 0.693126
迭代的次数: 11700 , 误差值: 0.693126
迭代的次数: 11800 , 误差值: 0.693126
迭代的次数: 11900 , 误差值: 0.693126
迭代的次数: 12000 , 误差值: 0.693126
迭代的次数: 12100 , 误差值: 0.693126
迭代的次数: 12200 , 误差值: 0.693126
迭代的次数: 12300 , 误差值: 0.693126
迭代的次数: 12400 , 误差值: 0.693126
迭代的次数: 12500 , 误差值: 0.693126
迭代的次数: 12600 , 误差值: 0.693126
迭代的次数: 12700 , 误差值: 0.693126
迭代的次数: 12800 , 误差值: 0.693126
迭代的次数: 12900 , 误差值: 0.693126
迭代的次数: 13000 , 误差值: 0.693126
迭代的次数: 13100 , 误差值: 0.693126
迭代的次数: 13200 , 误差值: 0.693126
迭代的次数: 13300 , 误差值: 0.693126
迭代的次数: 13400 , 误差值: 0.693126
迭代的次数: 13500 , 误差值: 0.693126
迭代的次数: 13600 , 误差值: 0.693126
迭代的次数: 13700 , 误差值: 0.693126
迭代的次数: 13800 , 误差值: 0.693126
迭代的次数: 13900 , 误差值: 0.693126
迭代的次数: 14000 , 误差值: 0.693126
迭代的次数: 14100 , 误差值: 0.693126
迭代的次数: 14200 , 误差值: 0.693126
迭代的次数: 14300 , 误差值: 0.693126
迭代的次数: 14400 , 误差值: 0.693126
迭代的次数: 14500 , 误差值: 0.693126
迭代的次数: 14600 , 误差值: 0.693126
迭代的次数: 14700 , 误差值: 0.693126
迭代的次数: 14800 , 误差值: 0.693126
迭代的次数: 14900 , 误差值: 0.693126
迭代的次数: 15000 , 误差值: 0.693126
迭代的次数: 15100 , 误差值: 0.693126
迭代的次数: 15200 , 误差值: 0.693126
迭代的次数: 15300 , 误差值: 0.693126
迭代的次数: 15400 , 误差值: 0.693126
迭代的次数: 15500 , 误差值: 0.693126
迭代的次数: 15600 , 误差值: 0.693126
迭代的次数: 15700 , 误差值: 0.693126
迭代的次数: 15800 , 误差值: 0.693126
迭代的次数: 15900 , 误差值: 0.693126
迭代的次数: 16000 , 误差值: 0.693126
迭代的次数: 16100 , 误差值: 0.693126
迭代的次数: 16200 , 误差值: 0.693126
迭代的次数: 16300 , 误差值: 0.693126
迭代的次数: 16400 , 误差值: 0.693126
迭代的次数: 16500 , 误差值: 0.693126
迭代的次数: 16600 , 误差值: 0.693126
迭代的次数: 16700 , 误差值: 0.693126
迭代的次数: 16800 , 误差值: 0.693126
迭代的次数: 16900 , 误差值: 0.693126
迭代的次数: 17000 , 误差值: 0.693126
迭代的次数: 17100 , 误差值: 0.693126
迭代的次数: 17200 , 误差值: 0.693126
迭代的次数: 17300 , 误差值: 0.693126
迭代的次数: 17400 , 误差值: 0.693126
迭代的次数: 17500 , 误差值: 0.693126
迭代的次数: 17600 , 误差值: 0.693126
迭代的次数: 17700 , 误差值: 0.693126
迭代的次数: 17800 , 误差值: 0.693126
迭代的次数: 17900 , 误差值: 0.693126
迭代的次数: 18000 , 误差值: 0.693126
迭代的次数: 18100 , 误差值: 0.693126
迭代的次数: 18200 , 误差值: 0.693126
迭代的次数: 18300 , 误差值: 0.693126
迭代的次数: 18400 , 误差值: 0.693126
迭代的次数: 18500 , 误差值: 0.693126
迭代的次数: 18600 , 误差值: 0.693126
迭代的次数: 18700 , 误差值: 0.693126
迭代的次数: 18800 , 误差值: 0.693126
迭代的次数: 18900 , 误差值: 0.693126
迭代的次数: 19000 , 误差值: 0.693126
迭代的次数: 19100 , 误差值: 0.693126
迭代的次数: 19200 , 误差值: 0.693126
迭代的次数: 19300 , 误差值: 0.693126
迭代的次数: 19400 , 误差值: 0.693126
迭代的次数: 19500 , 误差值: 0.693126
迭代的次数: 19600 , 误差值: 0.693126
迭代的次数: 19700 , 误差值: 0.693126
迭代的次数: 19800 , 误差值: 0.693126
迭代的次数: 19900 , 误差值: 0.693126
训练集准确性:0.08720114488576995
测试集准确性:0.08409814522529904
list(zip(y_test.ravel(),y_pred_test))
[(0.8992453324076225, 0.4967528028231265),
 (0.3851013295766984, 0.4967528028231265),
 (0.40643121428573037, 0.4967528028231265),
 (0.38027498582439323, 0.4967528028231265),
 (0.8104738960767541, 0.4967528028231265),
 (0.8390417391642768, 0.4967528028231265),
 (0.4504321338140941, 0.4967528028231265),
 (0.8099917499300319, 0.4967528028231265),
 (0.8720498762601127, 0.4967528028231265),
 (0.5658728128384852, 0.4967528028231265),
 (0.3700328775726984, 0.4967528028231265),
 (0.34026436667897175, 0.4967528028231265),
 (0.006295123397058511, 0.4967528028231265),
 (0.3977695663900206, 0.4967528028231265),
 (0.7383339634174181, 0.4967528028231265),
 (0.4439231672780264, 0.4967528028231265),
 (0.4451882640593837, 0.4967528028231265),
 (0.8869912818552514, 0.4967528028231265),
 (0.8084585193009086, 0.4967528028231265),
 (0.33274434942963516, 0.4967528028231265),
 (0.8008415997967571, 0.4967528028231265),
 (0.6635371506550262, 0.4967528028231265),
 (0.7569411450918976, 0.4967528028231265),
 (0.33504770920135785, 0.4967528028231265),
 (0.6689781517857755, 0.4967528028231265),
 (0.04465167617414101, 0.4967528028231265),
 (0.3204473627055453, 0.4967528028231265),
 (0.9192209900620649, 0.4967528028231265),
 (0.4124009098989646, 0.4967528028231265),
 (0.7210042264911242, 0.4967528028231265),
 (0.1407149859966419, 0.4967528028231265),
 (0.9201856780553369, 0.4967528028231265),
 (0.5451902568929778, 0.4967528028231265),
 (0.2050119735864867, 0.4967528028231265),
 (0.8074707749762727, 0.4967528028231265),
 (0.3817112825007104, 0.4967528028231265),
 (0.8484474792635882, 0.4967528028231265),
 (0.5709662157458889, 0.4967528028231265),
 (0.9734565338018282, 0.4967528028231265),
 (0.7031582513187008, 0.4967528028231265),
 (0.7378397848781774, 0.4967528028231265),
 (0.7352051874542106, 0.4967528028231265),
 (0.9113806353796529, 0.4967528028231265),
 (0.1376495028027953, 0.4967528028231265),
 (0.12072861710774441, 0.4967528028231265),
 (0.6364438097792455, 0.4967528028231265),
 (0.3675538310095132, 0.4967528028231265),
 (0.7876986074831057, 0.4967528028231265),
 (0.007086962043592826, 0.4967528028231265),
 (0.5388678311051747, 0.4967528028231265),
 (0.9223371045507281, 0.4967528028231265),
 (0.670939270156153, 0.4967528028231265),
 (0.08378700001622896, 0.4967528028231265),
 (0.8711701493666879, 0.4967528028231265),
 (0.7193597494775847, 0.4967528028231265),
 (0.2935667843156088, 0.4967528028231265),
 (0.0743661543022408, 0.4967528028231265),
 (0.5453865581516028, 0.4967528028231265),
 (0.42464986390401616, 0.4967528028231265),
 (0.5281064371440627, 0.4967528028231265),
 (0.6940859167795509, 0.4967528028231265),
 (0.3449499449429235, 0.4967528028231265),
 (0.9677273173325993, 0.4967528028231265),
 (0.835345268406359, 0.4967528028231265),
 (0.9598140402276492, 0.4967528028231265),
 (0.29024692546630393, 0.4967528028231265),
 (0.6922452912413346, 0.4967528028231265),
 (0.49604753136704627, 0.4967528028231265),
 (0.04068835237238577, 0.4967528028231265),
 (0.4275563749439958, 0.4967528028231265),
 (0.2371207521040617, 0.4967528028231265),
 (0.3888391781032067, 0.4967528028231265),
 (0.9624493664896201, 0.4967528028231265),
 (0.6753894414761255, 0.4967528028231265),
 (0.35912448241297157, 0.4967528028231265),
 (0.8117270549307672, 0.4967528028231265),
 (0.5718101380245024, 0.4967528028231265),
 (0.5213515437826556, 0.4967528028231265),
 (0.17688578085169815, 0.4967528028231265),
 (0.2635146261438338, 0.4967528028231265),
 (0.47090433131240805, 0.4967528028231265),
 (0.8569371036252929, 0.4967528028231265),
 (0.1879867891212612, 0.4967528028231265),
 (0.8308795368721635, 0.4967528028231265),
 (0.021176292921080364, 0.4967528028231265),
 (0.7940621924875205, 0.4967528028231265),
 (0.6006211733628872, 0.4967528028231265),
 (0.3602948366407722, 0.4967528028231265),
 (0.9562377995191115, 0.4967528028231265),
 (0.9688324442887287, 0.4967528028231265),
 (0.06358483152732974, 0.4967528028231265),
 (0.36958840739319376, 0.4967528028231265),
 (0.5710440842225822, 0.4967528028231265),
 (0.2064563088094653, 0.4967528028231265),
 (0.6331916521664857, 0.4967528028231265),
 (0.181538469226594, 0.4967528028231265),
 (0.38822863153679044, 0.4967528028231265),
 (0.24290189086894975, 0.4967528028231265),
 (0.3880190154269656, 0.4967528028231265),
 (0.754164465076902, 0.4967528028231265),
 (0.21065352799643045, 0.4967528028231265),
 (0.22417639840627035, 0.4967528028231265),
 (0.7991012587068267, 0.4967528028231265),
 (0.8066749263710346, 0.4967528028231265),
 (0.22284805641588568, 0.4967528028231265),
 (0.0796543479044417, 0.4967528028231265),
 (0.012970653567507817, 0.4967528028231265),
 (0.05491768286797116, 0.4967528028231265),
 (0.9451455663858466, 0.4967528028231265),
 (0.06797759501379963, 0.4967528028231265),
 (0.9129994366328874, 0.4967528028231265),
 (0.4386672017348001, 0.4967528028231265),
 (0.6290999034122733, 0.4967528028231265),
 (0.7071751017045296, 0.4967528028231265),
 (0.49469198314994023, 0.4967528028231265),
 (0.9782284497873656, 0.4967528028231265),
 (0.05626503814225825, 0.4967528028231265),
 (0.005250284181271714, 0.4967528028231265),
 (0.47537371767789927, 0.4967528028231265),
 (0.8628324779908857, 0.4967528028231265),
 (0.9833608381357899, 0.4967528028231265),
 (0.8886645400380584, 0.4967528028231265),
 (0.548597442073758, 0.4967528028231265),
 (0.2252098032405504, 0.4967528028231265),
 (0.5677720788118675, 0.4967528028231265),
 (0.416531229078557, 0.4967528028231265),
 (0.5807393398829617, 0.4967528028231265),
 (0.2420685399371334, 0.4967528028231265),
 (0.9809082897623567, 0.4967528028231265),
 (0.8792341879841399, 0.4967528028231265),
 (0.1059673533777642, 0.4967528028231265),
 (0.5874330674744691, 0.4967528028231265),
 (0.3710264199429707, 0.4967528028231265),
 (0.9276267103189461, 0.4967528028231265),
 (0.16539149530240405, 0.4967528028231265),
 (0.7410765381176483, 0.4967528028231265),
 (0.5127780216500379, 0.4967528028231265),
 (0.9210652906136647, 0.4967528028231265),
 (0.2349164532473711, 0.4967528028231265),
 (0.5132512489086719, 0.4967528028231265),
 (0.19242201569613493, 0.4967528028231265),
 (0.5628025050603029, 0.4967528028231265),
 (0.703415515741772, 0.4967528028231265),
 (0.3093896529726311, 0.4967528028231265),
 (0.5446778289106736, 0.4967528028231265),
 (0.21478440035778623, 0.4967528028231265),
 (0.09756345630803187, 0.4967528028231265),
 (0.4130960959473394, 0.4967528028231265),
 (0.6444686147717626, 0.4967528028231265),
 (0.40309299662668296, 0.4967528028231265),
 (0.7228893421967068, 0.4967528028231265),
 (0.4760541947370909, 0.4967528028231265),
 (0.4597864751928845, 0.4967528028231265),
 (0.5151126582798418, 0.4967528028231265),
 (0.03860449677188316, 0.4967528028231265),
 (0.5515927737884299, 0.4967528028231265),
 (0.6617082328324644, 0.4967528028231265),
 (0.38519417529735134, 0.4967528028231265),
 (0.22409117941003187, 0.4967528028231265),
 (0.8887702554918002, 0.4967528028231265),
 (0.9003747857768065, 0.4967528028231265),
 (0.8475871577173543, 0.4967528028231265),
 (0.5589962061348058, 0.4967528028231265),
 (0.9016539175262344, 0.4967528028231265),
 (0.540790344612932, 0.4967528028231265),
 (0.00036820688818683944, 0.4967528028231265),
 (0.708266401680192, 0.4967528028231265),
 (0.6694417753679902, 0.4967528028231265),
 (0.23276483024117178, 0.4967528028231265),
 (0.2656684578245426, 0.4967528028231265),
 (0.20858388810466277, 0.4967528028231265),
 (0.20710557405118823, 0.4967528028231265),
 (0.9413074098389412, 0.4967528028231265),
 (0.2906277146812124, 0.4967528028231265),
 (0.9860990271366175, 0.4967528028231265),
 (0.9122065567409215, 0.4967528028231265),
 (0.031074052040091837, 0.4967528028231265),
 (0.027627595392842874, 0.4967528028231265),
 (0.5791986558992204, 0.4967528028231265),
 (0.29120617663448156, 0.4967528028231265),
 (0.29573556212012897, 0.4967528028231265),
 (0.12853687251381418, 0.4967528028231265),
 (0.16725027685813554, 0.4967528028231265),
 (0.3004153138705258, 0.4967528028231265),
 (0.43834864536903106, 0.4967528028231265),
 (0.13723613189383632, 0.4967528028231265),
 (0.41706160558002636, 0.4967528028231265),
 (0.46422905611346643, 0.4967528028231265),
 (0.7570089079178809, 0.4967528028231265),
 (0.26956889479731505, 0.4967528028231265),
 (0.8134386486132896, 0.4967528028231265),
 (0.7509219974357325, 0.4967528028231265),
 (0.9471870121300671, 0.4967528028231265),
 (0.4166436826329841, 0.4967528028231265),
 (0.6559051229739936, 0.4967528028231265),
 (0.06758258490973656, 0.4967528028231265),
 (0.6412709103645695, 0.4967528028231265),
 (0.0181885654630346, 0.4967528028231265),
 (0.4434338119210277, 0.4967528028231265),
 (0.9729752434680449, 0.4967528028231265)]
posted @ 2021-04-24 20:22  魏宝航  阅读(90)  评论(0编辑  收藏  举报