Loading

逻辑回归的参数学习

from sklearn.datasets import load_breast_cancer
from sklearn.feature_selection import SelectFromModel
from sklearn.linear_model import LogisticRegression as LR
from sklearn.model_selection import cross_val_score
import numpy as np
import matplotlib.pyplot as plt
cancer = load_breast_cancer()
x = cancer.data
y = cancer.target
print(x.shape)
cross_val_score(
    LR(penalty="l2", solver="liblinear", random_state=420), x, y, cv=5
).mean()
(569, 30)





0.9508150908244062
lr_ = LR(penalty="l2", solver="liblinear", random_state=420)
x_embeded = SelectFromModel(lr_, norm_order=1).fit_transform(x, y)
print(x_embeded.shape)
cross_val_score(lr_, x_embeded, y, cv=5).mean()
(569, 9)





0.9367644775655954
threshold = np.linspace(0, abs(lr_.fit(x, y).coef_).max(), 20)
full = []
fsx = []
k = 0
for i in threshold:
    x_embeded = SelectFromModel(lr_, threshold=i, norm_order=1).fit_transform(x, y)
    full.append(cross_val_score(lr_, x, y, cv=5).mean())
    fsx.append(cross_val_score(lr_, x_embeded, y, cv=5).mean())
    print(threshold[k], x_embeded.shape[1])
    k += 1
0.0 30
0.10962887162000101 17
0.21925774324000202 12
0.32888661486000303 11
0.43851548648000405 8
0.5481443581000051 8
0.6577732297200061 6
0.7674021013400071 5
0.8770309729600081 5
0.9866598445800091 5
1.0962887162000101 5
1.2059175878200112 4
1.3155464594400121 2
1.425175331060013 2
1.5348042026800142 2
1.6444330743000153 1
1.7540619459200162 1
1.863690817540017 1
1.9733196891600182 1
2.0829485607800193 1
plt.figure(figsize=[20, 5])
plt.plot(threshold, full, label="full")
plt.plot(threshold, fsx, label="fsx")
plt.legend()
plt.xticks(threshold, rotation=20)
plt.show()


C = np.arange(0.01, 10.01, 0.5)
full = []
fsx = []
for i in C:
    lr_ = LR(penalty="l2", solver="liblinear", C=i, random_state=420)
    full.append(cross_val_score(lr_, x, y, cv=5).mean())
    x_embeded = SelectFromModel(lr_, norm_order=1).fit_transform(x, y)
    fsx.append(cross_val_score(lr_, x_embeded, y, cv=5).mean())
print(max(fsx), C[fsx.index(max(fsx))])
0.9543393882937432 4.01
plt.figure(figsize=[20, 5])
f_list = [full, fsx]
f_name = ["full", "fsx"]
for i in range(len(f_list)):
    plt.plot(C, f_list[i], label=f_name[i])
plt.xticks(C)
plt.legend()
plt.show()


C = np.arange(3.51, 4.51, 0.05)

full = []
fsx = []
for i in C:
    lr_ = LR(penalty="l2", solver="liblinear", C=i, random_state=420)
    full.append(cross_val_score(lr_, x, y, cv=5).mean())
    x_embeded = SelectFromModel(lr_, norm_order=1).fit_transform(x, y)
    fsx.append(cross_val_score(lr_, x_embeded, y, cv=5).mean())
print(max(fsx), C[fsx.index(max(fsx))])

plt.figure(figsize=[20, 5])
f_list = [full, fsx]
f_name = ["full", "fsx"]
for i in range(len(f_list)):
    plt.plot(C, f_list[i], label=f_name[i])
plt.xticks(C)
plt.legend()
plt.show()
0.9543393882937432 3.8599999999999985

lr_ = LR(penalty="l2", solver="liblinear", C=4.01, random_state=420)
print(cross_val_score(lr_, x, y, cv=5).mean(), x.shape[1])
0.9543238627542309 30
lr_ = LR(penalty="l2", solver="liblinear", C=4.01, random_state=420)
x_embeded = SelectFromModel(lr_, norm_order=1).fit_transform(x, y)
print(cross_val_score(lr_, x_embeded, y, cv=5).mean(), x_embeded.shape[1])
0.9543393882937432 9

max_iter 学习曲线

from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score

x_train, x_test, y_train, y_test = train_test_split(
    cancer.data, cancer.target, test_size=30
)
train_list = []
test_list = []
for i in range(1, 201, 10):
    lr_ = LR(penalty="l2", solver="liblinear", random_state=420, max_iter=i)
    lr_.fit(x_train, y_train)
    train_list.append(accuracy_score(lr_.predict(x_train), y_train))
    test_list.append(accuracy_score(lr_.predict(x_test), y_test))
/Users/chenhao/anaconda3/envs/machine_learn/lib/python3.10/site-packages/sklearn/svm/_base.py:1244: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.
  warnings.warn(
/Users/chenhao/anaconda3/envs/machine_learn/lib/python3.10/site-packages/sklearn/svm/_base.py:1244: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.
  warnings.warn(
/Users/chenhao/anaconda3/envs/machine_learn/lib/python3.10/site-packages/sklearn/svm/_base.py:1244: ConvergenceWarning: Liblinear failed to converge, increase the number of iterations.
  warnings.warn(
plt.figure(figsize=[20, 5])
plt.plot(range(1, 201, 10), train_list, label="train")
plt.plot(range(1, 201, 10), test_list, label="test")
plt.legend()
plt.xticks(range(1, 201, 10))
plt.show()


lr_ = LR(penalty="l2", solver="liblinear", random_state=420, max_iter=30).fit(
    x_train, y_train
)
lr_.n_iter_
array([24], dtype=int32)

multi_class

from sklearn.datasets import load_iris

iris = load_iris()
for i in ["multinomial", "ovr"]:
    lr_ = LR(
        penalty="l2", solver="sag", multi_class=i, random_state=42, max_iter=100
    ).fit(iris.data, iris.target)
    print("Traing score: %.3f (%s)" % (lr_.score(iris.data, iris.target), i))
Traing score: 0.987 (multinomial)
Traing score: 0.960 (ovr)


/Users/chenhao/anaconda3/envs/machine_learn/lib/python3.10/site-packages/sklearn/linear_model/_sag.py:350: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge
  warnings.warn(
/Users/chenhao/anaconda3/envs/machine_learn/lib/python3.10/site-packages/sklearn/linear_model/_sag.py:350: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge
  warnings.warn(
/Users/chenhao/anaconda3/envs/machine_learn/lib/python3.10/site-packages/sklearn/linear_model/_sag.py:350: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge
  warnings.warn(
/Users/chenhao/anaconda3/envs/machine_learn/lib/python3.10/site-packages/sklearn/linear_model/_sag.py:350: ConvergenceWarning: The max_iter was reached which means the coef_ did not converge
  warnings.warn(
posted @ 2023-04-12 16:59  ThankCAT  阅读(51)  评论(0编辑  收藏  举报