Matlab regression

 

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
% linear regression
% least squares fitting a straight line
load LinearRegressData.mat
plot(x,y,'.')
X=[ones(size(x)),x];
a=X\y
 
a =
 
    5.2745
    0.9950
clear
load jump.mat
plot(x,y,'.')
X=[ones(size(x)),x];
a=X\y
 
a =
 
   -0.0095
    0.7465
Y=-0.0095 + 0.7465*x;
plot(x,Y,'.')
x1=x;
x2=x.^2;
x3=x.^3;
X=[ones(size(x)),x1,x2,x3];
a=X\y
 
a =
 
   -0.0226
    1.3906
    0.0389
   -1.0536
yp=X*a;
plot(x,y,'o',x,yp,'-')
% weighted factors in linear regression
hold on
indx=find(x>0);
X(indx,:)=X(indx,:)/10;
yw=y;
yw(indx)=yw(indx)/10;
aw=X\yw;
ypw=aw(1)+aw(2)*x;
plot(x,ypw,'g-')
clear
load DIC_O2.mat
x=DIC;
y=O2;
plot(x,y,'.')
xlabel('DIC')
ylabel('O_2')
X=[ones(size(x)),x];
a=X\y;
hold on
yp=X*a;
plot(x,yp,'-')
X=[ones(size(x)),x];
Y=[ones(size(x)),y];
b=Y\x;
xp=Y*b;
plot(xp,y,'m-')

  

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
>> % type 2 regression
>> c(2)=sqrt(a(2)/b(2))
 
c =
 
         0    0.3559
 
>> c(2)=-c(2)
 
c =
 
         0   -0.3559
 
>> c(1)=mean(y)-c(2)*mean(x)
 
c =
 
  931.6370   -0.3559
>> yII=c(1)+c(2)*x;
>> plot(x,yII,'g-')
 
% multidimensional regression

  

  

 

 

1
2
function y=modfunc(a,x)
y=a(1)+a(2)*exp(-(x-a(3)).^2./(2.*a(4).^2));

  

1
2
3
4
5
6
7
8
9
10
11
12
>> load gauss_data.mat
>> plot(x,y,'o')
>> a0=[1,10,4,1];
>> hold on
>> ezplot(@(x)modfunc(a0,x),[0,10])
>> a=nlinfit(x,y,@modfunc,a0)
 
a =
 
    0.6731   10.0709    4.4948    0.7142
 
>> ezplot(@(x)modfunc(a,x),[0,10])<br>>> [a,R,J,CovB]=nlinfit(x,y,@modfunc,a0);<br>

    >> err=sqrt(diag(CovB))

err =

0.0828
0.2318
0.0216
0.0202

  

 

posted @   海边儿写代码的孩子  阅读(95)  评论(0编辑  收藏  举报
相关博文:
阅读排行:
· 地球OL攻略 —— 某应届生求职总结
· 周边上新:园子的第一款马克杯温暖上架
· Open-Sora 2.0 重磅开源!
· 提示词工程——AI应用必不可少的技术
· .NET周刊【3月第1期 2025-03-02】
点击右上角即可分享
微信分享提示