_ | 覦覈襦 | 豕蠏手 | 殊螳 | 譯殊碁
FrontPage › CurveFitting

Contents

1 n谿 curve fitting
2 log-normal curve fitting
3 exponantial curve fitting
4 gamma
5 smooth curve
6 Adding two random variables via convolution in R
7 propagte::fitDistr
8 谿瑚襭


1 n谿 curve fitting #

x <- c(32,64,96,118,126,144,152.5,158)
y <- c(99.5,104.8,108.5,100,86,64,35.3,15)
plot(x,y,pch=19)

xx <- seq(30,160, length=50)
fit <- lm(y~poly(x,4,raw=TRUE))
summary(fit)
lines(xx, predict(fit, data.frame(x=xx)), col="purple")

蟆郁骸
> summary(fit)

Call:
lm(formula = y ~ poly(x, 4, raw = TRUE))

Residuals:
      1       2       3       4       5       6       7       8 
 0.1242 -0.6912  1.6355  1.4491 -5.1240  4.0360 -0.4692 -0.9604 

Coefficients:
                          Estimate Std. Error t value Pr(>|t|)
(Intercept)              7.474e+01  5.473e+01   1.366    0.265
poly(x, 4, raw = TRUE)1  1.426e+00  3.095e+00   0.461    0.676
poly(x, 4, raw = TRUE)2 -2.854e-02  5.729e-02  -0.498    0.653
poly(x, 4, raw = TRUE)3  2.878e-04  4.278e-04   0.673    0.549
poly(x, 4, raw = TRUE)4 -1.134e-06  1.113e-06  -1.018    0.384

Residual standard error: 4.04 on 3 degrees of freedom
Multiple R-squared:  0.9943,	Adjusted R-squared:  0.9868 
F-statistic: 131.5 on 4 and 3 DF,  p-value: 0.001064

  • y = -1.134e-06 * x^4 + 2.878e-04 * x^3 + -2.854e-02 * x^2 + 1.426e+00 * x + 7.474e+01

2 log-normal curve fitting #

#data
val <- c(462604.2114,724545.9975,677961.1773,556621.1925,572068.4823,550408.0245,442748.9577,463050.285,485241.4509,406399.9335,394768.1661,474606.3792,408121.4988,254290.8273,234755.1933,201690.9834,
175243.2,171106.0665,228024.2613,199481.5251,141505.8969,148199.988,138756.7692,140078.0631,146512.2765,183688.7274,169084.7955,103705.1421,107350.3998,106554.8355,99301.161,105367.9611,148672.9455,
135343.5096,73087.3671,75604.4967,69973.8132,66367.3878,72191.2371,98143.1619,88506.7773,59722.086,59705.1591,54111.3165,49504.2126,50762.7774,68940.2766,67763.3592,50401.3383,50509.8696,51749.5161,
56774.814,56875.3797,65898.4131,58036.3659,45687.6945,44862.2592,41548.5696,37896.342,39405.8232,54220.8435,50035.9164,36221.5746,35067.5583,33099.0594,41550.561,33312.1392,42200.7531,36982.2894,26394.0156,
25939.9764,24163.6476,23641.9008,25495.8942,35859.1398,31292.8596,20783.2461,21233.3025,20770.302,20744.4138,19822.3956,31262.9886,28102.6368,17650.7739,17666.7051,17097.1647,16079.5593,17644.7997,26329.2951,
22879.1946,14800.0848,15000.2205,15227.2401,19307.6187,20345.1381,27219.4509,22791.573,14828.9601,26459.7318,65060.0337)
seq <- 1:length(val)
tmp <- data.frame(seq, val)

#log-normal function
fun <- function(x, mu, sd){
    a <- 1/(x * sqrt(2*pi*sd))
    b <- (log(x)-mu)^2 * -1
    c <- 2 * sd^2
    return (a * exp(b/c))
}

#fitting
library("minpack.lm")
fit <- nlsLM(val ~ a * fun(seq, b, c) + d,
             start = list(a=1, b=0.01, c=100, d=1), 
             control = nls.lm.control(maxiter = 1000),
             trace = TRUE,
             data=tmp)

#plotting
r <- sort(unique(tmp$seq))
xlim <- c(1,max(r))
ylim <- c(0, max(tmp$val) + max(tmp$val) * 0.3) 
plot(tmp$seq, tmp$val, cex=0.1, pch=19, xlim=xlim, ylim=ylim)
lines(r, predict(fit, data.frame(seq=r)), col="red")

3 exponantial curve fitting #

tmp <- textConnection( 
"lv    ex 
2   10
3   11
4     12
5 	13
6 	15
7 	17
8 	19
9 	21
10 	24
11 	26
12 	29
13 	33
14 	37
15 	41
16 	46
17 	51
18 	57
19 	64
20 	71
21 	79
22 	88
23 	99
24 	110
25 	123
26 	137
27 	153
28 	170
29 	190
30 	212
31 	237
32 	264
33 	294
34 	328
35 	366
36 	409
37 	456
38 	508
39 	567
40 	632
41 	705
42 	787
43 	878
44 	979
45 	1092
46 	1218
47 	1358
48 	1515
49 	1689
50 	1884") 
x <- read.table(tmp, header=TRUE) 
close.connection(tmp)
head(x)

xx <- x$lv
plot(x$lv,x$ex,pch=19)

cc <- coef(lm(log(ex) ~ lv, data=x))
fit <- nls(ex~ exp(a + b * lv), data=x, start = list(a = cc[1], b = cc[2]))
lines(xx, predict(fit, data.frame(lv=xx)), col="red")
summary(fit)

4 gamma #

y <- c(0.0941,0.2372,0.2923,0.1750,0.0863,0.0419,0.0207,0.0128,0.0142,0.0071,0.0041,0.0031,0.0032,0.0057,0.0022,0.0001) 
x <- 1:length(y)


library(MASS) 
est <- fitdistr(y,"gamma")$estimate
# scale=0.4495207, shape=7.1923309 

fit <- nls(y ~ dgamma(x, scale=a, shape=b), start=list(a=est[1], b=est[2]), trace=T)
plot(x,y)
lines(x, predict(fit, data.frame(x=x)), col="red")

5 smooth curve #

x <- 1:10
y <- c(2,4,6,8,7,12,14,16,18,20)
lo <- loess(y~x)
plot(x,y)
lines(predict(lo), col='red', lwd=2)

6 Adding two random variables via convolution in R #

http://stackoverflow.com/questions/23569133/adding-two-random-variables-via-convolution-in-r
#install.packages("distr")
library(distr)
X <- rnorm(1000,1,0.5)
Y <- rlnorm(1000,1.5,0.75)
Z <- X + Y

N <- Norm(mean=1, sd=0.5)          # N is signature for normal dist
L <- Lnorm(meanlog=1.5,sdlog=0.75) # same for log-normal
conv <- convpow(L+N,1)             # object of class AbscontDistribution
f.Z  <- d(conv)                    # distribution function


hist(Z,freq=F,breaks=50, xlim=c(0,30))
z <- seq(0,50,0.01)
lines(z,f.Z(z),lty=2,col="red")

7 propagte::fitDistr #

#install.packages("propagate")
library(propagate)
y <- c(0.0941,0.2372,0.2923,0.1750,0.0863,0.0419,0.0207,0.0128,0.0142,0.0071,0.0041,0.0031,0.0032,0.0057,0.0022,0.0001) 
fit.result <- fitDistr(y)
fit.result

蟆郁骸
> fit.result <- fitDistr(y)
Fitting Normal distribution...Done.
Fitting Skewed-normal distribution...Done.
Fitting Generalized normal distribution...........10.........20.......Done.
Fitting Log-normal distribution...Done.
Fitting Scaled/shifted t- distribution...Done.
Fitting Logistic distribution...Done.
Fitting Uniform distribution...Done.
Fitting Triangular distribution...Done.
Fitting Trapezoidal distribution...Done.
Fitting Curvilinear Trapezoidal distribution...Done.
Fitting Generalized Trapezoidal distribution...Done.
Fitting Gamma distribution...Done.
Fitting Cauchy distribution...Done.
Fitting Laplace distribution...Done.
Fitting Gumbel distribution...Done.
Fitting Johnson SU distribution...........10.........20.........30.........40.........50
.........60.........70.........80.Done.
Fitting Johnson SB distribution...........10.........20.........30.........40.........50
.........60.........70.........80.Done.
Fitting 3P Weibull distribution...........10.........20.......Done.
Fitting 4P Beta distribution...Done.
Fitting Arcsine distribution...Done.
Fitting von Mises distribution...Done.
> fit.result
$aic
              Distribution      AIC
17              Johnson SB 613.7082
18              3P Weibull 624.7589
3       Generalized normal 624.7920
16              Johnson SU 626.7920
4               Log-normal 627.1804
12                   Gamma 634.9023
14                 Laplace 639.8102
13                  Cauchy 640.2875
5        Scaled/shifted t- 642.1013
15                  Gumbel 642.8420
2            Skewed-normal 646.8412
6                 Logistic 647.8476
1                   Normal 652.1788
9              Trapezoidal 654.7066
20                 Arcsine 670.4624
19                 4P Beta 705.6771
11 Generalized Trapezoidal 836.8772
21               von Mises 882.3921
7                  Uniform 897.6248
8               Triangular 905.4962
10 Curvilinear Trapezoidal 911.1446

$fit
$fit$Normal
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.000717984960447366, 0.00725890392042258 
residual sum-of-squares: 197174
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$`Skewed-normal`
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: -0.00583038510142194, 0.0106443425236555, 8.50327745471171 
residual sum-of-squares: 174116
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$`Generalized normal`
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.0818148807363509, 0.0385147298611958, -2.26477841096039 
residual sum-of-squares: 119822
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$`Log-normal`
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: -4.42473398922409, 2.05820723941306 
residual sum-of-squares: 129074
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$`Scaled/shifted t-`
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.002960556811455, 0.00543082394110837, 0.847275538361585 
residual sum-of-squares: 160675
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$Logistic
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.00106198716159507, 0.00448320432790849 
residual sum-of-squares: 183218
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$Uniform
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.00249994429141502, 0.289899967178183 
residual sum-of-squares: 12634542
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$Triangular
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: -0.58442884603643, 0.276474920439136, 0.00750016053696837 
residual sum-of-squares: 13956564
reason terminated: Relative error between `par' and the solution is at most `ptol'.

$fit$Trapezoidal
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: -0.267697695351802, 0.162169769399358, -0.118546092194162, 0.0169541854752057 
residual sum-of-squares: 192315
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$`Curvilinear Trapezoidal`
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: -3.51380272100041, 0.781019642275179, 0.778518731596065 
residual sum-of-squares: 15358740
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$`Generalized Trapezoidal`
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: -0.316014522465303, -0.0122579620659835, 0.00250001098860818, -0.0569154565231172, 16.9883350504257, -13.687268319418, 1.38700131982915 
residual sum-of-squares: 3808834
reason terminated: Relative error between `par' and the solution is at most `ptol'.

$fit$Gamma
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.365709818937871, 25.9401735353146 
residual sum-of-squares: 147123
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$Cauchy
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.0027034462361882, 0.00566077284092268 
residual sum-of-squares: 161183
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$Laplace
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.00208489337704117, 0.0115719008947262 
residual sum-of-squares: 159884
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$Gumbel
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: -0.000564496234569831, 0.00585195632924723 
residual sum-of-squares: 168315
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$`Johnson SU`
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.00238945145777224, 2.41428652017113e-08, -6.58408257452447, 0.441518979946343 
residual sum-of-squares: 119822
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$`Johnson SB`
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.00249961631424682, 0.290585603944236, 0.633747903543896, 0.35472053982219 
residual sum-of-squares: 95990
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$`3P Weibull`
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.00206926024634554, 0.60287651598625, 0.0650136696113521 
residual sum-of-squares: 119755
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$`4P Beta`
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 243.96530522719, 244.003863012748, -0.219805373171359, 0.221019706262809 
residual sum-of-squares: 456251
reason terminated: Relative error between `par' and the solution is at most `ptol'.

$fit$Arcsine
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.00241801362975452, 0.295326975917706 
residual sum-of-squares: 268803
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fit$`von Mises`
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.00235128799831663, 314.160915522474 
residual sum-of-squares: 9759620
reason terminated: Number of iterations has reached `maxiter' == 1024.


$bestfit
Nonlinear regression via the Levenberg-Marquardt algorithm
parameter estimates: 0.00249961631424682, 0.290585603944236, 0.633747903543896, 0.35472053982219 
residual sum-of-squares: 95990
reason terminated: Relative error in the sum of squares is at most `ftol'.

$fitted
 [1] 62.219673 20.891649 12.605071  9.187138  7.285103  6.065145  5.213740  4.585130  4.101920  3.719054  3.408463
[12]  3.151718  2.936210  2.753014  2.595632  2.459222  2.340100  2.235416  2.142929  2.060858  1.987768  1.922492
[23]  1.864074  1.811724  1.764788  1.722718  1.685056  1.651422  1.621494  1.595006  1.571740  1.551516  1.534193
[34]  1.519663  1.507852  1.498715  1.492237  1.488439  1.487372  1.489126  1.493832  1.501671  1.512883  1.527777
[45]  1.546754  1.570328  1.599166  1.634138  1.676391  1.727475  1.789524  1.865572  1.960094  2.080031  2.236881
[56]  2.451540  2.767639  3.300214  4.355798

$residuals
 [1]  0.2803273  4.1083505 12.3949289 -9.1871379  5.2148973 -6.0651453 -5.2137405 -4.5851299  8.3980803 -3.7190542
[11] -3.4084626 -3.1517175 -2.9362096 -2.7530136 -2.5956321 -2.4592222 -2.3401002 10.2645843 10.3570709 -2.0608580
[21] -1.9877677 -1.9224916 -1.8640737 -1.8117243 -1.7647878 -1.7227176 -1.6850565 -1.6514218 -1.6214937 -1.5950062
[31] -1.5717396 -1.5515155 -1.5341925 -1.5196632 10.9921481 -1.4987145 -1.4922374 -1.4884391 -1.4873721 -1.4891258
[41] -1.4938321 -1.5016714 -1.5128828 -1.5277768 -1.5467535 -1.5703281 -1.5991664 10.8658625 -1.6763910 -1.7274750
[51] -1.7895244 -1.8655720 -1.9600941 -2.0800311 -2.2368812 -2.4515397 -2.7676387 -3.3002136  8.1442017


蠍 蠍郁鍵..
企: : るジ讓曙 襦螻豺 企Ν 譯殊語. 襦螻豺
EditText : Print : Mobile : FindPage : DeletePage : LikePages : Powered by MoniWiki : Last modified 2018-04-13 23:12:52

殊 蟾讌 覈詩企 譬. る 豌覿 蠍壱 螳襷 讌 襷. 蠏碁蟆 蠏 殊 襷♀鹸 語 襷 覯襴讌 . <覓企>