Guest User

Untitled

a guest
Aug 16th, 2018
72
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.90 KB | None | 0 0
  1. library(ggplot2)
  2. g <- function (x, a,b,c) a * (1-exp(-(x-c)/abs(b)))
  3. X1 <- c(129.08,109.92,85.83,37.72)
  4. Y1 <- c(0.7,0.5,0.39,-1.36)
  5. dt1 <- data.frame(x1=X1,y1=Y1)
  6. model1 <- nls(Y1 ~ g(X1, a, b, c),
  7. start = list(a=0.5, b=60, c=50),control=nls.control(maxiter = 200))
  8.  
  9. ggplot(data = dt1,aes(x = x1, y = y1)) +
  10. theme_bw() + geom_point() +
  11. geom_smooth(data=dt1, method="nls", formula=y~g(x, a, b, c),
  12. se=F, start=list(a=0.5, b=60, c=50))
  13.  
  14.  
  15. f <- function (x, a, b, c) a*(x^2)+b*x+c
  16. X2 <- c(589.62,457.92,370.16,295.98,243.99,199.07,159.91,142.63,
  17. 124.15, 101.98, 87.93, 83.16, 82.2, 74.48, 47.68, 37.51, 31,
  18. 27.9, 21.24,18.28)
  19. Y2 <- c(0.22,0.37,0.49,0.65,0.81,0.83,1,0.81,0.65,0.44,0.55,0.63,
  20. 0.65,0.55,0.37,0.32,0.27,0.22,0.17,0.14)
  21. dt2 <- data.frame(x2=X2,y2=Y2)
  22. model2 <- nls(Y2 ~ f(X2, a, b, c),
  23. start = list(a=-1, b=3, c=0),control=nls.control(maxiter = 200))
  24. ggplot(data = dt2,aes(x = x2, y = y2)) +
  25. theme_bw() + geom_point() +
  26. geom_smooth(data=dt2, method="nls", formula=y~f(x, a, b, c),
  27. se=F, start=list(a=-1, b=3, c=0))
  28.  
  29. library(nls2)
  30. summary(as.lm(model))
  31.  
  32. as.lm.nls <- function(object, ...) {
  33. if (!inherits(object, "nls")) {
  34. w <- paste("expected object of class nls but got object of class:",
  35. paste(class(object), collapse = " "))
  36. warning(w)
  37. }
  38.  
  39. gradient <- object$m$gradient()
  40. if (is.null(colnames(gradient))) {
  41. colnames(gradient) <- names(object$m$getPars())
  42. }
  43.  
  44. response.name <- if (length(formula(object)) == 2) "0" else
  45. as.character(formula(object)[[2]])
  46.  
  47. lhs <- object$m$lhs()
  48. L <- data.frame(lhs, gradient)
  49. names(L)[1] <- response.name
  50.  
  51. fo <- sprintf("%s ~ %s - 1", response.name,
  52. paste(colnames(gradient), collapse = "+"))
  53. fo <- as.formula(fo, env = as.proto.list(L))
  54.  
  55. do.call("lmst(fo, offset = substitute(fitted(object))))
  56. }
  57.  
  58. predCI <- predict(as.lm.nls(fittednls), interval = “confidence”, level = 0.95)
  59.  
  60. predictNLS <- function(
  61. object,
  62. newdata,
  63. level = 0.95,
  64. nsim = 10000,
  65. ...
  66. )
  67. {
  68. require(MASS, quietly = TRUE)
  69.  
  70. ## get right-hand side of formula
  71. RHS <- as.list(object$call$formula)[[3]]
  72. EXPR <- as.expression(RHS)
  73.  
  74. ## all variables in model
  75. VARS <- all.vars(EXPR)
  76.  
  77. ## coefficients
  78. COEF <- coef(object)
  79.  
  80. ## extract predictor variable
  81. predNAME <- setdiff(VARS, names(COEF))
  82.  
  83. ## take fitted values, if 'newdata' is missing
  84. if (missing(newdata)) {
  85. newdata <- eval(object$data)[predNAME]
  86. colnames(newdata) <- predNAME
  87. }
  88.  
  89. ## check that 'newdata' has same name as predVAR
  90. if (names(newdata)[1] != predNAME) stop("newdata should have name '", predNAME, "'!")
  91.  
  92. ## get parameter coefficients
  93. COEF <- coef(object)
  94.  
  95. ## get variance-covariance matrix
  96. VCOV <- vcov(object)
  97.  
  98. ## augment variance-covariance matrix for 'mvrnorm'
  99. ## by adding a column/row for 'error in x'
  100. NCOL <- ncol(VCOV)
  101. ADD1 <- c(rep(0, NCOL))
  102. ADD1 <- matrix(ADD1, ncol = 1)
  103. colnames(ADD1) <- predNAME
  104. VCOV <- cbind(VCOV, ADD1)
  105. ADD2 <- c(rep(0, NCOL + 1))
  106. ADD2 <- matrix(ADD2, nrow = 1)
  107. rownames(ADD2) <- predNAME
  108. VCOV <- rbind(VCOV, ADD2)
  109.  
  110. ## iterate over all entries in 'newdata' as in usual 'predict.' functions
  111. NR <- nrow(newdata)
  112. respVEC <- numeric(NR)
  113. seVEC <- numeric(NR)
  114. varPLACE <- ncol(VCOV)
  115.  
  116. ## define counter function
  117. counter <- function (i)
  118. {
  119. if (i%%10 == 0)
  120. cat(i)
  121. else cat(".")
  122. if (i%%50 == 0)
  123. cat("n")
  124. flush.console()
  125. }
  126.  
  127. outMAT <- NULL
  128.  
  129. for (i in 1:NR) {
  130. counter(i)
  131.  
  132. ## get predictor values and optional errors
  133. predVAL <- newdata[i, 1]
  134. if (ncol(newdata) == 2) predERROR <- newdata[i, 2] else predERROR <- 0
  135. names(predVAL) <- predNAME
  136. names(predERROR) <- predNAME
  137.  
  138. ## create mean vector for 'mvrnorm'
  139. MU <- c(COEF, predVAL)
  140.  
  141. ## create variance-covariance matrix for 'mvrnorm'
  142. ## by putting error^2 in lower-right position of VCOV
  143. newVCOV <- VCOV
  144. newVCOV[varPLACE, varPLACE] <- predERROR^2
  145.  
  146. ## create MC simulation matrix
  147. simMAT <- mvrnorm(n = nsim, mu = MU, Sigma = newVCOV, empirical = TRUE)
  148.  
  149. ## evaluate expression on rows of simMAT
  150. EVAL <- try(eval(EXPR, envir = as.data.frame(simMAT)), silent = TRUE)
  151. if (inherits(EVAL, "try-error")) stop("There was an error evaluating the simulations!")
  152.  
  153. ## collect statistics
  154. PRED <- data.frame(predVAL)
  155. colnames(PRED) <- predNAME
  156. FITTED <- predict(object, newdata = data.frame(PRED))
  157. MEAN.sim <- mean(EVAL, na.rm = TRUE)
  158. SD.sim <- sd(EVAL, na.rm = TRUE)
  159. MEDIAN.sim <- median(EVAL, na.rm = TRUE)
  160. MAD.sim <- mad(EVAL, na.rm = TRUE)
  161. QUANT <- quantile(EVAL, c((1 - level)/2, level + (1 - level)/2))
  162. RES <- c(FITTED, MEAN.sim, SD.sim, MEDIAN.sim, MAD.sim, QUANT[1], QUANT[2])
  163. outMAT <- rbind(outMAT, RES)
  164. }
  165.  
  166. colnames(outMAT) <- c("fit", "mean", "sd", "median", "mad", names(QUANT[1]), names(QUANT[2]))
  167. rownames(outMAT) <- NULL
  168.  
  169. cat("n")
  170.  
  171. return(outMAT)
  172. }
Add Comment
Please, Sign In to add comment