CRAN Package Check Results for Package nls2

Last updated on 2024-07-17 08:55:18 CEST.

Flavor Version Tinstall Tcheck Ttotal Status Flags
r-devel-linux-x86_64-debian-clang 0.3-4 2.74 34.11 36.85 OK
r-devel-linux-x86_64-debian-gcc 0.3-4 1.89 25.34 27.23 OK
r-devel-linux-x86_64-fedora-clang 0.3-4 45.59 OK
r-devel-linux-x86_64-fedora-gcc 0.3-4 42.65 OK
r-devel-windows-x86_64 0.3-4 5.00 55.00 60.00 OK
r-patched-linux-x86_64 0.3-3 2.51 27.37 29.88 OK
r-release-linux-x86_64 0.3-3 2.23 27.17 29.40 OK
r-release-macos-arm64 0.3-3 16.00 OK
r-release-macos-x86_64 0.3-4 36.00 ERROR
r-release-windows-x86_64 0.3-4 4.00 46.00 50.00 OK
r-oldrel-macos-arm64 0.3-3 17.00 OK
r-oldrel-macos-x86_64 0.3-4 35.00 ERROR
r-oldrel-windows-x86_64 0.3-4 5.00 50.00 55.00 OK

Check Details

Version: 0.3-4
Check: package dependencies
Result: NOTE Package suggested but not available for checking: ‘CPoptim’ Flavors: r-release-macos-x86_64, r-oldrel-macos-x86_64

Version: 0.3-4
Check: examples
Result: ERROR Running examples in ‘nls2-Ex.R’ failed The error most likely occurred in: > ### Name: nls2 > ### Title: Nonlinear Least Squares with Brute Force > ### Aliases: nls2 > ### Keywords: nonlinear regression models > > ### ** Examples > > > y <- c(44,36,31,39,38,26,37,33,34,48,25,22,44,5,9,13,17,15,21,10,16,22, + 13,20,9,15,14,21,23,23,32,29,20,26,31,4,20,25,24,32,23,33,34,23,28,30,10,29, + 40,10,8,12,13,14,56,47,44,37,27,17,32,31,26,23,31,34,37,32,26,37,28,38,35,27, + 34,35,32,27,22,23,13,28,13,22,45,33,46,37,21,28,38,21,18,21,18,24,18,23,22, + 38,40,52,31,38,15,21) > > x <- c(26.22,20.45,128.68,117.24,19.61,295.21,31.83,30.36,13.57,60.47, + 205.30,40.21,7.99,1.18,5.40,13.37,4.51,36.61,7.56,10.30,7.29,9.54,6.93,12.60, + 2.43,18.89,15.03,14.49,28.46,36.03,38.52,45.16,58.27,67.13,92.33,1.17, + 29.52,84.38,87.57,109.08,72.28,66.15,142.27,76.41,105.76,73.47,1.71,305.75, + 325.78,3.71,6.48,19.26,3.69,6.27,1689.67,95.23,13.47,8.60,96.00,436.97, + 472.78,441.01,467.24,1169.11,1309.10,1905.16,135.92,438.25,526.68,88.88,31.43, + 21.22,640.88,14.09,28.91,103.38,178.99,120.76,161.15,137.38,158.31,179.36, + 214.36,187.05,140.92,258.42,85.86,47.70,44.09,18.04,127.84,1694.32,34.27, + 75.19,54.39,79.88,63.84,82.24,88.23,202.66,148.93,641.76,20.45,145.31, + 27.52,30.70) > > ## Example 1 > ## brute force followed by nls optimization > > fo <- y ~ Const + B * (x ^ A) > > # pass our own set of starting values > # returning result of brute force search as nls object > st1 <- expand.grid(Const = seq(-100, 100, len = 4), + B = seq(-100, 100, len = 4), A = seq(-1, 1, len = 4)) > mod1 <- nls2(fo, start = st1, algorithm = "brute-force") > mod1 Nonlinear regression model model: y ~ Const + B * (x^A) data: parent.frame() Const B A 33.3333 -33.3333 -0.3333 residual sum-of-squares: 10078 Number of iterations to convergence: 64 Achieved convergence tolerance: NA > # use nls object mod1 just calculated as starting value for > # nls optimization. Same as: nls(fo, start = coef(mod1)) > nls2(fo, start = mod1) Nonlinear regression model model: y ~ Const + B * (x^A) data: parent.frame() Const B A 33.9291 -33.4595 -0.4464 residual sum-of-squares: 8751 Number of iterations to convergence: 3 Achieved convergence tolerance: 3.025e-06 > > ## Example 2 > > # pass a 2-row data frame and let nls2 calculate grid > st2 <- data.frame(Const = c(-100, 100), B = c(-100, 100), A = c(-1, 1)) > mod2 <- nls2(fo, start = st2, algorithm = "brute-force") > mod2 Nonlinear regression model model: y ~ Const + B * (x^A) data: parent.frame() Const B A 33.3333 -33.3333 -0.3333 residual sum-of-squares: 10078 Number of iterations to convergence: 64 Achieved convergence tolerance: NA > # use nls object mod1 just calculated as starting value for > # nls optimization. Same as: nls(fo, start = coef(mod2)) > nls2(fo, start = mod2) Nonlinear regression model model: y ~ Const + B * (x^A) data: parent.frame() Const B A 33.9291 -33.4595 -0.4464 residual sum-of-squares: 8751 Number of iterations to convergence: 3 Achieved convergence tolerance: 3.025e-06 > > ## Example 3 > > # Create same starting values as in Example 2 > # running an nls optimization from each one and picking best. > # This one does an nls optimization for every random point > # generated whereas Example 2 only does a single nls optimization > nls2(fo, start = st2, control = nls.control(warnOnly = TRUE)) Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : number of iterations exceeded maximum of 50 Warning in (function (formula, data = parent.frame(), start, control = nls.control(), : step factor 0.000488281 reduced below 'minFactor' of 0.000976562 Nonlinear regression model model: y ~ Const + B * (x^A) data: parent.frame() Const B A 33.9290 -33.4595 -0.4464 residual sum-of-squares: 8751 Number of iterations to convergence: 4 Achieved convergence tolerance: 4.773e-07 > > ## Example 4 > > # Investigate singular jacobian at the start value > # Note that this cannot be done with nls since the singular jacobian at > # the initial conditions would stop it with an error. > > DF1 <- data.frame(y=1:9, one=rep(1,9)) > xx <- nls2(y~(a+2*b)*one, DF1, start = c(a=1, b=1), algorithm = "brute-force") > svd(xx$m$Rmat())[-2] $d [1] 6.708204 0.000000 $v [,1] [,2] [1,] -0.4472136 -0.8944272 [2,] -0.8944272 0.4472136 > > ## Example 5 > > # plinear-lhs example > # Thanks to John Nash for suggesting this truncation of the > # Ratkowsky2 dataset. Full dataset: data(Ratkowsky2, package = "NISTnls") > # Use plinear-lhs to get starting values and then run nls via nls2 for > # final answer. > > pastured <- data.frame( + time=c(9, 14, 21, 28, 42, 57, 63, 70, 79), + yield= c(8.93, 10.8, 18.59, 22.33, 39.35, 56.11, 61.73, 64.62, 67.08)) > fo <- yield ~ cbind(1, - exp(-exp(t3+t4*log(time)))) > > gstart <- data.frame(t3 = c(-10, 10), t4 = c(1, 8)) > set.seed(123) > junk <- capture.output(fm0 <- nls2(fo, data = pastured, start = gstart, alg = "plinear-lhs", + control = nls.control(maxiter = 1000)), type = "message") > nls2(fo, pastured, start = fm0, alg = "plinear") Nonlinear regression model model: yield ~ cbind(1, -exp(-exp(t3 + t4 * log(time)))) data: pastured t3 t4 .lin1 .lin2 -9.209 2.378 69.955 61.681 residual sum-of-squares: 8.376 Number of iterations to convergence: 4 Achieved convergence tolerance: 4.485e-06 > > ## Example 6 > > # CPoptim example > nls2(demand ~ a + b * Time, data = BOD, start = + data.frame(a = c(-10, 10), b = c(-10, 10)), alg = "CPoptim") Error in loadNamespace(x) : there is no package called ‘CPoptim’ Calls: nls2 ... loadNamespace -> withRestarts -> withOneRestart -> doWithOneRestart Execution halted Flavors: r-release-macos-x86_64, r-oldrel-macos-x86_64