The airGR package implements semi-distributed model capabilities using a lag model between subcatchments. It allows to chain together several lumped models as well as integrating anthropogenic influence such as reservoirs or withdrawals.
RunModel_Lag
documentation gives an example of simulating the influence of a reservoir in a lumped model. Try example(RunModel_Lag)
to get it.
In this vignette, we show how to calibrate 2 sub-catchments in series with a semi-distributed model consisting of 2 GR4J models. For doing this we compare two strategies for calibrating the downstream subcatchment:
We finally compare these calibrations with a theoretical set of parameters.
We use an example data set from the package that unfortunately contains data for only one catchment.
## loading catchment data
data(L0123001)
Let’s imagine that this catchment of 360 km² is divided into 2 subcatchments:
We consider that meteorological data are homogeneous on the whole catchment, so we use the same pluviometry BasinObs$P
and the same evapotranspiration BasinObs$E
for the 2 subcatchments.
For the observed flow at the downstream outlet, we generate it with the assumption that the upstream flow arrives at downstream with a constant delay of 2 days.
<- (BasinObs$Qmm + c(0, 0, BasinObs$Qmm[1:(length(BasinObs$Qmm)-2)])) / 2
QObsDown options(digits = 5)
summary(cbind(QObsUp = BasinObs$Qmm, QObsDown))
## QObsUp QObsDown
## Min. : 0.02 Min. : 0.02
## 1st Qu.: 0.39 1st Qu.: 0.41
## Median : 0.98 Median : 1.00
## Mean : 1.47 Mean : 1.47
## 3rd Qu.: 1.88 3rd Qu.: 1.91
## Max. :23.88 Max. :19.80
## NA's :802 NA's :820
options(digits = 3)
The operations are exactly the same as the ones for a GR4J lumped model. So we do exactly the same operations as in the Get Started vignette.
<- CreateInputsModel(FUN_MOD = RunModel_GR4J, DatesR = BasinObs$DatesR,
InputsModelUp Precip = BasinObs$P, PotEvap = BasinObs$E)
<- seq(which(format(BasinObs$DatesR, format = "%Y-%m-%d") == "1990-01-01"),
Ind_Run which(format(BasinObs$DatesR, format = "%Y-%m-%d") == "1999-12-31"))
<- CreateRunOptions(FUN_MOD = RunModel_GR4J,
RunOptionsUp InputsModel = InputsModelUp,
IndPeriod_WarmUp = NULL, IndPeriod_Run = Ind_Run,
IniStates = NULL, IniResLevels = NULL)
## Warning in CreateRunOptions(FUN_MOD = RunModel_GR4J, InputsModel = InputsModelUp, : model warm up period not defined: default configuration used
## the year preceding the run period is used
<- CreateInputsCrit(FUN_CRIT = ErrorCrit_NSE, InputsModel = InputsModelUp,
InputsCritUp RunOptions = RunOptionsUp,
VarObs = "Q", Obs = BasinObs$Qmm[Ind_Run])
<- CreateCalibOptions(FUN_MOD = RunModel_GR4J, FUN_CALIB = Calibration_Michel)
CalibOptionsUp <- Calibration_Michel(InputsModel = InputsModelUp, RunOptions = RunOptionsUp,
OutputsCalibUp InputsCrit = InputsCritUp, CalibOptions = CalibOptionsUp,
FUN_MOD = RunModel_GR4J)
## Grid-Screening in progress (0% 20% 40% 60% 80% 100%)
## Screening completed (81 runs)
## Param = 247.151, -0.020, 83.096, 2.384
## Crit. NSE[Q] = 0.7688
## Steepest-descent local search in progress
## Calibration completed (21 iterations, 234 runs)
## Param = 257.238, 1.012, 88.235, 2.208
## Crit. NSE[Q] = 0.7988
And see the result of the simulation:
<- RunModel_GR4J(InputsModel = InputsModelUp, RunOptions = RunOptionsUp,
OutputsModelUp Param = OutputsCalibUp$ParamFinalR)
we need to create the InputsModel
object completed with upstream information:
<- CreateInputsModel(
InputsModelDown1 FUN_MOD = RunModel_GR4J, DatesR = BasinObs$DatesR,
Precip = BasinObs$P, PotEvap = BasinObs$E,
Qupstream = matrix(BasinObs$Qmm, ncol = 1), # upstream observed flow
LengthHydro = 100, # distance between upstream catchment outlet & the downstream one [km]
BasinAreas = c(180, 180) # upstream and downstream areas [km²]
)
## Warning in CreateInputsModel(FUN_MOD = RunModel_GR4J, DatesR =
## BasinObs$DatesR, : 'Qupstream' contains NA values: model outputs will contain
## NAs
And then calibrate the combination of Lag model for upstream flow transfer and GR4J model for the runoff of the downstream subcatchment:
<- CreateRunOptions(FUN_MOD = RunModel_GR4J,
RunOptionsDown InputsModel = InputsModelDown1,
IndPeriod_WarmUp = NULL, IndPeriod_Run = Ind_Run,
IniStates = NULL, IniResLevels = NULL)
## Warning in CreateRunOptions(FUN_MOD = RunModel_GR4J, InputsModel = InputsModelDown1, : model warm up period not defined: default configuration used
## the year preceding the run period is used
<- CreateInputsCrit(FUN_CRIT = ErrorCrit_NSE, InputsModel = InputsModelDown1,
InputsCritDown RunOptions = RunOptionsDown,
VarObs = "Q", Obs = QObsDown[Ind_Run])
<- CreateCalibOptions(FUN_MOD = RunModel_GR4J,
CalibOptionsDown FUN_CALIB = Calibration_Michel,
IsSD = TRUE) # specify that it's a SD model
<- Calibration_Michel(InputsModel = InputsModelDown1,
OutputsCalibDown1 RunOptions = RunOptionsDown,
InputsCrit = InputsCritDown,
CalibOptions = CalibOptionsDown,
FUN_MOD = RunModel_GR4J)
## Grid-Screening in progress (0% 20% 40% 60% 80% 100%)
## Screening completed (243 runs)
## Param = 11.250, 247.151, -0.020, 83.096, 2.384
## Crit. NSE[Q] = 0.8861
## Steepest-descent local search in progress
## Calibration completed (45 iterations, 675 runs)
## Param = 2.560, 265.072, 0.970, 83.931, 4.648
## Crit. NSE[Q] = 0.9489
To run the complete model, we should substitute the observed upstream flow by the simulated one:
<- InputsModelDown1
InputsModelDown2 $Qupstream[Ind_Run] <- OutputsModelUp$Qsim InputsModelDown2
RunModel
is run in order to automatically combine GR4J and Lag models.
<- RunModel(InputsModel = InputsModelDown2,
OutputsModelDown1 RunOptions = RunOptionsDown,
Param = OutputsCalibDown1$ParamFinalR,
FUN_MOD = RunModel_GR4J)
Performance of the model validation is then:
<- ErrorCrit_NSE(InputsCritDown, OutputsModelDown1) CritDown1
## Crit. NSE[Q] = 0.3569
We calibrate the model with the InputsModel
object previously created for substituting the observed upstream flow with the simulated one:
<- Calibration_Michel(InputsModel = InputsModelDown2,
OutputsCalibDown2 RunOptions = RunOptionsDown,
InputsCrit = InputsCritDown,
CalibOptions = CalibOptionsDown,
FUN_MOD = RunModel_GR4J)
## Grid-Screening in progress (0% 20% 40% 60% 80% 100%)
## Screening completed (243 runs)
## Param = 11.250, 169.017, -0.020, 83.096, 2.384
## Crit. NSE[Q] = 0.2520
## Steepest-descent local search in progress
## Calibration completed (61 iterations, 797 runs)
## Param = 19.990, 176.811, 5.715, 91.887, 3.890
## Crit. NSE[Q] = 0.7461
<- OutputsCalibDown2$ParamFinalR ParamDown2
The theoretical Velocity parameter should be equal to:
<- InputsModelDown1$LengthHydro * 1e3 / (2 * 86400)
Velocity paste(format(Velocity), "m/s")
## [1] "0.579 m/s"
Both calibrations overestimate this parameter:
<- matrix(c(Velocity,
mVelocity $ParamFinalR[1],
OutputsCalibDown1$ParamFinalR[1]),
OutputsCalibDown2ncol = 1,
dimnames = list(c("theoretical",
"calibrated with observed upstream flow",
"calibrated with simulated upstream flow"),
c("Velocity parameter")))
::kable(mVelocity) knitr
Velocity parameter | |
---|---|
theoretical | 0.579 |
calibrated with observed upstream flow | 2.560 |
calibrated with simulated upstream flow | 19.990 |
Theoretically, the parameters of the downstream GR4J model should be the same as the upstream one and we know the lag time. So this set of parameter should give a better performance criteria:
<- c(Velocity, OutputsCalibUp$ParamFinalR)
ParamDownTheo <- RunModel(InputsModel = InputsModelDown2,
OutputsModelDownTheo RunOptions = RunOptionsDown,
Param = ParamDownTheo,
FUN_MOD = RunModel_GR4J)
<- ErrorCrit_NSE(InputsCritDown, OutputsModelDownTheo) CritDownTheo
## Crit. NSE[Q] = 0.3354
<- matrix(c(0, OutputsCalibUp$ParamFinalR,
comp rep(OutputsCalibDown1$ParamFinalR, 2),
$ParamFinalR,
OutputsCalibDown2
ParamDownTheo),ncol = 5, byrow = TRUE)
<- cbind(comp, c(OutputsCalibUp$CritFinal,
comp $CritFinal,
OutputsCalibDown1$CritValue,
CritDown1$CritFinal,
OutputsCalibDown2$CritValue))
CritDownTheocolnames(comp) <- c("Velocity", paste0("X", 1:4), "NSE")
rownames(comp) <- c("Calibration of the upstream subcatchment",
"Calibration 1 with observed upstream flow",
"Validation 1 with simulated upstream flow",
"Calibration 2 with simulated upstream flow",
"Validation theoretical set of parameters")
::kable(comp) knitr
Velocity | X1 | X2 | X3 | X4 | NSE | |
---|---|---|---|---|---|---|
Calibration of the upstream subcatchment | 0.000 | 257 | 1.01 | 88.2 | 2.21 | 0.799 |
Calibration 1 with observed upstream flow | 2.560 | 265 | 0.97 | 83.9 | 4.65 | 0.949 |
Validation 1 with simulated upstream flow | 2.560 | 265 | 0.97 | 83.9 | 4.65 | 0.357 |
Calibration 2 with simulated upstream flow | 19.990 | 177 | 5.71 | 91.9 | 3.89 | 0.746 |
Validation theoretical set of parameters | 0.579 | 257 | 1.01 | 88.2 | 2.21 | 0.335 |
Even if calibration with observed upstream flows gives an improved performance criteria, in validation using simulated upstream flows the result is quite similar as the performance obtained with the calibration with upstream simulated flows. The theoretical set of parameters give also an equivalent performance but still underperforming the calibration 2 one.