Created
July 28, 2023 15:36
-
-
Save spinkney/bfe05d633a0a0454fa2b6da420d2cc11 to your computer and use it in GitHub Desktop.
AMOC
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
functions { | |
real ornstein_uhlenbeck_lpdf(vector x, real tau, real a, data real delta, | |
real alpha0, real mu0, real sigma2_0) { | |
int n = num_elements(x); | |
real loglik = 0; | |
real m = mu0 - alpha0 / (2 * a); | |
real lambda0 = -alpha0 ^ 2 / (4 * a); | |
real sigma2 = sigma2_0; | |
for (i in 1 : n - 1) { | |
real x_upp = x[i + 1]; | |
real x_low = x[i]; | |
real time = delta * i; | |
real lam_seq = lambda0 * (1 - time / tau); | |
real alpha_seq = 2 * sqrt(-a * lam_seq); | |
real gamma2_seq = sigma2 / (2 * alpha_seq); | |
real rho_seq = exp(-alpha_seq * delta); | |
real mu_seq = m + sqrt(-lam_seq / a); | |
real fh_half_tmp = a * delta * (x_low - mu_seq) * 0.5; | |
real fh_half = (mu_seq * fh_half_tmp + x_low) / (fh_half_tmp + 1); | |
real fh_half_inv = (mu_seq * fh_half_tmp - x_upp) / (fh_half_tmp - 1); | |
real mu_h = fh_half * rho_seq + mu_seq * (1 - rho_seq); | |
real m_part = fh_half_inv - mu_h; | |
real var_part = gamma2_seq * (1 - rho_seq ^ 2); | |
real det = 1 / (a * delta * (x_upp - mu_seq) * 0.5 - 1) ^ 2; | |
loglik += -log(var_part) - m_part ^ 2 / var_part + 2 * log(det); | |
} | |
return loglik; | |
} | |
} | |
data { | |
int<lower=0> N0; | |
vector[N0] x_0; | |
int<lower=0> N; | |
vector[N] x; | |
real delta; | |
real t0; | |
} | |
parameters { | |
real<lower=0> tau_raw; // we know that it hasn't happend yet | |
// so this needs to be greater than 0 | |
real<lower=0> a; | |
real<lower=0> alpha; | |
real<lower=0> mu; | |
real<lower=0> sigma; | |
} | |
transformed parameters { | |
real tau = tau_raw * 100 + 96; | |
real rho = exp(-alpha * delta); | |
real gamma = sigma / sqrt(2 * alpha); | |
} | |
model { | |
mu ~ std_normal(); | |
tau_raw ~ std_normal(); | |
a ~ normal(0.5, 0.5); | |
sigma ~ gamma(1.5, 1); | |
x ~ ornstein_uhlenbeck(tau, a, delta, alpha, mu, sigma ^ 2); | |
alpha ~ student_t(6, 2, 2); | |
x_0[2 : N0] ~ normal(x_0[1 : N0 - 1] * rho + mu * (1 - rho), | |
gamma * sqrt(1 - rho ^ 2)); | |
} | |
generated quantities { | |
real m = mu - alpha / (2 * a); | |
real lambda = -alpha^2 / (4 * a); | |
real tc = tau + t0; | |
} |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
library(data.table) | |
library(cmdstanr) | |
## time: calendar time in years | |
## AMOC0: SST (sea surface temperature) in subpolar gyre, subtracted the monthly mean | |
## AMOC1: AMOC0 subtracted the global mean SST | |
## AMOC2: AMOC0 subtracted two times the global mean SST (Arctic amplification) | |
## GM: global mean SST | |
dat <- fread("AMOCdata.txt") | |
dat[, AMOC3 := AMOC0 - 3 * GM] | |
## Baseline data: Subset of data for the years 1870-1924 | |
t0 = 1924 | |
data.0 = dat[time <= t0, AMOC2] | |
## Ramping data: Subset of data for the years 1925-2020 | |
## Subtracted 2 times global mean | |
data.2 = dat[time > t0, AMOC2] | |
mod_joint <- cmdstan_model("joint.stan") | |
mod_joint_out <- mod_joint$sample( | |
data = list( | |
N0 = length(data.0), | |
x_0 = data.0, | |
N = length(data.2), | |
x = data.2, | |
delta = 1 / 12, | |
t0 = t0 | |
), | |
adapt_delta = 0.8, | |
parallel_chains = 4 | |
) | |
mod_joint_out$summary() | |
library(bayesplot) | |
draws <- mod_joint_out$draws() | |
mcmc_pairs(draws, pars = c("a", "alpha", "mu", "sigma", "tau_raw")) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
## Function returning 2 x the negative log-likelihood of a trace up to a constant | |
## The trace is assumed an Ornstein-Uhlenbeck with constant mean mu and rate | |
## parameter alpha. | |
loglikOU.fun = function(pars, data, delta){ | |
## pars = (alpha, mu, sigma) are the parameters to be estimated in the model: | |
## dXt = - alpha * ( Xt - mu ) dt + sigma * dWt | |
## data is a trace | |
## delta is the time step between observations | |
alpha0 = max(pars[1],0.001) #Ensuring alpha is positive | |
mu0 = pars[2] #Mean | |
sigma2 = max(0,pars[3]) #Infinitisimal variance, should be positive | |
n = length(data) | |
Xupp = data[2:n] | |
Xlow = data[1:(n-1)] | |
time = delta*(1:(n-1)) | |
gamma2 = sigma2/(2*alpha0) #Asymptotic variance | |
rho0 = exp(-alpha0*delta) #Autocorrelation | |
m.part = Xupp - Xlow*rho0 - mu0*(1-rho0) #Observation minus the mean of transition distribution | |
v.part = gamma2*(1-rho0^2) #Variance of transition distribution | |
loglik = - n*(log(v.part)) - sum(m.part^2/v.part) | |
-loglik | |
} | |
## Function returning the estimated parameters | |
## Input is a data trace, the time step delta between observations | |
estimate.OU = function(data, delta, initial.values = NULL){ | |
if(is.null(initial.values)){ | |
n = length(data) | |
Xupp = data[2:n] | |
Xlow = data[1:(n-1)] | |
mu = mean(data) ##Starting value for mu0 | |
alpha = -log(sum((Xupp-mu)*(Xlow-mu))/sum((Xlow-mu)^2))/delta ##Starting value for alpha0 | |
s2 = mean(diff(data)^2)/delta ##Quadratic variation, starting value for sigma^2 | |
par.init = c(alpha, mu, s2) ## Collect starting values | |
} | |
if(!is.null(initial.values)) par.init = initial.values | |
minuslogl = function(alpha,mu,sigma2){ | |
logLik = tryCatch(loglikOU.fun(pars = c(alpha,mu,sigma2), data = data, delta = delta)) | |
if(is.na(logLik)) | |
{ | |
return(10000) | |
}else | |
{ | |
return(logLik) | |
} | |
} | |
temp = stats4::mle(minuslogl = minuslogl, | |
start = list(alpha = par.init[1], mu = par.init[2], sigma2 = par.init[3])) | |
return(temp) | |
} | |
## Function returning 2 x the negative log-likelihood of a trace up to a constant from model | |
## dXt = - (a*(Xt - m)^2 + lambda) dt + sigma * dWt | |
## Assuming alpha0, mu0, sigma known | |
## Strang splitting estimator based on Taylor expansion around mu(lambda) | |
loglik.fun = function(pars, data, delta, alpha0, mu0, sigma20, pen = 0){ | |
##pars are the parameters to be estimated, tau and a | |
##data is a trace under linear changing of lambda | |
##delta is the time step between observations | |
##alpha0, mu0, sigma20 are parameters already estimated in the OU process from stationary part | |
tau = pars[1] #Ramping time | |
a = max(0.1,pars[2]) #Factor in front of (x-m)^2 in drift. Positive | |
m = mu0 - alpha0/(2*a) #Constant mean shift | |
lambda0 = -alpha0^2/(4*a) #Stationary level of control parameter | |
sigma2 = sigma20 #Infinitisimal variance | |
n = length(data) | |
Xupp = data[2:n] | |
Xlow = data[1:(n-1)] | |
time = delta*(1:(n-1)) | |
lam.seq = lambda0*(1-time/tau) | |
alpha.seq = 2*sqrt(-a*lam.seq) | |
gamma2.seq = sigma2/(2*alpha.seq) | |
rho.seq = exp(-alpha.seq*delta) | |
mu.seq = m + sqrt(-lam.seq/a) | |
## Calculating the Strang splitting scheme pseudo likelihood | |
fh.half.tmp = a*delta*(Xlow - mu.seq)/2 | |
fh.half = (mu.seq*fh.half.tmp+Xlow)/(fh.half.tmp+1) | |
fh.half.inv = (mu.seq*fh.half.tmp-Xupp)/(fh.half.tmp-1) | |
mu.h = fh.half*rho.seq + mu.seq*(1-rho.seq) | |
m.part = fh.half.inv - mu.h | |
var.part = gamma2.seq*(1-rho.seq^2) | |
det.Dfh.half.inv = 1/(a*delta*(Xupp-mu.seq)/2-1)^2 | |
loglik = - sum(log(var.part)) - sum(m.part^2/var.part) + | |
2*sum(log(det.Dfh.half.inv)) - pen*n*(1/a - 1)*(a < 1) | |
return(-loglik) | |
} | |
## Function returning the estimated parameters | |
## Input is a data trace, the time step delta between observations, and time passed | |
## at present since time t_0 | |
estimate.tipping = function(data, delta, initial.values = c(100,1), | |
alpha0, mu0, sigma20, pen = pen){ | |
par.init = initial.values | |
minuslogl = function(pars){ | |
logLik = tryCatch(loglik.fun(pars = pars, data = data, delta = delta, | |
alpha0 = alpha0, mu0 = mu0, sigma20 = sigma20, pen = pen)) | |
if(is.na(logLik)) | |
{ | |
return(10000) | |
}else | |
{ | |
return(logLik) | |
} | |
} | |
temp = optim(par = c(tau = par.init[1], a = par.init[2]), | |
fn = minuslogl, method = "Nelder-Mead", hessian = FALSE) | |
return(temp) | |
} | |
############################### | |
### Estimation on AMOC data ### | |
############################### | |
## Read data | |
AMOC.data = read.table("AMOCdata.txt", header = TRUE) | |
## time: calendar time in years | |
## AMOC0: SST (sea surface temperature) in subpolar gyre, subtracted the monthly mean | |
## AMOC1: AMOC0 subtracted the global mean SST | |
## AMOC2: AMOC0 subtracted two times the global mean SST (Arctic amplification) | |
## GM: global mean SST | |
## Adding fingerprint with 3 times subtracted global warming for robustness analysis | |
AMOC.data$AMOC3 = AMOC.data$AMOC0 - 3 * AMOC.data$GM | |
## Estimating Ornstein-Uhlenbeck parameters from data up to year 1924 | |
## before linear ramping of control parameter lambda starts | |
## Baseline data: Subset of data for the years 1870-1924 | |
t0 = 1924 | |
data.0 = AMOC.data[AMOC.data$time <= t0, "AMOC2"] | |
## Estimate parameters | |
temp = estimate.OU(data = data.0, delta = 1/12) | |
alpha0 = unname(coef(temp)["alpha"]) | |
mu0 = unname(coef(temp)["mu"]) | |
sigma2 = unname(coef(temp)["sigma2"]) | |
## Ramping data: Subset of data for the years 1925-2020 | |
## Subtracted 2 times global mean | |
data.2 = AMOC.data[AMOC.data$time > t0, "AMOC2"] | |
## Estimate ramping parameters | |
temp = estimate.tipping(data = data.2, delta = 1/12, initial.values = c(100,1), | |
alpha0 = alpha0, mu0 = mu0, sigma20 = sigma2, | |
pen = 0.004) | |
tau = unname(temp$par[1]) | |
a = unname(temp$par[2]) | |
m = mu0 - alpha0/(2 * a) | |
lambda0 = -alpha0^2/(4 * a) | |
tc = tau + t0 | |
## Collect estimates | |
round(c(t0 = t0, alpha0 = alpha0, mu0 = mu0, sigma2 = sigma2, tau = tau, | |
a = a, m = m, lambda0 = lambda0, tc = tc),2) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment