import numpy as np
import matplotlib.pyplot as plt
def Lorenz63(state,*args): #Lorenz 63 model
= args[0]
sigma = args[1]
beta = args[2]
rho = state #Unpack the state vector
x, y, z = np.zeros(3) #Derivatives
f 0] = sigma * (y - x)
f[1] = x * (rho - z) - y
f[2] = x * y - beta * z
f[return f
def RK4(rhs,state,dt,*args):
= rhs(state,*args)
k1 = rhs(state+k1*dt/2,*args)
k2 = rhs(state+k2*dt/2,*args)
k3 = rhs(state+k3*dt,*args)
k4
= state + (dt/6)*(k1+2*k2+2*k3+k4)
new_state return new_state
def EnKF(ubi,w,ObsOp,JObsOp,R,B):
# The analysis step for the (stochastic) ensemble Kalman filter
# with virtual observations
= ubi.shape # n is the state dimension and N is the size of ensemble
n,N = w.shape[0] # m is the size of measurement vector
m
# compute the mean of forecast ensemble
= np.mean(ubi,1)
ub # evaluate Jacobian of observation operator at ub
= JObsOp(ub)
Dh # compute Kalman gain
= #FILL IN CODE
D = #FILL IN CODE
K
= np.zeros([m,N])
wi = np.zeros([n,N])
uai for i in range(N):
# create virtual observations
= w + np.random.multivariate_normal(np.zeros(m), R)
wi[:,i] # compute analysis ensemble
= ubi[:,i] + K @ (wi[:,i]-ObsOp(ubi[:,i]))
uai[:,i]
# compute the mean of analysis ensemble
= #FILL IN CODE
ua # compute analysis error covariance matrix
= #FILL IN CODE
P return uai, P
Ensemble Kalman Filter for Lorenz63 System
Ensemble Kalman Filter for Lorenz63 System
We analyze the Lorenz63 system of nonlinear ODEs
\[ \begin{align} \frac{\mathrm{d} x}{\mathrm{d} t} &= -\sigma(x-y), \nonumber \\ \frac{\mathrm{d} y}{\mathrm{d} t} &= \rho x-y-xz, \label{eq:lorenz} \\ \frac{\mathrm{d} z}{\mathrm{d} t} &= xy -\beta z, \nonumber \end{align} \]
where \(x=x(t),\) \(y=y(t)\), \(z=z(t)\) and \(\sigma\) (ratio of kinematic viscosity divided by thermal diffusivity), \(\rho\) (measure of stability) and \(\beta\) (related to the wave number) are parameters. Chaotic behavior is obtained when the parameters are chosen as
\[ \sigma = 10,\quad \rho=28,\quad \beta = 8/3. \]
This system is a simplified model for atmospheric convection and is an excellent example of the lack of predictability. It is ill-posed in the sense of Hadamard.
In [8]:
Parameters
In [9]:
= 10.0
sigma = 8.0/3.0
beta = 28.0
rho = 0.01
dt = 10
tm = int(tm/dt)
nt = np.linspace(0,tm,nt+1) t
Twin experiment
In [10]:
def h(u):
= u
w return w
def Dh(u):
= len(u)
n = np.eye(n)
D return D
= np.array([1,1,1]) # True initial conditions
u0True =1)
np.random.seed(seed= 0.15 # standard deviation for measurement noise
sig_m = sig_m**2*np.eye(3) #covariance matrix for measurement noise
R
= 0.2 #time period between observations
dt_m = 2 #maximum time for observations
tm_m = int(tm_m/dt_m) #number of observation instants
nt_m
= (np.linspace(int(dt_m/dt),int(tm_m/dt),nt_m)).astype(int)
ind_m = t[ind_m]
t_m
#time integration
= np.zeros([3,nt+1])
uTrue 0] = u0True
uTrue[:,= 0
km = np.zeros([3,nt_m])
w for k in range(nt):
+1] = RK4(Lorenz63,uTrue[:,k],dt,sigma,beta,rho)
uTrue[:,kif (km<nt_m) and (k+1==ind_m[km]):
= h(uTrue[:,k+1]) + np.random.normal(0,sig_m,[3,])
w[:,km] = km+1 km
In [4]:
= plt.subplots(nrows=3,ncols=1, figsize=(10,8))
fig, ax = ax.flat
ax
for k in range(3):
='True', linewidth = 3)
ax[k].plot(t,uTrue[k,:], label'o', fillstyle='none', \
ax[k].plot(t[ind_m],w[k,:], ='Observation', markersize = 8, markeredgewidth = 2)
label't')
ax[k].set_xlabel(0, tm_m, color='lightgray', alpha=0.4, lw=0)
ax[k].axvspan(
0].legend(loc="center", bbox_to_anchor=(0.5,1.25),ncol =4,fontsize=15)
ax[
0].set_ylabel('x(t)', labelpad=5)
ax[1].set_ylabel('y(t)', labelpad=-12)
ax[2].set_ylabel('z(t)')
ax[=0.5) fig.subplots_adjust(hspace
Data Assimilation
In [11]:
= np.array([2.0,3.0,4.0])
u0b
= 0.1
sig_b= sig_b**2*np.eye(3)
B = 0.0*np.eye(3)
Q
#time integration
= np.zeros([3,nt+1])
ub 0] = u0b
ub[:,= np.zeros([3,nt+1])
ua 0] = u0b
ua[:,
= 3 #state dimension
n = 3 #measurement dimension
m
# ensemble size
= 10
N #initialize ensemble
= np.zeros([3,N])
uai for i in range(N):
= u0b + np.random.multivariate_normal(np.zeros(n), B)
uai[:,i]
= 0
km for k in range(nt):
# Forecast Step
#background trajectory [without correction]
+1] = RK4(Lorenz63,ub[:,k],dt,sigma,beta,rho)
ub[:,k#EnKF trajectory [with correction at observation times]
for i in range(N): # forecast ensemble
= RK4(Lorenz63,uai[:,i],dt,sigma,beta,rho) \
uai[:,i] + np.random.multivariate_normal(np.zeros(n), Q)
# compute the mean of forecast ensemble
+1] = np.mean(uai,1)
ua[:,k# compute forecast error covariance matrix
= (1/(N-1)) * (uai - ua[:,k+1].reshape(-1,1)) @ (uai - ua[:,k+1].reshape(-1,1)).T
B
if (km<nt_m) and (k+1==ind_m[km]):
# Analysis Step
= EnKF(uai,w[:,km],h,Dh,R,B)
uai,B # compute the mean of analysis ensemble
+1] = np.mean(uai,1)
ua[:,k= km+1 km
Output
We plot the trajectories of \(x,\) \(y\) and \(z\) as a function of time \(t.\)
In [7]:
= plt.subplots(nrows=3,ncols=1, figsize=(10,8))
fig, ax = ax.flat
ax
for k in range(3):
='True', linewidth = 3)
ax[k].plot(t,uTrue[k,:], label':', label='Background', linewidth = 3)
ax[k].plot(t,ub[k,:], 'o', fillstyle='none', \
ax[k].plot(t[ind_m],w[k,:], ='Observation', markersize = 8, markeredgewidth = 2)
label'--', label='Analysis', linewidth = 3)
ax[k].plot(t,ua[k,:], 't')
ax[k].set_xlabel(0, tm_m, color='lightgray', alpha=0.4, lw=0)
ax[k].axvspan(
0].legend(loc="center", bbox_to_anchor=(0.5,1.25),ncol =4,fontsize=15)
ax[
0].set_ylabel('x(t)', labelpad=5)
ax[1].set_ylabel('y(t)', labelpad=-12)
ax[2].set_ylabel('z(t)')
ax[=0.5)
fig.subplots_adjust(hspace
'L63_EnKF.eps', dpi = 500, bbox_inches = 'tight') plt.savefig(