Professional Documents
Culture Documents
Exercise 04 RadonovIvan 5967988
Exercise 04 RadonovIvan 5967988
+++++++++++++++++++++++++++
uk D
^k
u d
xk = ( ) ∈ R yk = ( ) ∈ R k = 1, … , T
vk ^k
v
In [1]:
import numpy as np
plt.rcParams.update(bundles.beamer_moml())
plt.rcParams.update({"figure.dpi":200})
rng = np.random.default_rng(seed=124)
T = 200
Initial distribution
x0 ∼ N (μ0 , Σ0 )
with
⊤
μ0 = [−1 1] (1)
1.0 0.0
Σ0 = ( ) (2)
0.0 1.0
In [2]:
mu_0 = np.array([-1, 1])
Sigma_0 = np.eye(2)
Dynamics
xk ∣ xk−1 ∼ N (f(xk−1 ), Qk−1 )
with
3
uk u k + 0.1 ⋅ (c ⋅ (u k − u /3 + vk ))
k
f(xk ) = f (( )) = ( ) (3)
vk vk + 0.1 ⋅ (−(1/c) ⋅ (u k − a − b ⋅ vk ))
0.01 0.0
Qk = Q = ( ) (4)
0.0 0.01
and
a = 0.2 (5)
b = 0.2 (6)
c = 3.0 (7)
In [3]:
a, b, c = .2, .2, 3.
Q = 0.01 * np.eye(2)
def f(x):
u, v = x
def Jf(x):
u, v = x
Measurement model
yk ∣ xk ∼ N (h(xk ), R k )
with
h(xk ) = xk (8)
1 0
Rk = ( ) (9)
0 1
In [4]:
h = lambda x: x
def Jh(x):
n = x.shape[0]
Rk = np.eye(2)
ground_truth = []
path = []
observations = []
gk = mu_0
xk = randvars.Normal(mean=mu_0, cov=Sigma_0).sample(rng=rng)
for k in range(T):
gk = f(gk)
ground_truth.append(gk)
xk = randvars.Normal(mean=f(xk), cov=Q).sample(rng=rng)
path.append(xk)
yk = randvars.Normal(mean=h(xk), cov=Rk).sample(rng=rng)
observations.append(yk)
In [6]:
ground_truth, path, observations = simulate()
ax[0].set_title('Ground truth')
ax[0].plot(ground_truth)
ax[1].set_title('Data')
fig.legend()
<matplotlib.legend.Legend at 0x1635667df28>
Out[8]:
− ⊤
Σ = J f (μk−1 )Σk−1 J f (μk−1 ) + Qk−1 (11)
In [9]:
def ekf_predict(mu_prev, Sigma_prev):
mu_minus = f(mu_prev)
J_prev = Jf(mu_prev)
Correction
−
^ = h(μ
y ) (12)
k k
− − − ⊤
S k = J h (μ )Σ J h (μ ) + Rk (13)
k k k
− − ⊤ −1
Kk = Σ J h (μ ) S (14)
k k k
−
μk = μ ^ )
+ Kk (yk − y (15)
k k
− ⊤
Σk = Σ − Kk S k K (16)
k k
In [10]:
def compute_S_inv(J_minus, Sigma_minus, Rk):
R_inv = Rk
A = np.eye(J_minus.shape[0]) + b @ J_minus
yhat = h(mu_minus)
J_minus = Jh(mu_minus)
prediction_means = []
prediction_covs = []
filtered_means = []
filtered_covs = []
for k in range(T):
prediction_means.append(mu_minus)
prediction_covs.append(Sigma_minus)
filtered_means.append(mu_k)
filtered_covs.append(Sigma_k)
ax[0].set_title('Data')
ax[1].set_title('Filtering estimate')
ax[1].plot(filtered_means)
fig.legend()
<matplotlib.legend.Legend at 0x163567bcb38>
Out[12]:
−
ξk = μk + Gk (ξk+1 − μ ) (18)
k+1
− ⊤
Λk = Σk + Gk (Λk+1 − Σ )G (19)
k+1 k
In [13]:
def compute_Gk(Sigma_k, mu_k, Sigma_minus_next):
return ek, Lk
smoothed_means = [e_next]
smoothed_covs = [L_next]
smoothed_means.append(e_next)
smoothed_covs.append(L_next)
smoothed_means = smoothed_means[::-1]
smoothed_covs = smoothed_covs[::-1]
plt.plot(smoothed_means)
[<matplotlib.lines.Line2D at 0x163584beda0>,
Out[15]:
<matplotlib.lines.Line2D at 0x16356b159b0>]
In [16]:
fig, axes = plt.subplots(1, 2, sharey=True)
time_grid = np.arange(200)
axes[0].set_title('u')
axes[1].set_title('v')
axes[1].plot(time_grid, sm_v)
axes[0].fill_between(
time_grid,
alpha=0.2,
axes[1].fill_between(
fig.legend()
<matplotlib.legend.Legend at 0x16356c23320>
Out[16]: