Added likelihood and orthogonal projections

Added the likelihood equations/form from the discrete bayes
chapter to better tie in that form of reasoning. then I converted
the 1d equations to the orthogonal projection form to show how
the Kalman gain is computed and where the residual comes from
computationally. This should make the full KF equations much more
approachable.
This commit is contained in:
Roger Labbe 2016-01-17 20:16:27 -08:00
parent 005fe0618c
commit 0a41e78aeb
3 changed files with 690 additions and 805 deletions

File diff suppressed because one or more lines are too long

View File

@ -19,9 +19,9 @@ from __future__ import (absolute_import, division, print_function,
import book_plots as bp
import matplotlib.pyplot as plt
def plot_dog_track(xs, measurement_var, process_var):
def plot_dog_track(xs, dog, measurement_var, process_var):
N = len(xs)
bp.plot_track([0, N-1], [1, N])
bp.plot_track(dog)
bp.plot_measurements(xs, label='Sensor')
bp.set_labels('variance = {}, process variance = {}'.format(
measurement_var, process_var), 'time', 'pos')

View File

@ -104,9 +104,11 @@ def show_residual_chart():
plt.text (0.5, 159.6, "prediction", ha='center',va='top',fontsize=18,color='red')
plt.text (1.0, 164.4, r"measurement ($z$)",ha='center',va='bottom',fontsize=18,color='blue')
plt.text (0, 157.8, r"posterior ($x_{t-1}$)", ha='center', va='top',fontsize=18)
plt.text (1.02, est_y-1.5, "residual", ha='left', va='center',fontsize=18)
plt.text (1.02, est_y-1.5, "residual($y$)", ha='left', va='center',fontsize=18)
plt.text (1.02, est_y-2.2, r"$y=z-\bar x_t$", ha='left', va='center',fontsize=18)
plt.text (0.9, est_y, "new estimate ($x_t$)", ha='right', va='center',fontsize=18)
plt.text (0.8, est_y-0.5, "(posterior)", ha='right', va='center',fontsize=18)
plt.text (0.75, est_y-1.2, r"$\bar{x}_t + Ky$", ha='right', va='center',fontsize=18)
plt.xlabel('time')
ax.yaxis.set_label_position("right")
plt.ylabel('state')