initial commit

This commit is contained in:
Roger Labbe 2014-04-28 16:14:43 -05:00
commit ff9c20d7c1
5 changed files with 754 additions and 0 deletions

114
Untitled0.ipynb Normal file
View File

@ -0,0 +1,114 @@
{
"metadata": {
"name": ""
},
"nbformat": 3,
"nbformat_minor": 0,
"worksheets": [
{
"cells": [
{
"cell_type": "code",
"collapsed": false,
"input": [
"def to_array(x):\n",
" try:\n",
" x.shape\n",
" try:\n",
" if type(x) != numpy.ndarray:\n",
" x=asarray(x)[0]\n",
" return x\n",
" except:\n",
" pass\n",
"\n",
" except:\n",
" return array(mat(x)).reshape(1)\n",
"\n",
"def to_cov(x,n):\n",
" try:\n",
" x.shape\n",
" return x\n",
" except:\n",
" return eye(n) * x\n",
" \n",
"def multivariate_gaussian (x, mu, cov):\n",
" \"\"\" This is designed to work the same as scipy.stats.multivariate_normal\n",
" which is available before version 0.14. You may either pass in a \n",
" multivariate set of data:\n",
" multivariate_gaussian (array([1,1]), array([3,4]), eye(2)*1.4)\n",
" or unidimensional data:\n",
" multivariate_gaussian(1, 3, 1.4)\n",
" \n",
" In the multivariate case if cov is a scalar it is interpreted as eye(n)*cov\n",
" \"\"\"\n",
" \n",
" # force all to numpy.array type\n",
" x = to_array(x)\n",
" mu = to_array(mu)\n",
" n = mu.size\n",
" cov = to_cov (cov, n)\n",
"\n",
" det = numpy.sqrt(numpy.prod(numpy.diag(cov)))\n",
" frac = (2*numpy.pi)**(-n/2.0) * (1.0/det)\n",
" fprime = x - mu\n",
" fprime **= 2\n",
" m = frac * numpy.exp(-0.5*numpy.dot(fprime, 1/numpy.diag(cov)))\n",
" return m\n",
"\n"
],
"language": "python",
"metadata": {},
"outputs": [],
"prompt_number": 24
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"print mvg (array([1,1]), array([1,1]), eye(2))\n",
"print mvg (mat([1,1]), mat([1,1]), eye(2))\n",
"print mvg (2,3,1)\n",
"\n",
"\n",
"\n",
"\n"
],
"language": "python",
"metadata": {},
"outputs": [
{
"output_type": "stream",
"stream": "stdout",
"text": [
"0.159154943092\n",
"0.159154943092\n",
"0.241970724519\n"
]
}
],
"prompt_number": 30
},
{
"cell_type": "code",
"collapsed": false,
"input": [
"print numpy.array(3)"
],
"language": "python",
"metadata": {},
"outputs": [
{
"output_type": "stream",
"stream": "stdout",
"text": [
"3\n"
]
}
],
"prompt_number": 17
}
],
"metadata": {}
}
]
}

117
gauss.py Normal file
View File

@ -0,0 +1,117 @@
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 22 11:38:49 2014
@author: rlabbe
"""
from __future__ import division, print_function
import math
import matplotlib.pyplot as plt
import numpy.random as random
class gaussian(object):
def __init__ (self,m,s):
self.mu = float(m)
self.sigma = float(s)
def __add__ (a,b):
return gaussian (a.mu + b.mu, a.sigma + b.sigma)
def __mul__ (a,b):
m = (a.sigma*b.mu + b.sigma*a.mu) / (a.sigma + b.sigma)
s = 1. / (1./a.sigma + 1./b.sigma)
return gaussian (m,s)
def __call__(self,x):
return math.exp (-0.5 * (x-self.mu)**2 / self.sigma) / \
math.sqrt(2.*math.pi*self.sigma)
def __str__(self):
return "(%f, %f)" %(self.mu, self.sigma)
def __getitem__ (self,index):
""" maybe silly, allows you to access obect as a tuple:
a = gaussian(3,4)
print (tuple(a))
"""
if index == 0: return self.mu
elif index == 1: return self.sigma
else: raise StopIteration
class KF1D(object):
def __init__ (self, pos, sigma):
self.estimate = gaussian(pos,sigma)
def update(self, Z,var):
self.estimate = self.estimate * gaussian (Z,var)
def predict(self, U, var):
self.estimate = self.estimate + gaussian (U,var)
measurements = [x+5 for x in range(100)]
def fixed_error_kf(measurement_error, noise_factor = 1.0):
motion_sig = 2.
mu = 0
sig = 1000
f = KF1D (mu,sig)
ys = []
errs = []
xs = []
for i in range(len(measurements)):
r = random.randn() * noise_factor
m = measurements[i] + r
f.update (m, measurement_error)
xs.append(m)
ys.append(f.estimate.mu)
errs.append (f.estimate.sigma)
f.predict (1.0, motion_sig)
plt.clf()
plt.plot (measurements, 'r')
plt.plot (xs,'g')
plt.errorbar (x=range(len(ys)), color='b', y=ys, yerr=errs)
plt.show()
def varying_error_kf(noise_factor=1.0):
motion_sig = 2.
mu = 0
sig = 1000
f = KF1D (mu,sig)
ys = []
us = []
errs = []
xs = []
for i in range(len(measurements)):
r = random.randn() * noise_factor
m = measurements[i] + r
print (r)
f.update (m, abs(r*10))
xs.append(m)
#print ("measure:" + str(f.estimate))
ys.append(f.estimate.mu)
errs.append (f.estimate.sigma)
f.predict (1.0, motion_sig)
#print ("predict:" + str(f.estimate))
plt.clf()
plt.plot (measurements, 'r')
plt.plot (xs,'g')
plt.errorbar (x=range(len(ys)), color='b', y=ys, yerr=errs)
plt.show()
varying_error_kf( noise_factor=100)

83
gaussian.py Normal file
View File

@ -0,0 +1,83 @@
import numpy as np
import math
def _to_array(x):
""" returns any of a scalar, matrix, or array as a 1D numpy array
Example:
_to_array(3) == array([3])
"""
try:
x.shape
if type(x) != np.ndarray:
x = np.asarray(x)[0]
return x
except:
return np.array(np.mat(x)).reshape(1)
def _to_cov(x,n):
""" If x is a scalar, returns a covariance matrix generated from it
as the identity matrix multiplied by x. The dimension will be nxn.
If x is already a numpy array then it is returned unchanged.
"""
try:
x.shape
if type(x) != np.ndarray:
x = np.asarray(x)[0]
return x
except:
return np.eye(n) * x
_two_pi = 2*math.pi
def gaussian (x, mean, var):
"""returns normal distribution for x given a gaussian with the specified
mean and variance. All must be scalars
"""
return math.exp((-0.5*(x-mean)**2)/var) / math.sqrt(_two_pi*var)
def multivariate_gaussian (x, mu, cov):
""" This is designed to work the same as scipy.stats.multivariate_normal
which is not available before version 0.14. You may either pass in a
multivariate set of data:
multivariate_gaussian (array([1,1]), array([3,4]), eye(2)*1.4)
multivariate_gaussian (array([1,1,1]), array([3,4,5]), 1.4)
or unidimensional data:
multivariate_gaussian(1, 3, 1.4)
In the multivariate case if cov is a scalar it is interpreted as eye(n)*cov
The function gaussian() implements the 1D (univariate)case, and is much
faster than this function.
"""
# force all to numpy.array type
x = _to_array(x)
mu = _to_array(mu)
n = mu.size
cov = _to_cov (cov, n)
det = np.sqrt(np.prod(np.diag(cov)))
frac = _two_pi**(-n/2.) * (1./det)
fprime = (x - mu)**2
return frac * np.exp(-0.5*np.dot(fprime, 1./np.diag(cov)))
if __name__ == '__main__':
from scipy.stats import norm
# test conversion of scalar to covariance matrix
x = multivariate_gaussian(np.array([1,1]), np.array([3,4]), np.eye(2)*1.4)
x2 = multivariate_gaussian(np.array([1,1]), np.array([3,4]), 1.4)
assert x == x2
# test univarate case
rv = norm (loc = 1., scale = np.sqrt(2.3))
x2 = multivariate_gaussian (1.2, 1., 2.3)
x3 = gaussian (1.2, 1., 2.3)
assert rv.pdf(1.2) == x2
assert abs(x2- x3) < 0.00000001
print "all tests passed"

57
histogram.py Normal file
View File

@ -0,0 +1,57 @@
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 22 10:43:38 2014
@author: rlabbe
"""
p = [.2, .2, .2, .2, .2]
world = ['green', 'red', 'red', 'green', 'green']
measurements = ['red', 'green']
pHit = 0.6
pMiss = 0.2
pOvershoot = 0.1
pUndershoot = 0.1
pExact = 0.8
def normalize (p):
s = sum(p)
for i in range (len(p)):
p[i] = p[i] / s
def sense(p, Z):
q= []
for i in range (len(p)):
hit = (world[i] ==Z)
q.append(p[i] * (pHit*hit + pMiss*(1-hit)))
normalize(q)
return q
def move(p, U):
q = []
for i in range(len(p)):
pexact = p[(i-U) % len(p)] * pExact
pover = p[(i-U-1) % len(p)] * pOvershoot
punder = p[(i-U+1) % len(p)] * pUndershoot
q.append (pexact + pover + punder)
normalize(q)
return q
if __name__ == "__main__":
p = sense(p, 'red')
print p
pause()
for z in measurements:
p = sense (p, z)
p = move (p, 1)
print p

383
kalman filter 1D.ipynb Normal file

File diff suppressed because one or more lines are too long