. :
Python :
from __future__ import division
signal = [1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0]
def maximum_likelihood(s, last=None):
"""
The maximum likelihood estimator selects the parameter value which gives
the observed data the largest possible probability.
http://mathworld.wolfram.com/MaximumLikelihood.html
If `last` is given, only use the last `n` values.
"""
if not last:
return sum(s) / len(s)
return sum(s[:-last]) / last
if __name__ == '__main__':
hits = []
print('p\tpredicted\tcorrect\tsignal')
print('-\t---------\t-------\t------')
for i in range(1, len(signal) - 1):
p = maximum_likelihood(signal[:i])
prediction = int(p >= 0.5)
hits.append(prediction == signal[i])
print('%0.3f\t%s\t\t%s\t%s' % (
p, prediction, prediction == signal[i], signal[:i]))
print('accuracy: %0.3f' % (sum(hits) / len(hits)))
:
# p predicted correct signal
# - --------- ------- ------
# 1.000 1 False [1]
# 0.500 1 True [1, 0]
# 0.667 1 True [1, 0, 1]
# 0.750 1 False [1, 0, 1, 1]
# 0.600 1 False [1, 0, 1, 1, 0]
# 0.500 1 True [1, 0, 1, 1, 0, 0]
# 0.571 1 False [1, 0, 1, 1, 0, 0, 1]
# 0.500 1 True [1, 0, 1, 1, 0, 0, 1, 0]
# 0.556 1 True [1, 0, 1, 1, 0, 0, 1, 0, 1]
# 0.600 1 False [1, 0, 1, 1, 0, 0, 1, 0, 1, 1]
# 0.545 1 True [1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0]
# 0.583 1 True [1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1]
# 0.615 1 True [1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1]
# 0.643 1 True [1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1]
# 0.667 1 True [1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1]
# 0.688 1 False [1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1]
# 0.647 1 True [1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0]
# 0.667 1 False [1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1]
# 0.632 1 True [1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0]
# 0.650 1 True [1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1]
# accuracy: 0.650
.
, , 3 , 0,7.
: gist.