def Entropy(text): import math log2=lambda x:math.log(x)/math.log(2) exr={} infoc=0 for each in text: try: exr[each]+=1 except: exr[each]=1 textlen=len(text) for k,v in exr.items(): freq = 1.0*v/textlen infoc+=freq*log2(freq) infoc*=-1 return infoc while True: print Entropy(raw_input('>>>'))