Saved from https://towardsdatascience.com/information-entropy-c037a90de58f
import numpy as npdef entropy(dist): su=0 for p in dist: r= p/sum(dist) if r==0: su+=0 else: su+= -r*(np.log(r)) return su/np.log(2)