Open
Description
import numpy as np
# Calculate entropy of random variable x
def entropy(x):
try:
assert len(x) > 0
assert len(x) > 0
except AssertionError:
raise ValueError("Input array must not be empty.")
_, counts = np.unique(x, return_counts = True)
p = counts / len(x)
return -np.sum(p * np.log2(p))
# Calculate conditional entropy of x given y
def conditional_entropy(x, y):
try:
assert len(x) == len(y)
assert len(x) > 0
except AssertionError:
raise ValueError("Input arrays must have the same length, and must not be empty!")
n = len(x)
_, counts = np.unique(y, return_counts = True)
hy = np.sum(-(counts / n) * np.log2(counts / n))
hy_x = 0
for y_val in np.unique(y):
x_given_y = x[y == y_val]
hy_x += (np.sum(y == y_val) / n) * entropy(x_given_y)
return hy_x, hy
# Calculate mutual information between x and y given condition z
def information_mutual_conditional(x, y, z):
try:
assert len(x) == len(y) == len(z)
assert len(x) > 0
except AssertionError:
raise ValueError("Input arrays must have the same length, and must not be empty!")
hy_xz, hy_z = conditional_entropy(x, z)
hy_yz, _ = conditional_entropy(y, z)
hy_z = entropy(z)
mi = hy_xz + hy_yz - hy_z
return mi
# Calculate mutual information between x and y
def information_mutual(x, y):
try:
assert len(x) == len(y)
assert len(x) > 0
except AssertionError:
raise ValueError("Input arrays must have the same length and must not be empty!")
h_x = entropy(x)
h_y = entropy(x)
mi_xy = h_x + h_y - information_mutual_conditional(x, y, np.array([]))
return mi_xy
Metadata
Metadata
Assignees
Labels
No labels