Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # %%
- import numpy as np
- from sklearn.model_selection import train_test_split
- from sklearn.linear_model import LogisticRegression
- D = 20 # instrinsic
- repeats = 20
- d = D * repeats # data dimension
- layers = 10
- norm = 1
- weight_matrix_in = [norm * np.random.randn(d, d) / np.sqrt(d) for _ in range(layers)]
- weight_matrix_out = [norm * np.random.randn(d, d) / np.sqrt(d) for _ in range(layers)]
- activation = lambda x: np.maximum(x, 0)
- def forward(x):
- for i in range(layers):
- x = x + weight_matrix_out[i] @ activation(weight_matrix_in[i] @ x)
- return x
- N = 10000
- # get random points in {-1, 1}^D
- X = np.random.choice([-1, 1], size=(N, D))
- # %%
- x = np.concatenate([X] * repeats, axis=1)
- Z = forward(x.T).T
- # %%
- from matplotlib import pyplot as plt
- # see if you can still classify
- n = 30
- accs = []
- for _ in range(n):
- c = np.random.randint(D)
- labels = np.sign(X[:, c])
- yt, yv, zt, zv = train_test_split(labels, Z, test_size=0.2)
- model = LogisticRegression(max_iter=1000)
- model.fit(zt, yt)
- accs.append(model.score(zv, yv))
- plt.hist(accs, bins=20, range=(0, 1))
- plt.axvline(0.5, color="black")
- plt.title("base features")
- # %%
- # see if you can classify xors
- accs = []
- for _ in range(n):
- c1, c2 = np.random.choice(D, size=2, replace=False)
- labels = np.sign(X[:, c1] * X[:, c2])
- yt, yv, zt, zv = train_test_split(labels, Z, test_size=0.2)
- model = LogisticRegression(max_iter=1000)
- model.fit(zt, yt)
- accs.append(model.score(zv, yv))
- plt.hist(accs, bins=20, range=(0, 1))
- plt.axvline(0.5, color="black")
- plt.title("xor features")
- # %%
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement