Initial commit

This commit is contained in:
Aladdin Persson
2021-01-30 21:49:15 +01:00
commit 65b8c80495
432 changed files with 1290844 additions and 0 deletions

View File

@@ -0,0 +1,90 @@
1.1107, -2.1079
-0.5498, 0.0943
-0.0382, 1.8829
0.0555, -0.6139
0.5870, -1.2067
0.5453, 0.2509
-0.3927, -0.6220
-1.1905, -1.8785
-0.4240, 0.7772
-0.7139, 1.5846
-0.8883, 2.1408
-0.6922, 0.0993
1.4350, 1.2334
-0.7576, 0.7386
-1.1144, -1.7059
0.6612, -1.7296
-2.1381, -0.0600
1.3857, 1.2178
-1.4951, 0.0373
0.8029, 0.9739
1.5607, 1.5862
0.8563, -1.4245
0.0397, -1.3799
1.2331, 1.7421
-2.0015, 0.8355
-0.3428, -0.4780
-0.8891, 1.2634
0.3832, -0.1189
0.4172, 1.0132
-0.8695, -0.7947
2.9737, 3.6438
3.7680, 1.8649
0.1166, 0.9435
0.6896, 3.9160
1.2234, 2.9899
2.3009, 0.4150
3.7693, 3.8027
1.9450, 3.4208
0.9290, 3.3611
5.0027, 2.7870
1.0101, 1.8737
2.0751, 2.2628
1.9113, 3.6777
2.3127, 3.9130
1.9392, 2.3976
3.1218, 2.5495
1.7032, 1.1509
0.4212, 3.5322
2.7686, 0.9402
2.1696, 2.9285
0.3380, 2.0947
3.6886, 0.4054
2.6315, 3.1962
-0.5332, 3.1421
0.3380, 3.0801
1.4030, 1.1841
2.8739, 2.7777
1.1254, 3.2404
0.0988, 1.9522
0.3688, 2.8904
1.4758, -1.6387
1.9289, -1.8191
2.5741, -1.3213
2.1917, -1.2852
0.8358, -2.3349
2.6863, -1.8834
3.1102, -0.4854
3.7073, -0.6466
3.6394, -0.4097
0.5365, -3.6555
2.9295, -0.3819
0.8168, -3.1133
1.3432, -1.7717
1.1039, -2.2261
1.3754, -2.2236
0.6757, -2.5379
-0.2029, -3.8420
2.4210, -1.9788
1.0335, -2.6042
0.9638, -2.9449
-0.8198, -5.4449
1.9552, -1.5530
0.3505, -3.1887
2.4943, -1.8116
1.9761, -1.0664
0.5994, -3.0513
2.2076, -1.6728
1.9941, -1.8826
1.7487, -2.9644
1.4160, -2.4234

View File

@@ -0,0 +1,90 @@
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
1
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
2
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3
3

View File

@@ -0,0 +1,67 @@
"""
Naive Bayes Classifier Implementation from scratch
To run the code structure the code in the following way:
X be size: (num_training_examples, num_features)
y be size: (num_classes, )
Where the classes are 0, 1, 2, etc. Then an example run looks like:
NB = NaiveBayes(X, y)
NB.fit(X)
predictions = NB.predict(X)
Programmed by Aladdin Persson <aladdin.persson at hotmail dot com>
* 2020-04-21 Initial coding
"""
import numpy as np
class NaiveBayes:
def __init__(self, X, y):
self.num_examples, self.num_features = X.shape
self.num_classes = len(np.unique(y))
self.eps = 1e-6
def fit(self, X):
self.classes_mean = {}
self.classes_variance = {}
self.classes_prior = {}
for c in range(self.num_classes):
X_c = X[y == c]
self.classes_mean[str(c)] = np.mean(X_c, axis=0)
self.classes_variance[str(c)] = np.var(X_c, axis=0)
self.classes_prior[str(c)] = X_c.shape[0] / X.shape[0]
def predict(self, X):
probs = np.zeros((self.num_examples, self.num_classes))
for c in range(self.num_classes):
prior = self.classes_prior[str(c)]
probs_c = self.density_function(
X, self.classes_mean[str(c)], self.classes_variance[str(c)]
)
probs[:, c] = probs_c + np.log(prior)
return np.argmax(probs, 1)
def density_function(self, x, mean, sigma):
# Calculate probability from Gaussian density function
const = -self.num_features / 2 * np.log(2 * np.pi) - 0.5 * np.sum(
np.log(sigma + self.eps)
)
probs = 0.5 * np.sum(np.power(x - mean, 2) / (sigma + self.eps), 1)
return const - probs
if __name__ == "__main__":
X = np.loadtxt("example_data/data.txt", delimiter=",")
y = np.loadtxt("example_data/targets.txt") - 1
NB = NaiveBayes(X, y)
NB.fit(X)
y_pred = NB.predict(X)
print(f"Accuracy: {sum(y_pred==y)/X.shape[0]}")