Linear svm
import numpy as np
class SimpleSVM:
def __init__(self, learning_rate=0.001, lambda_param=0.01, n_iters=1000):
self.lr = learning_rate
self.lambda_param = lambda_param
self.n_iters = n_iters
self.weights = None
self.bias = None
def fit(self, X, y):
n_samples, n_features = X.shape
y_ = np.where(y <= 0, -1, 1)
self.weights = np.zeros(n_features)
self.bias = 0
"""
During training, we iterate through each sample in the dataset.
The condition checks if the sample is on the correct side of the margin.
If it is, we only update the weights based on the regularization term.
If it's not, we update the weights and bias based on both the regularization term and the misclassification.
"""
for _ in range(self.n_iters):
for idx, x_i in enumerate(X):
condition = y_[idx] * (np.dot(x_i, self.weights) - self.bias) >= 1
if condition:
self.weights -= self.lr * (2 * self.lambda_param * self.weights)
else:
self.weights -= self.lr * (2 * self.lambda_param * self.weights - np.dot(x_i, y_[idx]))
self.bias -= self.lr * y_[idx]
def predict(self, X):
linear_output = np.dot(X, self.weights) - self.bias
return np.sign(linear_output)
Last update:
March 21, 2024