## 2017年2月22日 Support Vector Machine Classifier 原

airxiechao

Intuitively, to calssify which class a point belongs, SVM Classifier compares point against the characteristc points of each class after points are transformed into a higher high-dimensional hyperspace. Decision boundary is a maxiumn-margin hyperplane.

``````from __future__ import division
import numpy as np
import cvxopt
from sklearn.datasets import make_gaussian_quantiles
from sklearn.model_selection import train_test_split

X, y = make_gaussian_quantiles(n_samples=1000, n_features=5,n_classes=2, random_state=1)
y[y==0] = -1
y = y.astype(float)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5)

#copy from https://github.com/ajtulloch/svmpy
class Kernel(object):
"""
Implements list of kernels from http://en.wikipedia.org/wiki/Support_vector_machine
"""
@staticmethod
def linear():
return lambda x, y: np.inner(x, y)

@staticmethod
def gaussian(sigma):
return lambda x, y: np.exp(-np.sqrt(np.linalg.norm(x-y) ** 2 / (2 * sigma ** 2)))

@staticmethod
def _polykernel(dimension, offset):
return lambda x, y: (offset + np.inner(x, y)) ** dimension

@classmethod
def inhomogenous_polynomial(cls, dimension):
return cls._polykernel(dimension=dimension, offset=1.0)

@classmethod
def homogenous_polynomial(cls, dimension):
return cls._polykernel(dimension=dimension, offset=0.0)

@staticmethod
def hyperbolic_tangent(kappa, c):
return lambda x, y: np.tanh(kappa * np.dot(x, y) + c)

@staticmethod
return lambda x, y: np.exp(-gamma*np.linalg.norm(np.subtract(x, y)))

class SVMTrainer(object):
def __init__(self, kernel, c):
self._kernel = kernel
self._c = c

def train(self, X, y):
"""
Given the training features X with labels y, returns a SVM
predictor representing the trained SVM.
"""
lagrange_multipliers = self._compute_multipliers(X, y)
return self._construct_predictor(X, y, lagrange_multipliers)

def _gram_matrix(self, X):
n_samples, n_features = X.shape
K = np.zeros((n_samples, n_samples))

for i, x_i in enumerate(X):
for j, x_j in enumerate(X):
K[i, j] = self._kernel(x_i, x_j)
return K

def _construct_predictor(self, X, y, lagrange_multipliers):
MIN_SUPPORT_VECTOR_MULTIPLIER = 1e-5
support_vector_indices = lagrange_multipliers > MIN_SUPPORT_VECTOR_MULTIPLIER

support_multipliers = lagrange_multipliers[support_vector_indices]
support_vectors = X[support_vector_indices]
support_vector_labels = y[support_vector_indices]

# http://www.cs.cmu.edu/~guestrin/Class/10701-S07/Slides/kernels.pdf
# bias = y_k - sum z_i y_i K(x_k, x_i)
# Thus we can just predict an example with bias of zero, and compute error.
bias = np.mean(
[y_k - SVMPredictor(
kernel=self._kernel,
bias=0.0,
weights=support_multipliers,
support_vectors=support_vectors,
support_vector_labels=support_vector_labels).predict(x_k)
for (y_k, x_k) in zip(support_vector_labels, support_vectors)])

return SVMPredictor(
kernel=self._kernel,
bias=bias,
weights=support_multipliers,
support_vectors=support_vectors,
support_vector_labels=support_vector_labels)

def _compute_multipliers(self, X, y):
n_samples, n_features = X.shape

K = self._gram_matrix(X)
# Solves
# min 1/2 x^T P x + q^T x
# s.t.
#  Gx <= h
#  Ax = b

P = cvxopt.matrix(np.outer(y, y) * K)
q = cvxopt.matrix(-1 * np.ones(n_samples))

# -a_i <= 0
G_std = cvxopt.matrix(np.diag(np.ones(n_samples) * -1))
h_std = cvxopt.matrix(np.zeros(n_samples))

# a_i <= c
G_slack = cvxopt.matrix(np.diag(np.ones(n_samples)))
h_slack = cvxopt.matrix(np.ones(n_samples) * self._c)

G = cvxopt.matrix(np.vstack((G_std, G_slack)))
h = cvxopt.matrix(np.vstack((h_std, h_slack)))

A = cvxopt.matrix(y, (1, n_samples))
b = cvxopt.matrix(0.0)

solution = cvxopt.solvers.qp(P, q, G, h, A, b)

# Lagrange multipliers
return np.ravel(solution['x'])

class SVMPredictor(object):
def __init__(self,
kernel,
bias,
weights,
support_vectors,
support_vector_labels):
self._kernel = kernel
self._bias = bias
self._weights = weights
self._support_vectors = support_vectors
self._support_vector_labels = support_vector_labels

def predict(self, x):
"""
Computes the SVM prediction on the given features x.
"""
result = self._bias
for z_i, x_i, y_i in zip(self._weights,
self._support_vectors,
self._support_vector_labels):
result += z_i * y_i * self._kernel(x_i, x)
return np.sign(result).item()

predictor = trainer.train(X_train, y_train)

ps = [predictor.predict(x) for x in X_test]
print 'score:', np.count_nonzero(ps == y_test) / y_test.shape[0]
#     pcost       dcost       gap    pres   dres
# 0: -1.4943e+02 -1.1549e+02  2e+03  2e+01  1e-15
# 1: -4.9844e+01 -9.0198e+01  6e+01  2e-01  1e-15
# 2: -4.6168e+01 -4.9145e+01  3e+00  8e-16  1e-15
# 3: -4.7298e+01 -4.7616e+01  3e-01  2e-16  8e-16
# 4: -4.7302e+01 -4.7325e+01  2e-02  2e-16  7e-16
# 5: -4.7308e+01 -4.7315e+01  7e-03  1e-15  8e-16
# 6: -4.7310e+01 -4.7312e+01  2e-03  8e-16  8e-16
# 7: -4.7310e+01 -4.7310e+01  3e-04  1e-15  8e-16
# 8: -4.7310e+01 -4.7310e+01  3e-05  1e-15  8e-16
#Optimal solution found.
#score: 0.832``````

### airxiechao

xinzhangyanxiang
2013/08/05
0
0

xinzhangyanxiang
2013/08/06
0
0

2017年进九时间 2017进九的日子是12月22日，其实也就是“冬至日”。天文专家介绍说，“九九”是我国北方特别是黄河中下游地区更为适用的一种杂节气。 2017~2018年数九时间表 一九 2017年12月...

anlve
2018/01/27
0
0

There are many Python machine learning resources freely availableonline. Where to begin? How to proceed? Go from zero to Python machinelearning hero in 7 steps! By Matthew Mayo.......

openthings
2016/03/09
265
0
Day186 | 遇见GCS（七）

7月份说超级节点计划马上开启，不过一个多月过去，还没有消息。 新闻：http://blockchain.game/news.html 开发者社区：http://developer.blockchain.game/ 新版官网上线：http://blockchain...

2018/08/21
0
0

shzwork

6
0
object 类中有哪些方法？

getClass(): 获取运行时类的对象 equals()：判断其他对象是否与此对象相等 hashcode()：返回该对象的哈希码值 toString()：返回该对象的字符串表示 clone()： 创建并返此对象的一个副本 wait...

happywe

6
0
Docker容器实战(七) - 容器中进程视野下的文件系统

JavaEdge

8
0

7
0

MtrS

5
0