Monday 9 September 2019

python machine learning 5 SVM

square vector machine method to predict which group the unknown belongs to
by finding a plane that has the furthest margin to both colony

if can't separate groups, transform to higher dimension 

2D -> 3D

import sklearn
from sklearn import datasets
from sklearn import svm
from sklearn import metrics
from sklearn.neighbors import KNeighborsClassifier

cancer = datasets.load_breast_cancer()

print(cancer.feature_names)
print(cancer.target_names)

x= cancer.data
y=cancer.target

x_train, x_test, y_train, y_test = sklearn.model_selection.train_test_split(x, y, test_size=0.2)

print(x_train, y_train)

classes = ['malignant', 'benign']

#kernel the math to create divisible plane, C = 2 -> softer margin, C = 0 -> tighter margin
clf = svm.SVC(kernel='linear', C=1)

#clf=KNeighborsClassifier(n_neighbors=9)

clf.fit(x_train, y_train)

y_pred = clf.predict(x_test)

acc = metrics.accuracy_score(y_test, y_pred)

print(acc)

----------------------------------------
#logs
#using SVN
#data header | sample data value | sample target value | accuracy

['mean radius' 'mean texture' 'mean perimeter' 'mean area'
 'mean smoothness' 'mean compactness' 'mean concavity'
 'mean concave points' 'mean symmetry' 'mean fractal dimension'
 'radius error' 'texture error' 'perimeter error' 'area error'
 'smoothness error' 'compactness error' 'concavity error'
 'concave points error' 'symmetry error' 'fractal dimension error'
 'worst radius' 'worst texture' 'worst perimeter' 'worst area'
 'worst smoothness' 'worst compactness' 'worst concavity'
 'worst concave points' 'worst symmetry' 'worst fractal dimension']
['malignant' 'benign']
[[1.082e+01 2.421e+01 6.889e+01 ... 3.264e-02 3.059e-01 7.626e-02]
 [1.881e+01 1.998e+01 1.209e+02 ... 1.294e-01 2.567e-01 5.737e-02]
 [1.900e+01 1.891e+01 1.234e+02 ... 1.218e-01 2.841e-01 6.541e-02]
 ...
 [1.522e+01 3.062e+01 1.034e+02 ... 2.356e-01 4.089e-01 1.409e-01]
 [1.427e+01 2.255e+01 9.377e+01 ... 1.362e-01 2.698e-01 8.351e-02]
 [1.537e+01 2.276e+01 1.002e+02 ... 1.476e-01 2.556e-01 6.828e-02]] [1 0 0 1 1 0 0 1 1 0 1 0 1 1 0 0 0 0 1 0 1 1 1 1 1 1 1 1 0 1 0 1 0 0 1 1 1
 0 1 1 0 0 1 0 0 0 1 1 1 1 1 1 1 0 1 1 0 1 1 1 1 0 0 0 1 1 1 0 0 0 1 1 0 0
 0 0 1 1 1 0 1 0 0 1 1 0 1 1 0 1 0 0 0 1 1 1 1 1 0 1 1 0 1 1 1 1 1 1 1 1 0
 1 1 0 1 1 0 1 1 1 0 0 0 0 1 0 0 0 0 1 1 0 1 1 1 1 0 0 0 1 1 0 1 1 0 1 1 1
 1 1 1 0 0 0 0 1 1 1 1 1 0 1 1 1 0 0 1 0 1 0 1 0 1 0 1 0 0 0 1 1 1 1 1 1 1
 1 0 1 1 1 0 1 0 1 1 0 1 1 1 0 0 1 0 1 1 1 0 0 0 1 1 1 1 1 1 1 0 0 1 1 1 1
 1 0 0 1 1 0 1 0 0 1 0 0 0 0 1 1 0 0 0 1 0 0 0 1 1 1 0 1 1 1 0 1 1 0 1 1 1
 1 0 1 1 0 0 0 1 1 1 1 1 0 1 0 1 1 0 1 0 1 1 0 0 1 0 0 1 0 1 1 1 0 1 1 1 1
 1 1 0 1 0 0 1 0 0 1 1 1 0 1 1 1 1 1 1 1 1 1 0 0 1 1 1 1 0 1 0 1 0 1 1 0 0
 1 0 1 0 1 1 1 1 0 0 1 0 1 0 1 0 1 1 1 0 1 1 1 0 1 1 1 1 1 0 0 1 1 1 1 1 0
 1 1 1 0 0 1 1 1 0 0 1 1 1 1 1 0 0 1 1 0 1 1 0 0 0 0 0 0 1 0 0 1 1 0 1 1 0
 1 0 0 1 1 0 1 0 1 0 1 1 1 1 1 0 0 0 1 1 0 1 1 1 1 1 1 1 1 0 0 0 1 1 1 1 1
 1 0 0 1 0 1 1 1 0 0 0]
0.9736842105263158

--------------------------------------
#using KNN
...
0.9210526315789473

for high dimension dataset, SVN usually better than KNN

No comments:

Post a Comment