support_vector_machines.py increase error tolerance to suppress convergence warnings (#1929)

* Update support_vector_machines.py

* Update support_vector_machines.py

Co-authored-by: Christian Clauss <cclauss@me.com>
This commit is contained in:
QuantumNovice 2020-05-03 00:19:45 +05:00 committed by GitHub
parent a859934105
commit 9bb57fbbfe
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -1,7 +1,6 @@
from sklearn.datasets import load_iris
from sklearn import svm
from sklearn.model_selection import train_test_split
import doctest
# different functions implementing different types of SVM's
@ -12,7 +11,7 @@ def NuSVC(train_x, train_y):
def Linearsvc(train_x, train_y):
svc_linear = svm.LinearSVC()
svc_linear = svm.LinearSVC(tol=10e-2)
svc_linear.fit(train_x, train_y)
return svc_linear
@ -20,7 +19,7 @@ def Linearsvc(train_x, train_y):
def SVC(train_x, train_y):
# svm.SVC(C=1.0, kernel='rbf', degree=3, gamma=0.0, coef0=0.0, shrinking=True,
# probability=False,tol=0.001, cache_size=200, class_weight=None, verbose=False,
# max_iter=-1, random_state=None)
# max_iter=1000, random_state=None)
# various parameters like "kernel","gamma","C" can effectively tuned for a given
# machine learning model.
SVC = svm.SVC(gamma="auto")
@ -39,7 +38,6 @@ def test(X_new):
'versicolor'
>>> test([6,3,4,1])
'versicolor'
"""
iris = load_iris()
# splitting the dataset to test and train
@ -55,4 +53,6 @@ def test(X_new):
if __name__ == "__main__":
import doctest
doctest.testmod()