You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
50 lines
1.8 KiB
50 lines
1.8 KiB
from sklearn.ensemble import RandomForestClassifier
|
|
from sklearn.neighbors import KNeighborsClassifier
|
|
from sklearn.linear_model import LogisticRegression
|
|
from sklearn.tree import DecisionTreeClassifier
|
|
from sklearn.linear_model import SGDClassifier
|
|
from sklearn import svm
|
|
from sklearn.svm import LinearSVC
|
|
from sklearn.naive_bayes import GaussianNB
|
|
|
|
|
|
def RandomForest(X_train, X_test, y_train):
|
|
random_forest = RandomForestClassifier(n_estimators=100,
|
|
criterion='entropy',
|
|
max_depth=10,
|
|
min_samples_split=2,
|
|
min_samples_leaf=1,
|
|
random_state=0)
|
|
random_forest.fit(X_train, y_train)
|
|
return random_forest.predict(X_test), random_forest
|
|
|
|
def KNN(X_train, X_test, y_train):
|
|
knn = KNeighborsClassifier(n_neighbors=5)
|
|
knn.fit(X_train, y_train)
|
|
return knn.predict(X_test),knn
|
|
|
|
|
|
def SVM(X_train, X_test, y_train):
|
|
clf = svm.SVC(gamma=0.001)
|
|
clf.fit(X_train,y_train)
|
|
return clf.predict(X_test),clf
|
|
|
|
def DecisionTree(X_train, X_test, y_train):
|
|
decisionTree = DecisionTreeClassifier()
|
|
decisionTree = decisionTree.fit(X_train,y_train)
|
|
return decisionTree.predict(X_test),decisionTree
|
|
|
|
def LogisticRegress(X_train, X_test, y_train):
|
|
logistic = LogisticRegression()
|
|
logistic.fit(X_train,y_train)
|
|
return logistic.predict(X_test),logistic
|
|
|
|
def Linearsvc(X_train, X_test, y_train):
|
|
svc = LinearSVC(C=1.0, dual=False, verbose=True, loss="squared_hinge", multi_class="crammer_singer")
|
|
svc.fit(X_train,y_train)
|
|
return svc.predict(X_test),svc
|
|
|
|
def GaussianNaiveBayes(X_train, X_test, y_train):
|
|
gnb = GaussianNB()
|
|
gnb.fit(X_train, y_train)
|
|
return gnb.predict(X_test),gnb |