parent
20909f995d
commit
c9ab9f9c53
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,39 +0,0 @@
|
|||||||
from sklearn.metrics import accuracy_score, confusion_matrix, classification_report, ConfusionMatrixDisplay, roc_curve, auc, RocCurveDisplay
|
|
||||||
import matplotlib.pyplot as plt
|
|
||||||
from sklearn.model_selection import learning_curve
|
|
||||||
import numpy as np
|
|
||||||
from sklearn import metrics
|
|
||||||
|
|
||||||
|
|
||||||
def calculateMatrix(y_true, y_pred):
|
|
||||||
accuracy = accuracy_score(y_true, y_pred)
|
|
||||||
confusion_matrix_result = confusion_matrix(y_true, y_pred)
|
|
||||||
classification_report_result = classification_report(y_true, y_pred, zero_division=1)
|
|
||||||
return accuracy, confusion_matrix_result, classification_report_result
|
|
||||||
|
|
||||||
def seeMatrix(matrix, classes):
|
|
||||||
cmap = plt.cm.Blues
|
|
||||||
disp = ConfusionMatrixDisplay(confusion_matrix=matrix, display_labels=classes)
|
|
||||||
disp.plot(cmap=cmap)
|
|
||||||
plt.show()
|
|
||||||
|
|
||||||
def rocCurve(y_test, y_pred):
|
|
||||||
fpr, tpr, thresholds = metrics.roc_curve(y_test, y_pred)
|
|
||||||
roc_auc = metrics.auc(fpr, tpr)
|
|
||||||
display = metrics.RocCurveDisplay(fpr=fpr, tpr=tpr, roc_auc=roc_auc,
|
|
||||||
estimator_name='example estimator')
|
|
||||||
display.plot()
|
|
||||||
|
|
||||||
def seeRocCurve(model, X_train, y_train, learning_reps):
|
|
||||||
train_sizes, train_scores, test_scores = learning_curve(model, X_train, y_train, cv=learning_reps, scoring='accuracy', n_jobs=-1)
|
|
||||||
|
|
||||||
train_scores_mean = np.mean(train_scores, axis=1)
|
|
||||||
test_scores_mean = np.mean(test_scores, axis=1)
|
|
||||||
|
|
||||||
plt.plot(train_sizes, train_scores_mean, label='Entrainement')
|
|
||||||
plt.plot(train_sizes, test_scores_mean, label='Test')
|
|
||||||
plt.xlabel("lol")
|
|
||||||
plt.ylabel('Score')
|
|
||||||
plt.title('Courbe d apprentissage')
|
|
||||||
plt.legend()
|
|
||||||
plt.show()
|
|
@ -0,0 +1,69 @@
|
|||||||
|
from sklearn.metrics import accuracy_score, confusion_matrix, classification_report, ConfusionMatrixDisplay, roc_curve, auc, RocCurveDisplay
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
from sklearn.model_selection import learning_curve
|
||||||
|
import numpy as np
|
||||||
|
from sklearn import metrics
|
||||||
|
from sklearn.preprocessing import LabelEncoder
|
||||||
|
from cleanData import *
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Fonction permettant de récupérer la moyenne de statistiques des combats précédents
|
||||||
|
def getFighterStats(df, label_encoder, fighter_name):
|
||||||
|
# Définition des colonnes ou la moyenne sera appliquée
|
||||||
|
columns = ['B_avg_BODY_landed', 'B_avg_HEAD_landed', 'B_avg_TD_att', 'B_avg_TOTAL_STR_landed',
|
||||||
|
'B_avg_opp_BODY_att', 'B_avg_opp_HEAD_landed', 'B_avg_opp_LEG_landed',
|
||||||
|
'B_avg_opp_SIG_STR_att', 'B_avg_opp_TOTAL_STR_att']
|
||||||
|
# Tentative de récupération du dataframe ou le fighter passée en paramètre combat
|
||||||
|
df_temp = df[(df['R_fighter'] == fighter_name) | (df['B_fighter'] == fighter_name)]
|
||||||
|
# Gestion d'erreur si le df est vide
|
||||||
|
if df_temp.empty:
|
||||||
|
print(f"{fighter_name} introuvable. Abandon de l'application")
|
||||||
|
sys.exit(1)
|
||||||
|
# sous-fonction permettant d'inverser le coté du combattant
|
||||||
|
def swap_values_if_needed(row):
|
||||||
|
if row['R_fighter'] == fighter_name:
|
||||||
|
return swap_values_withoutran(row)
|
||||||
|
return row
|
||||||
|
# Permet de faire un foreach et d'appliquer la fonction swap_values_if_needed pour chaque ligne
|
||||||
|
df_temp = df_temp.apply(swap_values_if_needed, axis=1)
|
||||||
|
# Fait la moyenne des colonnes précédement renseignée dans la liste columns
|
||||||
|
return df_temp[columns].mean()
|
||||||
|
|
||||||
|
def predict(fighterStatsR,fighterStatsB,titlebout,model,weight):
|
||||||
|
# Définition des colonnes attendus pour la prédiction
|
||||||
|
columns = ['B_fighter','R_fighter','title_bout',
|
||||||
|
'B_avg_BODY_landed', 'B_avg_HEAD_landed', 'B_avg_TD_att', 'B_avg_TOTAL_STR_landed',
|
||||||
|
'B_avg_opp_BODY_att', 'B_avg_opp_HEAD_landed', 'B_avg_opp_LEG_landed',
|
||||||
|
'B_avg_opp_SIG_STR_att', 'B_avg_opp_TOTAL_STR_att',
|
||||||
|
|
||||||
|
'R_avg_BODY_landed', 'R_avg_HEAD_landed', 'R_avg_TD_att', 'R_avg_TOTAL_STR_landed',
|
||||||
|
'R_avg_opp_BODY_att', 'R_avg_opp_HEAD_landed', 'R_avg_opp_LEG_landed',
|
||||||
|
'R_avg_opp_SIG_STR_att', 'R_avg_opp_TOTAL_STR_att','weight_class']
|
||||||
|
|
||||||
|
# Définition d'un dataframe issu des colonnes précedemment renseignée
|
||||||
|
df = pd.DataFrame(columns=columns)
|
||||||
|
# Association des valeurs liées au deux combattants pour la prédiction
|
||||||
|
fight = {'B_fighter':0,'R_fighter':0,'title_bout':1,'B_avg_BODY_landed': fighterStatsB['B_avg_BODY_landed'],
|
||||||
|
'B_avg_HEAD_landed': fighterStatsB['B_avg_HEAD_landed'], 'B_avg_TD_att': fighterStatsB['B_avg_TD_att'],
|
||||||
|
'B_avg_TOTAL_STR_landed': fighterStatsB['B_avg_TOTAL_STR_landed'],
|
||||||
|
'B_avg_opp_BODY_att': fighterStatsB['B_avg_opp_BODY_att'],
|
||||||
|
'B_avg_opp_HEAD_landed': fighterStatsB['B_avg_opp_HEAD_landed'],
|
||||||
|
'B_avg_opp_LEG_landed': fighterStatsB['B_avg_opp_LEG_landed'],
|
||||||
|
'B_avg_opp_SIG_STR_att': fighterStatsB['B_avg_opp_SIG_STR_att'],
|
||||||
|
'B_avg_opp_TOTAL_STR_att': fighterStatsB['B_avg_opp_TOTAL_STR_att'],
|
||||||
|
|
||||||
|
'R_avg_BODY_landed': fighterStatsR['B_avg_BODY_landed'],
|
||||||
|
'R_avg_HEAD_landed': fighterStatsR['B_avg_HEAD_landed'], 'R_avg_TD_att': fighterStatsR['B_avg_TD_att'],
|
||||||
|
'R_avg_TOTAL_STR_landed': fighterStatsR['B_avg_TOTAL_STR_landed'],
|
||||||
|
'R_avg_opp_BODY_att': fighterStatsR['B_avg_opp_BODY_att'],
|
||||||
|
'R_avg_opp_HEAD_landed': fighterStatsR['B_avg_opp_HEAD_landed'],
|
||||||
|
'R_avg_opp_LEG_landed': fighterStatsR['B_avg_opp_LEG_landed'],
|
||||||
|
'R_avg_opp_SIG_STR_att': fighterStatsR['B_avg_opp_SIG_STR_att'],
|
||||||
|
'R_avg_opp_TOTAL_STR_att': fighterStatsR['B_avg_opp_TOTAL_STR_att'],
|
||||||
|
|
||||||
|
'weight_class': 1
|
||||||
|
}
|
||||||
|
# Ajout des valeurs dans le dataframe
|
||||||
|
df = df._append(fight, ignore_index=True)
|
||||||
|
# Retourne la valeur 'Winner' suite à la prédiction du model choisis
|
||||||
|
return model.predict(df)
|
@ -1,17 +1,38 @@
|
|||||||
from runModel import *
|
from startModel import *
|
||||||
from cleanData import *
|
from cleanData import *
|
||||||
from sklearn.model_selection import train_test_split
|
from sklearn.model_selection import train_test_split
|
||||||
|
from analyse import *
|
||||||
|
import sys
|
||||||
|
# Définition d'un label encoder pour changer les string en index
|
||||||
|
label_encoder = LabelEncoder()
|
||||||
|
# Récupération des valeurs X,y et d'un dataframe inchangé
|
||||||
|
X, y, df = getData(label_encoder)
|
||||||
|
# Séparation des différentes parties attendus pour faire le test et l'apprentissage du model sélectionnée
|
||||||
|
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.30, random_state=50)
|
||||||
|
|
||||||
X,y=getData()
|
model = startRandomForest(X_train, X_test, y_train, y_test)
|
||||||
X_train,X_test,y_train,y_test=train_test_split(X,y,test_size=0.30, random_state=50)
|
#model=startKNN(X_train,X_test,y_train,y_test)
|
||||||
|
#model=startSVM(X_train,X_test,y_train,y_test)
|
||||||
|
#model=startDecisionTree(X_train,X_test,y_train,y_test)
|
||||||
|
#model=startLogisticRegression(X_train,X_test,y_train,y_test)
|
||||||
|
#model=startLinearSVC(X_train,X_test,y_train,y_test)
|
||||||
|
#model=startNaiveBayes(X_train,X_test,y_train,y_test)
|
||||||
|
|
||||||
startRandomForest(X_train,X_test,y_train,y_test)
|
# Insertion des valeurs pour la prédiction
|
||||||
#startKNN(X_train,X_test,y_train,y_test)
|
combattantR = input("Combattant Rouge ?: ")
|
||||||
#startSVM(X_train,X_test,y_train,y_test)
|
combattantB = input("Combattant Bleu ?: ")
|
||||||
#startDecisionTree(X_train,X_test,y_train,y_test)
|
poids = input("Poids ?: ")
|
||||||
#startLogisticRegression(X_train,X_test,y_train,y_test)
|
belt = input("Ceinture ?: ")
|
||||||
|
|
||||||
#startLinearSVC(X_train,X_test,y_train,y_test)
|
# Récupération des valeurs indispensables pour la prédiction
|
||||||
#startNaiveBayes(X_train,X_test,y_train,y_test)
|
fr=getFighterStats(df, label_encoder, combattantR)
|
||||||
|
fb=getFighterStats(df, label_encoder, combattantB)
|
||||||
|
|
||||||
# https://scikit-learn.org/stable/_static/ml_map.png
|
# Prédiction auprès du model sélectionnée
|
||||||
|
winner = predict(fr, fb, belt, model, poids)
|
||||||
|
|
||||||
|
# Affichage du gagnant prédit
|
||||||
|
if winner == 0:
|
||||||
|
print(combattantR)
|
||||||
|
else:
|
||||||
|
print(combattantB)
|
@ -1,64 +0,0 @@
|
|||||||
from models import *
|
|
||||||
from analise import *
|
|
||||||
|
|
||||||
def report(accuracy,confMatrix,classReport):
|
|
||||||
print(f'Accuracy: {accuracy}')
|
|
||||||
print(f'Confusion Matrix:\n{confMatrix}')
|
|
||||||
print(f'Classification Report:\n{classReport}')
|
|
||||||
|
|
||||||
|
|
||||||
def startRandomForest(X_train,X_test,y_train,y_test):
|
|
||||||
y_pred, rf = RandomForest(X_train, X_test, y_train)
|
|
||||||
rf_ac, rf_matrix, rf_class_report = calculateMatrix(y_test, y_pred)
|
|
||||||
report(rf_ac, rf_matrix, rf_class_report)
|
|
||||||
seeMatrix(rf_matrix, rf.classes_)
|
|
||||||
#rocCurve(y_test, y_pred)
|
|
||||||
#seeRocCurve(rf, X_train, y_train, 10)
|
|
||||||
|
|
||||||
def startKNN(X_train,X_test,y_train,y_test):
|
|
||||||
y_pred, knn = KNN(X_train, X_test, y_train)
|
|
||||||
knn_ac, knn_matrix, knn_class_report = calculateMatrix(y_test, y_pred)
|
|
||||||
report(knn_ac, knn_matrix, knn_class_report)
|
|
||||||
seeMatrix(knn_matrix, knn.classes_)
|
|
||||||
#rocCurve(y_test, y_pred)
|
|
||||||
#seeRocCurve(rf, X_train, y_train, 10)
|
|
||||||
|
|
||||||
def startSVM(X_train,X_test,y_train,y_test):
|
|
||||||
y_pred, svm = SVM(X_train, X_test, y_train)
|
|
||||||
svm_ac, svm_matrix, svm_class_report = calculateMatrix(y_test, y_pred)
|
|
||||||
report(svm_ac, svm_matrix, svm_class_report)
|
|
||||||
seeMatrix(svm_matrix, svm.classes_)
|
|
||||||
#rocCurve(y_test, y_pred)
|
|
||||||
#seeRocCurve(rf, X_train, y_train, 10)
|
|
||||||
|
|
||||||
def startDecisionTree(X_train,X_test,y_train,y_test):
|
|
||||||
y_pred, dt = DecisionTree(X_train, X_test, y_train)
|
|
||||||
dt_ac, dt_matrix, dt_class_report = calculateMatrix(y_test, y_pred)
|
|
||||||
report(dt_ac, dt_matrix, dt_class_report)
|
|
||||||
seeMatrix(dt_matrix, dt.classes_)
|
|
||||||
#rocCurve(y_test, y_pred)
|
|
||||||
#seeRocCurve(rf, X_train, y_train, 10)
|
|
||||||
|
|
||||||
def startLogisticRegression(X_train,X_test,y_train,y_test):
|
|
||||||
y_pred, lr = LogisticRegress(X_train, X_test, y_train)
|
|
||||||
lr_ac, lr_matrix, lr_class_report = calculateMatrix(y_test, y_pred)
|
|
||||||
report(lr_ac, lr_matrix, lr_class_report)
|
|
||||||
seeMatrix(lr_matrix, lr.classes_)
|
|
||||||
#rocCurve(y_test, y_pred)
|
|
||||||
#seeRocCurve(rf, X_train, y_train, 10)
|
|
||||||
|
|
||||||
def startLinearSVC(X_train,X_test,y_train,y_test):
|
|
||||||
y_pred, svc = Linearsvc(X_train, X_test, y_train)
|
|
||||||
svc_ac, svc_matrix, svc_class_report = calculateMatrix(y_test, y_pred)
|
|
||||||
report(svc_ac, svc_matrix, svc_class_report)
|
|
||||||
seeMatrix(svc_matrix, svc.classes_)
|
|
||||||
#rocCurve(y_test, y_pred)
|
|
||||||
#seeRocCurve(rf, X_train, y_train, 10)
|
|
||||||
|
|
||||||
def startNaiveBayes(X_train,X_test,y_train,y_test):
|
|
||||||
y_pred, gnb = GaussianNaiveBayes(X_train, X_test, y_train)
|
|
||||||
gnb_ac, gnb_matrix, gnb_class_report = calculateMatrix(y_test, y_pred)
|
|
||||||
report(gnb_ac, gnb_matrix, gnb_class_report)
|
|
||||||
seeMatrix(gnb_matrix, gnb.classes_)
|
|
||||||
#rocCurve(y_test, y_pred)
|
|
||||||
#seeRocCurve(rf, X_train, y_train, 10)
|
|
@ -0,0 +1,79 @@
|
|||||||
|
from models import *
|
||||||
|
from analyse import *
|
||||||
|
|
||||||
|
#Fonction permettant de calculer la matrice de confusion et les données liées à un entrainement et un test d'un model
|
||||||
|
def calculateMatrix(y_true, y_pred):
|
||||||
|
accuracy = accuracy_score(y_true, y_pred)
|
||||||
|
confusion_matrix_result = confusion_matrix(y_true, y_pred)
|
||||||
|
classification_report_result = classification_report(y_true, y_pred, zero_division=1)
|
||||||
|
return accuracy, confusion_matrix_result, classification_report_result
|
||||||
|
|
||||||
|
#Fonction d'affichage de la matrice de confusion
|
||||||
|
def seeMatrix(matrix, classes):
|
||||||
|
cmap = plt.cm.Blues
|
||||||
|
disp = ConfusionMatrixDisplay(confusion_matrix=matrix, display_labels=classes)
|
||||||
|
disp.plot(cmap=cmap)
|
||||||
|
plt.show()
|
||||||
|
|
||||||
|
#Fonction affichant les données clés à un model
|
||||||
|
def report(accuracy,confMatrix,classReport):
|
||||||
|
print(f'Accuracy: {accuracy}')
|
||||||
|
print(f'Confusion Matrix:\n{confMatrix}')
|
||||||
|
print(f'Classification Report:\n{classReport}')
|
||||||
|
|
||||||
|
#Fonction afin de récupérer le randomForest entrainé et savoir les donneés clés de l'entrainement
|
||||||
|
def startRandomForest(X_train,X_test,y_train,y_test):
|
||||||
|
y_pred, rf = RandomForest(X_train, X_test, y_train)
|
||||||
|
rf_ac, rf_matrix, rf_class_report = calculateMatrix(y_test, y_pred)
|
||||||
|
report(rf_ac, rf_matrix, rf_class_report)
|
||||||
|
#seeMatrix(rf_matrix, rf.classes_)
|
||||||
|
return rf
|
||||||
|
|
||||||
|
#Fonction afin de récupérer le KNN entrainé et savoir les donneés clés de l'entrainement
|
||||||
|
def startKNN(X_train,X_test,y_train,y_test):
|
||||||
|
y_pred, knn = KNN(X_train, X_test, y_train)
|
||||||
|
knn_ac, knn_matrix, knn_class_report = calculateMatrix(y_test, y_pred)
|
||||||
|
report(knn_ac, knn_matrix, knn_class_report)
|
||||||
|
#seeMatrix(knn_matrix, knn.classes_)
|
||||||
|
return knn
|
||||||
|
|
||||||
|
#Fonction afin de récupérer le SVM entrainé et savoir les donneés clés de l'entrainement
|
||||||
|
def startSVM(X_train,X_test,y_train,y_test):
|
||||||
|
y_pred, svm = SVM(X_train, X_test, y_train)
|
||||||
|
svm_ac, svm_matrix, svm_class_report = calculateMatrix(y_test, y_pred)
|
||||||
|
report(svm_ac, svm_matrix, svm_class_report)
|
||||||
|
#seeMatrix(svm_matrix, svm.classes_)
|
||||||
|
return svm
|
||||||
|
|
||||||
|
#Fonction afin de récupérer le DecisionTree entrainé et savoir les donneés clés de l'entrainement
|
||||||
|
def startDecisionTree(X_train,X_test,y_train,y_test):
|
||||||
|
y_pred, dt = DecisionTree(X_train, X_test, y_train)
|
||||||
|
dt_ac, dt_matrix, dt_class_report = calculateMatrix(y_test, y_pred)
|
||||||
|
report(dt_ac, dt_matrix, dt_class_report)
|
||||||
|
#seeMatrix(dt_matrix, dt.classes_)
|
||||||
|
return dt
|
||||||
|
|
||||||
|
#Fonction afin de récupérer la LogisticRegression entrainée et savoir les donneés clés de l'entrainement
|
||||||
|
def startLogisticRegression(X_train,X_test,y_train,y_test):
|
||||||
|
y_pred, lr = LogisticRegress(X_train, X_test, y_train)
|
||||||
|
lr_ac, lr_matrix, lr_class_report = calculateMatrix(y_test, y_pred)
|
||||||
|
report(lr_ac, lr_matrix, lr_class_report)
|
||||||
|
#seeMatrix(lr_matrix, lr.classes_)
|
||||||
|
return lr
|
||||||
|
|
||||||
|
#Fonction afin de récupérer la SVC Linear entrainée et savoir les donneés clés de l'entrainement
|
||||||
|
def startLinearSVC(X_train,X_test,y_train,y_test):
|
||||||
|
y_pred, svc = Linearsvc(X_train, X_test, y_train)
|
||||||
|
svc_ac, svc_matrix, svc_class_report = calculateMatrix(y_test, y_pred)
|
||||||
|
report(svc_ac, svc_matrix, svc_class_report)
|
||||||
|
#seeMatrix(svc_matrix, svc.classes_)
|
||||||
|
return svc
|
||||||
|
|
||||||
|
#Fonction afin de récupérer la Gaussian Naive Bayes entrainée et savoir les donneés clés de l'entrainement
|
||||||
|
def startNaiveBayes(X_train,X_test,y_train,y_test):
|
||||||
|
y_pred, gnb = GaussianNaiveBayes(X_train, X_test, y_train)
|
||||||
|
gnb_ac, gnb_matrix, gnb_class_report = calculateMatrix(y_test, y_pred)
|
||||||
|
report(gnb_ac, gnb_matrix, gnb_class_report)
|
||||||
|
#seeMatrix(gnb_matrix, gnb.classes_)
|
||||||
|
return gnb
|
||||||
|
|
Loading…
Reference in new issue