Compare commits

..

No commits in common. 'master' and 'managing_missing_values' have entirely different histories.

@ -1,31 +0,0 @@
kind: pipeline
type: docker
name: Pow
trigger:
event:
- push
steps:
- name: build-pow
image: plugins/docker
settings:
dockerfile: ./src/Dockerfile
context: ./src
registry: hub.codefirst.iut.uca.fr
repo: hub.codefirst.iut.uca.fr/dorian.hodin/pow
username:
from_secret: SECRET_USERNAME
password:
from_secret: SECRET_PASSWD
- name: deploy-pow
image: hub.codefirst.iut.uca.fr/thomas.bellembois/codefirst-dockerproxy-clientdrone:latest
environment:
IMAGENAME: hub.codefirst.iut.uca.fr/dorian.hodin/pow:latest
CONTAINERNAME: pow
COMMAND: create
OVERWRITE: true
ADMINS: dorianhodin,aurianjault,remiarnal
depends_on: [ build-pow ]

@ -1,2 +0,0 @@
[client]
showSidebarNavigation = false

@ -12,13 +12,6 @@ Projet de data mining, qui permet d'ouvrir un CSV, de pré-traiter et nettoyer l
Dataset : https://catalog.data.gov/dataset/crash-reporting-drivers-data Dataset : https://catalog.data.gov/dataset/crash-reporting-drivers-data
Lien du site déployé : https://codefirst.iut.uca.fr/containers/Picksteel-pow/ (ne pas oublier le /)
Pour lancer le projet en local, depuis le dossier src faite :
'''
streamlit run /home.py
'''
ATTENTION : Ne pas run le ficher home.py depuis le dossier racine du projet, mais bien une fois dans le dossier src.
# Développeurs 🧑‍💻 # Développeurs 🧑‍💻

@ -20,4 +20,4 @@ l.csv_value(df)
# s.plotBoxWhisker(df) # s.plotBoxWhisker(df)
c.launch_cluster(df,['Speed Limit','Vehicle Year','Longitude']) c.launch_cluster(df,['Speed Limit','Vehicle Year'])

@ -1,11 +0,0 @@
FROM python:3.9
WORKDIR /app
COPY . .
RUN pip install --upgrade pip
RUN pip install streamlit matplotlib pandas scikit-learn ydata-profiling
EXPOSE 8080
ENTRYPOINT ["streamlit", "run", "home.py", "--server.address=0.0.0.0", "--server.port=8080"]

@ -3,7 +3,6 @@ import matplotlib.pyplot as plt
from sklearn.cluster import KMeans, DBSCAN from sklearn.cluster import KMeans, DBSCAN
from sklearn.datasets import make_blobs, make_moons from sklearn.datasets import make_blobs, make_moons
from mpl_toolkits.mplot3d import Axes3D from mpl_toolkits.mplot3d import Axes3D
from sklearn.decomposition import PCA
def visualize_clusters_2d(X, labels, centers=None, title="Clusters"): def visualize_clusters_2d(X, labels, centers=None, title="Clusters"):
plt.figure(figsize=(10, 7)) plt.figure(figsize=(10, 7))
@ -13,7 +12,7 @@ def visualize_clusters_2d(X, labels, centers=None, title="Clusters"):
plt.title(title) plt.title(title)
plt.xlabel("Feature 1") plt.xlabel("Feature 1")
plt.ylabel("Feature 2") plt.ylabel("Feature 2")
return plt.gcf() plt.show()
def visualize_clusters_3d(X, labels, centers=None, title="Clusters"): def visualize_clusters_3d(X, labels, centers=None, title="Clusters"):
fig = plt.figure(figsize=(10, 7)) fig = plt.figure(figsize=(10, 7))
@ -25,7 +24,7 @@ def visualize_clusters_3d(X, labels, centers=None, title="Clusters"):
ax.set_xlabel("Feature 1") ax.set_xlabel("Feature 1")
ax.set_ylabel("Feature 2") ax.set_ylabel("Feature 2")
ax.set_zlabel("Feature 3") ax.set_zlabel("Feature 3")
return plt.gcf() plt.show()
def calculate_cluster_statistics_kmeans(X, labels, centers): def calculate_cluster_statistics_kmeans(X, labels, centers):
unique_labels = np.unique(labels) unique_labels = np.unique(labels)
@ -57,41 +56,7 @@ def calculate_cluster_statistics_dbscan(X, labels):
}) })
return stats return stats
def launch_cluster_knn(df, array_columns, n=3, dimensions=2): def launch_cluster(df,array_columns):
X = df[array_columns].values
if len(array_columns) > 3:
pca = PCA(dimensions)
X = pca.fit_transform(df)
kmeans = KMeans(n_clusters=n, random_state=42)
labels_kmeans = kmeans.fit_predict(X)
centers_kmeans = kmeans.cluster_centers_
# for stat in stats_kmeans:
# print(f"Cluster {stat['cluster']}: {stat['num_points']} points, Center: {stat['center']}")
stats_kmeans = calculate_cluster_statistics_kmeans(X, labels_kmeans, centers_kmeans)
if dimensions == 3:
return visualize_clusters_3d(X, labels_kmeans, centers_kmeans, title="K-Means Clustering 3D")
else:
return visualize_clusters_2d(X, labels_kmeans, centers_kmeans, title="K-Means Clustering")
def launch_cluster_dbscan(df, array_columns, dimensions=2):
X = df[array_columns].values
if len(array_columns) > 3:
pca = PCA(dimensions)
X = pca.fit_transform(df)
dbscan = DBSCAN(eps=0.2, min_samples=5)
labels_dbscan = dbscan.fit_predict(X)
stats_dbscan = calculate_cluster_statistics_dbscan(X, labels_dbscan)
# for stat in stats_dbscan:
# print(f"Cluster {stat['cluster']}: {stat['num_points']} points, Density: {stat['density']}")
if dimensions == 3:
return visualize_clusters_3d(X, labels_dbscan, title="DBSCAN Clustering 3D")
else:
return visualize_clusters_2d(X, labels_dbscan, title="DBSCAN Clustering")
def launch_cluster(df, array_columns):
X = df[array_columns].values X = df[array_columns].values
kmeans = KMeans(n_clusters=4, random_state=42) kmeans = KMeans(n_clusters=4, random_state=42)

@ -1,7 +1,6 @@
import pandas as pd import pandas as pd
import numpy as np import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from sklearn.preprocessing import RobustScaler
def return_csv(path): def return_csv(path):
df = pd.read_csv(path) df = pd.read_csv(path)
@ -14,6 +13,7 @@ def csv_value(df):
print(df.isna().sum()) print(df.isna().sum())
# Useless values # Useless values
def csv_check(df): def csv_check(df):
for col in df: for col in df:
print("-"*12) print("-"*12)
@ -21,37 +21,28 @@ def csv_check(df):
print("-"*12) print("-"*12)
print(df[col].unique()) print(df[col].unique())
def csv_norm_min_max(df, col):
max = df[col].max()
min = df[col].min()
df[col] = (df[col] - min)/ (max - min)
return df[col]
def csv_standardisation_Z(df, col): def csv_norm_min_max(df,col):
maValue = df[col].max
miValue = df[col].min
df[col] = (df[col] - df[col].min()) / (df[col].max() - df[col].min())
return df
def csv_standardisation_Z(df,col):
mean_col1 = df[col].mean() mean_col1 = df[col].mean()
std_col1 = df[col].std() std_col1 = df[col].std()
df[col] = (df[col] - mean_col1) / std_col1 df[col] = (df[col] - mean_col1) / std_col1
return df[col] return df[col]
def robust_normalize_column(df, column_name): def csv_robust_normalize(df, column):
# Extract the column datas # Calcul de la médiane et de l'IQR
column_data = df[column_name].values.reshape(-1, 1) median = df[column].median()
q1 = df[column].quantile(0.25)
# Fit and transform the column datas q3 = df[column].quantile(0.75)
scaler = RobustScaler() iqr = q3 - q1
normalized_data = scaler.fit_transform(column_data)
df[column_name] = normalized_data # Application de la normalisation robuste
normalized_column = (df[column] - median) / iqr
return normalized_data df[column] = normalized_column
print (normalized_column)
def handle_normalization(df, norm_method): return normalized_column
for col_name in df:
if norm_method == "min-max":
df[col_name] = csv_norm_min_max(df, col_name)
elif norm_method == "z-score":
df[col_name] = csv_standardisation_Z(df, col_name)
elif norm_method == "robust":
df[col_name] = robust_normalize_column(df, col_name)
else:
raise ValueError("Unknown method")
return df

@ -6,9 +6,7 @@ import load_csv as l
def convert_categorical_to_numeric(data): def convert_categorical_to_numeric(data):
for column in data.columns: for column in data.columns:
if pd.api.types.is_numeric_dtype(data[column]): if data[column].nunique() <= 15:
continue
elif data[column].nunique() <= 15:
data[column] = data[column].astype('category') data[column] = data[column].astype('category')
data[column] = data[column].cat.codes.replace(-1, np.nan) + 1 data[column] = data[column].cat.codes.replace(-1, np.nan) + 1
else: else:
@ -20,6 +18,7 @@ def drop_high_null_percentage(data, threshold=0.5):
data = data.loc[:, missing_percentage <= threshold] data = data.loc[:, missing_percentage <= threshold]
return data return data
def replace_with_mean(data): def replace_with_mean(data):
return data.apply(lambda col: col.fillna(col.mean()) if col.dtype.kind in 'biufc' else col) return data.apply(lambda col: col.fillna(col.mean()) if col.dtype.kind in 'biufc' else col)
@ -34,25 +33,17 @@ def impute_with_knn(data, n_neighbors=5):
return pd.DataFrame(imputer.fit_transform(data), columns=data.columns) return pd.DataFrame(imputer.fit_transform(data), columns=data.columns)
def impute_with_regression(data): def impute_with_regression(data):
missing_columns = data.columns[data.isnull().any()].tolist() for column in data.columns:
if data[column].isnull().sum() > 0:
for col in missing_columns: train_data = data[data[column].notna()]
missing_data = data[data[col].isnull()] test_data = data[data[column].isna()]
complete_data = data[~data[col].isnull()] if not train_data.empty and not test_data.empty:
if missing_data.empty or complete_data.empty: regressor = LinearRegression()
continue regressor.fit(train_data.drop(column, axis=1), train_data[column])
X_complete = complete_data.drop(columns=missing_columns) data.loc[data[column].isna(), column] = regressor.predict(test_data.drop(column, axis=1))
y_complete = complete_data[col]
X_missing = missing_data.drop(columns=missing_columns)
if X_missing.shape[0] > 0.5 * data.shape[0]:
continue
model = LinearRegression()
model.fit(X_complete, y_complete)
y_pred = model.predict(X_missing)
data.loc[data[col].isnull(), col] = y_pred
return data return data
""" """
Parameters: Parameters:
- data: Pandas DataFrame with the data - data: Pandas DataFrame with the data
@ -60,6 +51,8 @@ def impute_with_regression(data):
- n_neighbors: Number of neighbors to use for KNN imputation (only used if method='knn') - n_neighbors: Number of neighbors to use for KNN imputation (only used if method='knn')
""" """
def handle_missing_values(data, method, n_neighbors=5): def handle_missing_values(data, method, n_neighbors=5):
data = drop_high_null_percentage(data)
data = convert_categorical_to_numeric(data) data = convert_categorical_to_numeric(data)
if method == 'mean': if method == 'mean':
return replace_with_mean(data) return replace_with_mean(data)
@ -73,3 +66,4 @@ def handle_missing_values(data, method, n_neighbors=5):
return impute_with_regression(data) return impute_with_regression(data)
else: else:
raise ValueError("Unknown method") raise ValueError("Unknown method")

@ -1,37 +0,0 @@
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import f1_score
from sklearn.metrics import accuracy_score
import numpy as np
import matplotlib.pyplot as plt
def getColumnsForPredictionAndPredict(df,columns, columnGoal, algoOfPrediction):
predictors = df[columns]
target = df[columnGoal]
if algoOfPrediction == "Linear Regression":
model = LinearRegression()
elif algoOfPrediction == "Random Forest":
model = RandomForestRegressor(n_estimators=100)
else:
raise NameError("No method name : \"" + algoOfPrediction + "\"")
model.fit(predictors, target)
prediction = model.predict(predictors)
return prediction
def correlation_matrix(df, columns):
new_df = df[columns]
correlations = new_df.corr()
print(correlations)
fig = plt.figure()
ax = fig.add_subplot(111)
cax = ax.matshow(correlations, vmin=-1, vmax=1)
fig.colorbar(cax)
ticks = np.arange(0,new_df.shape[1],1)
ax.set_xticks(ticks)
ax.set_yticks(ticks)
ax.set_xticklabels(list(new_df))
ax.set_yticklabels(list(new_df))
return fig

@ -2,15 +2,15 @@ import pandas as pd
import numpy as np import numpy as np
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
def histo_col(df, col): def histo_col(df,colonne):
plt.figure() plt.figure()
plt.hist(df[col], bins=4, alpha=0.7, color='blue', edgecolor='black') plt.hist(df[colonne], bins=int(df[colonne].nunique()/4), alpha=0.7, color='blue', edgecolor='black')
plt.title(f"Histogramme de la colonne '{col}'") plt.title(f"Histogramme de la colonne '{colonne}'")
plt.xlabel(col) plt.xlabel(colonne)
plt.ylabel("Fréquence") plt.ylabel("Fréquence")
plt.grid(True) plt.grid(True)
return plt.gcf() plt.show()
def plotBoxWhisker(df, col): def plotBoxWhisker(df):
df[col].plot(kind='box', subplots=True, sharex=False, sharey=False) df.plot(kind='box', subplots=True, sharex=False, sharey=False)
return plt.gcf() plt.show()

@ -1,52 +0,0 @@
import streamlit as st
from io import StringIO
# from ydata_profiling import ProfileReport
import pandas as pd
def statistics(df):
nan_counts = df.isnull().sum(axis=1).sum()
st.write("*Number of columns*:", len(df.columns))
st.write("*Number of rows*:", len(df.index))
st.write("*Nan Counts*: ", nan_counts)
st.write(df.isna().sum())
def display_df_first_and_lasts_lines(df):
fl = df.head(10)
ll = df.tail(10)
concat = pd.concat([fl, ll])
st.dataframe(concat)
def nav_bar():
st.page_link("./home.py", label="Import", icon="⬆️", help=None)
st.page_link("pages/clean.py", label="Clean", icon="🧼", help=None)
st.page_link("pages/visualize.py", label="Visualize", icon="👁️", help=None)
st.page_link("pages/prediction.py", label="Predict", icon="🔮", help=None)
def clean_dataframe(line):
# Call to function to clean data
line.empty()
line.write("Dataframe has been cleaned")
def main():
nav_bar()
st.write("# Pow: Your data analyser")
uploaded_file = st.file_uploader("Choose a file")
if uploaded_file is not None:
df = pd.read_csv(uploaded_file)
st.session_state.original_df = df
st.write("## Dataframe (10 first/last lines)")
display_df_first_and_lasts_lines(df)
st.write("## Statistics")
statistics(df)
# profile = ProfileReport(df, title='Pandas Profiling Report', explorative=True)
# profile.to_widgets()
if st.button("Next"):
st.switch_page("pages/clean.py")
main()

@ -1,43 +0,0 @@
import streamlit as st
import sys
sys.path.append('./back/')
import managing_missing_values as mmv
import load_csv as lc
if 'original_df' in st.session_state:
df = st.session_state.original_df
st.write("# 🧼 Data cleaning")
st.write("## Missing data")
rm_empty_rows_or_cols = st.checkbox("Remove empty rows or columns", True)
st.write("#### Replace missing values")
replace_methods = ["mean","median","mode","knn","regression"]
replace_method = st.radio('Choose an option:', replace_methods)
st.write("## Normalize data")
normalize_methods = ["min-max","z-score","robust"]
normalize_method = st.radio('Choose an option:', normalize_methods)
is_cleaned = st.button("Clean dataset")
if is_cleaned:
if rm_empty_rows_or_cols:
st.write("- Removing hight null percentage values")
df = mmv.drop_high_null_percentage(df)
st.dataframe(df)
st.write("- Handle missing values with method:", replace_method)
df = mmv.handle_missing_values(df, replace_method)
st.session_state.df = df
st.dataframe(df)
st.write("- Normalize with method:", normalize_method)
df = lc.handle_normalization(df, normalize_method)
st.session_state.df = df
st.dataframe(df)
st.switch_page("pages/visualize.py")
else:
st.write("Please upload you dataset.")

@ -1,74 +0,0 @@
import streamlit as st
import pandas as pd
import sys
import matplotlib.pyplot as plt
import numpy as np
sys.path.append('./back/')
import clustering_csv as cc
import prediction as p
def handle_column_multiselect(df, method_name):
selected_columns = st.multiselect(f"Select the columns you want for {method_name}:", df.columns.tolist(), placeholder="Select dataset columns")
return selected_columns
def df_prediction_results(df, targetCol, sourceColumns, method):
original_col = df[targetCol]
predicted_col = p.getColumnsForPredictionAndPredict(df, sourceColumns, targetCol, method)
new_df = pd.DataFrame()
new_df['Original'] = original_col
new_df['Predicted'] = predicted_col
return new_df
if 'df' in st.session_state:
df = st.session_state.df
st.write("# 🔮 Prediction")
tab1, tab2 = st.tabs(["Clustering", "Predictions"])
with tab1:
st.header("Clustering")
selected_columns = handle_column_multiselect(df, "clustering")
if len(selected_columns) >= 3:
dimensions = st.radio("Reduce to dimensions X with PCA:",[2,3],index=0)
else:
dimensions = 2
tab_names = ["K-means", "DBSCAN"]
cluster_tabs = st.tabs(tab_names)
for idx, tab in enumerate(cluster_tabs):
if tab.button(f"Start {tab_names[idx]}"):
if tab_names[idx] == "K-means":
fig = cc.launch_cluster_knn(df, selected_columns, dimensions=dimensions)
else:
fig = cc.launch_cluster_dbscan(df, selected_columns, dimensions)
tab.pyplot(fig)
with tab2:
st.header("Predictions")
target_column = st.selectbox(
"Target column:",
df.columns.tolist(),
index=None,
placeholder="Select target column"
)
if target_column != None:
selected_columns_p = handle_column_multiselect(df, "predictions")
tab_names = ["Linear Regression", "Random Forest"]
prediction_tabs = st.tabs(tab_names)
for idx, tab in enumerate(prediction_tabs):
if tab.button(f"Start {tab_names[idx]}"):
tab.pyplot(p.correlation_matrix(df, selected_columns_p+[target_column]))
tmp_df = df_prediction_results(df, target_column, selected_columns_p, tab_names[idx])
tab.dataframe(tmp_df)
else:
st.write("Please clean your dataset.")

@ -1,32 +0,0 @@
import streamlit as st
import matplotlib.pyplot as plt
import sys
sys.path.append('./back/')
import show_csv as sc
if 'df' in st.session_state:
df = st.session_state.df
df_columns = df.columns.tolist()
st.write("# 📊 Visualization")
st.write("## Histograms")
hist_tabs = st.tabs(df_columns)
for idx, tab in enumerate(hist_tabs):
tab.write("##### "+df_columns[idx])
tab.pyplot(sc.histo_col(df, df_columns[idx]))
st.write("## Box & Whisker")
baw_tabs = st.tabs(df_columns)
for idx, tab in enumerate(baw_tabs):
tab.write("##### "+df_columns[idx])
fig, ax = plt.subplots()
df[df_columns[idx]].plot(kind='box')
tab.pyplot(fig)
else:
st.write('Please clean your dataset.')

@ -1,749 +0,0 @@
Recency (months),Frequency (times),Monetary (c.c. blood),Time (months),"whether he/she donated blood in March 2007"
2 ,50,12500,98 ,1
0 ,13,3250,28 ,1
1 ,16,4000,35 ,1
2 ,20,5000,45 ,1
1 ,24,6000,77 ,0
4 ,4,1000,4 ,0
2 ,7,1750,14 ,1
1 ,12,3000,35 ,0
2 ,9,2250,22 ,1
5 ,46,11500,98 ,1
4 ,23,5750,58 ,0
0 ,3,750,4 ,0
2 ,10,2500,28 ,1
1 ,13,3250,47 ,0
2 ,6,1500,15 ,1
2 ,5,1250,11 ,1
2 ,14,3500,48 ,1
2 ,15,3750,49 ,1
2 ,6,1500,15 ,1
2 ,3,750,4 ,1
2 ,3,750,4 ,1
4 ,11,2750,28 ,0
2 ,6,1500,16 ,1
2 ,6,1500,16 ,1
9 ,9,2250,16 ,0
4 ,14,3500,40 ,0
4 ,6,1500,14 ,0
4 ,12,3000,34 ,1
4 ,5,1250,11 ,1
4 ,8,2000,21 ,0
1 ,14,3500,58 ,0
4 ,10,2500,28 ,1
4 ,10,2500,28 ,1
4 ,9,2250,26 ,1
2 ,16,4000,64 ,0
2 ,8,2000,28 ,1
2 ,12,3000,47 ,1
4 ,6,1500,16 ,1
2 ,14,3500,57 ,1
4 ,7,1750,22 ,1
2 ,13,3250,53 ,1
2 ,5,1250,16 ,0
2 ,5,1250,16 ,1
2 ,5,1250,16 ,0
4 ,20,5000,69 ,1
4 ,9,2250,28 ,1
2 ,9,2250,36 ,0
2 ,2,500,2 ,0
2 ,2,500,2 ,0
2 ,2,500,2 ,0
2 ,11,2750,46 ,0
2 ,11,2750,46 ,1
2 ,6,1500,22 ,0
2 ,12,3000,52 ,0
4 ,5,1250,14 ,1
4 ,19,4750,69 ,1
4 ,8,2000,26 ,1
2 ,7,1750,28 ,1
2 ,16,4000,81 ,0
3 ,6,1500,21 ,0
2 ,7,1750,29 ,0
2 ,8,2000,35 ,1
2 ,10,2500,49 ,0
4 ,5,1250,16 ,1
2 ,3,750,9 ,1
3 ,16,4000,74 ,0
2 ,4,1000,14 ,1
0 ,2,500,4 ,0
4 ,7,1750,25 ,0
1 ,9,2250,51 ,0
2 ,4,1000,16 ,0
2 ,4,1000,16 ,0
4 ,17,4250,71 ,1
2 ,2,500,4 ,0
2 ,2,500,4 ,1
2 ,2,500,4 ,1
2 ,4,1000,16 ,1
2 ,2,500,4 ,0
2 ,2,500,4 ,0
2 ,2,500,4 ,0
4 ,6,1500,23 ,1
2 ,4,1000,16 ,0
2 ,4,1000,16 ,0
2 ,4,1000,16 ,0
2 ,6,1500,28 ,1
2 ,6,1500,28 ,0
4 ,2,500,4 ,0
4 ,2,500,4 ,0
4 ,2,500,4 ,0
2 ,7,1750,35 ,1
4 ,2,500,4 ,1
4 ,2,500,4 ,0
4 ,2,500,4 ,0
4 ,2,500,4 ,0
12 ,11,2750,23 ,0
4 ,7,1750,28 ,0
3 ,17,4250,86 ,0
4 ,9,2250,38 ,1
4 ,4,1000,14 ,1
5 ,7,1750,26 ,1
4 ,8,2000,34 ,1
2 ,13,3250,76 ,1
4 ,9,2250,40 ,0
2 ,5,1250,26 ,0
2 ,5,1250,26 ,0
6 ,17,4250,70 ,0
0 ,8,2000,59 ,0
3 ,5,1250,26 ,0
2 ,3,750,14 ,0
2 ,10,2500,64 ,0
4 ,5,1250,23 ,1
4 ,9,2250,46 ,0
4 ,5,1250,23 ,0
4 ,8,2000,40 ,1
2 ,12,3000,82 ,0
11 ,24,6000,64 ,0
2 ,7,1750,46 ,1
4 ,11,2750,61 ,0
1 ,7,1750,57 ,0
2 ,11,2750,79 ,1
2 ,3,750,16 ,1
4 ,5,1250,26 ,1
2 ,6,1500,41 ,1
2 ,5,1250,33 ,1
2 ,4,1000,26 ,0
2 ,5,1250,34 ,0
4 ,8,2000,46 ,1
2 ,4,1000,26 ,0
4 ,8,2000,48 ,1
2 ,2,500,10 ,1
4 ,5,1250,28 ,0
2 ,12,3000,95 ,0
2 ,2,500,10 ,0
4 ,6,1500,35 ,0
2 ,11,2750,88 ,0
2 ,3,750,19 ,0
2 ,5,1250,37 ,0
2 ,12,3000,98 ,0
9 ,5,1250,19 ,0
2 ,2,500,11 ,0
2 ,9,2250,74 ,0
5 ,14,3500,86 ,0
4 ,3,750,16 ,0
4 ,3,750,16 ,0
4 ,2,500,9 ,1
4 ,3,750,16 ,1
6 ,3,750,14 ,0
2 ,2,500,11 ,0
2 ,2,500,11 ,1
2 ,2,500,11 ,0
2 ,7,1750,58 ,1
4 ,6,1500,39 ,0
4 ,11,2750,78 ,0
2 ,1,250,2 ,1
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,1
2 ,1,250,2 ,1
2 ,1,250,2 ,1
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
11 ,10,2500,35 ,0
11 ,4,1000,16 ,1
4 ,5,1250,33 ,1
4 ,6,1500,41 ,1
2 ,3,750,22 ,0
4 ,4,1000,26 ,1
10 ,4,1000,16 ,0
2 ,4,1000,35 ,0
4 ,12,3000,88 ,0
13 ,8,2000,26 ,0
11 ,9,2250,33 ,0
4 ,5,1250,34 ,0
4 ,4,1000,26 ,0
8 ,15,3750,77 ,0
4 ,5,1250,35 ,1
4 ,7,1750,52 ,0
4 ,7,1750,52 ,0
2 ,4,1000,35 ,0
11 ,11,2750,42 ,0
2 ,2,500,14 ,0
2 ,5,1250,47 ,1
9 ,8,2000,38 ,1
4 ,6,1500,47 ,0
11 ,7,1750,29 ,0
9 ,9,2250,45 ,0
4 ,6,1500,52 ,0
4 ,7,1750,58 ,0
6 ,2,500,11 ,1
4 ,7,1750,58 ,0
11 ,9,2250,38 ,0
11 ,6,1500,26 ,0
2 ,2,500,16 ,0
2 ,7,1750,76 ,0
11 ,6,1500,27 ,0
11 ,3,750,14 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,1
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,1
4 ,1,250,4 ,1
4 ,1,250,4 ,0
4 ,1,250,4 ,1
4 ,1,250,4 ,1
4 ,1,250,4 ,0
4 ,3,750,24 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,1
4 ,1,250,4 ,0
10 ,8,2000,39 ,0
14 ,7,1750,26 ,0
8 ,10,2500,63 ,0
11 ,3,750,15 ,0
4 ,2,500,14 ,0
2 ,4,1000,43 ,0
8 ,9,2250,58 ,0
8 ,8,2000,52 ,1
11 ,22,5500,98 ,0
4 ,3,750,25 ,1
11 ,17,4250,79 ,1
9 ,2,500,11 ,0
4 ,5,1250,46 ,0
11 ,12,3000,58 ,0
7 ,12,3000,86 ,0
11 ,2,500,11 ,0
11 ,2,500,11 ,0
11 ,2,500,11 ,0
2 ,6,1500,75 ,0
11 ,8,2000,41 ,1
11 ,3,750,16 ,1
12 ,13,3250,59 ,0
2 ,3,750,35 ,0
16 ,8,2000,28 ,0
11 ,7,1750,37 ,0
4 ,3,750,28 ,0
12 ,12,3000,58 ,0
4 ,4,1000,41 ,0
11 ,14,3500,73 ,1
2 ,2,500,23 ,0
2 ,3,750,38 ,1
4 ,5,1250,58 ,0
4 ,4,1000,43 ,1
3 ,2,500,23 ,0
11 ,8,2000,46 ,0
4 ,7,1750,82 ,0
13 ,4,1000,21 ,0
16 ,11,2750,40 ,0
16 ,7,1750,28 ,0
7 ,2,500,16 ,0
4 ,5,1250,58 ,0
4 ,5,1250,58 ,0
4 ,4,1000,46 ,0
14 ,13,3250,57 ,0
4 ,3,750,34 ,0
14 ,18,4500,78 ,0
11 ,8,2000,48 ,0
14 ,16,4000,70 ,0
14 ,4,1000,22 ,1
14 ,5,1250,26 ,0
8 ,2,500,16 ,0
11 ,5,1250,33 ,0
11 ,2,500,14 ,0
4 ,2,500,23 ,0
9 ,2,500,16 ,1
14 ,5,1250,28 ,1
14 ,3,750,19 ,1
14 ,4,1000,23 ,1
16 ,12,3000,50 ,0
11 ,4,1000,28 ,0
11 ,5,1250,35 ,0
11 ,5,1250,35 ,0
2 ,4,1000,70 ,0
14 ,5,1250,28 ,0
14 ,2,500,14 ,0
14 ,2,500,14 ,0
14 ,2,500,14 ,0
14 ,2,500,14 ,0
14 ,2,500,14 ,0
14 ,2,500,14 ,0
2 ,3,750,52 ,0
14 ,6,1500,34 ,0
11 ,5,1250,37 ,1
4 ,5,1250,74 ,0
11 ,3,750,23 ,0
16 ,4,1000,23 ,0
16 ,3,750,19 ,0
11 ,5,1250,38 ,0
11 ,2,500,16 ,0
12 ,9,2250,60 ,0
9 ,1,250,9 ,0
9 ,1,250,9 ,0
4 ,2,500,29 ,0
11 ,2,500,17 ,0
14 ,4,1000,26 ,0
11 ,9,2250,72 ,1
11 ,5,1250,41 ,0
15 ,16,4000,82 ,0
9 ,5,1250,51 ,1
11 ,4,1000,34 ,0
14 ,8,2000,50 ,1
16 ,7,1750,38 ,0
14 ,2,500,16 ,0
2 ,2,500,41 ,0
14 ,16,4000,98 ,0
14 ,4,1000,28 ,1
16 ,7,1750,39 ,0
14 ,7,1750,47 ,0
16 ,6,1500,35 ,0
16 ,6,1500,35 ,1
11 ,7,1750,62 ,1
16 ,2,500,16 ,0
16 ,3,750,21 ,1
11 ,3,750,28 ,0
11 ,7,1750,64 ,0
11 ,1,250,11 ,1
9 ,3,750,34 ,0
14 ,4,1000,30 ,0
23 ,38,9500,98 ,0
11 ,6,1500,58 ,0
11 ,1,250,11 ,0
11 ,1,250,11 ,0
11 ,1,250,11 ,0
11 ,1,250,11 ,0
11 ,1,250,11 ,0
11 ,1,250,11 ,0
11 ,1,250,11 ,0
11 ,1,250,11 ,0
11 ,2,500,21 ,0
11 ,5,1250,50 ,0
11 ,2,500,21 ,0
16 ,4,1000,28 ,0
4 ,2,500,41 ,0
16 ,6,1500,40 ,0
14 ,3,750,26 ,0
9 ,2,500,26 ,0
21 ,16,4000,64 ,0
14 ,6,1500,51 ,0
11 ,2,500,24 ,0
4 ,3,750,71 ,0
21 ,13,3250,57 ,0
11 ,6,1500,71 ,0
14 ,2,500,21 ,1
23 ,15,3750,57 ,0
14 ,4,1000,38 ,0
11 ,2,500,26 ,0
16 ,5,1250,40 ,1
4 ,2,500,51 ,1
14 ,3,750,31 ,0
4 ,2,500,52 ,0
9 ,4,1000,65 ,0
14 ,4,1000,40 ,0
11 ,3,750,40 ,1
14 ,5,1250,50 ,0
14 ,1,250,14 ,0
14 ,1,250,14 ,0
14 ,1,250,14 ,0
14 ,1,250,14 ,0
14 ,1,250,14 ,0
14 ,1,250,14 ,0
14 ,1,250,14 ,0
14 ,1,250,14 ,0
14 ,7,1750,72 ,0
14 ,1,250,14 ,0
14 ,1,250,14 ,0
9 ,3,750,52 ,0
14 ,7,1750,73 ,0
11 ,4,1000,58 ,0
11 ,4,1000,59 ,0
4 ,2,500,59 ,0
11 ,4,1000,61 ,0
16 ,4,1000,40 ,0
16 ,10,2500,89 ,0
21 ,2,500,21 ,1
21 ,3,750,26 ,0
16 ,8,2000,76 ,0
21 ,3,750,26 ,1
18 ,2,500,23 ,0
23 ,5,1250,33 ,0
23 ,8,2000,46 ,0
16 ,3,750,34 ,0
14 ,5,1250,64 ,0
14 ,3,750,41 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
16 ,4,1000,45 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
16 ,2,500,26 ,0
21 ,2,500,23 ,0
16 ,2,500,27 ,0
21 ,2,500,23 ,0
21 ,2,500,23 ,0
14 ,4,1000,57 ,0
16 ,5,1250,60 ,0
23 ,2,500,23 ,0
14 ,5,1250,74 ,0
23 ,3,750,28 ,0
16 ,3,750,40 ,0
9 ,2,500,52 ,0
9 ,2,500,52 ,0
16 ,7,1750,87 ,1
14 ,4,1000,64 ,0
14 ,2,500,35 ,0
16 ,7,1750,93 ,0
21 ,2,500,25 ,0
14 ,3,750,52 ,0
23 ,14,3500,93 ,0
18 ,8,2000,95 ,0
16 ,3,750,46 ,0
11 ,3,750,76 ,0
11 ,2,500,52 ,0
11 ,3,750,76 ,0
23 ,12,3000,86 ,0
21 ,3,750,35 ,0
23 ,2,500,26 ,0
23 ,2,500,26 ,0
23 ,8,2000,64 ,0
16 ,3,750,50 ,0
23 ,3,750,33 ,0
21 ,3,750,38 ,0
23 ,2,500,28 ,0
21 ,1,250,21 ,0
21 ,1,250,21 ,0
21 ,1,250,21 ,0
21 ,1,250,21 ,0
21 ,1,250,21 ,0
21 ,1,250,21 ,0
21 ,1,250,21 ,0
21 ,1,250,21 ,0
21 ,1,250,21 ,0
21 ,1,250,21 ,1
21 ,1,250,21 ,0
21 ,1,250,21 ,0
21 ,5,1250,60 ,0
23 ,4,1000,45 ,0
21 ,4,1000,52 ,0
22 ,1,250,22 ,1
11 ,2,500,70 ,0
23 ,5,1250,58 ,0
23 ,3,750,40 ,0
23 ,3,750,41 ,0
14 ,3,750,83 ,0
21 ,2,500,35 ,0
26 ,5,1250,49 ,1
23 ,6,1500,70 ,0
23 ,1,250,23 ,0
23 ,1,250,23 ,0
23 ,1,250,23 ,0
23 ,1,250,23 ,0
23 ,1,250,23 ,0
23 ,1,250,23 ,0
23 ,1,250,23 ,0
23 ,1,250,23 ,0
23 ,4,1000,53 ,0
21 ,6,1500,86 ,0
23 ,3,750,48 ,0
21 ,2,500,41 ,0
21 ,3,750,64 ,0
16 ,2,500,70 ,0
21 ,3,750,70 ,0
23 ,4,1000,87 ,0
23 ,3,750,89 ,0
23 ,2,500,87 ,0
35 ,3,750,64 ,0
38 ,1,250,38 ,0
38 ,1,250,38 ,0
40 ,1,250,40 ,0
74 ,1,250,74 ,0
2 ,43,10750,86 ,1
6 ,22,5500,28 ,1
2 ,34,8500,77 ,1
2 ,44,11000,98 ,0
0 ,26,6500,76 ,1
2 ,41,10250,98 ,1
3 ,21,5250,42 ,1
2 ,11,2750,23 ,0
2 ,21,5250,52 ,1
2 ,13,3250,32 ,1
4 ,4,1000,4 ,1
2 ,11,2750,26 ,0
2 ,11,2750,28 ,0
3 ,14,3500,35 ,0
4 ,16,4000,38 ,1
4 ,6,1500,14 ,0
3 ,5,1250,12 ,1
4 ,33,8250,98 ,1
3 ,10,2500,33 ,1
4 ,10,2500,28 ,1
2 ,11,2750,40 ,1
2 ,11,2750,41 ,1
4 ,13,3250,39 ,1
1 ,10,2500,43 ,1
4 ,9,2250,28 ,0
2 ,4,1000,11 ,0
2 ,5,1250,16 ,1
2 ,15,3750,64 ,0
5 ,24,6000,79 ,0
2 ,6,1500,22 ,1
4 ,5,1250,16 ,1
2 ,4,1000,14 ,1
4 ,8,2000,28 ,0
2 ,4,1000,14 ,0
2 ,6,1500,26 ,0
4 ,5,1250,16 ,1
2 ,7,1750,32 ,1
2 ,6,1500,26 ,1
2 ,8,2000,38 ,1
2 ,2,500,4 ,1
2 ,6,1500,28 ,1
2 ,10,2500,52 ,0
4 ,16,4000,70 ,1
4 ,2,500,4 ,1
1 ,14,3500,95 ,0
4 ,2,500,4 ,1
7 ,14,3500,48 ,0
2 ,3,750,11 ,0
2 ,12,3000,70 ,1
4 ,7,1750,32 ,1
4 ,4,1000,16 ,0
2 ,6,1500,35 ,1
4 ,6,1500,28 ,1
2 ,3,750,14 ,0
2 ,4,1000,23 ,0
4 ,4,1000,18 ,0
5 ,6,1500,28 ,0
4 ,6,1500,30 ,0
14 ,5,1250,14 ,0
3 ,8,2000,50 ,0
4 ,11,2750,64 ,1
4 ,9,2250,52 ,0
4 ,16,4000,98 ,1
7 ,10,2500,47 ,0
4 ,14,3500,86 ,0
2 ,9,2250,75 ,0
4 ,6,1500,35 ,0
4 ,9,2250,55 ,0
4 ,6,1500,35 ,1
2 ,6,1500,45 ,0
2 ,6,1500,47 ,0
4 ,2,500,9 ,0
2 ,2,500,11 ,1
2 ,2,500,11 ,0
2 ,2,500,11 ,1
4 ,6,1500,38 ,1
3 ,4,1000,29 ,1
9 ,9,2250,38 ,0
11 ,5,1250,18 ,0
2 ,3,750,21 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,1
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,1
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
2 ,1,250,2 ,0
11 ,11,2750,38 ,0
2 ,3,750,22 ,0
9 ,11,2750,49 ,1
5 ,11,2750,75 ,0
3 ,5,1250,38 ,0
3 ,1,250,3 ,1
4 ,6,1500,43 ,0
2 ,3,750,24 ,0
12 ,11,2750,39 ,0
2 ,2,500,14 ,0
4 ,6,1500,46 ,0
9 ,3,750,14 ,0
14 ,8,2000,26 ,0
4 ,2,500,13 ,0
4 ,11,2750,95 ,0
2 ,7,1750,77 ,0
2 ,7,1750,77 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,1
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,0
4 ,1,250,4 ,1
4 ,1,250,4 ,0
4 ,7,1750,62 ,0
4 ,1,250,4 ,0
4 ,4,1000,34 ,1
11 ,6,1500,28 ,0
13 ,3,750,14 ,1
7 ,5,1250,35 ,0
9 ,9,2250,54 ,0
11 ,2,500,11 ,0
2 ,5,1250,63 ,0
7 ,11,2750,89 ,0
8 ,9,2250,64 ,0
2 ,2,500,22 ,0
6 ,3,750,26 ,0
12 ,15,3750,71 ,0
13 ,3,750,16 ,0
11 ,16,4000,89 ,0
4 ,5,1250,58 ,0
14 ,7,1750,35 ,0
11 ,4,1000,27 ,0
7 ,9,2250,89 ,1
11 ,8,2000,52 ,1
7 ,5,1250,52 ,0
11 ,6,1500,41 ,0
10 ,5,1250,38 ,0
14 ,2,500,14 ,1
14 ,2,500,14 ,0
14 ,2,500,14 ,0
2 ,2,500,33 ,0
11 ,3,750,23 ,0
14 ,8,2000,46 ,0
9 ,1,250,9 ,0
16 ,5,1250,27 ,0
14 ,4,1000,26 ,0
4 ,2,500,30 ,0
14 ,3,750,21 ,0
16 ,16,4000,77 ,0
4 ,2,500,31 ,0
14 ,8,2000,50 ,0
11 ,3,750,26 ,0
14 ,7,1750,45 ,0
15 ,5,1250,33 ,0
16 ,2,500,16 ,0
16 ,3,750,21 ,0
11 ,8,2000,72 ,0
11 ,1,250,11 ,0
11 ,1,250,11 ,0
11 ,1,250,11 ,0
11 ,1,250,11 ,1
11 ,1,250,11 ,0
2 ,3,750,75 ,1
2 ,3,750,77 ,0
16 ,4,1000,28 ,0
16 ,15,3750,87 ,0
16 ,14,3500,83 ,0
16 ,10,2500,62 ,0
16 ,3,750,23 ,0
14 ,3,750,26 ,0
23 ,19,4750,62 ,0
11 ,7,1750,75 ,0
14 ,3,750,28 ,0
20 ,14,3500,69 ,1
4 ,2,500,46 ,0
11 ,2,500,25 ,0
11 ,3,750,37 ,0
16 ,4,1000,33 ,0
21 ,7,1750,38 ,0
13 ,7,1750,76 ,0
16 ,6,1500,50 ,0
14 ,3,750,33 ,0
14 ,1,250,14 ,0
14 ,1,250,14 ,0
14 ,1,250,14 ,0
14 ,1,250,14 ,0
14 ,1,250,14 ,0
14 ,1,250,14 ,0
17 ,7,1750,58 ,1
14 ,3,750,35 ,0
14 ,3,750,35 ,0
16 ,7,1750,64 ,0
21 ,2,500,21 ,0
16 ,3,750,35 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
16 ,1,250,16 ,0
14 ,2,500,29 ,0
11 ,4,1000,74 ,0
11 ,2,500,38 ,1
21 ,6,1500,48 ,0
23 ,2,500,23 ,0
23 ,6,1500,45 ,0
14 ,2,500,35 ,1
16 ,6,1500,81 ,0
16 ,4,1000,58 ,0
16 ,5,1250,71 ,0
21 ,2,500,26 ,0
21 ,3,750,35 ,0
21 ,3,750,35 ,0
23 ,8,2000,69 ,0
21 ,3,750,38 ,0
23 ,3,750,35 ,0
21 ,3,750,40 ,0
23 ,2,500,28 ,0
21 ,1,250,21 ,0
21 ,1,250,21 ,0
25 ,6,1500,50 ,0
21 ,1,250,21 ,0
21 ,1,250,21 ,0
23 ,3,750,39 ,0
21 ,2,500,33 ,0
14 ,3,750,79 ,0
23 ,1,250,23 ,1
23 ,1,250,23 ,0
23 ,1,250,23 ,0
23 ,1,250,23 ,0
23 ,1,250,23 ,0
23 ,1,250,23 ,0
23 ,1,250,23 ,0
23 ,4,1000,52 ,0
23 ,1,250,23 ,0
23 ,7,1750,88 ,0
16 ,3,750,86 ,0
23 ,2,500,38 ,0
21 ,2,500,52 ,0
23 ,3,750,62 ,0
39 ,1,250,39 ,0
72 ,1,250,72 ,0
Loading…
Cancel
Save