|
|
@ -30,7 +30,7 @@ class ProjectAnalyzer:
|
|
|
|
'dotnet': ['.csproj', '.sln']
|
|
|
|
'dotnet': ['.csproj', '.sln']
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
# Frameworks et bibliothèques spécifiques à détecter dans les fichiers
|
|
|
|
|
|
|
|
self.framework_patterns = {
|
|
|
|
self.framework_patterns = {
|
|
|
|
'django': ['django'],
|
|
|
|
'django': ['django'],
|
|
|
|
'flask': ['flask'],
|
|
|
|
'flask': ['flask'],
|
|
|
@ -53,27 +53,27 @@ class ProjectAnalyzer:
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def analyze_technologies(self, project_path):
|
|
|
|
def analyze_technologies(self, project_path):
|
|
|
|
"""Analyse les technologies utilisées dans le projet"""
|
|
|
|
|
|
|
|
technologies = defaultdict(int)
|
|
|
|
technologies = defaultdict(int)
|
|
|
|
frameworks = defaultdict(int)
|
|
|
|
frameworks = defaultdict(int)
|
|
|
|
|
|
|
|
|
|
|
|
# Parcourir tous les fichiers du projet
|
|
|
|
|
|
|
|
for root, _, files in os.walk(project_path):
|
|
|
|
for root, _, files in os.walk(project_path):
|
|
|
|
if '.git' in root: # Ignorer le dossier .git
|
|
|
|
if '.git' in root:
|
|
|
|
continue
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
|
|
for file in files:
|
|
|
|
for file in files:
|
|
|
|
file_path = os.path.join(root, file)
|
|
|
|
file_path = os.path.join(root, file)
|
|
|
|
|
|
|
|
|
|
|
|
# Détecter les technologies par extension/nom de fichier
|
|
|
|
|
|
|
|
for tech, patterns in self.tech_patterns.items():
|
|
|
|
for tech, patterns in self.tech_patterns.items():
|
|
|
|
for pattern in patterns:
|
|
|
|
for pattern in patterns:
|
|
|
|
if file.endswith(pattern) or file == pattern:
|
|
|
|
if file.endswith(pattern) or file == pattern:
|
|
|
|
technologies[tech] += 1
|
|
|
|
technologies[tech] += 1
|
|
|
|
|
|
|
|
|
|
|
|
# Analyser le contenu des fichiers pour détecter les frameworks
|
|
|
|
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
if os.path.getsize(file_path) < 1000000: # Limiter aux fichiers < 1MB
|
|
|
|
if os.path.getsize(file_path) < 1000000:
|
|
|
|
with open(file_path, 'r', encoding='utf-8') as f:
|
|
|
|
with open(file_path, 'r', encoding='utf-8') as f:
|
|
|
|
content = f.read().lower()
|
|
|
|
content = f.read().lower()
|
|
|
|
for framework, patterns in self.framework_patterns.items():
|
|
|
|
for framework, patterns in self.framework_patterns.items():
|
|
|
@ -83,22 +83,22 @@ class ProjectAnalyzer:
|
|
|
|
except (UnicodeDecodeError, IOError):
|
|
|
|
except (UnicodeDecodeError, IOError):
|
|
|
|
continue
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
|
|
# Filtrer les technologies et frameworks les plus utilisés
|
|
|
|
|
|
|
|
significant_technologies = {k: v for k, v in technologies.items() if v > 0}
|
|
|
|
significant_technologies = {k: v for k, v in technologies.items() if v > 0}
|
|
|
|
significant_frameworks = {k: v for k, v in frameworks.items() if v > 0}
|
|
|
|
significant_frameworks = {k: v for k, v in frameworks.items() if v > 0}
|
|
|
|
|
|
|
|
|
|
|
|
# Formater les résultats
|
|
|
|
|
|
|
|
tech_list = []
|
|
|
|
tech_list = []
|
|
|
|
|
|
|
|
|
|
|
|
# Ajouter les langages principaux
|
|
|
|
|
|
|
|
for tech, count in significant_technologies.items():
|
|
|
|
for tech, count in significant_technologies.items():
|
|
|
|
tech_list.append(f"{tech.capitalize()} - Langage/Technologie principale")
|
|
|
|
tech_list.append(f"{tech.capitalize()} - Langage/Technologie principale")
|
|
|
|
|
|
|
|
|
|
|
|
# Ajouter les frameworks
|
|
|
|
|
|
|
|
for framework, count in significant_frameworks.items():
|
|
|
|
for framework, count in significant_frameworks.items():
|
|
|
|
tech_list.append(f"{framework.capitalize()} - Framework/Bibliothèque")
|
|
|
|
tech_list.append(f"{framework.capitalize()} - Framework/Bibliothèque")
|
|
|
|
|
|
|
|
|
|
|
|
# Ajouter les outils de build/gestion de dépendances
|
|
|
|
|
|
|
|
build_tools = {'maven', 'gradle', 'npm', 'pip', 'dotnet'}
|
|
|
|
build_tools = {'maven', 'gradle', 'npm', 'pip', 'dotnet'}
|
|
|
|
for tool in build_tools:
|
|
|
|
for tool in build_tools:
|
|
|
|
if tool in significant_technologies:
|
|
|
|
if tool in significant_technologies:
|
|
|
@ -107,13 +107,13 @@ class ProjectAnalyzer:
|
|
|
|
return tech_list
|
|
|
|
return tech_list
|
|
|
|
|
|
|
|
|
|
|
|
def get_git_authors(self, project_path):
|
|
|
|
def get_git_authors(self, project_path):
|
|
|
|
"""Récupère les auteurs depuis l'historique Git avec leurs contributions"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
# Vérifier si le projet est un dépôt git
|
|
|
|
|
|
|
|
if not os.path.exists(os.path.join(project_path, '.git')):
|
|
|
|
if not os.path.exists(os.path.join(project_path, '.git')):
|
|
|
|
return []
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
|
# Récupérer les auteurs avec leurs contributions
|
|
|
|
|
|
|
|
cmd_log = ['git', 'shortlog', '-sne', '--all']
|
|
|
|
cmd_log = ['git', 'shortlog', '-sne', '--all']
|
|
|
|
process = subprocess.Popen(cmd_log,
|
|
|
|
process = subprocess.Popen(cmd_log,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
@ -125,7 +125,7 @@ class ProjectAnalyzer:
|
|
|
|
authors = []
|
|
|
|
authors = []
|
|
|
|
for line in output.strip().split('\n'):
|
|
|
|
for line in output.strip().split('\n'):
|
|
|
|
if line.strip():
|
|
|
|
if line.strip():
|
|
|
|
# Format: "123\tAuthor Name <email@example.com>"
|
|
|
|
|
|
|
|
parts = line.strip().split('\t')
|
|
|
|
parts = line.strip().split('\t')
|
|
|
|
if len(parts) == 2:
|
|
|
|
if len(parts) == 2:
|
|
|
|
commits = parts[0].strip()
|
|
|
|
commits = parts[0].strip()
|
|
|
@ -133,7 +133,7 @@ class ProjectAnalyzer:
|
|
|
|
name = author_info[0].strip()
|
|
|
|
name = author_info[0].strip()
|
|
|
|
email = author_info[1].rstrip('>')
|
|
|
|
email = author_info[1].rstrip('>')
|
|
|
|
|
|
|
|
|
|
|
|
# Récupérer les statistiques de contribution
|
|
|
|
|
|
|
|
cmd_stat = ['git', 'log', '--author=' + email, '--pretty=tformat:', '--numstat']
|
|
|
|
cmd_stat = ['git', 'log', '--author=' + email, '--pretty=tformat:', '--numstat']
|
|
|
|
process = subprocess.Popen(cmd_stat,
|
|
|
|
process = subprocess.Popen(cmd_stat,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
@ -142,7 +142,7 @@ class ProjectAnalyzer:
|
|
|
|
text=True)
|
|
|
|
text=True)
|
|
|
|
stat_output, _ = process.communicate()
|
|
|
|
stat_output, _ = process.communicate()
|
|
|
|
|
|
|
|
|
|
|
|
# Calculer les lignes ajoutées/supprimées
|
|
|
|
|
|
|
|
added = 0
|
|
|
|
added = 0
|
|
|
|
deleted = 0
|
|
|
|
deleted = 0
|
|
|
|
for stat_line in stat_output.strip().split('\n'):
|
|
|
|
for stat_line in stat_output.strip().split('\n'):
|
|
|
@ -156,7 +156,7 @@ class ProjectAnalyzer:
|
|
|
|
except ValueError:
|
|
|
|
except ValueError:
|
|
|
|
continue
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
|
|
# Récupérer la dernière contribution
|
|
|
|
|
|
|
|
cmd_last = ['git', 'log', '-1', '--format=%ai', f'--author={email}']
|
|
|
|
cmd_last = ['git', 'log', '-1', '--format=%ai', f'--author={email}']
|
|
|
|
process = subprocess.Popen(cmd_last,
|
|
|
|
process = subprocess.Popen(cmd_last,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
@ -166,7 +166,7 @@ class ProjectAnalyzer:
|
|
|
|
last_date, _ = process.communicate()
|
|
|
|
last_date, _ = process.communicate()
|
|
|
|
last_date = last_date.strip()
|
|
|
|
last_date = last_date.strip()
|
|
|
|
|
|
|
|
|
|
|
|
# Formater la date si disponible
|
|
|
|
|
|
|
|
if last_date:
|
|
|
|
if last_date:
|
|
|
|
from datetime import datetime
|
|
|
|
from datetime import datetime
|
|
|
|
date_obj = datetime.strptime(last_date.split()[0], '%Y-%m-%d')
|
|
|
|
date_obj = datetime.strptime(last_date.split()[0], '%Y-%m-%d')
|
|
|
@ -187,32 +187,32 @@ class ProjectAnalyzer:
|
|
|
|
return []
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
|
def get_prerequisites(self, project_path):
|
|
|
|
def get_prerequisites(self, project_path):
|
|
|
|
"""Analyse le projet pour déterminer les prérequis"""
|
|
|
|
|
|
|
|
prerequisites = []
|
|
|
|
prerequisites = []
|
|
|
|
|
|
|
|
|
|
|
|
# Vérifier les différents fichiers de dépendances
|
|
|
|
|
|
|
|
req_file = os.path.join(project_path, 'requirements.txt')
|
|
|
|
req_file = os.path.join(project_path, 'requirements.txt')
|
|
|
|
if os.path.exists(req_file):
|
|
|
|
if os.path.exists(req_file):
|
|
|
|
# Lire toutes les dépendances
|
|
|
|
|
|
|
|
with open(req_file, 'r', encoding='utf-8') as f:
|
|
|
|
with open(req_file, 'r', encoding='utf-8') as f:
|
|
|
|
dependencies = [line.strip() for line in f if line.strip() and not line.startswith('#')]
|
|
|
|
dependencies = [line.strip() for line in f if line.strip() and not line.startswith('#')]
|
|
|
|
|
|
|
|
|
|
|
|
# Ajouter Python avec une version spécifique si trouvée
|
|
|
|
|
|
|
|
python_version = "3.x" # Version par défaut
|
|
|
|
python_version = "3.x"
|
|
|
|
for dep in dependencies:
|
|
|
|
for dep in dependencies:
|
|
|
|
if dep.lower().startswith("python"):
|
|
|
|
if dep.lower().startswith("python"):
|
|
|
|
python_version = dep.split("==")[-1] if "==" in dep else "3.x"
|
|
|
|
python_version = dep.split("==")[-1] if "==" in dep else "3.x"
|
|
|
|
break
|
|
|
|
break
|
|
|
|
prerequisites.append(f"- Python {python_version}")
|
|
|
|
prerequisites.append(f"- Python {python_version}")
|
|
|
|
|
|
|
|
|
|
|
|
# Grouper les dépendances par catégorie
|
|
|
|
|
|
|
|
ui_deps = []
|
|
|
|
ui_deps = []
|
|
|
|
parsing_deps = []
|
|
|
|
parsing_deps = []
|
|
|
|
diagram_deps = []
|
|
|
|
diagram_deps = []
|
|
|
|
other_deps = []
|
|
|
|
other_deps = []
|
|
|
|
|
|
|
|
|
|
|
|
for dep in dependencies:
|
|
|
|
for dep in dependencies:
|
|
|
|
# Extraire le nom et la version
|
|
|
|
|
|
|
|
if "==" in dep:
|
|
|
|
if "==" in dep:
|
|
|
|
name, version = dep.split("==")
|
|
|
|
name, version = dep.split("==")
|
|
|
|
name = name.strip().lower()
|
|
|
|
name = name.strip().lower()
|
|
|
@ -221,7 +221,7 @@ class ProjectAnalyzer:
|
|
|
|
name = dep.strip().lower()
|
|
|
|
name = dep.strip().lower()
|
|
|
|
version = "(dernière version)"
|
|
|
|
version = "(dernière version)"
|
|
|
|
|
|
|
|
|
|
|
|
# Classifier la dépendance
|
|
|
|
|
|
|
|
if name in ['customtkinter', 'tkinter', 'pillow']:
|
|
|
|
if name in ['customtkinter', 'tkinter', 'pillow']:
|
|
|
|
ui_deps.append(f"{dep} - {version}")
|
|
|
|
ui_deps.append(f"{dep} - {version}")
|
|
|
|
elif name in ['antlr4-python3-runtime', 'javalang', 'pyparsing']:
|
|
|
|
elif name in ['antlr4-python3-runtime', 'javalang', 'pyparsing']:
|
|
|
@ -231,7 +231,7 @@ class ProjectAnalyzer:
|
|
|
|
else:
|
|
|
|
else:
|
|
|
|
other_deps.append(f"{dep} - {version}")
|
|
|
|
other_deps.append(f"{dep} - {version}")
|
|
|
|
|
|
|
|
|
|
|
|
# Ajouter les dépendances groupées
|
|
|
|
|
|
|
|
if ui_deps:
|
|
|
|
if ui_deps:
|
|
|
|
prerequisites.append("\n### Interface graphique")
|
|
|
|
prerequisites.append("\n### Interface graphique")
|
|
|
|
for dep in ui_deps:
|
|
|
|
for dep in ui_deps:
|
|
|
@ -252,11 +252,11 @@ class ProjectAnalyzer:
|
|
|
|
for dep in other_deps:
|
|
|
|
for dep in other_deps:
|
|
|
|
prerequisites.append(f"- {dep}")
|
|
|
|
prerequisites.append(f"- {dep}")
|
|
|
|
|
|
|
|
|
|
|
|
# Vérifier Java pour PlantUML
|
|
|
|
|
|
|
|
if os.path.exists(req_file) and any('plantuml' in line.lower() for line in open(req_file)):
|
|
|
|
if os.path.exists(req_file) and any('plantuml' in line.lower() for line in open(req_file)):
|
|
|
|
prerequisites.insert(1, "- Java Runtime Environment (JRE) - Requis pour PlantUML")
|
|
|
|
prerequisites.insert(1, "- Java Runtime Environment (JRE) - Requis pour PlantUML")
|
|
|
|
|
|
|
|
|
|
|
|
# Ajouter les outils de développement recommandés
|
|
|
|
|
|
|
|
prerequisites.append("\n### Outils de développement recommandés")
|
|
|
|
prerequisites.append("\n### Outils de développement recommandés")
|
|
|
|
prerequisites.append("- Un IDE Python (PyCharm, VSCode, etc.)")
|
|
|
|
prerequisites.append("- Un IDE Python (PyCharm, VSCode, etc.)")
|
|
|
|
prerequisites.append("- Git pour le contrôle de version")
|
|
|
|
prerequisites.append("- Git pour le contrôle de version")
|
|
|
@ -264,20 +264,20 @@ class ProjectAnalyzer:
|
|
|
|
return prerequisites
|
|
|
|
return prerequisites
|
|
|
|
|
|
|
|
|
|
|
|
def get_installation_steps(self, project_path):
|
|
|
|
def get_installation_steps(self, project_path):
|
|
|
|
"""Génère les étapes d'installation en fonction du projet"""
|
|
|
|
|
|
|
|
steps = []
|
|
|
|
steps = []
|
|
|
|
|
|
|
|
|
|
|
|
# Étape 1 : Clonage du projet
|
|
|
|
|
|
|
|
if os.path.exists(os.path.join(project_path, '.git')):
|
|
|
|
if os.path.exists(os.path.join(project_path, '.git')):
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
# Récupérer l'URL du dépôt distant
|
|
|
|
|
|
|
|
cmd = ['git', 'config', '--get', 'remote.origin.url']
|
|
|
|
cmd = ['git', 'config', '--get', 'remote.origin.url']
|
|
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
|
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
|
|
|
cwd=project_path, text=True)
|
|
|
|
cwd=project_path, text=True)
|
|
|
|
output, _ = process.communicate()
|
|
|
|
output, _ = process.communicate()
|
|
|
|
repo_url = output.strip() if output.strip() else '[URL_DU_PROJET]'
|
|
|
|
repo_url = output.strip() if output.strip() else '[URL_DU_PROJET]'
|
|
|
|
|
|
|
|
|
|
|
|
# Récupérer le nom du projet
|
|
|
|
|
|
|
|
project_name = os.path.basename(project_path)
|
|
|
|
project_name = os.path.basename(project_path)
|
|
|
|
|
|
|
|
|
|
|
|
steps.append(
|
|
|
|
steps.append(
|
|
|
@ -292,7 +292,7 @@ class ProjectAnalyzer:
|
|
|
|
"**Télécharger le projet**\n Téléchargez et décompressez le projet dans un dossier de votre choix"
|
|
|
|
"**Télécharger le projet**\n Téléchargez et décompressez le projet dans un dossier de votre choix"
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Étape 2 : Installation des dépendances
|
|
|
|
|
|
|
|
if os.path.exists(os.path.join(project_path, 'requirements.txt')):
|
|
|
|
if os.path.exists(os.path.join(project_path, 'requirements.txt')):
|
|
|
|
steps.append(
|
|
|
|
steps.append(
|
|
|
|
"**Installer les dépendances Python**\n ```bash\n pip install -r requirements.txt\n ```"
|
|
|
|
"**Installer les dépendances Python**\n ```bash\n pip install -r requirements.txt\n ```"
|
|
|
@ -318,7 +318,7 @@ class ProjectAnalyzer:
|
|
|
|
"**Restaurer et compiler le projet .NET**\n ```bash\n dotnet restore\n dotnet build\n ```"
|
|
|
|
"**Restaurer et compiler le projet .NET**\n ```bash\n dotnet restore\n dotnet build\n ```"
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Étape 3 : Lancement de l'application
|
|
|
|
|
|
|
|
main_files = {
|
|
|
|
main_files = {
|
|
|
|
'main.py': "**Lancer l'application**\n ```bash\n python main.py\n ```",
|
|
|
|
'main.py': "**Lancer l'application**\n ```bash\n python main.py\n ```",
|
|
|
|
'app.py': "**Lancer l'application**\n ```bash\n python app.py\n ```",
|
|
|
|
'app.py': "**Lancer l'application**\n ```bash\n python app.py\n ```",
|
|
|
@ -339,6 +339,6 @@ class ProjectAnalyzer:
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
pass
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
# Si aucune étape de lancement n'est détectée, ne pas ajouter d'étape par défaut
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return steps
|
|
|
|
return steps
|
|
|
|