You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
345 lines
16 KiB
345 lines
16 KiB
import os
|
|
import subprocess
|
|
import json
|
|
from collections import defaultdict
|
|
|
|
class ProjectAnalyzer:
|
|
def __init__(self):
|
|
self.tech_patterns = {
|
|
'python': ['.py', 'requirements.txt', 'setup.py', 'Pipfile'],
|
|
'java': ['.java', 'pom.xml', 'build.gradle', '.jar'],
|
|
'csharp': ['.cs', '.csproj', '.sln'],
|
|
'javascript': ['.js', 'package.json', '.jsx'],
|
|
'typescript': ['.ts', '.tsx', 'tsconfig.json'],
|
|
'html': ['.html', '.htm'],
|
|
'css': ['.css', '.scss', '.sass', '.less'],
|
|
'react': ['react', '.jsx', '.tsx'],
|
|
'angular': ['angular', '.ts'],
|
|
'vue': ['.vue'],
|
|
'docker': ['Dockerfile', 'docker-compose.yml'],
|
|
'sql': ['.sql'],
|
|
'markdown': ['.md'],
|
|
'yaml': ['.yml', '.yaml'],
|
|
'json': ['.json'],
|
|
'xml': ['.xml'],
|
|
'git': ['.git'],
|
|
'maven': ['pom.xml'],
|
|
'gradle': ['build.gradle'],
|
|
'npm': ['package.json'],
|
|
'pip': ['requirements.txt'],
|
|
'dotnet': ['.csproj', '.sln']
|
|
}
|
|
|
|
# Frameworks et bibliothèques spécifiques à détecter dans les fichiers
|
|
self.framework_patterns = {
|
|
'django': ['django'],
|
|
'flask': ['flask'],
|
|
'spring': ['@SpringBootApplication', '@Controller', '@Service'],
|
|
'aspnet': ['Microsoft.AspNetCore'],
|
|
'react': ['react'],
|
|
'angular': ['@angular'],
|
|
'vue': ['Vue'],
|
|
'bootstrap': ['bootstrap'],
|
|
'jquery': ['jquery'],
|
|
'tailwind': ['tailwindcss'],
|
|
'entity_framework': ['Microsoft.EntityFrameworkCore'],
|
|
'hibernate': ['@Entity', '@Table'],
|
|
'pytest': ['pytest'],
|
|
'junit': ['@Test', 'junit'],
|
|
'numpy': ['numpy'],
|
|
'pandas': ['pandas'],
|
|
'tensorflow': ['tensorflow'],
|
|
'pytorch': ['torch']
|
|
}
|
|
|
|
def analyze_technologies(self, project_path):
|
|
"""Analyse les technologies utilisées dans le projet"""
|
|
technologies = defaultdict(int)
|
|
frameworks = defaultdict(int)
|
|
|
|
# Parcourir tous les fichiers du projet
|
|
for root, _, files in os.walk(project_path):
|
|
if '.git' in root: # Ignorer le dossier .git
|
|
continue
|
|
|
|
for file in files:
|
|
file_path = os.path.join(root, file)
|
|
|
|
# Détecter les technologies par extension/nom de fichier
|
|
for tech, patterns in self.tech_patterns.items():
|
|
for pattern in patterns:
|
|
if file.endswith(pattern) or file == pattern:
|
|
technologies[tech] += 1
|
|
|
|
# Analyser le contenu des fichiers pour détecter les frameworks
|
|
try:
|
|
if os.path.getsize(file_path) < 1000000: # Limiter aux fichiers < 1MB
|
|
with open(file_path, 'r', encoding='utf-8') as f:
|
|
content = f.read().lower()
|
|
for framework, patterns in self.framework_patterns.items():
|
|
for pattern in patterns:
|
|
if pattern.lower() in content:
|
|
frameworks[framework] += 1
|
|
except (UnicodeDecodeError, IOError):
|
|
continue
|
|
|
|
# Filtrer les technologies et frameworks les plus utilisés
|
|
significant_technologies = {k: v for k, v in technologies.items() if v > 0}
|
|
significant_frameworks = {k: v for k, v in frameworks.items() if v > 0}
|
|
|
|
# Formater les résultats
|
|
tech_list = []
|
|
|
|
# Ajouter les langages principaux
|
|
for tech, count in significant_technologies.items():
|
|
tech_list.append(f"{tech.capitalize()} - Langage/Technologie principale")
|
|
|
|
# Ajouter les frameworks
|
|
for framework, count in significant_frameworks.items():
|
|
tech_list.append(f"{framework.capitalize()} - Framework/Bibliothèque")
|
|
|
|
# Ajouter les outils de build/gestion de dépendances
|
|
build_tools = {'maven', 'gradle', 'npm', 'pip', 'dotnet'}
|
|
for tool in build_tools:
|
|
if tool in significant_technologies:
|
|
tech_list.append(f"{tool.capitalize()} - Gestion de dépendances")
|
|
|
|
return tech_list
|
|
|
|
def get_git_authors(self, project_path):
|
|
"""Récupère les auteurs depuis l'historique Git avec leurs contributions"""
|
|
try:
|
|
# Vérifier si le projet est un dépôt git
|
|
if not os.path.exists(os.path.join(project_path, '.git')):
|
|
return []
|
|
|
|
# Récupérer les auteurs avec leurs contributions
|
|
cmd_log = ['git', 'shortlog', '-sne', '--all']
|
|
process = subprocess.Popen(cmd_log,
|
|
stdout=subprocess.PIPE,
|
|
stderr=subprocess.PIPE,
|
|
cwd=project_path,
|
|
text=True)
|
|
output, _ = process.communicate()
|
|
|
|
authors = []
|
|
for line in output.strip().split('\n'):
|
|
if line.strip():
|
|
# Format: "123\tAuthor Name <email@example.com>"
|
|
parts = line.strip().split('\t')
|
|
if len(parts) == 2:
|
|
commits = parts[0].strip()
|
|
author_info = parts[1].split('<')
|
|
name = author_info[0].strip()
|
|
email = author_info[1].rstrip('>')
|
|
|
|
# Récupérer les statistiques de contribution
|
|
cmd_stat = ['git', 'log', '--author=' + email, '--pretty=tformat:', '--numstat']
|
|
process = subprocess.Popen(cmd_stat,
|
|
stdout=subprocess.PIPE,
|
|
stderr=subprocess.PIPE,
|
|
cwd=project_path,
|
|
text=True)
|
|
stat_output, _ = process.communicate()
|
|
|
|
# Calculer les lignes ajoutées/supprimées
|
|
added = 0
|
|
deleted = 0
|
|
for stat_line in stat_output.strip().split('\n'):
|
|
if stat_line.strip():
|
|
try:
|
|
add, delete, _ = stat_line.split('\t')
|
|
if add != '-':
|
|
added += int(add)
|
|
if delete != '-':
|
|
deleted += int(delete)
|
|
except ValueError:
|
|
continue
|
|
|
|
# Récupérer la dernière contribution
|
|
cmd_last = ['git', 'log', '-1', '--format=%ai', f'--author={email}']
|
|
process = subprocess.Popen(cmd_last,
|
|
stdout=subprocess.PIPE,
|
|
stderr=subprocess.PIPE,
|
|
cwd=project_path,
|
|
text=True)
|
|
last_date, _ = process.communicate()
|
|
last_date = last_date.strip()
|
|
|
|
# Formater la date si disponible
|
|
if last_date:
|
|
from datetime import datetime
|
|
date_obj = datetime.strptime(last_date.split()[0], '%Y-%m-%d')
|
|
last_date = date_obj.strftime('%d/%m/%Y')
|
|
author_line = f"**{name}**"
|
|
author_line += f"\n - {commits} commits"
|
|
author_line += f"\n - {added:,} lignes ajoutées, {deleted:,} lignes supprimées"
|
|
author_line += f"\n - Dernière contribution : {last_date}"
|
|
else:
|
|
author_line = f"**{name}** ({commits} commits)"
|
|
|
|
authors.append(author_line)
|
|
|
|
return authors
|
|
|
|
except Exception as e:
|
|
print(f"Erreur lors de la récupération des auteurs Git: {str(e)}")
|
|
return []
|
|
|
|
def get_prerequisites(self, project_path):
|
|
"""Analyse le projet pour déterminer les prérequis"""
|
|
prerequisites = []
|
|
|
|
# Vérifier les différents fichiers de dépendances
|
|
req_file = os.path.join(project_path, 'requirements.txt')
|
|
if os.path.exists(req_file):
|
|
# Lire toutes les dépendances
|
|
with open(req_file, 'r', encoding='utf-8') as f:
|
|
dependencies = [line.strip() for line in f if line.strip() and not line.startswith('#')]
|
|
|
|
# Ajouter Python avec une version spécifique si trouvée
|
|
python_version = "3.x" # Version par défaut
|
|
for dep in dependencies:
|
|
if dep.lower().startswith("python"):
|
|
python_version = dep.split("==")[-1] if "==" in dep else "3.x"
|
|
break
|
|
prerequisites.append(f"- Python {python_version}")
|
|
|
|
# Grouper les dépendances par catégorie
|
|
ui_deps = []
|
|
parsing_deps = []
|
|
diagram_deps = []
|
|
other_deps = []
|
|
|
|
for dep in dependencies:
|
|
# Extraire le nom et la version
|
|
if "==" in dep:
|
|
name, version = dep.split("==")
|
|
name = name.strip().lower()
|
|
version = f"v{version}"
|
|
else:
|
|
name = dep.strip().lower()
|
|
version = "(dernière version)"
|
|
|
|
# Classifier la dépendance
|
|
if name in ['customtkinter', 'tkinter', 'pillow']:
|
|
ui_deps.append(f"{dep} - {version}")
|
|
elif name in ['antlr4-python3-runtime', 'javalang', 'pyparsing']:
|
|
parsing_deps.append(f"{dep} - {version}")
|
|
elif name in ['plantuml']:
|
|
diagram_deps.append(f"{dep} - {version}")
|
|
else:
|
|
other_deps.append(f"{dep} - {version}")
|
|
|
|
# Ajouter les dépendances groupées
|
|
if ui_deps:
|
|
prerequisites.append("\n### Interface graphique")
|
|
for dep in ui_deps:
|
|
prerequisites.append(f"- {dep}")
|
|
|
|
if parsing_deps:
|
|
prerequisites.append("\n### Analyse de code")
|
|
for dep in parsing_deps:
|
|
prerequisites.append(f"- {dep}")
|
|
|
|
if diagram_deps:
|
|
prerequisites.append("\n### Génération de diagrammes")
|
|
for dep in diagram_deps:
|
|
prerequisites.append(f"- {dep}")
|
|
|
|
if other_deps:
|
|
prerequisites.append("\n### Autres dépendances")
|
|
for dep in other_deps:
|
|
prerequisites.append(f"- {dep}")
|
|
|
|
# Vérifier Java pour PlantUML
|
|
if os.path.exists(req_file) and any('plantuml' in line.lower() for line in open(req_file)):
|
|
prerequisites.insert(1, "- Java Runtime Environment (JRE) - Requis pour PlantUML")
|
|
|
|
# Ajouter les outils de développement recommandés
|
|
prerequisites.append("\n### Outils de développement recommandés")
|
|
prerequisites.append("- Un IDE Python (PyCharm, VSCode, etc.)")
|
|
prerequisites.append("- Git pour le contrôle de version")
|
|
|
|
return prerequisites
|
|
|
|
def get_installation_steps(self, project_path):
|
|
"""Génère les étapes d'installation en fonction du projet"""
|
|
steps = []
|
|
|
|
# Étape 1 : Clonage du projet
|
|
if os.path.exists(os.path.join(project_path, '.git')):
|
|
try:
|
|
# Récupérer l'URL du dépôt distant
|
|
cmd = ['git', 'config', '--get', 'remote.origin.url']
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
|
cwd=project_path, text=True)
|
|
output, _ = process.communicate()
|
|
repo_url = output.strip() if output.strip() else '[URL_DU_PROJET]'
|
|
|
|
# Récupérer le nom du projet
|
|
project_name = os.path.basename(project_path)
|
|
|
|
steps.append(
|
|
f"**Cloner le projet**\n ```bash\n git clone {repo_url}\n cd {project_name}\n ```"
|
|
)
|
|
except Exception:
|
|
steps.append(
|
|
"**Cloner le projet**\n ```bash\n git clone [URL_DU_PROJET]\n cd [NOM_DU_PROJET]\n ```"
|
|
)
|
|
else:
|
|
steps.append(
|
|
"**Télécharger le projet**\n Téléchargez et décompressez le projet dans un dossier de votre choix"
|
|
)
|
|
|
|
# Étape 2 : Installation des dépendances
|
|
if os.path.exists(os.path.join(project_path, 'requirements.txt')):
|
|
steps.append(
|
|
"**Installer les dépendances Python**\n ```bash\n pip install -r requirements.txt\n ```"
|
|
)
|
|
|
|
if os.path.exists(os.path.join(project_path, 'package.json')):
|
|
steps.append(
|
|
"**Installer les dépendances Node.js**\n ```bash\n npm install\n ```"
|
|
)
|
|
|
|
if os.path.exists(os.path.join(project_path, 'pom.xml')):
|
|
steps.append(
|
|
"**Compiler le projet avec Maven**\n ```bash\n mvn clean install\n ```"
|
|
)
|
|
|
|
if os.path.exists(os.path.join(project_path, 'build.gradle')):
|
|
steps.append(
|
|
"**Compiler le projet avec Gradle**\n ```bash\n ./gradlew build\n ```"
|
|
)
|
|
|
|
if any(f.endswith('.csproj') for f in os.listdir(project_path)):
|
|
steps.append(
|
|
"**Restaurer et compiler le projet .NET**\n ```bash\n dotnet restore\n dotnet build\n ```"
|
|
)
|
|
|
|
# Étape 3 : Lancement de l'application
|
|
main_files = {
|
|
'main.py': "**Lancer l'application**\n ```bash\n python main.py\n ```",
|
|
'app.py': "**Lancer l'application**\n ```bash\n python app.py\n ```",
|
|
'manage.py': "**Lancer le serveur Django**\n ```bash\n python manage.py runserver\n ```"
|
|
}
|
|
|
|
for file, command in main_files.items():
|
|
if os.path.exists(os.path.join(project_path, file)):
|
|
steps.append(command)
|
|
break
|
|
|
|
if os.path.exists(os.path.join(project_path, 'package.json')):
|
|
try:
|
|
with open(os.path.join(project_path, 'package.json'), 'r') as f:
|
|
package_data = json.load(f)
|
|
if 'scripts' in package_data and 'start' in package_data['scripts']:
|
|
steps.append("**Lancer l'application**\n ```bash\n npm start\n ```")
|
|
except json.JSONDecodeError:
|
|
pass
|
|
|
|
# Si aucune étape de lancement n'est détectée, ne pas ajouter d'étape par défaut
|
|
|
|
return steps
|