|
#!/usr/bin/env python3 |
|
import json |
|
import os |
|
import sys |
|
import glob |
|
import subprocess |
|
from collections import defaultdict |
|
from datetime import datetime |
|
|
|
# Configuration: Set to an integer to limit depth, or None for unlimited |
|
MAX_LEVELS = 3 |
|
# If True, only show dependencies that are directly required by our own packages (btv/* or local) |
|
ONLY_DIRECT_DEPS_OF_OWN = True |
|
|
|
# Prefix configuration for package filtering |
|
OWN_PREFIXES = ('btv/','saitho/') |
|
# Prefix for site packages that should be grouped if they have identical dependencies |
|
SITE_PACKAGE_PREFIX = 'btv/btv-ws-' |
|
|
|
class DependencyVisualizer: |
|
def __init__(self, base_path, max_levels=None, only_direct_own=False): |
|
self.base_path = base_path |
|
self.max_levels = max_levels |
|
self.only_direct_own = only_direct_own |
|
self.packages_dir = os.path.join(base_path, 'packages') |
|
self.lock_data = self._load_json('composer.lock') |
|
|
|
if not self.lock_data: |
|
print("Error: composer.lock not found. Please run 'composer install' first.") |
|
sys.exit(1) |
|
|
|
# Map package name to its version and dependency requirements from lock file |
|
all_locked_packages = self.lock_data.get('packages', []) + self.lock_data.get('packages-dev', []) |
|
self.package_map = {pkg['name']: pkg for pkg in all_locked_packages} |
|
self.version_map = {pkg['name']: pkg['version'] for pkg in all_locked_packages} |
|
|
|
# Track missing dependencies, their dependents and the required version constraints |
|
self.missing_report = defaultdict(dict) |
|
|
|
# Identify local packages in the packages/ directory |
|
self.root_packages = self._find_local_packages() |
|
|
|
# Include the main project composer.json as a root |
|
main_composer = self._load_json('composer.json') |
|
root_name = main_composer.get('name', 'root') |
|
self.root_packages[root_name] = main_composer.get('require', {}) |
|
self.version_map[root_name] = 'local' |
|
self.main_root = root_name |
|
|
|
self._group_site_packages() |
|
|
|
def _group_site_packages(self): |
|
"""Combines packages with the same SITE_PACKAGE_PREFIX and identical dependencies into one node.""" |
|
if not SITE_PACKAGE_PREFIX: |
|
return |
|
|
|
# Identify all packages matching the prefix |
|
matching_packages = {} |
|
all_package_names = set(list(self.root_packages.keys()) + list(self.package_map.keys())) |
|
|
|
for name in all_package_names: |
|
if name.startswith(SITE_PACKAGE_PREFIX): |
|
# Get dependencies (excluding PHP/ext- as they don't affect site package structure usually) |
|
deps = {} |
|
if name in self.root_packages: |
|
deps = self.root_packages[name] |
|
elif name in self.package_map: |
|
deps = self.package_map[name].get('require', {}) |
|
|
|
# Filter out PHP and extensions for comparison |
|
filtered_deps = {k: v for k, v in deps.items() if k != 'php' and not k.startswith('ext-')} |
|
# Create a stable key from sorted dependencies |
|
dep_key = json.dumps(filtered_deps, sort_keys=True) |
|
matching_packages.setdefault(dep_key, []).append(name) |
|
|
|
if not matching_packages: |
|
return |
|
|
|
# Find the majority dependency set (the one that most packages share) |
|
# We only want to create ONE merged group for the "standard" case. |
|
majority_dep_key = max(matching_packages, key=lambda k: len(matching_packages[k])) |
|
names = matching_packages[majority_dep_key] |
|
|
|
if len(names) <= 1: |
|
return |
|
|
|
name_map = {} # Maps old package name to new aggregated name |
|
# Create an aggregated name |
|
new_name = f"{SITE_PACKAGE_PREFIX}-* ({len(names)} packages)" |
|
is_root = False |
|
|
|
# Merge dependencies (take from first package as they are identical) |
|
first_name = names[0] |
|
if first_name in self.root_packages: |
|
merged_deps = self.root_packages[first_name].copy() |
|
else: |
|
merged_deps = self.package_map[first_name].get('require', {}).copy() |
|
|
|
for name in names: |
|
name_map[name] = new_name |
|
if name in self.root_packages: |
|
is_root = True |
|
|
|
# Cleanup original entries |
|
if name in self.root_packages: |
|
del self.root_packages[name] |
|
if name in self.package_map: |
|
del self.package_map[name] |
|
if name in self.version_map: |
|
del self.version_map[name] |
|
|
|
# Update main root reference if it was one of these packages |
|
if name == self.main_root: |
|
self.main_root = new_name |
|
|
|
# Register the new combined package |
|
self.version_map[new_name] = 'merged' |
|
if is_root: |
|
self.root_packages[new_name] = merged_deps |
|
else: |
|
self.package_map[new_name] = {'name': new_name, 'version': 'merged', 'require': merged_deps} |
|
|
|
# Update all dependency references to point to the new aggregated names |
|
self._update_all_dependency_references(name_map) |
|
|
|
def _update_all_dependency_references(self, name_map): |
|
"""Updates all 'require' dictionaries to use the new mapped names.""" |
|
if not name_map: |
|
return |
|
|
|
def update_deps(deps): |
|
new_deps = {} |
|
for dep_name, constraint in deps.items(): |
|
target_name = name_map.get(dep_name, dep_name) |
|
new_deps[target_name] = constraint |
|
return new_deps |
|
|
|
# Update root packages |
|
for name in list(self.root_packages.keys()): |
|
self.root_packages[name] = update_deps(self.root_packages[name]) |
|
|
|
# Update packages in package_map |
|
for name in list(self.package_map.keys()): |
|
if 'require' in self.package_map[name]: |
|
self.package_map[name]['require'] = update_deps(self.package_map[name]['require']) |
|
|
|
def _load_json(self, filename): |
|
path = os.path.join(self.base_path, filename) |
|
if not os.path.exists(path): |
|
return {} |
|
with open(path, 'r', encoding='utf-8') as f: |
|
try: |
|
return json.load(f) |
|
except json.JSONDecodeError: |
|
return {} |
|
|
|
def _find_local_packages(self): |
|
"""Finds all composer.json files in the packages directory and extracts their info.""" |
|
local_roots = {} |
|
pattern = os.path.join(self.packages_dir, '*', 'composer.json') |
|
for composer_path in glob.glob(pattern): |
|
try: |
|
with open(composer_path, 'r', encoding='utf-8') as f: |
|
data = json.load(f) |
|
name = data.get('name') |
|
if name: |
|
local_roots[name] = data.get('require', {}) |
|
self.version_map[name] = 'local' |
|
except Exception as e: |
|
print(f"Warning: Could not read {composer_path}: {e}") |
|
return local_roots |
|
|
|
def is_relevant(self, name): |
|
"""Filters for TYPO3 or project-specific packages to keep graphs clean.""" |
|
return name.startswith('typo3/') or name.startswith(OWN_PREFIXES) or name in self.root_packages |
|
|
|
def is_own_package(self, name): |
|
"""Checks if a package is considered 'ours' (local or btv/*).""" |
|
return name.startswith(OWN_PREFIXES) or name in self.root_packages |
|
|
|
def get_safe_id(self, name): |
|
"""Generates a safe identifier for a package name across different diagram types.""" |
|
return name.replace('/', '_').replace('-', '_').replace('.', '_').replace(' ', '_') |
|
|
|
def traverse_dependencies(self): |
|
"""Standard BFS traversal starting from root packages.""" |
|
levels = {0: list(self.root_packages.keys())} |
|
visited = set(self.root_packages.keys()) |
|
queue = [] |
|
edges = [] # List of tuples: (parent, child, is_missing, constraint) |
|
|
|
# Initialize queue with root packages |
|
for name, deps in self.root_packages.items(): |
|
queue.append((name, deps, 1)) |
|
|
|
while queue: |
|
parent_name, deps, level = queue.pop(0) |
|
if self.max_levels is not None and level > self.max_levels: |
|
continue |
|
|
|
for dep_name, constraint in deps.items(): |
|
if dep_name == 'php' or dep_name.startswith('ext-'): |
|
continue |
|
if not self.is_relevant(dep_name) and not self.is_relevant(parent_name): |
|
continue |
|
|
|
is_missing = dep_name not in self.version_map |
|
if is_missing: |
|
self.missing_report[dep_name][parent_name] = constraint |
|
|
|
edge = (parent_name, dep_name, is_missing, constraint) |
|
if edge not in edges: |
|
edges.append(edge) |
|
|
|
if dep_name not in visited: |
|
# Determine if we should explore this dependency's own dependencies |
|
can_explore = True |
|
if self.only_direct_own and not self.is_own_package(dep_name): |
|
can_explore = False |
|
|
|
visited.add(dep_name) |
|
levels.setdefault(level, []).append(dep_name) |
|
if not is_missing and can_explore: |
|
pkg_data = self.package_map.get(dep_name) |
|
if pkg_data and 'require' in pkg_data: |
|
queue.append((dep_name, pkg_data['require'], level + 1)) |
|
|
|
return levels, edges, visited |
|
|
|
|
|
|
|
def generate_dot(self, levels, edges): |
|
lines = ["digraph dependencies {", " rankdir=LR;", " node [shape=box, fontname=\"Arial\"];"] |
|
for lv in sorted(levels.keys()): |
|
lines.append(f' subgraph cluster_level_{lv} {{') |
|
lines.append(f' label = "Level {lv}"; style = dashed; color = grey;') |
|
for name in levels[lv]: |
|
version = self.version_map.get(name) |
|
attrs = [] |
|
if name in self.root_packages: |
|
attrs.append('style=filled, fillcolor=yellow') |
|
elif version is None: |
|
attrs.append('style=filled, fillcolor="#ffcccc", color="#cc0000"') |
|
|
|
label_text = f"{name}\\n({version if version else 'MISSING'})" |
|
if version is None: |
|
for parent, constraint in sorted(self.missing_report[name].items()): |
|
label_text += f"\\nReq by {parent}: {constraint}" |
|
|
|
attrs.append(f'label="{label_text}"') |
|
lines.append(f' "{name}" [{", ".join(attrs)}];') |
|
lines.append(' }') |
|
|
|
for parent, child, is_missing, constraint in edges: |
|
if is_missing: |
|
attr = ' [color=red]' |
|
else: |
|
attr = f' [label="{constraint}"]' |
|
lines.append(f' "{parent}" -> "{child}"{attr};') |
|
lines.append("}") |
|
return "\n".join(lines) |
|
|
|
def generate_puml(self, levels, edges, visited): |
|
"""Generates a PlantUML component diagram.""" |
|
lines = ["@startuml", "skinparam componentStyle uml2", ""] |
|
|
|
# Style definitions |
|
lines.append("skinparam component {") |
|
lines.append(" BackgroundColor<<ROOT>> Yellow") |
|
lines.append(" BackgroundColor<<MISSING>> #ffcccc") |
|
lines.append(" BorderColor<<MISSING>> Red") |
|
lines.append("}") |
|
lines.append("") |
|
|
|
# Components |
|
for name in sorted(visited): |
|
safe_id = self.get_safe_id(name) |
|
version = self.version_map.get(name) |
|
|
|
stereotype = "" |
|
if name in self.root_packages: |
|
stereotype = "<<ROOT>>" |
|
elif version is None: |
|
stereotype = "<<MISSING>>" |
|
|
|
label = f"{name}" |
|
if version: |
|
label += f" ({version})" |
|
elif name in self.missing_report: |
|
label += " (MISSING)" |
|
|
|
lines.append(f'[{label}] as {safe_id} {stereotype}') |
|
|
|
lines.append("") |
|
# Relations |
|
for parent, child, is_missing, constraint in edges: |
|
p_id = self.get_safe_id(parent) |
|
c_id = self.get_safe_id(child) |
|
arrow = "-[#red]->" if is_missing else "-->" |
|
lines.append(f'{p_id} {arrow} {c_id} : "{constraint}"') |
|
|
|
lines.append("") |
|
lines.append("@endum") |
|
return "\n".join(lines) |
|
|
|
def generate_markdown(self, levels, edges): |
|
"""Generates a Markdown report of the dependencies.""" |
|
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S") |
|
lines = [ |
|
"# Dependency Report", |
|
"", |
|
f"> [!NOTE]", |
|
f"> This file was automatically generated on {now}.", |
|
"", |
|
"## Explanation", |
|
"This report visualizes the dependency tree of the project's composer packages.", |
|
"", |
|
"- **Levels**: Level 0 represents the root project and local packages. Higher levels represent deeper dependencies.", |
|
"- **Filtering**: The report is filtered to show only relevant packages (TYPO3 core or project-specific namespaces).", |
|
f"- **Grouping**: Packages starting with `{SITE_PACKAGE_PREFIX}` that share identical dependencies are merged into single nodes to keep the report concise. Outliers with unique dependencies are kept separate.", |
|
"- **Direct Dependencies**: If configured, only direct dependencies of our own packages are explored.", |
|
"" |
|
] |
|
|
|
lines.append("## Packages by Level") |
|
for lv in sorted(levels.keys()): |
|
lines.append(f"### Level {lv}") |
|
for name in sorted(levels[lv]): |
|
version = self.version_map.get(name, "MISSING").replace('|', '\\|') |
|
lines.append(f"- **{name}** (`{version}`)") |
|
lines.append("") |
|
|
|
lines.append("## Reverse Dependencies (Dependents)") |
|
lines.append("| Package | Required By | Constraint |") |
|
lines.append("| --- | --- | --- |") |
|
# Sort by child (the package being depended upon) |
|
for parent, child, is_missing, constraint in sorted(edges, key=lambda x: (x[1], x[0])): |
|
safe_constraint = constraint.replace('|', '\\|') |
|
lines.append(f"| {child} | {parent} | `{safe_constraint}` |") |
|
|
|
return "\n".join(lines) |
|
|
|
def run(self): |
|
levels, edges, visited = self.traverse_dependencies() |
|
|
|
# DOT |
|
dot_path = os.path.join(self.base_path, 'dependencies.dot') |
|
with open(dot_path, 'w', encoding='utf-8') as f: |
|
f.write(self.generate_dot(levels, edges)) |
|
|
|
# SVG (via DOT) |
|
svg_path = os.path.join(self.base_path, 'dependencies.svg') |
|
try: |
|
subprocess.run(['dot', '-Tsvg', dot_path, '-o', svg_path], check=True) |
|
except Exception as e: |
|
print(f"Warning: Could not generate SVG via 'dot' command: {e}") |
|
|
|
# PlantUML |
|
with open(os.path.join(self.base_path, 'dependencies.puml'), 'w', encoding='utf-8') as f: |
|
f.write(self.generate_puml(levels, edges, visited)) |
|
|
|
# Markdown |
|
with open(os.path.join(self.base_path, 'dependencies.md'), 'w', encoding='utf-8') as f: |
|
f.write(self.generate_markdown(levels, edges)) |
|
|
|
print(f"\nSuccess! Analyzed {len(self.root_packages)} root package(s).") |
|
if self.missing_report: |
|
print("\n!!! MISSING DEPENDENCIES DETECTED !!!") |
|
for pkg, dependents in sorted(self.missing_report.items()): |
|
print(f" - {pkg}") |
|
for dep, constraint in sorted(dependents.items()): |
|
print(f" Required by: {dep} ({constraint})") |
|
|
|
print(f"\nFiles generated in {self.base_path}:") |
|
print("1. dependencies.dot (Source for Graphviz)") |
|
print("2. dependencies.svg (Vector graphic)") |
|
print("3. dependencies.puml (View with PlantUML)") |
|
print("4. dependencies.md (Markdown report)") |
|
|
|
if __name__ == "__main__": |
|
visualizer = DependencyVisualizer( |
|
os.path.dirname(os.path.abspath(__file__)), |
|
MAX_LEVELS, |
|
ONLY_DIRECT_DEPS_OF_OWN |
|
) |
|
visualizer.run() |