diff --git a/.github/workflows/examples.yaml b/.github/workflows/examples.yaml
index 24c318ad..423f8dcf 100644
--- a/.github/workflows/examples.yaml
+++ b/.github/workflows/examples.yaml
@@ -443,150 +443,401 @@ jobs:
with:
name: macos-arm-size-report
path: macos_arm_size_report.json
+
combine-reports:
+ name: Generate Combined Size Reports
needs: [windows, android, ios, linux, macos, macos-arm]
runs-on: ubuntu-latest
+
steps:
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: '3.11'
+ cache: 'pip'
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install pandas numpy
+
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
path: size-reports
+ pattern: '*-size-report'
+ merge-multiple: true
- - name: Generate Combined JSON Report
+ - name: Generate Combined Report
+ id: combine-reports
run: |
- cat << 'EOF' > generate_combined_json.py
+ cat << 'EOF' > generate_combined_report.py
import json
import os
import glob
from datetime import datetime
+ from typing import Dict, Any, Union
+ import pandas as pd
+ import numpy as np
- def convert_to_mb(value):
+ def format_size(size: Union[float, str, dict, None]) -> tuple[str, float]:
+ """Format size values with appropriate units and return both display and raw values."""
try:
- return round(float(value) / 1024, 2)
- except (ValueError, TypeError):
- return value
-
- def process_dict(data):
- if isinstance(data, dict):
- return {k: process_dict(v) for k, v in data.items()}
- elif isinstance(data, list):
- return [process_dict(item) for item in data]
- else:
- return convert_to_mb(data)
-
- def consolidate_data(reports):
- combined = {"timestamp": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")}
- packages_data = {}
-
+ if isinstance(size, (int, float)):
+ raw_value = float(size)
+ if size > 1024:
+ return f"{size/1024:.2f} GB", raw_value
+ return f"{size:.2f} MB", raw_value
+ elif isinstance(size, str):
+ try:
+ float_val = float(size)
+ return format_size(float_val)
+ except ValueError:
+ return size, 0.0
+ elif isinstance(size, dict):
+ return "N/A (nested)", 0.0
+ return "N/A", 0.0
+ except Exception:
+ return "Error", 0.0
+
+ def process_platform_data(data: Dict[str, Any]) -> Dict[str, Any]:
+ """Process and validate platform-specific data."""
+ processed = {}
+ for key, value in data.items():
+ if isinstance(value, dict):
+ processed[key] = {
+ k: format_size(v)[0] if not isinstance(v, dict) else process_platform_data(v)
+ for k, v in value.items()
+ }
+ else:
+ processed[key] = format_size(value)[0]
+ return processed
+
+ def generate_trend_data(data: Dict[str, Any]) -> Dict[str, Any]:
+ """Generate trend analysis from historical data."""
+ trends = {}
+ for package, platforms in data.items():
+ if package == "platform":
+ continue
+ trends[package] = {}
+ for platform, sizes in platforms.items():
+ if isinstance(sizes, dict):
+ if "total_size" in sizes:
+ _, raw_size = format_size(sizes["total_size"])
+ trends[package][platform] = raw_size
+ else:
+ platform_avg = np.mean([
+ format_size(s.get("total_size", 0))[1]
+ for s in sizes.values()
+ if isinstance(s, dict)
+ ])
+ trends[package][platform] = platform_avg
+ else:
+ _, raw_size = format_size(sizes)
+ trends[package][platform] = raw_size
+ return trends
+
+ def consolidate_data(reports: list) -> Dict[str, Any]:
+ """Consolidate all report data with additional analytics."""
+ combined = {
+ "metadata": {
+ "timestamp": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
+ "report_count": len(reports),
+ "generated_at": datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")
+ },
+ "packages": {}
+ }
+
for report_path in reports:
platform = os.path.basename(os.path.dirname(report_path)).replace('-size-report', '')
- with open(report_path) as f:
- data = json.load(f)
- processed_data = process_dict(data)
- for package, sizes in processed_data.items():
- if package not in packages_data:
- packages_data[package] = {}
- packages_data[package][platform] = sizes
+ try:
+ with open(report_path) as f:
+ data = json.load(f)
+ for package, sizes in data.items():
+ if package not in combined["packages"]:
+ combined["packages"][package] = {}
+ combined["packages"][package][platform] = process_platform_data(sizes)
+ except Exception as e:
+ print(f"Error processing {report_path}: {str(e)}")
+ continue
+
+ # Generate trends and statistics
+ trends = generate_trend_data(combined["packages"])
+ combined["analytics"] = {
+ "trends": trends,
+ "summary": {
+ package: {
+ "average_size": f"{np.mean(list(platforms.values())):.2f} MB",
+ "size_range": f"{min(platforms.values()):.2f} - {max(platforms.values()):.2f} MB"
+ }
+ for package, platforms in trends.items()
+ }
+ }
- combined["packages"] = packages_data
return combined
- reports = glob.glob('size-reports/*/*.json')
- combined_data = consolidate_data(reports)
-
- with open("combined_size_report.json", "w") as f:
- json.dump(combined_data, f, indent=2)
+ def main():
+ try:
+ reports = glob.glob('size-reports/*/*.json')
+ if not reports:
+ raise Exception("No report files found")
+
+ combined_data = consolidate_data(reports)
+
+ # Save combined report
+ with open("combined_size_report.json", "w") as f:
+ json.dump(combined_data, f, indent=2)
+
+ print("::set-output name=status::success")
+ print(f"::set-output name=report_count::{len(reports)}")
+
+ except Exception as e:
+ print(f"::error::Error generating combined report: {str(e)}")
+ print("::set-output name=status::failure")
+ raise
+
+ if __name__ == "__main__":
+ main()
EOF
- python3 generate_combined_json.py
-
- - name: Display Combined JSON Report
- run: |
- echo "Combined JSON Report:"
- cat combined_size_report.json
-
- - name: Upload Combined JSON Report
- uses: actions/upload-artifact@v4
- with:
- name: combined-size-report
- path: combined_size_report.json
+ python generate_combined_report.py
- - name: Generate SVGs for Each Package
+ - name: Generate SVG Reports
+ if: steps.combine-reports.outputs.status == 'success'
+ id: generate-svgs
run: |
cat << 'EOF' > generate_svgs.py
import json
from pathlib import Path
+ from typing import Union, Dict, Any
+ import math
+
+ def create_gradient_definitions() -> str:
+ """Create gradient definitions for various visual elements."""
+ return '''
+
+
+
+
+
+
+
+ '''
- def generate_svg_for_package(package_name, data):
- platforms = list(data.keys())
- max_archs = 0
- for platform_data in data.values():
- if isinstance(platform_data, dict):
- max_archs = max(max_archs, len(platform_data))
+ def create_platform_icon(platform: str, x: int, y: int) -> str:
+ """Create platform-specific icon."""
+ icons = {
+ 'windows': '''
+
+
+
+
+ ''',
+ 'apple': '''
+
+ ''',
+ 'linux': '''
+
+ ''',
+ 'android': '''
+
+ '''
+ }
+
+ base_icon = icons.get(platform.lower().replace('-arm', ''), '')
+ if not base_icon:
+ return ''
+
+ return f'''
+
+ {base_icon}
+
+ '''
- svg_height = len(platforms) * (40 + max_archs * 20) + 100
- svg_width = 800
+ def generate_svg_for_package(package_name: str, data: Dict[str, Any], analytics: Dict[str, Any]) -> None:
+ """Generate enhanced SVG report for a package with analytics."""
+ platforms = list(data.keys())
+
+ # Calculate dimensions
+ row_height = 40
+ header_height = 150
+ platform_spacing = 20
+ max_rows_per_platform = max(
+ len(sizes) if isinstance(sizes, dict) else 1
+ for sizes in data.values()
+ )
+
+ svg_height = header_height + (len(platforms) * (row_height * max_rows_per_platform + platform_spacing)) + 100
+ svg_width = 1200
+
svg_content = f'''
- '
- svg_path = Path("svg-reports") / f"{package_name}_consolidated_size_report.svg"
+
+ Size:
+ {platform_data}
+
+ '''
+ y_offset += row_height
+
+ y_offset += platform_spacing
+
+ svg_content += ''
+
+ # Save SVG
+ svg_path = Path("svg-reports") / f"{package_name}_size_report.svg"
svg_path.parent.mkdir(exist_ok=True)
with svg_path.open("w") as f:
f.write(svg_content)
- with open("combined_size_report.json") as f:
- combined_data = json.load(f)
+ def main():
+ try:
+ with open("combined_size_report.json") as f:
+ data = json.load(f)
+
+ Path("svg-reports").mkdir(exist_ok=True)
+
+ for package, package_data in data["packages"].items():
+ if package != "platform": # Skip platform mapping
+ generate_svg_for_package(
+ package,
+ package_data,
+ data["analytics"]
+ )
+
+ print("::set-output name=status::success")
+
+ except Exception as e:
+ print(f"::error::Error generating SVG reports: {str(e)}")
+ print("::set-output name=status::failure")
+ raise
+
+ if __name__ == "__main__":
+ main()
+ EOF
- for package, data in combined_data["packages"].items():
- generate_svg_for_package(package, data)
+ python generate_svgs.py
- EOF
- python3 generate_svgs.py
+ - name: Create Report Summary
+ if: always()
+ run: |
+ echo "## Size Report Generation Summary" >> $GITHUB_STEP_SUMMARY
+ echo "### Status" >> $GITHUB_STEP_SUMMARY
+ if [[ "${{ steps.generate-svgs.outputs.status }}" == "success" ]]; then
+ echo "✅ Report generation completed successfully" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "❌ Report generation failed" >> $GITHUB_STEP_SUMMARY
+ fi
+ echo "### Details" >> $GITHUB_STEP_SUMMARY
+ echo "- Reports processed: ${{ steps.combine-reports.outputs.report_count }}" >> $GITHUB_STEP_SUMMARY
+ echo "- Generated at: $(date -u '+%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_STEP_SUMMARY
+
+ - name: Upload Combined JSON Report
+ if: steps.combine-reports.outputs.status == 'success'
+ uses: actions/upload-artifact@v4
+ with:
+ name: combined-size-report
+ path: combined_size_report.json
+ if-no-files-found: error
- - name: Upload SVG reports
+ - name: Upload SVG Reports
+ if: steps.generate-svgs.outputs.status == 'success'
uses: actions/upload-artifact@v4
with:
name: svg-size-reports
- path: svg-reports
+ path: svg-reports/*.svg
+ if-no-files-found: error