diff --git a/.builders/build.py b/.builders/build.py
index 328441670a76d..ba38ec66479cb 100644
--- a/.builders/build.py
+++ b/.builders/build.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import argparse
+import json
import os
import shutil
import subprocess
@@ -153,6 +154,10 @@ def build_macos():
final_requirements = mount_dir / 'frozen.txt'
shutil.move(final_requirements, output_dir)
+ # Move the dependency sizes to the output directory
+ dependency_sizes_dir = mount_dir / 'sizes.json'
+ shutil.move(dependency_sizes_dir, output_dir)
+
def build_image():
parser = argparse.ArgumentParser(prog='builder', allow_abbrev=False)
@@ -219,6 +224,7 @@ def build_image():
external_wheels_dir.mkdir()
final_requirements = mount_dir / 'frozen.txt'
final_requirements.touch()
+ dependency_sizes = mount_dir / 'sizes.json'
script_args = ['--python', args.python]
@@ -246,6 +252,9 @@ def build_image():
# Move the final requirements file to the output directory
shutil.move(final_requirements, output_dir)
+ # Move the dependency sizes to the output directory
+ shutil.move(dependency_sizes, output_dir)
+
def main():
if sys.platform == 'darwin':
diff --git a/.builders/scripts/build_wheels.py b/.builders/scripts/build_wheels.py
index 7bbe62ce56d88..b5b77639c2018 100644
--- a/.builders/scripts/build_wheels.py
+++ b/.builders/scripts/build_wheels.py
@@ -1,11 +1,14 @@
from __future__ import annotations
import argparse
+import json
import os
import subprocess
import sys
from pathlib import Path
from tempfile import TemporaryDirectory
+from typing import TypedDict
+from zipfile import ZipFile
from dotenv import dotenv_values
from utils import extract_metadata, normalize_project_name
@@ -14,6 +17,10 @@
CUSTOM_EXTERNAL_INDEX = f'{INDEX_BASE_URL}/external'
CUSTOM_BUILT_INDEX = f'{INDEX_BASE_URL}/built'
+class WheelSizes(TypedDict):
+ compressed: int
+ uncompressed: int
+
if sys.platform == 'win32':
PY3_PATH = Path('C:\\py3\\Scripts\\python.exe')
PY2_PATH = Path('C:\\py2\\Scripts\\python.exe')
@@ -55,6 +62,13 @@ def check_process(*args, **kwargs) -> subprocess.CompletedProcess:
return process
+def calculate_wheel_sizes(wheel_path: Path) -> WheelSizes:
+ compressed_size = wheel_path.stat().st_size
+ with ZipFile(wheel_path) as zf:
+ uncompressed_size = sum(zinfo.file_size for zinfo in zf.infolist())
+ return {'compressed': compressed_size, 'uncompressed': uncompressed_size}
+
+
def main():
parser = argparse.ArgumentParser(prog='wheel-builder', allow_abbrev=False)
parser.add_argument('--python', required=True)
@@ -109,9 +123,14 @@ def main():
# Fetch or build wheels
command_args = [
- str(python_path), '-m', 'pip', 'wheel',
- '-r', str(MOUNT_DIR / 'requirements.in'),
- '--wheel-dir', str(staged_wheel_dir),
+ str(python_path),
+ '-m',
+ 'pip',
+ 'wheel',
+ '-r',
+ str(MOUNT_DIR / 'requirements.in'),
+ '--wheel-dir',
+ str(staged_wheel_dir),
# Temporarily removing extra index urls. See below.
# '--extra-index-url', CUSTOM_EXTERNAL_INDEX,
]
@@ -124,21 +143,38 @@ def main():
check_process(command_args, env=env_vars)
# Repair wheels
- check_process([
- sys.executable, '-u', str(MOUNT_DIR / 'scripts' / 'repair_wheels.py'),
- '--source-dir', str(staged_wheel_dir),
- '--built-dir', str(built_wheels_dir),
- '--external-dir', str(external_wheels_dir),
- ])
+ check_process(
+ [
+ sys.executable,
+ '-u',
+ str(MOUNT_DIR / 'scripts' / 'repair_wheels.py'),
+ '--source-dir',
+ str(staged_wheel_dir),
+ '--built-dir',
+ str(built_wheels_dir),
+ '--external-dir',
+ str(external_wheels_dir),
+ ]
+ )
dependencies: dict[str, tuple[str, str]] = {}
+ sizes: dict[str, WheelSizes] = {}
+
for wheel_dir in wheels_dir.iterdir():
- for entry in wheel_dir.iterdir():
- project_metadata = extract_metadata(entry)
+ for wheel in wheel_dir.iterdir():
+ project_metadata = extract_metadata(wheel)
project_name = normalize_project_name(project_metadata['Name'])
project_version = project_metadata['Version']
dependencies[project_name] = project_version
+
+ project_sizes = calculate_wheel_sizes(wheel)
+ sizes[project_name] = {'version': project_version, **project_sizes}
+
+ output_path = MOUNT_DIR / 'sizes.json'
+ with output_path.open('w', encoding='utf-8') as fp:
+ json.dump(sizes, fp, indent=2, sort_keys=True)
+
final_requirements = MOUNT_DIR / 'frozen.txt'
with final_requirements.open('w', encoding='utf-8') as f:
for project_name, project_version in sorted(dependencies.items()):
diff --git a/.github/workflows/measure-disk-usage.yml b/.github/workflows/measure-disk-usage.yml
index 112886f68dbee..e6233e1e348f8 100644
--- a/.github/workflows/measure-disk-usage.yml
+++ b/.github/workflows/measure-disk-usage.yml
@@ -1,23 +1,48 @@
name: Measure Disk Usage
on:
- push:
- branches:
- - master
+ # workflow_run:
+ # workflows: ['Resolve Dependencies and Build Wheels']
+ # types:
+ # - completed
+ workflow_call:
env:
PYTHON_VERSION: "3.12"
jobs:
+
+ define-current-commit:
+ runs-on: ubuntu-22.04
+ outputs:
+ commit: ${{ steps.define.outputs.commit }}
+ steps:
+ - id: define
+ run: |
+ if [ "${{ github.event_name }}" = "pull_request" ]; then
+ echo "commit=${{ github.event.pull_request.head.sha }}" >> "$GITHUB_OUTPUT"
+ else
+ echo "commit=${{ github.sha }}" >> "$GITHUB_OUTPUT"
+ fi
+
measure-disk-usage:
runs-on: ubuntu-22.04
+ needs:
+ - define-current-commit
+ permissions:
+ contents: read
+ actions: read
+ id-token: write
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
+ ref: ${{ needs.define-current-commit.outputs.commit }}
+
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: ${{ env.PYTHON_VERSION }}
+
- name: Install ddev
run: |
pip install -e ./datadog_checks_dev[cli]
@@ -25,89 +50,149 @@ jobs:
- name: Configure ddev
run: |
- ddev config set repos.core .
- ddev config set repo core
- - name: Measure disk usage (uncompressed)
- run: |
- ddev size status --to-dd-key ${{secrets.DD_API_KEY}} > size-uncompressed.txt
- ddev size status --format png,csv,markdown
- cat size-uncompressed.txt
- echo "# Size (uncompressed)" >> $GITHUB_STEP_SUMMARY
- cat uncompressed_status.md >> $GITHUB_STEP_SUMMARY
-
- - name: Measure disk usage (compressed)
+ ddev config override
+
+ - name: Measure disk usage
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
- ddev size status --compressed --to-dd-key ${{secrets.DD_API_KEY}} > size-compressed.txt
- ddev size status --compressed --format png,csv,markdown
- cat size-compressed.txt
- echo "# Size (compressed)" >> $GITHUB_STEP_SUMMARY
- cat compressed_status.md >> $GITHUB_STEP_SUMMARY
-
-
-
- - name: Measure disk usage differences from last commit (uncompressed)
- run: |
- BEFORE=$(git rev-parse HEAD^)
- AFTER=$(git rev-parse HEAD)
- ddev size diff $BEFORE $AFTER > diff-uncompressed.txt
- ddev size diff $BEFORE $AFTER --format png,csv,markdown
- cat diff-uncompressed.txt
- echo "# Size diff (uncompressed)" >> $GITHUB_STEP_SUMMARY
- if [ -f uncompressed_diff.md ]; then
- cat uncompressed_diff.md >> $GITHUB_STEP_SUMMARY
- fi
+ cmd="ddev size status \
+ --dependency-commit ${{needs.define-current-commit.outputs.commit}} \
+ --format json"
- - name: Measure disk usage differences from last commit (compressed)
- run: |
- BEFORE=$(git rev-parse HEAD^)
- AFTER=$(git rev-parse HEAD)
- ddev size diff $BEFORE $AFTER --compressed > diff-compressed.txt
- ddev size diff $BEFORE $AFTER --compressed --format png,csv,markdown
- cat diff-compressed.txt
- echo "# Size diff (compressed)" >> $GITHUB_STEP_SUMMARY
- if [ -f compressed_diff.md ]; then
- cat compressed_diff.md >> $GITHUB_STEP_SUMMARY
- fi
+ ls -l
+
+ # TODO: change to ${{ github.event.workflow_run.head_sha }}
+ # Send metrics to Datadog only on push to master
+
+ # if [ "${{ github.event.workflow_run.event }}" = "push" ] && [ "${{ github.ref_name }}" = "master" ]; then
+ # cmd="$cmd --to-dd-key $DD_API_KEY"
+ # fi
+
+ # $cmd --to-dd-key ${{ secrets.DD_API_KEY}}
+ $cmd
+ $cmd --compressed
- - name: Upload file sizes (uncompressed)
+ - name: Upload JSON uncompressed sizes artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
- name: uncompressed_status.csv
- path: uncompressed_status.csv
+ name: status_uncompressed.json
+ path: status_uncompressed.json
if-no-files-found: error
- - name: Upload file sizes (compressed)
+ - name: Upload JSON compressed sizes artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
- name: compressed_status.csv
- path: compressed_status.csv
+ name: status_compressed.json
+ path: status_compressed.json
if-no-files-found: error
-
- - name: Upload file sizes diff (uncompressed)
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
+
+ calculate-diff:
+ runs-on: ubuntu-22.04
+ needs:
+ - measure-disk-usage
+ - define-current-commit
+ permissions:
+ contents: read
+ pull-requests: write
+ outputs:
+ passed: ${{ steps.check_diff.outputs.passed }}
+ steps:
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
- name: uncompressed_diff.csv
- path: uncompressed_diff.csv
- if-no-files-found: warn
+ fetch-depth: 0
- - name: Upload file sizes diff (compressed)
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
+ - name: Set up Python ${{ env.PYTHON_VERSION }}
+ uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
- name: compressed_diff.csv
- path: compressed_diff.csv
- if-no-files-found: warn
+ python-version: ${{ env.PYTHON_VERSION }}
- - name: Upload status PNGs
+ - name: Install ddev
+ run: |
+ pip install -e ./datadog_checks_dev[cli]
+ pip install -e ./ddev
+
+ - name: Configure ddev
+ run: |
+ ddev config override
+
+ - name: Calculate diff
+ run: |
+ cmd="ddev size diff ${{needs.define-current-commit.outputs.commit}} --format json --use-artifacts" # TODO: change to ${{ github.event.workflow_run.head_sha }}
+ $cmd --compressed --quality-gate-threshold ${{ env.SIZE_THRESHOLD }}
+ $cmd
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ SIZE_THRESHOLD: 1000000000000
+
+ - name: Upload JSON compressed diff sizes artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
- name: size-visuals
- path: size_status_visualizations/
+ name: diff_compressed.json
+ path: diff_compressed.json
if-no-files-found: error
- - name: Upload diff PNGs
+ - name: Upload JSON uncompressed diff sizes artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
- name: diff-visuals
- path: size_diff_visualizations/
- if-no-files-found: warn
-
+ name: diff_uncompressed.json
+ path: diff_uncompressed.json
+ if-no-files-found: error
+
+ - name: Check diff
+ id: check_diff
+ if: github.event_name == 'pull_request'
+ run: |
+ if [ -s diff.html ]; then
+ echo "Quality gates not passed"
+ echo "passed=false" >> "$GITHUB_OUTPUT"
+ else
+ echo "Quality gates passed"
+ echo "passed=true" >> "$GITHUB_OUTPUT"
+ fi
+
+ - name: Find last comment
+ if: github.event_name == 'pull_request' && steps.check_diff.outputs.passed == 'false'
+ uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
+ id: find_comment
+ with:
+ issue-number: ${{ github.event.pull_request.number }}
+ body-includes: "
Compressed Size Changes
"
+ direction: last
+
+ - name: Delete last comment
+ if: github.event_name == 'pull_request' && steps.check_diff.outputs.passed == 'false' && steps.find_comment.outputs.comment-id != ''
+ uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
+ with:
+ script: |
+ const commentId = ${{ steps.find_comment.outputs.comment-id }};
+ await github.rest.issues.deleteComment({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ comment_id: commentId,
+ });
+
+ - name: Create comment
+ if: github.event_name == 'pull_request' && steps.check_diff.outputs.passed == 'false'
+ run: |
+ PR_NUMBER=$(gh pr list \
+ --search ${{ needs.define-current-commit.outputs.commit }} \
+ --state open \
+ --json number \
+ --jq '.[0].number')
+ GITHUB_RUN_URL="https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
+ echo "
📋 View detailed breakdown in GitHub Step Summary" >> diff.html
+ gh pr comment $PR_NUMBER --body-file diff.html
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Check for new dependencies
+ id: check_new_deps
+ run: |
+ if jq -e '.[] | select(.Type == "Dependency" and .Change_Type == "New")' diff_compressed.json > /dev/null; then
+ echo "New dependencies were added."
+ echo "has_new_dependencies=true" >> "$GITHUB_OUTPUT"
+ else
+ echo "No new dependencies were added."
+ echo "has_new_dependencies=false" >> "$GITHUB_OUTPUT"
+ fi
diff --git a/.github/workflows/resolve-build-deps.yaml b/.github/workflows/resolve-build-deps.yaml
index 200da4aa8e8aa..82ce59443b882 100644
--- a/.github/workflows/resolve-build-deps.yaml
+++ b/.github/workflows/resolve-build-deps.yaml
@@ -6,18 +6,11 @@ on:
branches:
- master
- 7.*.*
- paths:
- - .github/workflows/resolve-build-deps.yml
- - .builders/**
- - agent_requirements.in
+
push:
branches:
- master
- 7.*.*
- paths:
- - .github/workflows/resolve-build-deps.yml
- - .builders/**
- - agent_requirements.in
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
@@ -35,8 +28,53 @@ env:
SOURCE_DATE_EPOCH: "1580601600"
jobs:
+ # measure-disk-usage.yml depends on this workflow being triggered and completed,
+ # so it can wait for the build to calculate dependency sizes.
+ # The 'on' setting ensures it runs, but this job cancels it if no dependency changes are detected.
+
+ check-dependency-changes:
+ name: Check dependency changes
+ runs-on: ubuntu-22.04
+ permissions:
+ actions: write
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+ with:
+ fetch-depth: 0
+ - name: Set Base SHA Output
+ id: set_base_sha
+ run: |
+ if [ "${{ github.event_name }}" == "pull_request" ]; then
+ echo "base_sha=${{ github.event.pull_request.base.sha }}" >> $GITHUB_OUTPUT
+ else
+ echo "base_sha=${{ github.event.before }}" >> $GITHUB_OUTPUT
+ fi
+ - name: Check for dependency changes
+ id: dependency-check
+ run: |
+ if git diff --name-only ${{ steps.set_base_sha.outputs.base_sha }} ${{ github.sha }} | grep -qE "(\.github/workflows/resolve-build-deps\.yml|\.builders/|agent_requirements\.in)"; then
+ echo "changed=true" >> $GITHUB_OUTPUT
+ echo "Dependency files changed."
+ else
+ echo "changed=false" >> $GITHUB_OUTPUT
+ echo "No dependency files changed."
+ fi
+ - name: Cancel workflow if no dependency changes
+ if: steps.dependency-check.outputs.changed == 'false'
+ uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
+ with:
+ script: |
+ await github.rest.actions.cancelWorkflowRun({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ run_id: context.runId
+ });
+
test:
name: Run tests
+ needs:
+ - check-dependency-changes
runs-on: ubuntu-22.04
steps:
- name: Checkout code
@@ -56,6 +94,8 @@ jobs:
build:
name: Target ${{ matrix.job.image }} on ${{ matrix.job.os }}
+ needs:
+ - check-dependency-changes
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
@@ -173,6 +213,8 @@ jobs:
build-macos:
name: Target macOS/${{ matrix.job.arch }} on ${{ matrix.job.os }}
+ needs:
+ - check-dependency-changes
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
@@ -244,12 +286,26 @@ jobs:
name: target-macos-${{ matrix.job.arch }}
path: output
+ measure-disk-usage:
+ name: Measure disk usage
+ permissions:
+ contents: read
+ actions: read
+ pull-requests: write
+ id-token: write
+ needs:
+ # - build
+ - build-macos
+ uses: ./.github/workflows/measure-disk-usage.yml
+ secrets: inherit
+
publish:
name: Publish artifacts and update lockfiles via PR
if: github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && (github.ref_name == github.event.repository.default_branch || startsWith(github.ref_name, '7.')))
needs:
- build
- build-macos
+ - check-dependency-changes
runs-on: ubuntu-latest
permissions:
diff --git a/ddev/changelog.d/21331.added b/ddev/changelog.d/21331.added
new file mode 100644
index 0000000000000..d5e38ca4f80df
--- /dev/null
+++ b/ddev/changelog.d/21331.added
@@ -0,0 +1 @@
+Add option to `ddev size status` to compute dependency sizes from JSON or a commit’s GitHub Actions artifacts
\ No newline at end of file
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index 20b8d9e75f034..7ca04ffa10bdc 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -1,275 +1,417 @@
# (C) Datadog, Inc. 2022-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
+from __future__ import annotations
+import json
import os
from datetime import datetime
-from typing import Optional
+from typing import TYPE_CHECKING, Literal
import click
-from rich.console import Console
-from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn
from ddev.cli.application import Application
+from ddev.cli.size.utils.common_funcs import GitRepo
from ddev.cli.size.utils.common_params import common_params
-from .utils.common_funcs import (
- CLIParameters,
- FileDataEntry,
- FileDataEntryPlatformVersion,
- GitRepo,
- convert_to_human_readable_size,
- export_format,
- format_modules,
- get_dependencies,
- get_files,
- get_valid_platforms,
- get_valid_versions,
- plot_treemap,
- print_table,
-)
+if TYPE_CHECKING:
+ from ddev.cli.size.utils.common_funcs import CLIParameters, FileDataEntry
-console = Console(stderr=True)
MINIMUM_DATE = datetime.strptime("Sep 17 2024", "%b %d %Y").date()
MINIMUM_LENGTH_COMMIT = 7
@click.command()
-@click.argument("first_commit")
-@click.argument("second_commit")
+@click.argument("new_commit")
+@click.option(
+ "--compare-to",
+ "old_commit",
+ help="Commit to compare to. If not specified, will compare to the previous commit on master",
+)
@click.option("--python", "version", help="Python version (e.g 3.12). If not specified, all versions will be analyzed")
+@click.option("--use-artifacts", is_flag=True, help="Fetch sizes from gha artifacts instead of the repo")
+@click.option(
+ "--quality-gate-threshold",
+ type=int,
+ help="Threshold for the size difference. Outputs the html only if the size"
+ " difference is greater than the quality gate threshold",
+)
+@click.option("--to-dd-org", type=str, help="Send metrics to Datadog using the specified organization name.")
+@click.option("--to-dd-key", type=str, help="Send metrics to Datadog using the specified API key.")
+@click.option(
+ "--to-dd-site",
+ type=str,
+ help="Send metrics to Datadog using the specified site. If not provided datadoghq.com will be used.",
+)
@common_params # platform, compressed, format, show_gui
@click.pass_obj
def diff(
app: Application,
- first_commit: str,
- second_commit: str,
- platform: Optional[str],
- version: Optional[str],
+ new_commit: str,
+ old_commit: str | None,
+ platform: str | None,
+ version: str | None,
compressed: bool,
format: list[str],
show_gui: bool,
+ use_artifacts: bool,
+ quality_gate_threshold: int | None,
+ to_dd_org: str | None,
+ to_dd_key: str | None,
+ to_dd_site: str | None,
) -> None:
"""
- Compare the size of integrations and dependencies between two commits.
+ Compares the size of integrations and dependencies between two commits.
+
+ - If only one commit is given on a feature branch, it's compared to the branch's merge base with master.
+
+ - If only one commit is given while on master, it's compared to the previous commit on master.
"""
- with Progress(
- SpinnerColumn(),
- TextColumn("[progress.description]{task.description}"),
- BarColumn(),
- TimeElapsedColumn(),
- transient=True,
- console=console,
- ) as progress:
- task = progress.add_task("[cyan]Calculating differences...", total=None)
- if len(first_commit) < MINIMUM_LENGTH_COMMIT and len(second_commit) < MINIMUM_LENGTH_COMMIT:
- raise click.BadParameter(f"Commit hashes must be at least {MINIMUM_LENGTH_COMMIT} characters long")
- elif len(first_commit) < MINIMUM_LENGTH_COMMIT:
- raise click.BadParameter(
- f"First commit hash must be at least {MINIMUM_LENGTH_COMMIT} characters long.",
- param_hint="first_commit",
- )
- elif len(second_commit) < MINIMUM_LENGTH_COMMIT:
- raise click.BadParameter(
- f"Second commit hash must be at least {MINIMUM_LENGTH_COMMIT} characters long.",
- param_hint="second_commit",
- )
- if first_commit == second_commit:
- raise click.BadParameter("Commit hashes must be different")
- if format:
- for fmt in format:
- if fmt not in ["png", "csv", "markdown", "json"]:
- raise ValueError(f"Invalid format: {fmt}. Only png, csv, markdown, and json are supported.")
+
+ from .utils.common_funcs import (
+ get_valid_platforms,
+ get_valid_versions,
+ )
+
+ with app.status("Calculating differences..."):
repo_url = app.repo.path
+ modules: list[FileDataEntry] = []
+ passes_quality_gate = True
+
+ valid_versions = get_valid_versions(app.repo.path)
+ valid_platforms = get_valid_platforms(app.repo.path, valid_versions)
+ validate_parameters(
+ app,
+ old_commit,
+ new_commit,
+ format,
+ valid_platforms,
+ valid_versions,
+ platform,
+ version,
+ to_dd_org,
+ to_dd_key,
+ to_dd_site,
+ )
- with GitRepo(repo_url) as gitRepo:
- try:
- date_str, _, _ = gitRepo.get_commit_metadata(first_commit)
- date = datetime.strptime(date_str, "%b %d %Y").date()
- if date < MINIMUM_DATE:
- raise ValueError(f"First commit must be after {MINIMUM_DATE.strftime('%b %d %Y')} ")
- valid_versions = get_valid_versions(gitRepo.repo_dir)
- valid_platforms = get_valid_platforms(gitRepo.repo_dir, valid_versions)
- if platform and platform not in valid_platforms:
- raise ValueError(f"Invalid platform: {platform}")
- elif version and version not in valid_versions:
- raise ValueError(f"Invalid version: {version}")
- modules_plat_ver: list[FileDataEntryPlatformVersion] = []
- platforms = valid_platforms if platform is None else [platform]
- versions = valid_versions if version is None else [version]
- progress.remove_task(task)
- combinations = [(p, v) for p in platforms for v in versions]
- for plat, ver in combinations:
- parameters: CLIParameters = {
- "app": app,
- "platform": plat,
- "version": ver,
- "compressed": compressed,
- "format": format,
- "show_gui": show_gui,
- }
- modules_plat_ver.extend(
- diff_mode(
+ platforms = valid_platforms if platform is None else [platform]
+ versions = valid_versions if version is None else [version]
+ combinations = [(p, v) for p in platforms for v in versions]
+
+ if not old_commit:
+ old_commit = app.repo.git.merge_base(new_commit, "origin/master")
+ print("Comparing to commit: ", old_commit)
+ if use_artifacts:
+ for plat, ver in combinations:
+ parameters_artifacts: CLIParameters = {
+ "app": app,
+ "platform": plat,
+ "version": ver,
+ "compressed": compressed,
+ "format": format,
+ "show_gui": show_gui,
+ }
+
+ try:
+ old_commit_sizes = get_sizes_from_artifacts(app, old_commit, plat, compressed, "csv")
+ new_commit_sizes = get_sizes_from_artifacts(app, new_commit, plat, compressed, "json")
+ except Exception as e:
+ app.abort(str(e))
+
+ diff_modules = calculate_diff(old_commit_sizes, new_commit_sizes, plat, ver)
+ output_diff(parameters_artifacts, diff_modules)
+ modules.extend(diff_modules)
+ total_diff = sum(int(x.get("Size_Bytes", 0)) for x in diff_modules)
+ if quality_gate_threshold and total_diff > quality_gate_threshold:
+ passes_quality_gate = False
+
+ else:
+ with GitRepo(repo_url) as gitRepo:
+ try:
+ date_str, _, _ = gitRepo.get_commit_metadata(old_commit)
+ date = datetime.strptime(date_str, "%b %d %Y").date()
+ if date < MINIMUM_DATE:
+ raise ValueError(f"First commit must be after {MINIMUM_DATE.strftime('%b %d %Y')} ")
+
+ for plat, ver in combinations:
+ parameters_repo: CLIParameters = {
+ "app": app,
+ "platform": plat,
+ "version": ver,
+ "compressed": compressed,
+ "format": format,
+ "show_gui": show_gui,
+ }
+ diff_modules = get_diff(
gitRepo,
- first_commit,
- second_commit,
- parameters,
- progress,
+ old_commit,
+ new_commit,
+ parameters_repo,
)
- )
- if format:
- export_format(app, format, modules_plat_ver, "diff", platform, version, compressed)
- except Exception as e:
- progress.stop()
- app.abort(str(e))
+ output_diff(parameters_repo, diff_modules)
+ modules.extend(diff_modules)
+ total_diff = sum(int(x.get("Size_Bytes", 0)) for x in diff_modules)
+ if quality_gate_threshold and total_diff > quality_gate_threshold:
+ passes_quality_gate = False
+ except Exception as e:
+ app.abort(str(e))
+
+ if to_dd_org or to_dd_key:
+ from .utils.common_funcs import send_metrics_to_dd
+
+ mode: Literal["diff"] = "diff"
+ send_metrics_to_dd(app, modules, to_dd_org, to_dd_key, to_dd_site, compressed, mode)
+
+ if format or not passes_quality_gate:
+ modules = [module for module in modules if module["Size_Bytes"] != 0]
+ if format:
+ from .utils.common_funcs import export_format
+
+ export_format(app, format, modules, "diff", platform, version, compressed)
+ if not passes_quality_gate:
+ from .utils.common_funcs import save_html
+
+ save_html(app, "Diff", modules, "diff.html", old_commit)
return None
-def diff_mode(
+def validate_parameters(
+ app: Application,
+ old_commit: str | None,
+ new_commit: str,
+ format: list[str],
+ valid_platforms: set[str],
+ valid_versions: set[str],
+ platform: str | None,
+ version: str | None,
+ to_dd_org: str | None,
+ to_dd_key: str | None,
+ to_dd_site: str | None,
+):
+ errors = []
+ if platform and platform not in valid_platforms:
+ errors.append(f"Invalid platform: {platform}")
+
+ elif version and version not in valid_versions:
+ errors.append(f"Invalid version: {version}")
+
+ if len(new_commit) < MINIMUM_LENGTH_COMMIT and old_commit and len(old_commit) < MINIMUM_LENGTH_COMMIT:
+ errors.append(f"Commit hashes must be at least {MINIMUM_LENGTH_COMMIT} characters long")
+
+ elif len(new_commit) < MINIMUM_LENGTH_COMMIT:
+ errors.append(
+ f"First commit hash must be at least {MINIMUM_LENGTH_COMMIT} characters long.",
+ )
+
+ elif new_commit and len(new_commit) < MINIMUM_LENGTH_COMMIT:
+ errors.append(
+ f"Second commit hash must be at least {MINIMUM_LENGTH_COMMIT} characters long.",
+ )
+
+ if new_commit and old_commit == new_commit:
+ errors.append("Commit hashes must be different")
+
+ if format:
+ for fmt in format:
+ if fmt not in ["png", "csv", "markdown", "json"]:
+ errors.append(f"Invalid format: {fmt}. Only png, csv, markdown, json, and html are supported.")
+
+ if to_dd_site and not to_dd_key:
+ errors.append("If --to-dd-site is provided, --to-dd-key must also be provided.")
+
+ if to_dd_site and to_dd_org:
+ errors.append("If --to-dd-org is provided, --to-dd-site must not be provided.")
+
+ if to_dd_key and to_dd_org:
+ errors.append("If --to-dd-org is provided, --to-dd-key must not be provided.")
+
+ if errors:
+ app.abort("\n".join(errors))
+
+
+def get_sizes_from_artifacts(
+ app: Application, commit: str, platform: str, compressed: bool, extension: str | None = "json"
+) -> list[FileDataEntry]:
+ import tempfile
+
+ from .utils.common_funcs import get_sizes_json_from_artifacts
+
+ with tempfile.TemporaryDirectory() as temp_dir:
+ print(f"Temporary directory: {temp_dir}")
+ sizes_json = get_sizes_json_from_artifacts(commit, temp_dir, compressed, extension)
+ compression = "compressed" if compressed else "uncompressed"
+ if not sizes_json[compression]:
+ app.abort(f"Sizes not found for {commit=}, {platform=}, {compressed=}")
+ return []
+ if extension == "json" and sizes_json[compression]:
+ modules_json: list[FileDataEntry] = list(json.loads(sizes_json[compression].read_text()))
+ filtered_modules_json = [module for module in modules_json if module.get("Platform") == platform]
+ return filtered_modules_json
+ elif extension == "csv" and sizes_json[compression]:
+ # Assume CSV
+ import csv
+
+ modules_csv: list[FileDataEntry] = []
+ with open(sizes_json[compression], newline="", encoding="utf-8") as csvfile:
+ modules_csv = list(csv.DictReader(csvfile))
+ filtered_modules_csv = [module for module in modules_csv if module.get("Platform") == platform]
+ return filtered_modules_csv
+ return []
+
+
+def get_diff(
gitRepo: GitRepo,
- first_commit: str,
- second_commit: str,
+ old_commit: str,
+ new_commit: str,
params: CLIParameters,
- progress: Progress,
-) -> list[FileDataEntryPlatformVersion]:
+) -> list[FileDataEntry]:
files_b, dependencies_b, files_a, dependencies_a = get_repo_info(
- gitRepo, params["platform"], params["version"], first_commit, second_commit, params["compressed"], progress
+ gitRepo, params["platform"], params["version"], old_commit, new_commit, params["compressed"]
)
- integrations = get_diff(files_b, files_a, "Integration")
- dependencies = get_diff(dependencies_b, dependencies_a, "Dependency")
+ integrations = calculate_diff(files_b, files_a, params["platform"], params["version"])
+ dependencies = calculate_diff(dependencies_b, dependencies_a, params["platform"], params["version"])
- if integrations + dependencies == []:
+ return integrations + dependencies
+
+
+def output_diff(params: CLIParameters, modules: list[FileDataEntry]):
+ if modules == []:
params["app"].display(
f"No size differences were detected between the selected commits for {params['platform']}"
)
return []
else:
- formatted_modules = format_modules(integrations + dependencies, params["platform"], params["version"])
- formatted_modules.sort(key=lambda x: x["Size_Bytes"], reverse=True)
- for module in formatted_modules:
+ modules.sort(key=lambda x: abs(x["Size_Bytes"]), reverse=True)
+ for module in modules:
if module["Size_Bytes"] > 0:
module["Size"] = f"+{module['Size']}"
if not params["format"] or params["format"] == ["png"]: # if no format is provided for the data print the table
- print_table(params["app"], "Diff", formatted_modules)
+ from .utils.common_funcs import print_table
+
+ print_table(params["app"], "Diff", modules)
treemap_path = None
if params["format"] and "png" in params["format"]:
treemap_path = os.path.join("size_diff_visualizations", f"treemap_{params['platform']}_{params['version']}.png")
if params["show_gui"] or treemap_path:
+ from .utils.common_funcs import plot_treemap
+
plot_treemap(
params["app"],
- formatted_modules,
+ modules,
f"Disk Usage Differences for {params['platform']} and Python version {params['version']}",
params["show_gui"],
"diff",
treemap_path,
)
- return formatted_modules
-
def get_repo_info(
gitRepo: GitRepo,
platform: str,
version: str,
- first_commit: str,
- second_commit: str,
+ old_commit: str,
+ new_commit: str,
compressed: bool,
- progress: Progress,
) -> tuple[list[FileDataEntry], list[FileDataEntry], list[FileDataEntry], list[FileDataEntry]]:
- with progress:
- """
- Retrieves integration and dependency sizes for two commits in the repo.
-
- Args:
- gitRepo: An instance of GitRepo for accessing the repository.
- platform: Target platform for dependency resolution.
- version: Python version for dependency resolution.
- first_commit: The earlier commit SHA to compare.
- second_commit: The later commit SHA to compare.
- compressed: Whether to measure compressed sizes.
- progress: Rich Progress bar.
-
- Returns:
- A tuple of four lists:
- - files_b: Integration sizes at first_commit
- - dependencies_b: Dependency sizes at first_commit
- - files_a: Integration sizes at second_commit
- - dependencies_a: Dependency sizes at second_commit
- """
-
- repo = gitRepo.repo_dir
- task = progress.add_task("[cyan]Calculating sizes for the first commit...", total=None)
- gitRepo.checkout_commit(first_commit)
- files_b = get_files(repo, compressed, version)
- dependencies_b = get_dependencies(repo, platform, version, compressed)
- progress.remove_task(task)
-
- task = progress.add_task("[cyan]Calculating sizes for the second commit...", total=None)
- gitRepo.checkout_commit(second_commit)
- files_a = get_files(repo, compressed, version)
- dependencies_a = get_dependencies(repo, platform, version, compressed)
- progress.remove_task(task)
+ """
+ Retrieves integration and dependency sizes for two commits in the repo.
+
+ Args:
+ gitRepo: An instance of GitRepo for accessing the repository.
+ platform: Target platform for dependency resolution.
+ version: Python version for dependency resolution.
+ old_commit: The earlier commit SHA to compare.
+ new_commit: The later commit SHA to compare.
+ compressed: Whether to measure compressed sizes.
+
+ Returns:
+ A tuple of four lists:
+ - files_b: Integration sizes at old_commit
+ - dependencies_b: Dependency sizes at old_commit
+ - files_a: Integration sizes at new_commit
+ - dependencies_a: Dependency sizes at new_commit
+ """
+ from .utils.common_funcs import get_dependencies, get_files
+
+ repo = gitRepo.repo_dir
+ gitRepo.checkout_commit(old_commit)
+ files_b = get_files(repo, compressed, version, platform)
+ dependencies_b = get_dependencies(repo, platform, version, compressed)
+
+ gitRepo.checkout_commit(new_commit)
+ files_a = get_files(repo, compressed, version, platform)
+ dependencies_a = get_dependencies(repo, platform, version, compressed)
return files_b, dependencies_b, files_a, dependencies_a
-def get_diff(
- size_first_commit: list[FileDataEntry], size_second_commit: list[FileDataEntry], type: str
+def calculate_diff(
+ size_old_commit: list[FileDataEntry], size_new_commit: list[FileDataEntry], platform: str, py_version: str
) -> list[FileDataEntry]:
"""
Computes size differences between two sets of integrations or dependencies.
Args:
- size_first_commit: Entries from the first (earlier) commit.
- size_second_commit: Entries from the second (later) commit.
- type: Integration/Dependency
+ size_old_commit: Entries from the first (earlier) commit.
+ size_new_commit: Entries from the second (later) commit.
Returns:
A list of FileDataEntry items representing only the entries with a size difference.
Entries include new, deleted, or changed modules, with delta size in bytes and human-readable format.
"""
+ from .utils.common_funcs import convert_to_human_readable_size
- first_commit = {entry["Name"]: entry for entry in size_first_commit}
- second_commit = {entry["Name"]: entry for entry in size_second_commit}
+ old_commit = {
+ (entry["Name"], entry["Type"], entry["Platform"], entry["Python_Version"]): entry for entry in size_old_commit
+ }
+ new_commit = {
+ (entry["Name"], entry["Type"], entry["Platform"], entry["Python_Version"]): entry for entry in size_new_commit
+ }
- all_names = set(first_commit) | set(second_commit)
+ all_names = set(old_commit) | set(new_commit)
diffs: list[FileDataEntry] = []
- for name in all_names:
- b = first_commit.get(name)
- a = second_commit.get(name)
-
- size_b = b["Size_Bytes"] if b else 0
- size_a = a["Size_Bytes"] if a else 0
- delta = size_a - size_b
-
- if delta == 0:
- continue
-
- ver_b = b["Version"] if b else ""
- ver_a = a["Version"] if a else ""
-
- if size_b == 0:
- name_str = f"{name} (NEW)"
- version_str = ver_a
- elif size_a == 0:
- name_str = f"{name} (DELETED)"
- version_str = ver_b
+ for name, _type, platform, py_version in all_names:
+ old = old_commit.get((name, _type, platform, py_version))
+ new = new_commit.get((name, _type, platform, py_version))
+ size_old = int(old["Size_Bytes"]) if old else 0
+ size_new = int(new["Size_Bytes"]) if new else 0
+ delta = size_new - size_old
+ percentage = (delta / size_old) * 100 if size_old != 0 else 0
+
+ print("Name: ", name, "Type: ", _type, "Platform: ", platform, "Before: ", size_old, "After: ", size_new)
+
+ ver_old = old["Version"] if old else ""
+ ver_new = new["Version"] if new else ""
+
+ if size_old == 0:
+ change_type = "New"
+ name_str = f"{name}"
+ version_str = ver_new
+ elif size_new == 0:
+ change_type = "Removed"
+ name_str = f"{name}"
+ version_str = ver_old
else:
+ change_type = "Modified"
name_str = name
- version_str = f"{ver_b} -> {ver_a}" if ver_a != ver_b else ver_a
+ version_str = f"{ver_old} -> {ver_new}" if ver_new != ver_old else ver_new
diffs.append(
{
"Name": name_str,
"Version": version_str,
- "Type": type,
+ "Type": _type,
+ "Platform": platform,
+ "Python_Version": py_version,
"Size_Bytes": delta,
"Size": convert_to_human_readable_size(delta),
+ "Percentage": round(percentage, 2),
+ "Delta_Type": change_type,
}
)
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index f839966d76217..59a9d6533ad83 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -2,70 +2,93 @@
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
+from __future__ import annotations
+
import os
-from pathlib import Path
-from typing import Optional
+from typing import TYPE_CHECKING, Literal
import click
-from rich.console import Console
from ddev.cli.application import Application
-from ddev.cli.size.utils.common_funcs import (
- CLIParameters,
- FileDataEntryPlatformVersion,
- export_format,
- format_modules,
- get_dependencies,
- get_files,
- get_valid_platforms,
- get_valid_versions,
- plot_treemap,
- print_table,
- send_metrics_to_dd,
-)
from ddev.cli.size.utils.common_params import common_params
+from ddev.utils.fs import Path
-console = Console(stderr=True)
+if TYPE_CHECKING:
+ from ddev.cli.size.utils.common_funcs import (
+ CLIParameters,
+ FileDataEntry,
+ )
@click.command()
@click.option("--to-dd-org", type=str, help="Send metrics to Datadog using the specified organization name.")
-@click.option("--to-dd-key", type=str, help="Send metrics to datadoghq.com using the specified API key.")
+@click.option("--to-dd-key", type=str, help="Send metrics to Datadog using the specified API key.")
+@click.option(
+ "--to-dd-site",
+ type=str,
+ help="Send metrics to Datadog using the specified site. If not provided datadoghq.com will be used.",
+)
@click.option("--python", "version", help="Python version (e.g 3.12). If not specified, all versions will be analyzed")
+@click.option("--dependency-sizes", type=click.Path(exists=True), help="Path to the dependency sizes json file.")
+@click.option(
+ "--dependency-commit",
+ help="Commit hash to check the dependency status of. It takes the commit's dependency sizes file.",
+)
@common_params # platform, compressed, format, show_gui
@click.pass_obj
def status(
app: Application,
- platform: Optional[str],
- version: Optional[str],
+ platform: str | None,
+ version: str | None,
compressed: bool,
format: list[str],
show_gui: bool,
- to_dd_org: str,
- to_dd_key: str,
+ to_dd_org: str | None,
+ to_dd_key: str | None,
+ to_dd_site: str | None,
+ dependency_sizes: Path | None,
+ dependency_commit: str | None,
) -> None:
"""
- Show the current size of all integrations and dependencies.
+ Show the current size of all integrations and dependencies in your local repo.
+ By default, it analyzes every platform and Python version using your local lockfiles
+ and prints the results to the terminal.
+
"""
+ from ddev.cli.size.utils.common_funcs import (
+ get_valid_platforms,
+ get_valid_versions,
+ )
+
try:
repo_path = app.repo.path
valid_versions = get_valid_versions(repo_path)
valid_platforms = get_valid_platforms(repo_path, valid_versions)
- if platform and platform not in valid_platforms:
- raise ValueError(f"Invalid platform: {platform}")
- elif version and version not in valid_versions:
- raise ValueError(f"Invalid version: {version}")
- elif format:
- for fmt in format:
- if fmt not in ["png", "csv", "markdown", "json"]:
- raise ValueError(f"Invalid format: {fmt}. Only png, csv, markdown, and json are supported.")
- elif to_dd_org and to_dd_key:
- raise ValueError("Specify either --to-dd-org or --to-dd-key, not both")
- modules_plat_ver: list[FileDataEntryPlatformVersion] = []
+
+ validate_parameters(
+ valid_platforms,
+ valid_versions,
+ platform,
+ version,
+ format,
+ to_dd_org,
+ to_dd_key,
+ to_dd_site,
+ dependency_commit,
+ dependency_sizes,
+ app,
+ )
+
+ modules_plat_ver: list[FileDataEntry] = []
platforms = valid_platforms if platform is None else [platform]
versions = valid_versions if version is None else [version]
combinations = [(p, v) for p in platforms for v in versions]
+
for plat, ver in combinations:
+ if dependency_commit:
+ from ddev.cli.size.utils.common_funcs import get_last_dependency_sizes_artifact
+
+ dependency_sizes = get_last_dependency_sizes_artifact(app, dependency_commit, plat)
parameters: CLIParameters = {
"app": app,
"platform": plat,
@@ -78,31 +101,101 @@ def status(
status_mode(
repo_path,
parameters,
+ dependency_sizes,
)
)
-
if format:
+ from ddev.cli.size.utils.common_funcs import export_format
+
export_format(app, format, modules_plat_ver, "status", platform, version, compressed)
+
if to_dd_org or to_dd_key:
- send_metrics_to_dd(app, modules_plat_ver, to_dd_org, to_dd_key, compressed)
+ from ddev.cli.size.utils.common_funcs import send_metrics_to_dd
+
+ print("Sending metrics to Datadog ")
+ mode: Literal["status"] = "status"
+ commits = [dependency_commit] if dependency_commit else None
+ print(f"Sending metrics to Datadog for commits: {commits}")
+ send_metrics_to_dd(app, modules_plat_ver, to_dd_org, to_dd_key, to_dd_site, compressed, mode, commits)
except Exception as e:
app.abort(str(e))
+def validate_parameters(
+ valid_platforms: set[str],
+ valid_versions: set[str],
+ platform: str | None,
+ version: str | None,
+ format: list[str],
+ to_dd_org: str | None,
+ to_dd_key: str | None,
+ to_dd_site: str | None,
+ dependency_commit: str | None,
+ dependency_sizes: Path | None,
+ app: Application,
+) -> None:
+ errors = []
+ if platform and platform not in valid_platforms:
+ errors.append(f"Invalid platform: {platform!r}")
+
+ if version and version not in valid_versions:
+ errors.append(f"Invalid version: {version!r}")
+
+ if dependency_commit and dependency_sizes:
+ errors.append("Pass either 'dependency-commit' or 'dependency-sizes'. Both options cannot be supplied.")
+
+ if format:
+ for fmt in format:
+ if fmt not in ["png", "csv", "markdown", "json"]:
+ errors.append(f"Invalid format: {fmt!r}. Only png, csv, markdown, and json are supported.")
+
+ if dependency_sizes and not dependency_sizes.is_file():
+ errors.append(f"Dependency sizes file does not exist: {dependency_sizes!r}")
+
+ if to_dd_site and not to_dd_key:
+ errors.append("If --to-dd-site is provided, --to-dd-key must also be provided.")
+
+ if to_dd_site and to_dd_org:
+ errors.append("If --to-dd-org is provided, --to-dd-site must not be provided.")
+
+ if to_dd_key and to_dd_org:
+ errors.append("If --to-dd-org is provided, --to-dd-key must not be provided.")
+
+ if errors:
+ app.abort("\n".join(errors))
+
+
def status_mode(
repo_path: Path,
params: CLIParameters,
-) -> list[FileDataEntryPlatformVersion]:
- with console.status("[cyan]Calculating sizes...", spinner="dots"):
- modules = get_files(repo_path, params["compressed"], params["version"]) + get_dependencies(
- repo_path, params["platform"], params["version"], params["compressed"]
- )
+ dependency_sizes: Path | None,
+) -> list[FileDataEntry]:
+ from ddev.cli.size.utils.common_funcs import (
+ get_dependencies,
+ get_files,
+ print_table,
+ )
- formatted_modules = format_modules(modules, params["platform"], params["version"])
- formatted_modules.sort(key=lambda x: x["Size_Bytes"], reverse=True)
+ with params["app"].status("Calculating sizes..."):
+ if dependency_sizes:
+ from ddev.cli.size.utils.common_funcs import get_commit_data, get_dependencies_from_json
+ last_commit_timestamp, last_commit_message, last_commit_tickets, last_commit_prs = get_commit_data()
+ print(f"Last commit: {last_commit_message} {last_commit_timestamp} {last_commit_tickets} {last_commit_prs}")
+ modules = get_files(
+ repo_path, params["compressed"], params["version"], params["platform"]
+ ) + get_dependencies_from_json(
+ dependency_sizes, params["platform"], params["version"], params["compressed"]
+ )
+
+ else:
+ modules = get_files(
+ repo_path, params["compressed"], params["version"], params["platform"]
+ ) + get_dependencies(repo_path, params["platform"], params["version"], params["compressed"])
+
+ modules.sort(key=lambda x: x["Size_Bytes"], reverse=True)
if not params["format"] or params["format"] == ["png"]: # if no format is provided for the data print the table
- print_table(params["app"], "Status", formatted_modules)
+ print_table(params["app"], "Status", modules)
treemap_path = None
if params["format"] and "png" in params["format"]:
@@ -111,13 +204,15 @@ def status_mode(
)
if params["show_gui"] or treemap_path:
+ from ddev.cli.size.utils.common_funcs import plot_treemap
+
plot_treemap(
params["app"],
- formatted_modules,
+ modules,
f"Disk Usage Status for {params['platform']} and Python version {params['version']}",
params["show_gui"],
"status",
treemap_path,
)
- return formatted_modules
+ return modules
diff --git a/ddev/src/ddev/cli/size/utils/common_funcs.py b/ddev/src/ddev/cli/size/utils/common_funcs.py
index 0a6d66fc5de94..470f28d6faf3d 100644
--- a/ddev/src/ddev/cli/size/utils/common_funcs.py
+++ b/ddev/src/ddev/cli/size/utils/common_funcs.py
@@ -12,18 +12,22 @@
import zipfile
import zlib
from datetime import date
-from pathlib import Path
+from functools import cache
from types import TracebackType
from typing import TYPE_CHECKING, Literal, Optional, Type, TypedDict
import requests
import squarify
-from datadog import api, initialize
+from typing_extensions import NotRequired
from ddev.cli.application import Application
+from ddev.utils.fs import Path
from ddev.utils.toml import load_toml_file
-METRIC_VERSION = 2
+METRIC_VERSION = -1 # TODO CHANGE THIS TO 2
+
+RESOLVE_BUILD_DEPS_WORKFLOW = '.github/workflows/resolve-build-deps.yaml'
+MEASURE_DISK_USAGE_WORKFLOW = '.github/workflows/measure-disk-usage.yml'
if TYPE_CHECKING:
from matplotlib.axes import Axes
@@ -36,11 +40,15 @@ class FileDataEntry(TypedDict):
Size_Bytes: int # Size in bytes
Size: str # Human-readable size
Type: str # Integration/Dependency
-
-
-class FileDataEntryPlatformVersion(FileDataEntry):
Platform: str # Target platform (e.g. linux-aarch64)
Python_Version: str # Target Python version (e.g. 3.12)
+ Delta_Type: NotRequired[str] # Change type (New, Removed, Modified)
+ Percentage: NotRequired[float] # Percentage of the size change
+
+
+# class FileDataEntry(FileDataEntry):
+# Platform: str # Target platform (e.g. linux-aarch64)
+# Python_Version: str # Target Python version (e.g. 3.12)
class CommitEntry(TypedDict):
@@ -95,11 +103,12 @@ def get_valid_platforms(repo_path: Path | str, versions: set[str]) -> set[str]:
"""
Extracts the platforms we support from the .deps/resolved file names.
"""
- resolved_path = os.path.join(repo_path, ".deps", "resolved")
- platforms = []
- for file in os.listdir(resolved_path):
- if any(version in file for version in versions):
- platforms.append("_".join(file.split("_")[:-1]))
+ # resolved_path = os.path.join(repo_path, ".deps", "resolved")
+ # platforms = []
+ # for file in os.listdir(resolved_path):
+ # if any(version in file for version in versions):
+ # platforms.append("_".join(file.split("_")[:-1]))
+ platforms = ["macos-x86_64", "macos-aarch64"]
return set(platforms)
@@ -118,7 +127,9 @@ def get_valid_versions(repo_path: Path | str) -> set[str]:
def is_correct_dependency(platform: str, version: str, name: str) -> bool:
- return platform in name and version in name
+ # The name of the dependency file is in the format of {platform}_{version}.txt e.g. linux-aarch64_3.12.txt
+ _platform, _version = name.rsplit(".", 1)[0].rsplit("_", 1)
+ return platform == _platform and version == _version
def is_valid_integration_file(
@@ -182,7 +193,7 @@ def get_gitignore_files(repo_path: str | Path) -> list[str]:
def convert_to_human_readable_size(size_bytes: float) -> str:
- for unit in [" B", " KB", " MB", " GB"]:
+ for unit in [" B", " KiB", " MiB", " GiB"]:
if abs(size_bytes) < 1024:
return str(round(size_bytes, 2)) + unit
size_bytes /= 1024
@@ -201,7 +212,7 @@ def compress(file_path: str) -> int:
return compressed_size
-def get_files(repo_path: str | Path, compressed: bool, py_version: str) -> list[FileDataEntry]:
+def get_files(repo_path: str | Path, compressed: bool, py_version: str, platform: str) -> list[FileDataEntry]:
"""
Calculates integration file sizes and versions from a repository.
Only takes into account integrations with a valid version looking at the pyproject.toml file
@@ -226,6 +237,8 @@ def get_files(repo_path: str | Path, compressed: bool, py_version: str) -> list[
relative_path = os.path.relpath(file_path, repo_path)
if not is_valid_integration_file(relative_path, str(repo_path)):
continue
+ integration_name = Path(relative_path).parts[0]
+
size = compress(file_path) if compressed else os.path.getsize(file_path)
integration_sizes[integration_name] = integration_sizes.get(integration_name, 0) + size
@@ -240,6 +253,8 @@ def get_files(repo_path: str | Path, compressed: bool, py_version: str) -> list[
"Size_Bytes": size,
"Size": convert_to_human_readable_size(size),
"Type": "Integration",
+ "Platform": platform,
+ "Python_Version": py_version,
}
for name, size in integration_sizes.items()
]
@@ -289,7 +304,7 @@ def get_dependencies(repo_path: str | Path, platform: str, version: str, compres
if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
deps, download_urls, versions = get_dependencies_list(file_path)
- return get_dependencies_sizes(deps, download_urls, versions, compressed)
+ return get_dependencies_sizes(deps, download_urls, versions, compressed, platform, version)
return []
@@ -322,7 +337,7 @@ def get_dependencies_list(file_path: str) -> tuple[list[str], list[str], list[st
def get_dependencies_sizes(
- deps: list[str], download_urls: list[str], versions: list[str], compressed: bool
+ deps: list[str], download_urls: list[str], versions: list[str], compressed: bool, platform: str, py_version: str
) -> list[FileDataEntry]:
"""
Calculates the sizes of dependencies, either compressed or uncompressed.
@@ -375,12 +390,33 @@ def get_dependencies_sizes(
"Size_Bytes": int(size),
"Size": convert_to_human_readable_size(size),
"Type": "Dependency",
+ "Platform": platform,
+ "Python_Version": py_version,
}
)
return file_data
+def get_dependencies_from_json(
+ dependency_sizes: Path, platform: str, version: str, compressed: bool
+) -> list[FileDataEntry]:
+ data = json.loads(dependency_sizes.read_text())
+ size_key = "compressed" if compressed else "uncompressed"
+ return [
+ {
+ "Name": name,
+ "Version": sizes.get("version", ""),
+ "Size_Bytes": int(sizes.get(size_key, 0)),
+ "Size": convert_to_human_readable_size(sizes.get(size_key, 0)),
+ "Type": "Dependency",
+ "Platform": platform,
+ "Python_Version": version,
+ }
+ for name, sizes in data.items()
+ ]
+
+
def is_excluded_from_wheel(path: str) -> bool:
"""
These files are excluded from the wheel in the agent build:
@@ -438,29 +474,10 @@ def is_excluded_from_wheel(path: str) -> bool:
return False
-def format_modules(
- modules: list[FileDataEntry],
- platform: str,
- py_version: str,
-) -> list[FileDataEntryPlatformVersion]:
- """
- Formats the modules list, adding platform and Python version information.
- """
- new_modules: list[FileDataEntryPlatformVersion] = [
- {**entry, "Platform": platform, "Python_Version": py_version} for entry in modules
- ]
- return new_modules
-
-
def save_json(
app: Application,
file_path: str,
- modules: (
- list[FileDataEntry]
- | list[FileDataEntryPlatformVersion]
- | list[CommitEntryWithDelta]
- | list[CommitEntryPlatformWithDelta]
- ),
+ modules: (list[FileDataEntry] | list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta]),
) -> None:
if modules == []:
return
@@ -472,7 +489,7 @@ def save_json(
def save_csv(
app: Application,
- modules: list[FileDataEntryPlatformVersion] | list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta],
+ modules: list[FileDataEntry] | list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta],
file_path: str,
) -> None:
if modules == []:
@@ -499,7 +516,7 @@ def format(s: str) -> str:
def save_markdown(
app: Application,
title: str,
- modules: list[FileDataEntryPlatformVersion] | list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta],
+ modules: list[FileDataEntry] | list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta],
file_path: str,
) -> None:
if modules == []:
@@ -546,10 +563,110 @@ def save_markdown(
app.display(f"Markdown table saved to {file_path}")
+def save_html(
+ app: Application,
+ title: str,
+ modules: list[FileDataEntry],
+ file_path: str,
+ old_commit: str,
+) -> None:
+ """
+ Saves the modules list to HTML format, if the ouput is larger than the PR comment size max,
+ it ouputs the short version.
+ """
+ if modules == []:
+ return
+
+ MAX_HTML_SIZE = 65536 # PR comment size max
+ html = str()
+
+ groups = group_modules(modules)
+
+ html_headers = "Compressed Size Changes
"
+ html_headers += f''
+ for (platform, py_version), group in groups.items():
+ html_subheaders = str()
+ total_diff_bytes = sum(int(item.get("Size_Bytes", 0)) for item in group)
+ sign_total = "+" if total_diff_bytes > 0 else ""
+ total_diff = convert_to_human_readable_size(total_diff_bytes)
+
+ added = [g for g in group if g.get("Delta_Type") == "New"]
+ removed = [g for g in group if g.get("Delta_Type") == "Removed"]
+ modified = [g for g in group if g.get("Delta_Type") == "Modified"]
+
+ total_added = sum(int(x.get("Size_Bytes", 0)) for x in added)
+ total_removed = sum(int(x.get("Size_Bytes", 0)) for x in removed)
+ total_modified = sum(int(x.get("Size_Bytes", 0)) for x in modified)
+
+ html_subheaders += f"Size Delta for {platform} and Python {py_version}:\n"
+ html_subheaders += f"{sign_total}{total_diff}
\n\n"
+
+ tables = str()
+
+ # Added summary
+ tables += append_html_entry(total_added, "Added", added)
+ tables += append_html_entry(total_removed, "Removed", removed)
+ tables += append_html_entry(total_modified, "Modified", modified)
+
+ close_details = " \n\n"
+ if len(html_headers) + len(html_subheaders) + len(tables) + len(close_details) > MAX_HTML_SIZE:
+ tables = str()
+ tables += append_html_short_entry(total_added, "Added", added)
+ tables += append_html_short_entry(total_removed, "Removed", removed)
+ tables += append_html_short_entry(total_modified, "Modified", modified)
+
+ html += f"{html_subheaders}\n{tables}\n{close_details}"
+
+ html = f"{html_headers}\n{html}"
+
+ with open(file_path, "a", encoding="utf-8") as f:
+ f.write(html)
+ app.display(f"HTML file saved to {file_path}")
+
+
+def group_modules(
+ modules: list[FileDataEntry],
+) -> dict[tuple[str, str], list[FileDataEntry]]:
+ groups: dict[tuple[str, str], list[FileDataEntry]] = {}
+ for m in modules:
+ key = (m.get("Platform", ""), m.get("Python_Version", ""))
+ if key not in groups:
+ groups[key] = []
+ groups[key].append(m)
+ return groups
+
+
+def append_html_entry(total: int, type: str, entries: list[FileDataEntry]) -> str:
+ html = str()
+ if total != 0:
+ sign = "+" if total > 0 else ""
+ html += f"{type}: {len(entries)} item(s), {sign}{convert_to_human_readable_size(total)}\n"
+ html += "Type | Name | Version | Size Delta | Percentage |
\n"
+ for e in entries:
+ html += f"{e.get('Type', '')} | {e.get('Name', '')} | {e.get('Version', '')} | "
+ html += f"{e.get('Size', '')} | {e.get('Percentage', '')}% |
\n"
+ html += "
\n"
+ else:
+ html += f"No {type.lower()} dependencies/integrations\n"
+
+ return html
+
+
+def append_html_short_entry(total: int, type: str, entries: list[FileDataEntry]) -> str:
+ html = str()
+ if total != 0:
+ sign = "+" if total > 0 else ""
+ html += f"{type}: {len(entries)} item(s), {sign}{convert_to_human_readable_size(total)}\n"
+ else:
+ html += f"No {type.lower()} dependencies/integrations\n"
+
+ return html
+
+
def print_table(
app: Application,
mode: str,
- modules: list[FileDataEntryPlatformVersion] | list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta],
+ modules: list[FileDataEntry] | list[CommitEntryWithDelta] | list[CommitEntryPlatformWithDelta],
) -> None:
if modules == []:
return
@@ -557,6 +674,8 @@ def print_table(
columns = [col for col in modules[0].keys() if "Bytes" not in col]
modules_table: dict[str, dict[int, str]] = {col: {} for col in columns}
for i, row in enumerate(modules):
+ if row.get("Size_Bytes") == 0:
+ continue
for key in columns:
modules_table[key][i] = str(row.get(key, ""))
@@ -566,60 +685,51 @@ def print_table(
def export_format(
app: Application,
format: list[str],
- modules: list[FileDataEntryPlatformVersion],
+ modules: list[FileDataEntry],
mode: Literal["status", "diff"],
platform: Optional[str],
version: Optional[str],
compressed: bool,
) -> None:
size_type = "compressed" if compressed else "uncompressed"
+ name = f"{mode}_{size_type}"
+ if platform:
+ name += f"_{platform}"
+ if version:
+ name += f"_{version}"
for output_format in format:
if output_format == "csv":
- csv_filename = (
- f"{platform}_{version}_{size_type}_{mode}.csv"
- if platform and version
- else (
- f"{version}_{size_type}_{mode}.csv"
- if version
- else f"{platform}_{size_type}_{mode}.csv"
- if platform
- else f"{size_type}_{mode}.csv"
- )
- )
+ csv_filename = f"{name}.csv"
save_csv(app, modules, csv_filename)
elif output_format == "json":
- json_filename = (
- f"{platform}_{version}_{size_type}_{mode}.json"
- if platform and version
- else (
- f"{version}_{size_type}_{mode}.json"
- if version
- else f"{platform}_{size_type}_{mode}.json"
- if platform
- else f"{size_type}_{mode}.json"
- )
- )
+ json_filename = f"{name}.json"
save_json(app, json_filename, modules)
elif output_format == "markdown":
- markdown_filename = (
- f"{platform}_{version}_{size_type}_{mode}.md"
+ markdown_filename = f"{name}.md"
+ save_markdown(app, "Status", modules, markdown_filename)
+
+ elif output_format == "html":
+ html_filename = (
+ f"{platform}_{version}_{size_type}_{mode}.html"
if platform and version
else (
- f"{version}_{size_type}_{mode}.md"
+ f"{version}_{size_type}_{mode}.html"
if version
- else f"{platform}_{size_type}_{mode}.md"
+ else f"{platform}_{size_type}_{mode}.html"
if platform
- else f"{size_type}_{mode}.md"
+ else f"{size_type}_{mode}.html"
)
)
- save_markdown(app, "Status", modules, markdown_filename)
+ title = "Status" if mode == "status" else "Diff"
+ # mypy: modules narrowed to FileDataEntry when mode == diff
+ save_html(app, title, modules, html_filename) # type: ignore[arg-type]
def plot_treemap(
app: Application,
- modules: list[FileDataEntryPlatformVersion],
+ modules: list[FileDataEntry],
title: str,
show: bool,
mode: Literal["status", "diff"],
@@ -663,7 +773,7 @@ def plot_treemap(
def plot_status_treemap(
- modules: list[FileDataEntry] | list[FileDataEntryPlatformVersion],
+ modules: list[FileDataEntry] | list[FileDataEntry],
) -> tuple[list[dict[str, float]], list[tuple[float, float, float, float]], list[Patch]]:
import matplotlib.pyplot as plt
from matplotlib.patches import Patch
@@ -697,7 +807,7 @@ def plot_status_treemap(
def plot_diff_treemap(
- modules: list[FileDataEntry] | list[FileDataEntryPlatformVersion],
+ modules: list[FileDataEntry] | list[FileDataEntry],
) -> tuple[list[dict[str, float]], list[tuple[float, float, float, float]], list[Patch]]:
import matplotlib.pyplot as plt
from matplotlib.patches import Patch
@@ -764,7 +874,7 @@ def scale_colors_treemap(area: float, max_area: float) -> float:
def draw_treemap_rects_with_labels(
ax: Axes,
rects: list[dict],
- modules: list[FileDataEntry] | list[FileDataEntryPlatformVersion],
+ modules: list[FileDataEntry] | list[FileDataEntry],
colors: list[tuple[float, float, float, float]],
) -> None:
from matplotlib.patches import Rectangle
@@ -837,24 +947,34 @@ def draw_treemap_rects_with_labels(
def send_metrics_to_dd(
app: Application,
- modules: list[FileDataEntryPlatformVersion],
- org: str,
- key: str,
+ modules: list[FileDataEntry],
+ org: str | None,
+ key: str | None,
+ site: str | None,
compressed: bool,
+ mode: Literal["status", "diff"],
+ commits: list[str] | None = None,
) -> None:
+ from datadog_api_client import ApiClient, Configuration
+ from datadog_api_client.v2.api.metrics_api import MetricsApi
+ from datadog_api_client.v2.model.metric_intake_type import MetricIntakeType
+ from datadog_api_client.v2.model.metric_payload import MetricPayload
+ from datadog_api_client.v2.model.metric_point import MetricPoint
+ from datadog_api_client.v2.model.metric_series import MetricSeries
+
metric_name = "datadog.agent_integrations"
size_type = "compressed" if compressed else "uncompressed"
+ dd_site = site if site else "datadoghq.com"
+ config_file_info = app.config.orgs.get(org, {}) if org else {'api_key': key, 'site': dd_site}
- config_file_info = get_org(app, org) if org else {"api_key": key, "site": "datadoghq.com"}
- if not is_everything_committed():
- raise RuntimeError("All files have to be committed in order to send the metrics to Datadog")
- if "api_key" not in config_file_info:
+ # if not commits and not is_everything_committed():
+ # raise RuntimeError("All files have to be committed in order to send the metrics to Datadog")
+ if "api_key" not in config_file_info or config_file_info["api_key"] is None or config_file_info["api_key"] == "":
raise RuntimeError("No API key found in config file")
- if "site" not in config_file_info:
+ if "site" not in config_file_info or config_file_info["site"] is None or config_file_info["site"] == "":
raise RuntimeError("No site found in config file")
- message, tickets, prs = get_last_commit_data()
- timestamp = get_last_commit_timestamp()
+ timestamp, message, tickets, prs = get_commit_data(commits[-1]) if commits else get_commit_data()
metrics = []
n_integrations_metrics = []
@@ -863,15 +983,18 @@ def send_metrics_to_dd(
n_integrations: dict[tuple[str, str], int] = {}
n_dependencies: dict[tuple[str, str], int] = {}
+ gauge_type = MetricIntakeType.GAUGE
+
for item in modules:
+ delta_type = item.get('Delta_Type', '')
metrics.append(
- {
- "metric": f"{metric_name}.size",
- "type": "gauge",
- "points": [(timestamp, item["Size_Bytes"])],
- "tags": [
- f"name:{item['Name']}",
- f"type:{item['Type']}",
+ MetricSeries(
+ metric=f"{metric_name}.size_{mode}",
+ type=gauge_type,
+ points=[MetricPoint(timestamp=timestamp, value=item["Size_Bytes"])],
+ tags=[
+ f"module_name:{item['Name']}",
+ f"module_type:{item['Type']}",
f"name_type:{item['Type']}({item['Name']})",
f"python_version:{item['Python_Version']}",
f"module_version:{item['Version']}",
@@ -882,110 +1005,304 @@ def send_metrics_to_dd(
f"jira_ticket:{tickets[0]}",
f"pr_number:{prs[-1]}",
f"commit_message:{message}",
+ f"delta_Type:{delta_type}",
],
- }
+ )
)
- key_count = (item["Platform"], item["Python_Version"])
+ key_count = (item['Platform'], item['Python_Version'])
if key_count not in n_integrations:
n_integrations[key_count] = 0
if key_count not in n_dependencies:
n_dependencies[key_count] = 0
- if item["Type"] == "Integration":
+ if item['Type'] == 'Integration':
n_integrations[key_count] += 1
- elif item["Type"] == "Dependency":
+ elif item['Type'] == 'Dependency':
n_dependencies[key_count] += 1
- for (platform, py_version), count in n_integrations.items():
- n_integrations_metrics.append(
- {
- "metric": f"{metric_name}.integration_count",
- "type": "gauge",
- "points": [(timestamp, count)],
- "tags": [
- f"platform:{platform}",
- f"python_version:{py_version}",
- "team:agent-integrations",
- f"metrics_version:{METRIC_VERSION}",
- ],
- }
- )
- for (platform, py_version), count in n_dependencies.items():
- n_dependencies_metrics.append(
- {
- "metric": f"{metric_name}.dependency_count",
- "type": "gauge",
- "points": [(timestamp, count)],
- "tags": [
- f"platform:{platform}",
- f"python_version:{py_version}",
- "team:agent-integrations",
- f"metrics_version:{METRIC_VERSION}",
- ],
- }
- )
+ if mode == "status":
+ for (platform, py_version), count in n_integrations.items():
+ n_integrations_metrics.append(
+ MetricSeries(
+ metric=f"{metric_name}.integration_count",
+ type=gauge_type,
+ points=[MetricPoint(timestamp=timestamp, value=count)],
+ tags=[
+ f"platform:{platform}",
+ f"python_version:{py_version}",
+ "team:agent-integrations",
+ f"metrics_version:{METRIC_VERSION}",
+ ],
+ )
+ )
+ for (platform, py_version), count in n_dependencies.items():
+ n_dependencies_metrics.append(
+ MetricSeries(
+ metric=f"{metric_name}.dependency_count",
+ type=gauge_type,
+ points=[MetricPoint(timestamp=timestamp, value=count)],
+ tags=[
+ f"platform:{platform}",
+ f"python_version:{py_version}",
+ "team:agent-integrations",
+ f"metrics_version:{METRIC_VERSION}",
+ ],
+ )
+ )
+
+ configuration = Configuration()
+ configuration.request_timeout = (5, 5)
+
+ configuration.api_key = {
+ "apiKeyAuth": config_file_info["api_key"],
+ }
+ configuration.server_variables["site"] = config_file_info["site"]
+
+ with ApiClient(configuration) as api_client:
+ api_instance = MetricsApi(api_client)
+ api_instance.submit_metrics(body=MetricPayload(series=metrics))
+ if mode == "status":
+ api_instance.submit_metrics(body=MetricPayload(series=n_integrations_metrics))
+ api_instance.submit_metrics(body=MetricPayload(series=n_dependencies_metrics))
+ print("Metrics sent to Datadog")
+
+
+def is_everything_committed() -> bool:
+ result = subprocess.run(["git", "status", "--porcelain"], capture_output=True, text=True)
+ return result.stdout.strip() == ""
- initialize(
- api_key=config_file_info["api_key"],
- api_host=f"https://api.{config_file_info['site']}",
+
+def get_commit_data(commit: str | None = "") -> tuple[int, str, list[str], list[str]]:
+ '''
+ Get the commit data for a given commit. If no commit is provided, get the last commit data.
+ '''
+ cmd = ["git", "log", "-1", "--format=%s%n%ct"]
+ cmd.append(commit) if commit else None
+ result = subprocess.run(cmd, capture_output=True, text=True, check=True)
+
+ cmd_branch = ["git", "branch", "--remote", "--contains"]
+ cmd_branch.append(commit) if commit else cmd_branch.append("HEAD")
+ branch_name = subprocess.check_output(
+ cmd_branch
+ ).decode("utf-8")
+ ticket_pattern = r'\b(?:DBMON|SAASINT|AGENT|AI)-\d+\b'
+ pr_pattern = r'#(\d+)'
+
+ message, timestamp = result.stdout.strip().split('\n')
+ tickets = set(re.findall(ticket_pattern, message) + re.findall(ticket_pattern, branch_name))
+ prs = re.findall(pr_pattern, message)
+ if not tickets:
+ tickets = [""]
+ if not prs:
+ prs = [""]
+ return int(timestamp), message, tickets, prs
+
+
+def check_commits(commits: list[str]) -> bool:
+ # Check if commits are from master branch
+ for commit in commits:
+ result = subprocess.run(["git", "branch", "--contains", commit], capture_output=True, text=True, check=True)
+ if "master" not in result.stdout:
+ return False
+
+ # Check if commits are in sequence by verifying commit2 is parent of commit1
+ result = subprocess.run(
+ ["git", "rev-parse", f"{commits[1]}^"],
+ capture_output=True,
+ text=True,
+ check=True,
)
+ parent_commit = result.stdout.strip()
- api.Metric.send(metrics=metrics)
- api.Metric.send(metrics=n_integrations_metrics)
- api.Metric.send(metrics=n_dependencies_metrics)
+ if parent_commit != commits[0]:
+ raise ValueError("Second commit must be the direct parent of first commit. Metrics cannot be uploaded.")
+ return True
-def get_org(app: Application, org: str) -> dict[str, str]:
- config_path: Path = app.config_file.path
- current_section = None
- org_data = {}
+@cache
+def get_last_dependency_sizes_artifact(app: Application, commit: str, platform: str) -> Path | None:
+ dep_sizes_json = get_dep_sizes_json(commit, platform)
+ if not dep_sizes_json:
+ dep_sizes_json = get_previous_dep_sizes_json(app.repo.git.merge_base(commit, "master"), platform)
+ return Path(dep_sizes_json) if dep_sizes_json else None
- with open(config_path, "r", encoding="utf-8") as f:
- for line in f:
- line = line.strip()
- if not line or line.startswith("#"):
- continue
- # Detect section header
- if line.startswith("[") and line.endswith("]"):
- current_section = line[1:-1]
- continue
+@cache
+def get_dep_sizes_json(current_commit: str, platform: str) -> Path | None:
+ print(f"Getting dependency sizes json for commit: {current_commit}, platform: {platform}")
+ run_id = get_run_id(current_commit, RESOLVE_BUILD_DEPS_WORKFLOW)
+ if run_id:
+ dep_sizes_json = get_current_dep_sizes_json(run_id, platform)
+ print(f"Dependency sizes json path: {dep_sizes_json}")
+ return dep_sizes_json
+ else:
+ return None
+
+
+@cache
+def get_run_id(commit: str, workflow: str) -> str | None:
+ print(f"Getting run id for commit: {commit}, workflow: {workflow}")
+ result = subprocess.run(
+ [
+ 'gh',
+ 'run',
+ 'list',
+ '--workflow',
+ workflow,
+ '-c',
+ commit,
+ '--json',
+ 'databaseId',
+ '--jq',
+ '.[-1].databaseId',
+ ],
+ capture_output=True,
+ text=True,
+ )
+ if result.stderr and "Bad credentials" in result.stderr:
+ raise RuntimeError("Bad credentials, please check your GitHub token")
- if current_section == f"orgs.{org}":
- if "=" in line:
- key, value = line.split("=", 1)
- key = key.strip()
- value = value.strip().strip('"')
- org_data[key] = value
- if not org_data:
- raise ValueError(f"Organization '{org}' not found in config")
- return org_data
+ run_id = result.stdout.strip() if result.stdout else None
+ if run_id:
+ print(f"Run id: {run_id}")
+ else:
+ print(f"No run id found for commit: {commit}, workflow: {workflow}")
+ return run_id
-def is_everything_committed() -> bool:
- result = subprocess.run(["git", "status", "--porcelain"], capture_output=True, text=True)
- return result.stdout.strip() == ""
+@cache
+def get_current_dep_sizes_json(run_id: str, platform: str) -> Path | None:
+ print(f"Getting current sizes json for run_id={run_id}, platform={platform}")
+ with tempfile.TemporaryDirectory() as tmpdir:
+ print(f"Downloading artifacts to {tmpdir}")
+ try:
+ subprocess.run(
+ [
+ 'gh',
+ 'run',
+ 'download',
+ run_id,
+ '--name',
+ f'target-{platform}',
+ '--dir',
+ tmpdir,
+ ],
+ check=True,
+ capture_output=True,
+ text=True,
+ )
+ except subprocess.CalledProcessError as e:
+ if e.stderr and "no artifact matches any of the names or patterns provided" in e.stderr:
+ print(f"No artifact found for run_id={run_id}, platform={platform}")
+ else:
+ print(f"Failed to download current sizes json: {e}")
-def get_last_commit_timestamp() -> int:
- result = subprocess.run(["git", "log", "-1", "--format=%ct"], capture_output=True, text=True, check=True)
- return int(result.stdout.strip())
+ print("Comparing to merge base commit")
+ return None
+ print(f"Downloaded artifacts to {tmpdir}")
+ sizes_file = Path(tmpdir) / platform / 'py3' / 'sizes.json'
-def get_last_commit_data() -> tuple[str, list[str], list[str]]:
- result = subprocess.run(["git", "log", "-1", "--format=%s"], capture_output=True, text=True, check=True)
- ticket_pattern = r"\b(?:DBMON|SAASINT|AGENT|AI)-\d+\b"
- pr_pattern = r"#(\d+)"
+ if not sizes_file.is_file():
+ print(f"Sizes artifact not found at {sizes_file}")
+ return None
- message = result.stdout.strip()
- tickets = re.findall(ticket_pattern, message)
- prs = re.findall(pr_pattern, message)
+ print(f"Found sizes artifact at {sizes_file}")
+ dest_path = sizes_file.rename(f"{platform}.json")
+ return dest_path
- if not tickets:
- tickets = [""]
- if not prs:
- prs = [""]
- return message, tickets, prs
+
+@cache
+def get_artifact(
+ run_id: str,
+ artifact_name: str,
+ dir: str | None = None,
+) -> Path | None:
+ print(f"Downloading artifact: {artifact_name} from run_id={run_id}")
+ try:
+ cmd = [
+ 'gh',
+ 'run',
+ 'download',
+ run_id,
+ '--name',
+ artifact_name,
+ ]
+ if dir is not None:
+ cmd.extend(['--dir', dir])
+ subprocess.run(
+ cmd,
+ check=True,
+ text=True,
+ )
+ except subprocess.CalledProcessError as e:
+ print(f"Failed to download artifact: {artifact_name} from run_id={run_id}:{e}")
+ return None
+
+ print(f"Artifact downloaded to: {artifact_name}")
+ return Path(dir, artifact_name) if dir else Path(artifact_name)
+
+
+@cache
+def get_sizes_json_from_artifacts(
+ commit: str,
+ dir: str | None = None,
+ compressed: bool | None = None,
+ extension: str | None = "json",
+) -> dict[str, Path | None]:
+ compression = "compressed" if compressed else "uncompressed"
+ artifact_name = f'status_{compression}.{extension}' if extension == "json" else f'{compression}_status.{extension}'
+ print(f"Artifact name: {artifact_name}")
+ run_id = (
+ get_run_id(commit, RESOLVE_BUILD_DEPS_WORKFLOW)
+ if extension == "json"
+ else get_run_id(commit, MEASURE_DISK_USAGE_WORKFLOW)
+ ) # changed to RESOLVE_BUILD_DEPS_WORKFLOW from MEASURE_DISK_USAGE_WORKFLOW
+ compressed_json = None
+ uncompressed_json = None
+ if run_id and (compressed is None or compressed):
+ compressed_json = get_artifact(run_id, artifact_name, dir)
+ if run_id and not compressed:
+ uncompressed_json = get_artifact(run_id, artifact_name, dir)
+ return {"compressed": compressed_json, "uncompressed": uncompressed_json}
+
+
+@cache
+def get_previous_dep_sizes_json(commit: str, platform: str, compressed: bool | None = None) -> Path | None:
+ with tempfile.TemporaryDirectory() as tmpdir:
+ sizes_json = get_sizes_json_from_artifacts(commit, dir=tmpdir, compressed=compressed)
+ if not sizes_json["compressed"] or not sizes_json["uncompressed"]:
+ return None
+ sizes = parse_dep_sizes_json(sizes_json["compressed"], sizes_json["uncompressed"], platform)
+
+ output_path = Path(f'{platform}.json')
+ output_path.write_text(json.dumps(sizes, indent=2))
+ print(f"Wrote merged sizes json to {output_path}")
+ return output_path
+
+
+@cache
+def parse_dep_sizes_json(compressed_json_path: Path, uncompressed_json_path: Path, platform: str) -> dict[str, dict[str, int]]:
+ compressed_list = list(json.loads(compressed_json_path.read_text()))
+ uncompressed_list = list(json.loads(uncompressed_json_path.read_text()))
+
+ sizes_json = {
+ dep["Name"]: {
+ "compressed": int(dep["Size_Bytes"]),
+ "version": dep.get("Version"),
+ }
+ for dep in compressed_list
+ if dep.get("Type") == "Dependency" and dep.get("Platform") == platform
+ }
+
+ for dep in uncompressed_list:
+ if dep.get("Type") == "Dependency" and dep.get("Platform") == platform:
+ name = dep["Name"]
+ entry = sizes_json.setdefault(name, {"version": dep.get("Version")})
+ entry["uncompressed"] = int(dep["Size_Bytes"])
+
+ return sizes_json
class WrongDependencyFormat(Exception):
diff --git a/ddev/src/ddev/cli/size/utils/common_params.py b/ddev/src/ddev/cli/size/utils/common_params.py
index cc77784a7f1c8..76edb29ecbbfb 100644
--- a/ddev/src/ddev/cli/size/utils/common_params.py
+++ b/ddev/src/ddev/cli/size/utils/common_params.py
@@ -12,6 +12,7 @@ def common_params(func: Callable) -> Callable:
@click.option("--compressed", is_flag=True, help="Measure compressed size")
@click.option(
"--format",
+ show_default=True,
help="Format of the output (comma-separated values: png, csv, markdown, json)",
callback=lambda _, __, v: v.split(",") if v else [],
)
diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py
index 9ab40a5451c14..1ddea62801385 100644
--- a/ddev/tests/cli/size/test_diff.py
+++ b/ddev/tests/cli/size/test_diff.py
@@ -7,6 +7,8 @@
import pytest
+from ddev.cli.size.diff import validate_parameters
+
def to_native_path(path: str) -> str:
return path.replace("/", os.sep)
@@ -17,210 +19,306 @@ def mock_size_diff_dependencies():
mock_git_repo = MagicMock()
mock_git_repo.repo_dir = "fake_repo"
mock_git_repo.get_commit_metadata.return_value = ("Feb 1 2025", "", "")
+ mock_git_repo.__enter__.return_value = mock_git_repo
+ mock_git_repo.__exit__.return_value = None
def get_compressed_files_side_effect(*args, **kwargs):
get_compressed_files_side_effect.counter += 1
+ py_version = args[2]
+ platform = args[3]
if get_compressed_files_side_effect.counter % 2 == 1:
- return [{"Name": "path1.py", "Version": "1.1.1", "Size_Bytes": 1000, "Type": "Integration"}] # before
+ return [
+ {
+ "Name": "path1.py",
+ "Version": "1.1.1",
+ "Size_Bytes": 1000,
+ "Type": "Integration",
+ "Platform": platform,
+ "Python_Version": py_version,
+ }
+ ] # before
else:
return [
- {"Name": "path1.py", "Version": "1.1.2", "Size_Bytes": 1200, "Type": "Integration"},
- {"Name": "path2.py", "Version": "1.1.1", "Size_Bytes": 500, "Type": "Integration"},
+ {
+ "Name": "path1.py",
+ "Version": "1.1.2",
+ "Size_Bytes": 1200,
+ "Type": "Integration",
+ "Platform": platform,
+ "Python_Version": py_version,
+ },
+ {
+ "Name": "path2.py",
+ "Version": "1.1.1",
+ "Size_Bytes": 500,
+ "Type": "Integration",
+ "Platform": platform,
+ "Python_Version": py_version,
+ },
] # after
get_compressed_files_side_effect.counter = 0
def get_compressed_dependencies_side_effect(*args, **kwargs):
get_compressed_dependencies_side_effect.counter += 1
+ platform = args[1]
+ py_version = args[2]
if get_compressed_dependencies_side_effect.counter % 2 == 1:
- return [{"Name": "dep1", "Version": "1.0.0", "Size_Bytes": 2000, "Type": "Dependency"}] # before
+ return [
+ {
+ "Name": "dep1",
+ "Version": "1.0.0",
+ "Size_Bytes": 2000,
+ "Type": "Dependency",
+ "Platform": platform,
+ "Python_Version": py_version,
+ }
+ ] # before
else:
return [
- {"Name": "dep1", "Version": "1.1.0", "Size_Bytes": 2500, "Type": "Dependency"},
- {"Name": "dep2", "Version": "1.0.0", "Size_Bytes": 1000, "Type": "Dependency"},
+ {
+ "Name": "dep1",
+ "Version": "1.1.0",
+ "Size_Bytes": 2500,
+ "Type": "Dependency",
+ "Platform": platform,
+ "Python_Version": py_version,
+ },
+ {
+ "Name": "dep2",
+ "Version": "1.0.0",
+ "Size_Bytes": 1000,
+ "Type": "Dependency",
+ "Platform": platform,
+ "Python_Version": py_version,
+ },
] # after
get_compressed_dependencies_side_effect.counter = 0
with (
patch(
- "ddev.cli.size.diff.get_valid_platforms",
+ "ddev.cli.size.utils.common_funcs.get_valid_platforms",
return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "macos-aarch64", "windows-x86_64"}),
),
patch(
- "ddev.cli.size.diff.get_valid_versions",
+ "ddev.cli.size.utils.common_funcs.get_valid_versions",
return_value=({"3.12"}),
),
- patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=mock_git_repo),
- patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None),
- patch("ddev.cli.size.diff.GitRepo.checkout_commit"),
+ patch("ddev.cli.size.utils.common_funcs.GitRepo", return_value=mock_git_repo),
patch("ddev.cli.size.utils.common_funcs.tempfile.mkdtemp", return_value="fake_repo"),
- patch("ddev.cli.size.diff.get_files", side_effect=get_compressed_files_side_effect),
- patch("ddev.cli.size.diff.get_dependencies", side_effect=get_compressed_dependencies_side_effect),
- patch("ddev.cli.size.diff.format_modules", side_effect=lambda m, *_: m),
+ patch("ddev.cli.size.utils.common_funcs.get_files", side_effect=get_compressed_files_side_effect),
+ patch("ddev.cli.size.utils.common_funcs.get_dependencies", side_effect=get_compressed_dependencies_side_effect),
patch("ddev.cli.size.utils.common_funcs.open", MagicMock()),
):
yield
-def test_diff_no_args(ddev, mock_size_diff_dependencies):
- assert ddev("size", "diff", "commit1", "commit2").exit_code == 0
- assert ddev("size", "diff", "commit1", "commit2", "--compressed").exit_code == 0
- assert ddev("size", "diff", "commit1", "commit2", "--format", "csv,markdown,json,png").exit_code == 0
- assert ddev("size", "diff", "commit1", "commit2", "--show-gui").exit_code == 0
-
-
-def test_diff_with_platform_and_version(ddev, mock_size_diff_dependencies):
- assert ddev("size", "diff", "commit1", "commit2", "--platform", "linux-aarch64", "--python", "3.12").exit_code == 0
- assert (
- ddev(
- "size", "diff", "commit1", "commit2", "--platform", "linux-aarch64", "--python", "3.12", "--compressed"
- ).exit_code
- == 0
- )
- assert (
- ddev(
- "size",
- "diff",
- "commit1",
- "commit2",
- "--platform",
- "linux-aarch64",
- "--python",
- "3.12",
- "--format",
- "csv,markdown,json,png",
- ).exit_code
- == 0
- )
- assert (
- ddev(
- "size", "diff", "commit1", "commit2", "--platform", "linux-aarch64", "--python", "3.12", "--show-gui"
- ).exit_code
- == 0
- )
-
-
-def test_diff_no_differences(ddev):
+@pytest.fixture
+def mock_size_diff_no_diff_dependencies():
fake_repo = MagicMock()
fake_repo.repo_dir = "fake_repo"
fake_repo.get_commit_metadata.return_value = ("Feb 1 2025", "", "")
+ fake_repo.__enter__.return_value = fake_repo
+ fake_repo.__exit__.return_value = None
+
+ def get_files_side_effect(*args, **kwargs):
+ py_version = args[2]
+ platform = args[3]
+ return [
+ {
+ "Name": "path1.py",
+ "Version": "1.0.0",
+ "Size_Bytes": 1000,
+ "Type": "Integration",
+ "Platform": platform,
+ "Python_Version": py_version,
+ },
+ {
+ "Name": "path2.py",
+ "Version": "1.0.0",
+ "Size_Bytes": 500,
+ "Type": "Integration",
+ "Platform": platform,
+ "Python_Version": py_version,
+ },
+ ]
+
+ def get_dependencies_side_effect(*args, **kwargs):
+ platform = args[1]
+ py_version = args[2]
+ return [
+ {
+ "Name": "dep1.whl",
+ "Version": "2.0.0",
+ "Size_Bytes": 2000,
+ "Type": "Dependency",
+ "Platform": platform,
+ "Python_Version": py_version,
+ },
+ {
+ "Name": "dep2.whl",
+ "Version": "2.0.0",
+ "Size_Bytes": 1000,
+ "Type": "Dependency",
+ "Platform": platform,
+ "Python_Version": py_version,
+ },
+ ]
with (
- patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=fake_repo),
- patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None),
+ patch("ddev.cli.size.utils.common_funcs.GitRepo", return_value=fake_repo),
patch(
- "ddev.cli.size.diff.get_valid_platforms",
+ "ddev.cli.size.utils.common_funcs.get_valid_platforms",
return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "macos-aarch64", "windows-x86_64"}),
),
patch(
- "ddev.cli.size.diff.get_valid_versions",
+ "ddev.cli.size.utils.common_funcs.get_valid_versions",
return_value=({"3.12"}),
),
- patch.object(fake_repo, "checkout_commit"),
patch("ddev.cli.size.utils.common_funcs.tempfile.mkdtemp", return_value="fake_repo"),
patch("ddev.cli.size.utils.common_funcs.os.path.exists", return_value=True),
patch("ddev.cli.size.utils.common_funcs.os.path.isdir", return_value=True),
patch("ddev.cli.size.utils.common_funcs.os.path.isfile", return_value=True),
patch("ddev.cli.size.utils.common_funcs.os.listdir", return_value=["linux-aarch64_3.12"]),
patch(
- "ddev.cli.size.diff.get_files",
- return_value=[
- {"Name": "path1.py", "Version": "1.0.0", "Size_Bytes": 1000},
- {"Name": "path2.py", "Version": "1.0.0", "Size_Bytes": 500},
- ],
+ "ddev.cli.size.utils.common_funcs.get_files",
+ side_effect=get_files_side_effect,
),
patch(
- "ddev.cli.size.diff.get_dependencies",
- return_value=[
- {"Name": "dep1.whl", "Version": "2.0.0", "Size_Bytes": 2000},
- {"Name": "dep2.whl", "Version": "2.0.0", "Size_Bytes": 1000},
- ],
+ "ddev.cli.size.utils.common_funcs.get_dependencies",
+ side_effect=get_dependencies_side_effect,
),
patch("ddev.cli.size.utils.common_funcs.open", MagicMock()),
):
- result = ddev(
- "size", "diff", "commit1", "commit2", "--platform", "linux-aarch64", "--python", "3.12", "--compressed"
- )
+ yield
- assert result.exit_code == 0, result.output
- assert "No size differences were detected" in result.output
- assert ddev("size", "diff", "commit1", "commit2").exit_code == 0
- assert ddev("size", "diff", "commit1", "commit2", "--compressed").exit_code == 0
- assert ddev("size", "diff", "commit1", "commit2", "--format", "csv,markdown,json,png").exit_code == 0
- assert ddev("size", "diff", "commit1", "commit2", "--show-gui").exit_code == 0
+@pytest.mark.parametrize(
+ "diff_args",
+ [
+ ["commit1", "--compare-to", "commit2"],
+ ["commit1", "--compare-to", "commit2", "--compressed"],
+ ["commit1", "--compare-to", "commit2", "--format", "csv,markdown,json,png"],
+ ["commit1", "--compare-to", "commit2", "--show-gui"],
+ ["commit1", "--compare-to", "commit2", "--platform", "linux-aarch64", "--python", "3.12"],
+ ["commit1", "--compare-to", "commit2", "--platform", "linux-aarch64", "--python", "3.12", "--compressed"],
+ ],
+ ids=[
+ "no options",
+ "compressed",
+ "all formats",
+ "show gui",
+ "with platform and version",
+ "with platform, version and compressed",
+ ],
+)
+def test_diff_options(ddev, mock_size_diff_dependencies, diff_args):
+ result = ddev("size", "diff", *diff_args)
+ assert result.exit_code == 0
-def test_diff_invalid_platform(ddev):
- mock_git_repo = MagicMock()
- mock_git_repo.repo_dir = "fake_repo"
- mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
- mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
- mock_git_repo.__enter__.return_value = mock_git_repo
- with (
- patch("ddev.cli.size.diff.GitRepo", return_value=mock_git_repo),
- patch(
- "ddev.cli.size.diff.get_valid_platforms",
- return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "macos-aarch64", "windows-x86_64"}),
- ),
- patch(
- "ddev.cli.size.diff.get_valid_versions",
- return_value=({"3.12"}),
- ),
- ):
- result = ddev("size", "diff", "commit1", "commit2", "--platform", "linux", "--python", "3.12", "--compressed")
- assert result.exit_code != 0
+@pytest.mark.parametrize(
+ "diff_args",
+ [
+ ["commit1", "--compare-to", "commit2", "--platform", "linux-aarch64", "--python", "3.12", "--compressed"],
+ ["commit1", "--compare-to", "commit2"],
+ ["commit1", "--compare-to", "commit2", "--compressed"],
+ ["commit1", "--compare-to", "commit2", "--format", "csv,markdown,json,png"],
+ ["commit1", "--compare-to", "commit2", "--show-gui"],
+ ],
+ ids=[
+ "platform, python and compressed",
+ "no options",
+ "compressed",
+ "all formats",
+ "show gui",
+ ],
+)
+def test_diff_no_differences(ddev, mock_size_diff_no_diff_dependencies, diff_args):
+ result = ddev("size", "diff", *diff_args)
+ assert result.exit_code == 0, result.output
+ assert "No size differences were detected" in result.output
-def test_diff_invalid_version(ddev):
- mock_git_repo = MagicMock()
- mock_git_repo.repo_dir = "fake_repo"
- mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
- mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
- mock_git_repo.__enter__.return_value = mock_git_repo
- with (
- patch("ddev.cli.size.diff.GitRepo", return_value=mock_git_repo),
- patch(
- "ddev.cli.size.diff.get_valid_platforms",
- return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "macos-aarch64", "windows-x86_64"}),
+@pytest.mark.parametrize(
+ "first_commit, second_commit, format_list, platform, version, error_expected",
+ [
+ ("abcdefg", "bcdefgh", [], "invalid-platform", "3.9", "Invalid platform: invalid-platform"),
+ ("abcdefg", "bcdefgh", [], "linux-x86_64", "invalid-version", "Invalid version: invalid-version"),
+ (
+ "abc",
+ "bcd",
+ [],
+ "linux-x86_64",
+ "3.9",
+ True,
),
- patch(
- "ddev.cli.size.diff.get_valid_versions",
- return_value=({"3.12"}),
+ (
+ "abc",
+ "bcdefgh",
+ [],
+ "linux-x86_64",
+ "3.9",
+ True,
),
- ):
- result = ddev(
- "size",
- "diff",
- "commit1",
- "commit2",
- "--platform",
- "linux-aarch64",
- "--python",
- "2.10", # invalid
- "--compressed",
- )
- assert result.exit_code != 0
-
-
-def test_diff_invalid_platform_and_version(ddev):
- mock_git_repo = MagicMock()
- mock_git_repo.repo_dir = "fake_repo"
- mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
- mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
- mock_git_repo.__enter__.return_value = mock_git_repo
- with (
- patch("ddev.cli.size.diff.GitRepo", return_value=mock_git_repo),
- patch(
- "ddev.cli.size.diff.get_valid_platforms",
- return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "macos-aarch64", "windows-x86_64"}),
+ (
+ "abcdefg",
+ "bcd",
+ [],
+ "linux-x86_64",
+ "3.9",
+ True,
),
- patch(
- "ddev.cli.size.diff.get_valid_versions",
- return_value=({"3.12"}),
+ ("abcdefg", "abcdefg", [], "linux-x86_64", "3.9", True),
+ (
+ "abcdefg",
+ "bcdefgh",
+ ["invalid-format"],
+ "linux-x86_64",
+ "3.9",
+ True,
),
- ):
- result = ddev("size", "diff", "commit1", "commit2", "--platform", "linux", "--python", "2.10", "--compressed")
- assert result.exit_code != 0
+ (
+ "abc",
+ "abcdefg",
+ ["invalid-format"],
+ "invalid-platform",
+ "3.9",
+ True,
+ ),
+ ("abcdefg", "bcdefgh", ["png"], "linux-x86_64", "3.9", False),
+ ("abcdefg", "bcdefgh", [], None, None, False),
+ ],
+ ids=[
+ "invalid platform",
+ "invalid version",
+ "both commits too short",
+ "first commit too short",
+ "second commit too short",
+ "same commits",
+ "invalid format",
+ "multiple errors",
+ "valid parameters",
+ "valid parameters without optional values",
+ ],
+)
+def test_validate_parameters(first_commit, second_commit, format_list, platform, version, error_expected):
+ valid_platforms = {"linux-x86_64", "windows-x86_64"}
+ valid_versions = {"3.9", "3.11"}
+
+ app = MagicMock()
+ app.abort.side_effect = SystemExit
+
+ if error_expected:
+ with pytest.raises(SystemExit):
+ validate_parameters(
+ app, first_commit, second_commit, format_list, valid_platforms, valid_versions, platform, version
+ )
+ app.abort.assert_called_once()
+ else:
+ validate_parameters(
+ app, first_commit, second_commit, format_list, valid_platforms, valid_versions, platform, version
+ )
+ app.abort.assert_not_called()
diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py
index 893a64917816c..d4a92ca333913 100644
--- a/ddev/tests/cli/size/test_status.py
+++ b/ddev/tests/cli/size/test_status.py
@@ -8,6 +8,8 @@
import pytest
+from ddev.cli.size.status import validate_parameters
+
def to_native_path(path: str) -> str:
return path.replace("/", os.sep)
@@ -45,15 +47,15 @@ def mock_size_status():
with (
patch("ddev.cli.size.utils.common_funcs.get_gitignore_files", return_value=set()),
patch(
- "ddev.cli.size.status.get_valid_platforms",
+ "ddev.cli.size.utils.common_funcs.get_valid_platforms",
return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'macos-aarch64', 'windows-x86_64'}),
),
patch(
- "ddev.cli.size.status.get_valid_versions",
+ "ddev.cli.size.utils.common_funcs.get_valid_versions",
return_value=({'3.12'}),
),
- patch("ddev.cli.size.status.get_files", return_value=fake_files),
- patch("ddev.cli.size.status.get_dependencies", return_value=fake_deps),
+ patch("ddev.cli.size.utils.common_funcs.get_files", return_value=fake_files),
+ patch("ddev.cli.size.utils.common_funcs.get_dependencies", return_value=fake_deps),
patch(
"ddev.cli.size.utils.common_funcs.os.path.relpath",
side_effect=lambda path, _: path.replace(f"fake_root{os.sep}", ""),
@@ -67,62 +69,178 @@ def mock_size_status():
yield mock_app
-def test_status_no_args(ddev, mock_size_status):
- assert ddev("size", "status").exit_code == 0
- assert ddev("size", "status", "--compressed").exit_code == 0
- assert ddev("size", "status", "--format", "csv,markdown,json,png").exit_code == 0
- assert ddev("size", "status", "--show-gui").exit_code == 0
-
-
-def test_status(ddev, mock_size_status):
- assert (ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12")).exit_code == 0
- assert (ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--compressed")).exit_code == 0
- assert (
- ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--format", "csv,markdown,json,png")
- ).exit_code == 0
- assert (ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--show-gui")).exit_code == 0
-
-
-def test_status_wrong_platform(ddev):
- with (
- patch(
- "ddev.cli.size.status.get_valid_platforms",
- return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'macos-aarch64', 'windows-x86_64'}),
- ),
- patch(
- "ddev.cli.size.status.get_valid_versions",
- return_value=({'3.12'}),
- ),
- ):
- result = ddev("size", "status", "--platform", "linux", "--python", "3.12", "--compressed")
- assert result.exit_code != 0
-
-
-def test_status_wrong_version(ddev):
- with (
- patch(
- "ddev.cli.size.status.get_valid_platforms",
- return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'macos-aarch64', 'windows-x86_64'}),
+@pytest.mark.parametrize(
+ "args, use_dependency_sizes",
+ [
+ ([], False),
+ (["--compressed"], False),
+ (["--format", "csv,markdown,json,png"], False),
+ (["--show-gui"], False),
+ (["--platform", "linux-aarch64", "--python", "3.12"], False),
+ (["--platform", "linux-aarch64", "--python", "3.12", "--compressed"], False),
+ (["--platform", "linux-aarch64", "--python", "3.12", "--format", "csv,markdown,json,png"], False),
+ (["--platform", "linux-aarch64", "--python", "3.12", "--show-gui"], False),
+ ],
+ ids=[
+ "no_args",
+ "compressed",
+ "format",
+ "show_gui",
+ "platform_and_version",
+ "platform_version_compressed",
+ "platform_version_format",
+ "platform_version_show_gui",
+ ],
+)
+def test_status(ddev, mock_size_status, tmp_path, args, use_dependency_sizes):
+ command = ["size", "status"] + args
+
+ if use_dependency_sizes:
+ fake_deps = [
+ {
+ "Name": "dep1",
+ "Version": "1.1.1",
+ "Size_Bytes": 5678,
+ "Size": 123,
+ "Type": "Dependency",
+ }
+ ]
+ dependency_sizes_file = tmp_path / "sizes"
+ dependency_sizes_file.write_text("{}")
+ command.extend(["--dependency-sizes", str(dependency_sizes_file)])
+
+ with patch("ddev.cli.size.utils.common_funcs.get_dependencies_from_json", return_value=fake_deps):
+ result = ddev(*command)
+ assert result.exit_code == 0
+ else:
+ result = ddev(*command)
+ assert result.exit_code == 0
+
+
+@pytest.mark.parametrize(
+ (
+ "platform",
+ "version",
+ "format",
+ "to_dd_org",
+ "to_dd_key",
+ "commit",
+ "dependency_sizes_path",
+ "create_dependency_sizes_file",
+ "should_abort",
+ ),
+ [
+ # Valid cases
+ ("linux-x86_64", "3.12", ["csv"], None, None, None, None, False, False),
+ ("macos-x86_64", "3.12", [], None, None, "1234567890", None, False, False),
+ ("linux-aarch64", "3.12", [], None, None, None, Path("sizes"), True, False),
+ # Invalid platform
+ ("invalid-platform", "3.12", [], None, None, None, None, False, True),
+ # Invalid version
+ ("linux-x86_64", "2.7", [], None, None, None, None, False, True),
+ # Invalid dependency sizes file
+ ("linux-x86_64", "3.12", [], None, None, None, Path("sizes"), False, True),
+ # Both commit and dependency_sizes
+ (
+ "linux-x86_64",
+ "3.12",
+ [],
+ None,
+ None,
+ "1234567890",
+ Path("sizes"),
+ True,
+ True,
),
- patch(
- "ddev.cli.size.status.get_valid_versions",
- return_value=({'3.12'}),
+ # Invalid format
+ ("linux-x86_64", "3.12", ["invalid-format"], None, None, None, None, False, True),
+ # Both to_dd_org and to_dd_key
+ (
+ "linux-x86_64",
+ "3.12",
+ [],
+ "test-org",
+ "test-key",
+ None,
+ None,
+ False,
+ True,
),
- ):
- result = ddev("size", "status", "--platform", "linux-aarch64", "--python", "2.10", "--compressed")
- assert result.exit_code != 0
-
-
-def test_status_wrong_plat_and_version(ddev):
- with (
- patch(
- "ddev.cli.size.status.get_valid_platforms",
- return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'macos-aarch64', 'windows-x86_64'}),
+ # Multiple errors
+ (
+ "invalid-platform",
+ "2.7",
+ ["invalid-format"],
+ "test-org",
+ "test-key",
+ "1234567890",
+ Path("sizes"),
+ True,
+ True,
),
- patch(
- "ddev.cli.size.status.get_valid_versions",
- return_value=({'3.12'}),
- ),
- ):
- result = ddev("size", "status", "--platform", "linux", "--python", "2.10", "--compressed")
- assert result.exit_code != 0
+ ],
+ ids=[
+ "valid_simple",
+ "valid_with_commit",
+ "valid_with_dependency_sizes",
+ "invalid_platform",
+ "invalid_version",
+ "invalid_dependency_sizes_file",
+ "commit_and_dependency_sizes",
+ "invalid_format",
+ "to_dd_org_and_to_dd_key",
+ "multiple_errors",
+ ],
+)
+def test_validate_parameters(
+ platform,
+ version,
+ format,
+ to_dd_org,
+ to_dd_key,
+ commit,
+ dependency_sizes_path,
+ create_dependency_sizes_file,
+ should_abort,
+ tmp_path,
+):
+ valid_platforms = ["linux-x86_64", "macos-x86_64", "linux-aarch64", "macos-aarch64", "windows-x86_64"]
+ valid_versions = ["3.12"]
+
+ dependency_sizes = None
+ if dependency_sizes_path:
+ dependency_sizes = tmp_path / dependency_sizes_path
+ if create_dependency_sizes_file:
+ dependency_sizes.touch()
+ app = MagicMock()
+ app.abort.side_effect = SystemExit
+
+ if should_abort:
+ with pytest.raises(SystemExit):
+ validate_parameters(
+ valid_platforms,
+ valid_versions,
+ platform,
+ version,
+ format,
+ to_dd_org,
+ commit,
+ dependency_sizes,
+ to_dd_key,
+ app,
+ )
+ app.abort.assert_called_once()
+ else:
+ validate_parameters(
+ valid_platforms,
+ valid_versions,
+ platform,
+ version,
+ format,
+ to_dd_org,
+ commit,
+ dependency_sizes,
+ to_dd_key,
+ app,
+ )
+ app.abort.assert_not_called()
diff --git a/ddev/tests/size/test_common.py b/ddev/tests/size/test_common.py
index a171ce37729c7..8c6d790e73728 100644
--- a/ddev/tests/size/test_common.py
+++ b/ddev/tests/size/test_common.py
@@ -2,28 +2,30 @@
import json
import os
import zipfile
-from pathlib import Path
-from unittest.mock import MagicMock, Mock, mock_open, patch
+from unittest.mock import MagicMock, mock_open, patch
+
+import pytest
from ddev.cli.size.utils.common_funcs import (
check_python_version,
compress,
convert_to_human_readable_size,
extract_version_from_about_py,
- format_modules,
+ get_dependencies_from_json,
get_dependencies_list,
get_dependencies_sizes,
get_files,
get_gitignore_files,
- get_org,
get_valid_platforms,
get_valid_versions,
is_correct_dependency,
is_valid_integration_file,
+ parse_dep_sizes_json,
save_csv,
save_json,
save_markdown,
)
+from ddev.utils.fs import Path
def to_native_path(path: str) -> str:
@@ -80,26 +82,44 @@ def test_get_valid_versions():
assert versions == expected_versions
-def test_is_correct_dependency():
- assert is_correct_dependency("windows-x86_64", "3.12", "windows-x86_64-3.12")
- assert not is_correct_dependency("windows-x86_64", "3.12", "linux-x86_64-3.12")
- assert not is_correct_dependency("windows-x86_64", "3.13", "windows-x86_64-3.12")
-
-
-def test_convert_to_human_readable_size():
- assert convert_to_human_readable_size(500) == "500 B"
- assert convert_to_human_readable_size(1024) == "1.0 KB"
- assert convert_to_human_readable_size(1048576) == "1.0 MB"
- assert convert_to_human_readable_size(1073741824) == "1.0 GB"
-
-
-def test_is_valid_integration_file():
+@pytest.mark.parametrize(
+ "platform, version, dependency_file_name, expected",
+ [
+ pytest.param("windows-x86_64", "3.12", "windows-x86_64_3.12.txt", True, id="correct"),
+ pytest.param("windows-x86_64", "3.12", "linux-x86_64_3.12.txt", False, id="incorrect_platform"),
+ pytest.param("windows-x86_64", "3.13", "windows-x86_64_3.12.txt", False, id="incorrect_version"),
+ ],
+)
+def test_is_correct_dependency(platform, version, dependency_file_name, expected):
+ assert is_correct_dependency(platform, version, dependency_file_name) is expected
+
+
+@pytest.mark.parametrize(
+ "size_bytes, expected_string",
+ [
+ pytest.param(500, "500 B", id="Bytes"),
+ pytest.param(1024, "1.0 KiB", id="KiB"),
+ pytest.param(1048576, "1.0 MiB", id="MiB"),
+ pytest.param(1073741824, "1.0 GiB", id="GiB"),
+ ],
+)
+def test_convert_to_human_readable_size(size_bytes, expected_string):
+ assert convert_to_human_readable_size(size_bytes) == expected_string
+
+
+@pytest.mark.parametrize(
+ "file_path, expected",
+ [
+ pytest.param("datadog_checks/example.py", True, id="valid"),
+ pytest.param("__pycache__/file.py", False, id="pycache"),
+ pytest.param("datadog_checks_dev/example.py", False, id="checks_dev"),
+ pytest.param(".git/config", False, id="git"),
+ ],
+)
+def test_is_valid_integration_file(file_path, expected):
repo_path = "fake_repo"
with patch("ddev.cli.size.utils.common_funcs.get_gitignore_files", return_value=set()):
- assert is_valid_integration_file(to_native_path("datadog_checks/example.py"), repo_path)
- assert not is_valid_integration_file(to_native_path("__pycache__/file.py"), repo_path)
- assert not is_valid_integration_file(to_native_path("datadog_checks_dev/example.py"), repo_path)
- assert not is_valid_integration_file(to_native_path(".git/config"), repo_path)
+ assert is_valid_integration_file(to_native_path(file_path), repo_path) is expected
def test_get_dependencies_list():
@@ -130,7 +150,12 @@ def test_get_dependencies_sizes():
mock_response.__exit__.return_value = None
with patch("requests.get", return_value=mock_response):
file_data = get_dependencies_sizes(
- ["dependency1"], ["https://example.com/dependency1/dependency1-1.1.1-.whl"], ["1.1.1"], True
+ ["dependency1"],
+ ["https://example.com/dependency1/dependency1-1.1.1-.whl"],
+ ["1.1.1"],
+ True,
+ "linux-x86_64",
+ "3.12",
)
assert file_data == [
@@ -140,37 +165,11 @@ def test_get_dependencies_sizes():
"Size_Bytes": 11,
"Size": convert_to_human_readable_size(11),
"Type": "Dependency",
- }
- ]
-
-
-def test_format_modules():
- modules = [
- {"Name": "module1", "Type": "A", "Size_Bytes": 1500},
- {"Name": "module2", "Type": "B", "Size_Bytes": 3000},
- ]
- platform = "linux-aarch64"
- version = "3.12"
-
- expected_output = [
- {
- "Name": "module1",
- "Type": "A",
- "Size_Bytes": 1500,
- "Platform": "linux-aarch64",
+ "Platform": "linux-x86_64",
"Python_Version": "3.12",
- },
- {
- "Name": "module2",
- "Type": "B",
- "Size_Bytes": 3000,
- "Platform": "linux-aarch64",
- "Python_Version": "3.12",
- },
+ }
]
- assert format_modules(modules, platform, version) == expected_output
-
def test_get_files_grouped_and_with_versions():
repo_path = Path("fake_repo")
@@ -206,7 +205,7 @@ def mock_getsize(path):
),
patch("ddev.cli.size.utils.common_funcs.check_python_version", return_value=True),
):
- result = get_files(repo_path, compressed=False, py_version="3.12")
+ result = get_files(repo_path, compressed=False, py_version="3.12", platform="linux-x86_64")
expected = [
{
@@ -215,6 +214,8 @@ def mock_getsize(path):
"Size_Bytes": 3000,
"Size": "2.93 KB",
"Type": "Integration",
+ "Platform": "linux-x86_64",
+ "Python_Version": "3.12",
},
{
"Name": "integration2",
@@ -222,13 +223,22 @@ def mock_getsize(path):
"Size_Bytes": 3000,
"Size": "2.93 KB",
"Type": "Integration",
+ "Platform": "linux-x86_64",
+ "Python_Version": "3.12",
},
]
assert result == expected
-def test_check_version():
+@pytest.mark.parametrize(
+ "py_version, expected",
+ [
+ pytest.param("3", True, id="py3"),
+ pytest.param("2", False, id="py2"),
+ ],
+)
+def test_check_version(py_version, expected):
with (
patch(
"ddev.cli.size.utils.common_funcs.load_toml_file",
@@ -236,8 +246,7 @@ def test_check_version():
),
patch("ddev.cli.size.utils.common_funcs.os.path.exists", return_value=True),
):
- assert check_python_version("fake_repo", "integration1", "3")
- assert not check_python_version("fake_repo", "integration1", "2")
+ assert check_python_version("fake_repo", "integration1", py_version) is expected
def test_get_gitignore_files():
@@ -334,49 +343,105 @@ def test_save_markdown():
assert written_content == expected_writes
-def test_extract_version_from_about_py_pathlib():
+@pytest.mark.parametrize(
+ "file_content, expected_version",
+ [
+ pytest.param("__version__ = '1.2.3'", "1.2.3", id="version_present"),
+ pytest.param("not_version = 'not_defined'", "", id="version_not_present"),
+ ],
+)
+def test_extract_version_from_about_py(file_content, expected_version):
fake_path = Path("some") / "module" / "__about__.py"
- fake_content = "__version__ = '1.2.3'\n"
-
- with patch("ddev.cli.size.utils.common_funcs.open", mock_open(read_data=fake_content)):
+ with patch("ddev.cli.size.utils.common_funcs.open", mock_open(read_data=file_content)):
version = extract_version_from_about_py(str(fake_path))
+ assert version == expected_version
+
+
+def test_dep_parse_sizes_json(tmp_path):
+ compressed_data = json.dumps(
+ [
+ {
+ "Name": "dep1",
+ "Size_Bytes": 123,
+ "Size": "2 B",
+ "Type": "Dependency",
+ "Platform": "linux-x86_64",
+ "Python_Version": "3.12",
+ },
+ {
+ "Name": "module1",
+ "Size_Bytes": 123,
+ "Size": "2 B",
+ "Type": "Integration",
+ "Platform": "linux-x86_64",
+ "Python_Version": "3.12",
+ },
+ ]
+ )
+ uncompressed_data = json.dumps(
+ [
+ {
+ "Name": "dep1",
+ "Size_Bytes": 456,
+ "Size": "4 B",
+ "Type": "Dependency",
+ "Platform": "linux-x86_64",
+ "Python_Version": "3.12",
+ },
+ {
+ "Name": "module1",
+ "Size_Bytes": 456,
+ "Size": "4 B",
+ "Type": "Integration",
+ "Platform": "linux-x86_64",
+ "Python_Version": "3.12",
+ },
+ ]
+ )
- assert version == "1.2.3"
-
-
-def test_extract_version_from_about_py_no_version_pathlib():
- fake_path = Path("another") / "module" / "__about__.py"
- fake_content = "version = 'not_defined'\n"
-
- with patch("ddev.cli.size.utils.common_funcs.open", mock_open(read_data=fake_content)):
- version = extract_version_from_about_py(str(fake_path))
-
- assert version == ""
-
-
-def test_get_org():
- mock_app = Mock()
- mock_path = Mock()
-
- toml_data = """
- [orgs.default]
- api_key = "test_api_key"
- app_key = "test_app_key"
- site = "datadoghq.com"
- """
+ expected_output = {
+ "dep1": {
+ "compressed": 123,
+ "uncompressed": 456,
+ "version": None,
+ },
+ }
+ compressed_json_path = tmp_path / "compressed.json"
+ compressed_json_path.write_text(compressed_data)
+ uncompressed_json_path = tmp_path / "uncompressed.json"
+ uncompressed_json_path.write_text(uncompressed_data)
+ result = parse_dep_sizes_json(compressed_json_path, uncompressed_json_path)
- mock_app.config_file.path = mock_path
+ assert result == expected_output
- with (
- patch("ddev.cli.size.utils.common_funcs.open", mock_open(read_data=toml_data)),
- patch.object(mock_path, "open", mock_open(read_data=toml_data)),
- ):
- result = get_org(mock_app, "default")
- expected = {
- "api_key": "test_api_key",
- "app_key": "test_app_key",
- "site": "datadoghq.com",
- }
+def test_get_dependencies_from_json():
+ dep_size_dict = (
+ '{"dep1": {"compressed": 1, "uncompressed": 2, "version": "1.1.1"},\n'
+ '"dep2": {"compressed": 10, "uncompressed": 20, "version": "1.1.1"}}'
+ )
+ expected = [
+ {
+ "Name": "dep1",
+ "Version": "1.1.1",
+ "Size_Bytes": 1,
+ "Size": "1 B",
+ "Type": "Dependency",
+ "Platform": "linux-x86_64",
+ "Python_Version": "3.12",
+ },
+ {
+ "Name": "dep2",
+ "Version": "1.1.1",
+ "Size_Bytes": 10,
+ "Size": "10 B",
+ "Type": "Dependency",
+ "Platform": "linux-x86_64",
+ "Python_Version": "3.12",
+ },
+ ]
+ with patch('ddev.utils.fs.Path') as mock_path:
+ mock_path.read_text.return_value = dep_size_dict
+ result = get_dependencies_from_json(mock_path, "linux-x86_64", "3.12", True)
assert result == expected
diff --git a/ddev/tests/size/test_diff.py b/ddev/tests/size/test_diff.py
index 34c0b3406f420..1285f0f67bf84 100644
--- a/ddev/tests/size/test_diff.py
+++ b/ddev/tests/size/test_diff.py
@@ -4,7 +4,7 @@
import os
-from ddev.cli.size.diff import get_diff
+from ddev.cli.size.diff import calculate_diff
from ddev.cli.size.utils.common_funcs import convert_to_human_readable_size
@@ -12,42 +12,93 @@ def to_native_path(path: str) -> str:
return path.replace("/", os.sep)
-def test_get_diff():
+def test_calculate_diff():
size_before = [
- {"Name": "foo", "Version": "1.0.0", "Size_Bytes": 1000, "Type": "Integration"},
- {"Name": "bar", "Version": "2.0.0", "Size_Bytes": 2000, "Type": "Integration"},
- {"Name": "deleted", "Version": "3.0.0", "Size_Bytes": 1500, "Type": "Integration"},
+ {
+ "Name": "foo",
+ "Version": "1.0.0",
+ "Size_Bytes": 1000,
+ "Type": "Integration",
+ "Platform": "linux-aarch64",
+ "Python_Version": "3.12",
+ },
+ {
+ "Name": "bar",
+ "Version": "2.0.0",
+ "Size_Bytes": 2000,
+ "Type": "Integration",
+ "Platform": "linux-aarch64",
+ "Python_Version": "3.12",
+ },
+ {
+ "Name": "deleted",
+ "Version": "3.0.0",
+ "Size_Bytes": 1500,
+ "Type": "Integration",
+ "Platform": "linux-aarch64",
+ "Python_Version": "3.12",
+ },
]
size_after = [
- {"Name": "foo", "Version": "1.1.0", "Size_Bytes": 1200, "Type": "Integration"},
- {"Name": "bar", "Version": "2.0.0", "Size_Bytes": 2000, "Type": "Integration"},
- {"Name": "new", "Version": "0.1.0", "Size_Bytes": 800, "Type": "Integration"},
+ {
+ "Name": "foo",
+ "Version": "1.1.0",
+ "Size_Bytes": 1200,
+ "Type": "Integration",
+ "Platform": "linux-aarch64",
+ "Python_Version": "3.12",
+ },
+ {
+ "Name": "bar",
+ "Version": "2.0.0",
+ "Size_Bytes": 2000,
+ "Type": "Integration",
+ "Platform": "linux-aarch64",
+ "Python_Version": "3.12",
+ },
+ {
+ "Name": "new",
+ "Version": "0.1.0",
+ "Size_Bytes": 800,
+ "Type": "Integration",
+ "Platform": "linux-aarch64",
+ "Python_Version": "3.12",
+ },
]
- result = get_diff(size_before, size_after, "Integration")
+ result = calculate_diff(size_before, size_after, "linux-aarch64", "3.12")
expected = [
{
- "Name": "deleted (DELETED)",
+ "Name": "deleted",
"Version": "3.0.0",
"Type": "Integration",
+ "Platform": "linux-aarch64",
+ "Python_Version": "3.12",
"Size_Bytes": -1500,
"Size": convert_to_human_readable_size(-1500),
+ "Change_Type": "Removed",
},
{
"Name": "foo",
"Version": "1.0.0 -> 1.1.0",
"Type": "Integration",
+ "Platform": "linux-aarch64",
+ "Python_Version": "3.12",
"Size_Bytes": 200,
"Size": convert_to_human_readable_size(200),
+ "Change_Type": "Modified",
},
{
- "Name": "new (NEW)",
+ "Name": "new",
"Version": "0.1.0",
"Type": "Integration",
+ "Platform": "linux-aarch64",
+ "Python_Version": "3.12",
"Size_Bytes": 800,
"Size": convert_to_human_readable_size(800),
+ "Change_Type": "New",
},
]