From c39ed1a134132c3792a801349edb73d9753e7d66 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Wed, 12 Mar 2025 12:52:12 +0100
Subject: [PATCH 01/40] basic status reporting

---
 package_size_analyzer/cli.py   |  38 +++++++
 package_size_analyzer/modes.py | 184 +++++++++++++++++++++++++++++++++
 package_size_analyzer/test.py  |  58 +++++++++++
 3 files changed, 280 insertions(+)
 create mode 100644 package_size_analyzer/cli.py
 create mode 100644 package_size_analyzer/modes.py
 create mode 100644 package_size_analyzer/test.py

diff --git a/package_size_analyzer/cli.py b/package_size_analyzer/cli.py
new file mode 100644
index 0000000000000..23c9a15d0bc0c
--- /dev/null
+++ b/package_size_analyzer/cli.py
@@ -0,0 +1,38 @@
+import argparse
+from modes import status_mode
+
+
+def main():
+    parser = argparse.ArgumentParser(description="Package Size Analyzer CLI")
+
+    # Define allowed choices
+    valid_modes = ["status", "diff", "timeline"]
+    valid_platforms = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"]
+    valid_python_versions = ["3.12"]
+
+    # Arguments
+    parser.add_argument("mode", choices=valid_modes, help="Mode of operation")
+    parser.add_argument("--platform", choices=valid_platforms, required=False, help="Target platform")
+    parser.add_argument("--python", choices=valid_python_versions, required=False, help="Python version (MAJOR.MINOR)")
+    parser.add_argument("--compressed", action="store_true", help="Measure compressed size")
+
+    args = parser.parse_args()
+
+    # Execute the corresponding function based on the selected mode
+    if args.mode == "status":
+        # if an argument is not specified, all possibilities are executed
+        if args.platform is None and args.python is None:
+            for platform in valid_platforms:
+                for version in valid_python_versions:
+                    status_mode(platform, version, args.compressed)
+        elif args.platform is None:
+            for platform in valid_platforms:
+                status_mode(platform, args.python, args.compressed)
+        elif args.python is None:
+            for version in valid_python_versions:
+                status_mode(args.platform, version, args.compressed)
+        else:
+            status_mode(args.platform, args.python, args.compressed)
+
+if __name__ == "__main__":
+    main()
diff --git a/package_size_analyzer/modes.py b/package_size_analyzer/modes.py
new file mode 100644
index 0000000000000..b9e0f2f05b4ab
--- /dev/null
+++ b/package_size_analyzer/modes.py
@@ -0,0 +1,184 @@
+import requests
+import pandas as pd
+import re
+import os
+from tabulate import tabulate
+import zlib
+import io
+
+
+def status_mode(platform, version, compressed):
+    if compressed:
+        df1 = pd.DataFrame(get_compressed_files())
+        print("Compressed integrations done")
+
+        df2 = pd.DataFrame(get_compressed_dependencies(platform,version))
+        print("Compressed dependencies done")
+        
+
+        df = pd.concat([df1, df2], ignore_index=True)
+        
+        # Calculate the size for the whole module
+        df_grouped = df.groupby(["Name", 'Type'], as_index=False).agg({"Size (Bytes)": "sum"})
+        df_grouped = df_grouped.sort_values(by="Size (Bytes)", ascending=False).reset_index(drop=True)
+        
+
+        df_grouped["Size"] = df_grouped["Size (Bytes)"].apply(convert_size)
+        df_grouped.to_csv("compressed_status_" + platform + "_" + version + ".csv", index=False)
+        df.to_csv("compressed_status_all_" + platform + "_" + version + ".csv", index=False)
+        df_grouped = df_grouped.drop(columns=['Size (Bytes)'])
+        print('--------------', platform,version,'--------------')
+        print(tabulate(df_grouped, headers='keys', tablefmt='grid'))
+        print("CSV exported")
+
+
+
+
+def get_compressed_files():
+    print("Getting compressed integrations")
+
+    ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
+    git_ignore = get_gitignore_files()
+    included_folder = "datadog_checks/"
+
+    script_path = os.path.abspath(__file__)
+    parent_dir = os.path.dirname(script_path)
+    repo_path = os.path.dirname(parent_dir)
+
+    file_data = []
+    for root, _, files in os.walk(repo_path):
+        for file in files:
+            file_path = os.path.join(root, file)
+
+            # Convert the path to a relative format within the repo
+            relative_path = os.path.relpath(file_path, repo_path)
+
+            # Filter files 
+            if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore):
+                try:
+                    # Compress the file
+                    compressor = zlib.compressobj()
+                    compressed_size = 0
+
+                    # original_size = os.path.getsize(file_path)
+                    with open(file_path, "rb") as f:
+                        while chunk := f.read(8192):  # Read in 8KB chunks
+                            compressed_chunk = compressor.compress(chunk)
+                            compressed_size += len(compressed_chunk)
+
+                        compressed_size += len(compressor.flush())  # Flush the buffer
+                    integration = relative_path.split("/")[0]
+                    file_data.append({
+                        "File Path": relative_path,
+                        "Type": "Integration",
+                        "Name": integration,
+                        "Size (Bytes)": compressed_size
+                    })
+
+                except Exception as e:
+                    print(f"Error processing {relative_path}: {e}")  
+
+    return file_data
+    
+
+def get_compressed_dependencies(platform=None, version=None):
+    print("Getting compressed dependencies")
+
+    script_path = os.path.abspath(__file__)
+    parent_dir = os.path.dirname(script_path)
+    repo_path = os.path.dirname(parent_dir)
+    resolved_path = os.path.join(repo_path, ".deps/resolved")
+
+    if not os.path.exists(resolved_path) or not os.path.isdir(resolved_path):
+        print(f"Error: Directory not found {resolved_path}")
+        return []
+
+    file_data = []
+    
+    for filename in os.listdir(resolved_path):
+        file_path = os.path.join(resolved_path, filename)
+        
+        if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
+            deps, download_urls = get_dependencies(file_path)
+            return get_dependencies_sizes(deps, download_urls)
+    
+    
+    
+
+
+def is_correct_dependency(platform, version, name):
+    return platform in name and version in name
+        
+def get_dependencies_sizes(deps, download_urls):
+    file_data = []
+    for dep, url in zip(deps, download_urls):
+        dep_response = requests.head(url)
+        if dep_response.status_code != 200:
+            print(f"Error {response.status_code}: Unable to fetch the dependencies file")
+        else:
+            size = dep_response.headers.get("Content-Length", None)
+            file_data.append({"File Path": dep, "Type": "Dependency", "Name": dep, "Size (Bytes)": int(size)})
+        
+    return file_data 
+
+
+def get_dependencies(file_path):
+    download_urls = []
+    deps = []
+    try:
+        with open(file_path, "r", encoding="utf-8") as file:
+            file_content = file.read()
+            for line in file_content.splitlines():
+                match = re.search(r"([\w\-\d\.]+) @ (https?://[^\s#]+)", line)
+                if match:
+                    deps.append(match.group(1))
+                    download_urls.append(match.group(2))
+    except Exception as e:
+        print(f"Error reading file {file_path}: {e}")
+    
+    return deps, download_urls
+
+def is_valid_integration(path, included_folder, ignored_files, git_ignore):
+    # It is not an integration
+    if path.startswith('.'):
+        return False
+    # It is part of an integration and it is not in the datadog_checks folder 
+    elif not (included_folder in path):
+        return False
+    # It is an irrelevant file
+    elif any(ignore in path for ignore in ignored_files):
+        return False
+    # This file is contained in .gitignore
+    elif any(ignore in path for ignore in git_ignore):
+        return False
+    else:
+        return True
+
+
+def get_gitignore_files():
+    script_path = os.path.abspath(__file__)
+    parent_dir = os.path.dirname(script_path)
+    repo_path = os.path.dirname(parent_dir)
+    gitignore_path = os.path.join(repo_path, ".gitignore")
+    if not os.path.exists(gitignore_path):
+        print(f"Error: .gitignore file not found at {gitignore_path}")
+        return []
+    
+    try:
+        with open(gitignore_path, "r", encoding="utf-8") as file:
+            gitignore_content = file.read()
+            ignored_patterns = [line.strip() for line in gitignore_content.splitlines() if line.strip() and not line.startswith("#")]
+            return ignored_patterns
+    except Exception as e:
+        print(f"Error reading .gitignore file: {e}")
+        return []
+
+def convert_size(size_bytes):
+    """Transforms bytes into a human-friendly format (KB, MB, GB) with 3 decimal places."""
+    for unit in ['B', 'KB', 'MB', 'GB']:
+        if size_bytes < 1024:
+            return (str(round(size_bytes, 2)) + unit)
+        size_bytes /= 1024
+    return (str(round(size_bytes, 2)) + "TB")
+
+
diff --git a/package_size_analyzer/test.py b/package_size_analyzer/test.py
new file mode 100644
index 0000000000000..89bc7c64a3c11
--- /dev/null
+++ b/package_size_analyzer/test.py
@@ -0,0 +1,58 @@
+
+import pytest
+import requests
+from unittest.mock import patch, mock_open, MagicMock
+from modes import (
+    get_compressed_dependencies,
+    get_gitignore_files,
+    convert_size,
+    is_valid_integration,
+    is_correct_dependency,
+    get_dependencies,
+    get_dependencies_sizes
+)
+
+def test_is_correct_dependency():
+    assert is_correct_dependency("windows-x86_64", "3.12", "windows-x86_64-3.12") == True
+    assert is_correct_dependency("windows-x86_64", "3.12", "linux-x86_64-3.12") == False
+    assert is_correct_dependency("windows-x86_64", "3.13", "windows-x86_64-3.12") == False
+
+   
+def test_convert_size():
+    assert convert_size(500) == "500B"
+    assert convert_size(1024) == "1.0KB"
+    assert convert_size(1048576) == "1.0MB"
+    assert convert_size(1073741824) == "1.0GB"
+
+def test_is_valid_integration():
+    included_folder = "datadog_checks/"
+    ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
+    git_ignore = [".git", "__pycache__"]
+    
+    assert is_valid_integration("datadog_checks/example.py", included_folder, ignored_files, git_ignore) == True
+    assert is_valid_integration("__pycache__/file.py", included_folder, ignored_files, git_ignore) == False
+    assert is_valid_integration("datadog_checks_dev/example.py", included_folder, ignored_files, git_ignore) == False
+    assert is_valid_integration(".git/config", included_folder, ignored_files, git_ignore) == False
+
+def test_get_dependencies():
+    file_content = "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl"
+    mock_open_obj = mock_open(read_data=file_content)
+    with patch("builtins.open", mock_open_obj):
+        deps, urls = get_dependencies("fake_path")
+    assert deps == ["dependency1", "dependency2"]
+    assert urls == ["https://example.com/dependency1.whl", "https://example.com/dependency2.whl"]
+
+def test_get_gitignore_files():
+    mock_gitignore = "__pycache__/\n*.log\n"  # Sample .gitignore file
+    with patch("builtins.open", mock_open(read_data=mock_gitignore)):
+        with patch("os.path.exists", return_value=True):
+            ignored_patterns = get_gitignore_files()
+    assert ignored_patterns == ["__pycache__/", "*.log"]
+
+def test_get_dependencies_sizes():
+    mock_response = MagicMock()
+    mock_response.status_code = 200
+    mock_response.headers = {"Content-Length": "12345"}
+    with patch("requests.head", return_value=mock_response):
+        file_data = get_dependencies_sizes(["dependency1"], ["https://example.com/dependency1.whl"])
+    assert file_data == [{"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345}]

From a9c824dd7996328c8fece9553f4033a5d5d7d46a Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Thu, 20 Mar 2025 12:51:05 +0100
Subject: [PATCH 02/40] integration into ddev

---
 ddev/pyproject.toml                |   1 +
 ddev/src/ddev/cli/__init__.py      |   2 +
 ddev/src/ddev/cli/size/__init__.py |  19 +++
 ddev/src/ddev/cli/size/status.py   | 216 +++++++++++++++++++++++++++++
 ddev/tests/cli/size/__init__.py    |   3 +
 ddev/tests/cli/size/test_status.py | 130 +++++++++++++++++
 6 files changed, 371 insertions(+)
 create mode 100644 ddev/src/ddev/cli/size/__init__.py
 create mode 100644 ddev/src/ddev/cli/size/status.py
 create mode 100644 ddev/tests/cli/size/__init__.py
 create mode 100644 ddev/tests/cli/size/test_status.py

diff --git a/ddev/pyproject.toml b/ddev/pyproject.toml
index 7218fa7a26a98..8a1ab15edbb2c 100644
--- a/ddev/pyproject.toml
+++ b/ddev/pyproject.toml
@@ -40,6 +40,7 @@ dependencies = [
     "tomli-w",
     "tomlkit",
     "tqdm",
+    "requests"
 ]
 dynamic = ["version"]
 
diff --git a/ddev/src/ddev/cli/__init__.py b/ddev/src/ddev/cli/__init__.py
index 302f859cd9f54..a5924607a880c 100644
--- a/ddev/src/ddev/cli/__init__.py
+++ b/ddev/src/ddev/cli/__init__.py
@@ -25,6 +25,7 @@
 from ddev.plugin import specs
 from ddev.utils.ci import running_in_ci
 from ddev.utils.fs import Path
+from ddev.cli.size import size
 
 
 @click.group(context_settings={'help_option_names': ['-h', '--help']}, invoke_without_command=True)
@@ -149,6 +150,7 @@ def ddev(
 ddev.add_command(status)
 ddev.add_command(test)
 ddev.add_command(validate)
+ddev.add_command(size)
 
 __management_command = os.environ.get('PYAPP_COMMAND_NAME', '')
 if __management_command:
diff --git a/ddev/src/ddev/cli/size/__init__.py b/ddev/src/ddev/cli/size/__init__.py
new file mode 100644
index 0000000000000..25863ae1dbc5e
--- /dev/null
+++ b/ddev/src/ddev/cli/size/__init__.py
@@ -0,0 +1,19 @@
+# (C) Datadog, Inc. 2022-present
+# All rights reserved
+# Licensed under a 3-clause BSD style license (see LICENSE)
+
+import click
+
+from ddev.cli.size.status import status
+
+
+@click.group(short_help='Get the size of integrations and dependencies by platform and python version')
+def size():
+    """Package Size Analyzer"""
+    pass
+
+
+size.add_command(status)
+
+if __name__ == "__main__":
+    size()
\ No newline at end of file
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
new file mode 100644
index 0000000000000..147e1dd34f216
--- /dev/null
+++ b/ddev/src/ddev/cli/size/status.py
@@ -0,0 +1,216 @@
+# (C) Datadog, Inc. 2022-present
+# All rights reserved
+# Licensed under a 3-clause BSD style license (see LICENSE)
+
+import click
+import requests
+import re
+import os
+import zlib
+import io
+from pathlib import Path
+import sys
+import csv as csv_lib
+
+
+VALID_PLATFORMS = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"]
+VALID_PYTHON_VERSIONS = ["3.12"]
+REPO_PATH = Path(__file__).resolve().parents[5]
+
+
+
+@click.command()
+@click.option('--platform', type=click.Choice(VALID_PLATFORMS), help="Target platform")
+@click.option('--python', 'version', type=click.Choice(VALID_PYTHON_VERSIONS), help="Python version (MAJOR.MINOR)")
+@click.option('--compressed', is_flag=True, help="Measure compressed size")
+@click.option('--csv', is_flag=True, help="Output in CSV format")
+@click.pass_obj
+def status(app, platform, version, compressed, csv):
+    platforms = VALID_PLATFORMS if platform is None else [platform]
+    versions = VALID_PYTHON_VERSIONS if version is None else [version]
+
+    for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
+        status_mode(app, plat, ver, compressed, csv, i)
+
+
+
+def status_mode(app,platform, version, compressed,csv,i):
+    if compressed:
+        modules = get_compressed_files(app) + get_compressed_dependencies(app, platform,version)
+        
+        grouped_modules = group_modules(modules,platform, version)
+        grouped_modules.sort(key=lambda x: x['Size (Bytes)'], reverse=True)
+        
+        if csv:
+            headers = grouped_modules[0].keys()
+            if i == 0:
+                app.display(",".join(headers)) # comas alrededor
+
+            for row in grouped_modules:
+                app.display(",".join(str(row[h]) for h in headers))
+        else:
+            modules_table = {col: {} for col in grouped_modules[0].keys()}
+            for i,row in enumerate(grouped_modules):
+                for key,value in row.items():
+                    modules_table[key][i] = str(value)
+            app.display_table(platform + " " + version, modules_table)
+
+    
+
+def group_modules(modules, platform, version):
+    grouped_aux = {}
+
+    for file in modules:
+        key = (file['Name'], file['Type'])
+        grouped_aux[key] = grouped_aux.get(key, 0) + file["Size (Bytes)"]
+
+    return [{'Name': name, 'Type': type, 'Size (Bytes)': size, 'Size': convert_size(size), 'Platform': platform, 'Version': version} for (name,type), size in grouped_aux.items()]
+
+
+def get_compressed_files(app):
+    #print("Getting compressed integrations")
+
+    ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
+    git_ignore = get_gitignore_files(app)
+    included_folder = "datadog_checks/"
+
+    # script_path = 
+    #REPO_PATH = os.path.abspath(os.path.join(os.path.abspath(__file__), "../../../../../../")) 
+
+    file_data = []
+    for root, _, files in os.walk(REPO_PATH):
+        for file in files:
+            file_path = os.path.join(root, file)
+
+            # Convert the path to a relative format within the repo
+            relative_path = os.path.relpath(file_path, REPO_PATH)
+
+            # Filter files 
+            if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore):
+                try:
+                    # Compress the file
+                    compressor = zlib.compressobj()
+                    compressed_size = 0
+
+                    # original_size = os.path.getsize(file_path)
+                    with open(file_path, "rb") as f:
+                        while chunk := f.read(8192):  # Read in 8KB chunks
+                            compressed_chunk = compressor.compress(chunk)
+                            compressed_size += len(compressed_chunk)
+
+                        compressed_size += len(compressor.flush())  # Flush the buffer
+                    integration = relative_path.split("/")[0]
+                    file_data.append({
+                        "File Path": relative_path,
+                        "Type": "Integration",
+                        "Name": integration,
+                        "Size (Bytes)": compressed_size
+                    })
+
+                except Exception as e:
+                    app.display_error(f"Error processing {relative_path}: {e}") 
+                    sys.exit(1) 
+
+    return file_data
+    
+
+def get_compressed_dependencies(app,platform, version):
+    #print("Getting compressed dependencies")
+
+    #script_path = os.path.abspath(__file__)
+    #REPO_PATH = os.path.abspath(os.path.join(script_path, "../../../../../../"))
+    resolved_path = os.path.join(REPO_PATH, ".deps/resolved")
+
+    if not os.path.exists(resolved_path) or not os.path.isdir(resolved_path):
+        app.display_error(f"Error: Directory not found {resolved_path}")
+        sys.exit(1)
+
+    
+    for filename in os.listdir(resolved_path):
+        file_path = os.path.join(resolved_path, filename)
+        
+        if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
+            deps, download_urls = get_dependencies(app, file_path)
+            return get_dependencies_sizes(app, deps, download_urls)
+    
+    
+    
+
+
+def is_correct_dependency(platform, version, name):
+    return platform in name and version in name
+        
+def get_dependencies_sizes(app, deps, download_urls):
+    file_data = []
+    for dep, url in zip(deps, download_urls):
+        dep_response = requests.head(url)
+        if dep_response.status_code != 200:
+            app.display_error(f"Error {dep_response.status_code}: Unable to fetch the dependencies file")
+            sys.exit(1)
+        else:
+            size = dep_response.headers.get("Content-Length", None)
+            file_data.append({"File Path": dep, "Type": "Dependency", "Name": dep, "Size (Bytes)": int(size)})
+        
+    return file_data 
+
+
+def get_dependencies(app,file_path):
+    download_urls = []
+    deps = []
+    try:
+        with open(file_path, "r", encoding="utf-8") as file:
+            file_content = file.read()
+            for line in file_content.splitlines():
+                match = re.search(r"([\w\-\d\.]+) @ (https?://[^\s#]+)", line)
+                if match:
+                    deps.append(match.group(1))
+                    download_urls.append(match.group(2))
+    except Exception as e:
+        app.display_error(f"Error reading file {file_path}: {e}")
+        sys.exit(1)
+    
+    return deps, download_urls
+
+def is_valid_integration(path, included_folder, ignored_files, git_ignore):
+    # It is not an integration
+    if path.startswith('.'):
+        return False
+    # It is part of an integration and it is not in the datadog_checks folder 
+    elif not (included_folder in path):
+        return False
+    # It is an irrelevant file
+    elif any(ignore in path for ignore in ignored_files):
+        return False
+    # This file is contained in .gitignore
+    elif any(ignore in path for ignore in git_ignore):
+        return False
+    else:
+        return True
+
+
+def get_gitignore_files(app):
+    #script_path = os.path.abspath(__file__)
+    #repo_root = os.path.abspath(os.path.join(script_path, "../../../../../../")) 
+    gitignore_path = os.path.join(REPO_PATH, ".gitignore")
+    if not os.path.exists(gitignore_path):
+        app.display_error(f"Error: .gitignore file not found at {gitignore_path}")
+        sys.exit(1)
+    
+    try:
+        with open(gitignore_path, "r", encoding="utf-8") as file:
+            gitignore_content = file.read()
+            ignored_patterns = [line.strip() for line in gitignore_content.splitlines() if line.strip() and not line.startswith("#")]
+            return ignored_patterns
+    except Exception as e:
+        app.display_error(f"Error reading .gitignore file: {e}")
+        sys.exit(1)
+
+def convert_size(size_bytes):
+    #Transforms bytes into a human-friendly format (KB, MB, GB)
+    for unit in [' B', ' KB', ' MB', ' GB']:
+        if size_bytes < 1024:
+            return (str(round(size_bytes, 2)) + unit)
+        size_bytes /= 1024
+    return (str(round(size_bytes, 2)) + " TB")
+
+
diff --git a/ddev/tests/cli/size/__init__.py b/ddev/tests/cli/size/__init__.py
new file mode 100644
index 0000000000000..3eff9712cbcf5
--- /dev/null
+++ b/ddev/tests/cli/size/__init__.py
@@ -0,0 +1,3 @@
+# (C) Datadog, Inc. 2022-present
+# All rights reserved
+# Licensed under a 3-clause BSD style license (see LICENSE)
diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py
new file mode 100644
index 0000000000000..a7625797ef763
--- /dev/null
+++ b/ddev/tests/cli/size/test_status.py
@@ -0,0 +1,130 @@
+# (C) Datadog, Inc. 2022-present
+# All rights reserved
+# Licensed under a 3-clause BSD style license (see LICENSE)
+
+from unittest.mock import patch, mock_open, MagicMock
+import os
+from ddev.cli.size.status import (
+    get_compressed_dependencies,
+    get_gitignore_files,
+    convert_size,
+    is_valid_integration,
+    is_correct_dependency,
+    get_dependencies,
+    get_dependencies_sizes,
+    group_modules
+)
+from ddev.cli.application import Application
+
+
+def test_is_correct_dependency():
+    assert is_correct_dependency("windows-x86_64", "3.12", "windows-x86_64-3.12")
+    assert not is_correct_dependency("windows-x86_64", "3.12", "linux-x86_64-3.12") 
+    assert not is_correct_dependency("windows-x86_64", "3.13", "windows-x86_64-3.12")
+
+   
+def test_convert_size():
+    assert convert_size(500) == "500 B"
+    assert convert_size(1024) == "1.0 KB"
+    assert convert_size(1048576) == "1.0 MB"
+    assert convert_size(1073741824) == "1.0 GB"
+
+def test_is_valid_integration():
+    included_folder = "datadog_checks/"
+    ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
+    git_ignore = [".git", "__pycache__"]
+    
+    assert is_valid_integration("datadog_checks/example.py", included_folder, ignored_files, git_ignore)
+    assert not is_valid_integration("__pycache__/file.py", included_folder, ignored_files, git_ignore) 
+    assert not is_valid_integration("datadog_checks_dev/example.py", included_folder, ignored_files, git_ignore)
+    assert not is_valid_integration(".git/config", included_folder, ignored_files, git_ignore)
+
+def test_get_dependencies(terminal):
+    file_content = "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl"
+    mock_open_obj = mock_open(read_data=file_content)
+    with patch("builtins.open", mock_open_obj):
+        deps, urls = get_dependencies(terminal, "fake_path")
+    assert deps == ["dependency1", "dependency2"]
+    assert urls == ["https://example.com/dependency1.whl", "https://example.com/dependency2.whl"]
+
+def test_get_gitignore_files(terminal):
+    mock_gitignore = "__pycache__/\n*.log\n"  # Sample .gitignore file
+    with patch("builtins.open", mock_open(read_data=mock_gitignore)):
+        with patch("os.path.exists", return_value=True):
+            ignored_patterns = get_gitignore_files(terminal)
+    assert ignored_patterns == ["__pycache__/", "*.log"]
+
+def test_get_dependencies_sizes(terminal):
+    mock_response = MagicMock()
+    mock_response.status_code = 200
+    mock_response.headers = {"Content-Length": "12345"}
+    with patch("requests.head", return_value=mock_response):
+        file_data = get_dependencies_sizes(terminal, ["dependency1"], ["https://example.com/dependency1.whl"])
+    assert file_data == [{"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345}]
+
+def test_get_compressed_dependencies(terminal):
+    platform = "windows-x86_64"
+    version = "3.12"
+    
+    fake_file_content = "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl"
+    
+    mock_response = MagicMock()
+    mock_response.status_code = 200
+    mock_response.headers = {"Content-Length": "12345"}
+    
+    with patch("os.path.exists", return_value=True), \
+         patch("os.path.isdir", return_value=True), \
+         patch("os.listdir", return_value=[f"{platform}-{version}"]), \
+         patch("os.path.isfile", return_value=True), \
+         patch("builtins.open", mock_open(read_data=fake_file_content)), \
+         patch("requests.head", return_value=mock_response):
+        
+        file_data = get_compressed_dependencies(terminal, platform, version)
+    
+    assert file_data == [
+        {"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345},
+        {"File Path": "dependency2", "Type": "Dependency", "Name": "dependency2", "Size (Bytes)": 12345},
+    ]
+def test_group_modules():
+    modules = [
+        {"Name": "module1", "Type": "A", "Size (Bytes)": 1500},
+        {"Name": "module2", "Type": "B", "Size (Bytes)": 3000},
+        {"Name": "module1", "Type": "A", "Size (Bytes)": 2500},  
+        {"Name": "module3", "Type": "A", "Size (Bytes)": 4000},
+    ]
+
+    platform = "linux-aarch64"
+    version = "3.12"
+
+    expected_output = [
+        {"Name": "module1", "Type": "A", "Size (Bytes)": 4000, "Size": "3.91 KB", "Platform": "linux-aarch64", "Version": "3.12"},
+        {"Name": "module2", "Type": "B", "Size (Bytes)": 3000, "Size": "2.93 KB", "Platform": "linux-aarch64", "Version": "3.12"},
+        {"Name": "module3", "Type": "A", "Size (Bytes)": 4000, "Size": "3.91 KB", "Platform": "linux-aarch64", "Version": "3.12"},
+    ]
+
+    assert group_modules(modules, platform, version) == expected_output
+
+def test_statu_no_args(ddev):
+    result = ddev('size', 'status', '--compressed')
+    assert result.exit_code == 0
+
+def test_status(ddev):
+    result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed')
+    assert result.exit_code == 0
+
+def test_status_csv(ddev):
+    result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed', '--csv')
+    assert result.exit_code == 0
+
+def test_status_fail(ddev):
+    result = ddev('size', 'status', '--platform', 'linux', '--python', '3.12', '--compressed')
+    assert result.exit_code != 0
+
+def test_status_fail2(ddev):
+    result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '2.10', '--compressed')
+    assert result.exit_code != 0
+
+def test_status_fail2(ddev):
+    result = ddev('size', 'status', '--platform', 'linux', '--python' ,'2.10', '--compressed')
+    assert result.exit_code != 0
+

From b4d0f5fb4c015f6970d80e66d462e8b19c20fa55 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Thu, 20 Mar 2025 12:54:36 +0100
Subject: [PATCH 03/40] clean commented code

---
 ddev/src/ddev/cli/size/status.py | 14 ++------------
 1 file changed, 2 insertions(+), 12 deletions(-)

diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index 147e1dd34f216..97f52300ae562 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -68,15 +68,11 @@ def group_modules(modules, platform, version):
 
 
 def get_compressed_files(app):
-    #print("Getting compressed integrations")
 
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
     git_ignore = get_gitignore_files(app)
     included_folder = "datadog_checks/"
 
-    # script_path = 
-    #REPO_PATH = os.path.abspath(os.path.join(os.path.abspath(__file__), "../../../../../../")) 
-
     file_data = []
     for root, _, files in os.walk(REPO_PATH):
         for file in files:
@@ -88,7 +84,6 @@ def get_compressed_files(app):
             # Filter files 
             if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore):
                 try:
-                    # Compress the file
                     compressor = zlib.compressobj()
                     compressed_size = 0
 
@@ -115,10 +110,7 @@ def get_compressed_files(app):
     
 
 def get_compressed_dependencies(app,platform, version):
-    #print("Getting compressed dependencies")
-
-    #script_path = os.path.abspath(__file__)
-    #REPO_PATH = os.path.abspath(os.path.join(script_path, "../../../../../../"))
+    
     resolved_path = os.path.join(REPO_PATH, ".deps/resolved")
 
     if not os.path.exists(resolved_path) or not os.path.isdir(resolved_path):
@@ -188,9 +180,7 @@ def is_valid_integration(path, included_folder, ignored_files, git_ignore):
         return True
 
 
-def get_gitignore_files(app):
-    #script_path = os.path.abspath(__file__)
-    #repo_root = os.path.abspath(os.path.join(script_path, "../../../../../../")) 
+def get_gitignore_files(app): 
     gitignore_path = os.path.join(REPO_PATH, ".gitignore")
     if not os.path.exists(gitignore_path):
         app.display_error(f"Error: .gitignore file not found at {gitignore_path}")

From 79b0fa80f22f7031df9adc1a52ecf013220e462b Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Tue, 25 Mar 2025 12:47:43 +0100
Subject: [PATCH 04/40] add diff mode

---
 ddev/src/ddev/cli/__init__.py      |   2 +-
 ddev/src/ddev/cli/size/GitRepo.py  |  27 +++++
 ddev/src/ddev/cli/size/__init__.py |   5 +-
 ddev/src/ddev/cli/size/common.py   | 144 ++++++++++++++++++++++++++
 ddev/src/ddev/cli/size/diff.py     | 151 +++++++++++++++++++++++++++
 ddev/src/ddev/cli/size/status.py   | 159 +++++-----------------------
 ddev/tests/cli/size/test_common.py | 141 +++++++++++++++++++++++++
 ddev/tests/cli/size/test_diff.py   | 112 ++++++++++++++++++++
 ddev/tests/cli/size/test_status.py | 160 +++++++++++++----------------
 9 files changed, 676 insertions(+), 225 deletions(-)
 create mode 100644 ddev/src/ddev/cli/size/GitRepo.py
 create mode 100644 ddev/src/ddev/cli/size/common.py
 create mode 100644 ddev/src/ddev/cli/size/diff.py
 create mode 100644 ddev/tests/cli/size/test_common.py
 create mode 100644 ddev/tests/cli/size/test_diff.py

diff --git a/ddev/src/ddev/cli/__init__.py b/ddev/src/ddev/cli/__init__.py
index a5924607a880c..e16dc80db4146 100644
--- a/ddev/src/ddev/cli/__init__.py
+++ b/ddev/src/ddev/cli/__init__.py
@@ -18,6 +18,7 @@
 from ddev.cli.env import env
 from ddev.cli.meta import meta
 from ddev.cli.release import release
+from ddev.cli.size import size
 from ddev.cli.status import status
 from ddev.cli.test import test
 from ddev.cli.validate import validate
@@ -25,7 +26,6 @@
 from ddev.plugin import specs
 from ddev.utils.ci import running_in_ci
 from ddev.utils.fs import Path
-from ddev.cli.size import size
 
 
 @click.group(context_settings={'help_option_names': ['-h', '--help']}, invoke_without_command=True)
diff --git a/ddev/src/ddev/cli/size/GitRepo.py b/ddev/src/ddev/cli/size/GitRepo.py
new file mode 100644
index 0000000000000..04e736b1058b5
--- /dev/null
+++ b/ddev/src/ddev/cli/size/GitRepo.py
@@ -0,0 +1,27 @@
+import os
+import shutil
+import tempfile
+import subprocess
+
+class GitRepo:
+    def __init__(self, url):
+        self.url = url
+        self.repo_dir = None
+
+    def __enter__(self):
+        self.repo_dir = tempfile.mkdtemp()
+        self._run("git init")
+        self._run(f"git remote add origin {self.url}")
+        return self
+
+    def _run(self, cmd):
+        subprocess.run(cmd, shell=True, cwd=self.repo_dir, check=True)
+
+    def checkout_commit(self, commit):
+        self._run(f"git fetch --depth 1 origin {commit}")
+        self._run(f"git checkout {commit}")
+
+
+    def __exit__(self, exception_type, exception_value, exception_traceback):
+        if self.repo_dir and os.path.exists(self.repo_dir):
+            shutil.rmtree(self.repo_dir)
\ No newline at end of file
diff --git a/ddev/src/ddev/cli/size/__init__.py b/ddev/src/ddev/cli/size/__init__.py
index 25863ae1dbc5e..1cb5140b75d23 100644
--- a/ddev/src/ddev/cli/size/__init__.py
+++ b/ddev/src/ddev/cli/size/__init__.py
@@ -5,7 +5,7 @@
 import click
 
 from ddev.cli.size.status import status
-
+from ddev.cli.size.diff import diff
 
 @click.group(short_help='Get the size of integrations and dependencies by platform and python version')
 def size():
@@ -14,6 +14,7 @@ def size():
 
 
 size.add_command(status)
+size.add_command(diff)
 
 if __name__ == "__main__":
-    size()
\ No newline at end of file
+    size()
diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
new file mode 100644
index 0000000000000..922b5ca748a4f
--- /dev/null
+++ b/ddev/src/ddev/cli/size/common.py
@@ -0,0 +1,144 @@
+# (C) Datadog, Inc. 2022-present
+# All rights reserved
+# Licensed under a 3-clause BSD style license (see LICENSE)
+import sys
+import re
+import requests
+import os
+import zlib
+
+#utilities
+
+#mirar si existe
+def convert_size(size_bytes):
+    # Transforms bytes into a human-friendly format (KB, MB, GB)
+    for unit in [' B', ' KB', ' MB', ' GB']:
+        if size_bytes < 1024:
+            return str(round(size_bytes, 2)) + unit
+        size_bytes /= 1024
+    return str(round(size_bytes, 2)) + " TB"
+
+
+def is_valid_integration(path, included_folder, ignored_files, git_ignore):
+    # It is not an integration
+    if path.startswith('.'):
+        return False
+    # It is part of an integration and it is not in the datadog_checks folder
+    elif included_folder not in path:
+        return False
+    # It is an irrelevant file
+    elif any(ignore in path for ignore in ignored_files):
+        return False
+    # This file is contained in .gitignore
+    elif any(ignore in path for ignore in git_ignore):
+        return False
+    else:
+        return True
+
+
+def is_correct_dependency(platform, version, name):
+    return platform in name and version in name
+
+def print_csv(app, i, modules):
+    headers = modules[0].keys()
+    if i == 0:
+        app.display(",".join(headers))  
+
+    for row in modules:
+        app.display(",".join(format(str(row[h])) for h in headers))
+
+def format(s):
+    if "," in s:
+        return '"' + s + '"'
+    else:
+        return s
+    
+def print_table(app, modules, platform, version):
+    modules_table = {col: {} for col in modules[0].keys()}
+    for i, row in enumerate(modules):
+        for key, value in row.items():
+            modules_table[key][i] = str(value)
+    app.display_table(platform + " " + version, modules_table)
+    
+def get_dependencies_sizes(app, deps, download_urls):
+    file_data = []
+    for dep, url in zip(deps, download_urls, strict=False):
+        dep_response = requests.head(url)
+        if dep_response.status_code != 200:
+            app.display_error(f"Error {dep_response.status_code}: Unable to fetch the dependencies file")
+            sys.exit(1)
+        else:
+            size = dep_response.headers.get("Content-Length", None)
+            file_data.append({"File Path": dep, "Type": "Dependency", "Name": dep, "Size (Bytes)": int(size)})
+
+    return file_data
+
+def get_dependencies(app, file_path):
+    download_urls = []
+    deps = []
+    try:
+        with open(file_path, "r", encoding="utf-8") as file:
+            file_content = file.read()
+            for line in file_content.splitlines():
+                match = re.search(r"([\w\-\d\.]+) @ (https?://[^\s#]+)", line)
+                if match:
+                    deps.append(match.group(1))
+                    download_urls.append(match.group(2))
+    except Exception as e:
+        app.display_error(f"Error reading file {file_path}: {e}")
+        sys.exit(1)
+
+    return deps, download_urls
+
+
+def group_modules(modules, platform, version):
+    grouped_aux = {}
+
+    for file in modules:
+        key = (file['Name'], file['Type'])
+        grouped_aux[key] = grouped_aux.get(key, 0) + file["Size (Bytes)"]
+
+    return [
+        {
+            'Name': name ,
+            'Type': type ,
+            'Size (Bytes)': size ,
+            'Size': convert_size(size),
+            'Platform': platform ,
+            'Version': version ,
+        }
+        for (name, type), size in grouped_aux.items()
+    ]
+
+def get_gitignore_files(app, repo_path):
+    gitignore_path = os.path.join(repo_path, ".gitignore")
+    if not os.path.exists(gitignore_path):
+        app.display_error(f"Error: .gitignore file not found at {gitignore_path}")
+        sys.exit(1)
+
+    try:
+        with open(gitignore_path, "r", encoding="utf-8") as file:
+            gitignore_content = file.read()
+            ignored_patterns = [
+                line.strip() for line in gitignore_content.splitlines() if line.strip() and not line.startswith("#")
+            ]
+            return ignored_patterns
+    except Exception as e:
+        app.display_error(f"Error reading .gitignore file: {e}")
+        sys.exit(1)
+
+def compress(app, file_path, relative_path):
+    compressor = zlib.compressobj()
+    compressed_size = 0
+    try:
+        # original_size = os.path.getsize(file_path)
+        with open(file_path, "rb") as f:
+            while chunk := f.read(8192):  # Read in 8KB chunks
+                compressed_chunk = compressor.compress(chunk)
+                compressed_size += len(compressed_chunk)
+
+            compressed_size += len(compressor.flush()) 
+        return compressed_size
+    except Exception as e:
+        app.display_error(f"Error processing {relative_path}: {e}")
+        sys.exit(1)
\ No newline at end of file
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
new file mode 100644
index 0000000000000..0739dbe4dd482
--- /dev/null
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -0,0 +1,151 @@
+# (C) Datadog, Inc. 2022-present
+# All rights reserved
+# Licensed under a 3-clause BSD style license (see LICENSE)
+
+import os
+import sys
+from pathlib import Path
+import click
+import requests
+from .common import is_correct_dependency, is_valid_integration, group_modules, print_csv, print_table, get_gitignore_files, get_dependencies, compress
+from .GitRepo import GitRepo
+
+VALID_PLATFORMS = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"]
+VALID_PYTHON_VERSIONS = ["3.12"]
+
+
+@click.command()
+@click.argument("before")
+@click.argument("after")
+@click.option('--platform', type=click.Choice(VALID_PLATFORMS), help="Target platform")
+@click.option('--python', 'version', type=click.Choice(VALID_PYTHON_VERSIONS), help="Python version (MAJOR.MINOR)")
+@click.option('--compressed', is_flag=True, help="Measure compressed size")
+@click.option('--csv', is_flag=True, help="Output in CSV format")
+@click.pass_obj
+def diff(app, before, after, platform, version, compressed, csv):
+    platforms = VALID_PLATFORMS if platform is None else [platform]
+    versions = VALID_PYTHON_VERSIONS if version is None else [version]
+
+    for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
+        diff_mode(app, before, after, plat, ver, compressed, csv, i)
+
+
+def diff_mode(app, before, after, platform, version, compressed, csv, i):
+    if compressed:
+        with GitRepo("https://github.com/DataDog/integrations-core.git") as gitRepo:
+            repo = gitRepo.repo_dir
+            gitRepo.checkout_commit(before)
+            files_b = get_compressed_files(app, repo)
+            dependencies_b = get_compressed_dependencies(app, repo, platform, version)
+            gitRepo.checkout_commit(after)
+            files_a = get_compressed_files(app, repo)
+            dependencies_a = get_compressed_dependencies(app, repo, platform, version)
+            
+        integrations = get_diff(files_b, files_a, 'Integration') 
+        dependencies = get_diff(dependencies_b, dependencies_a, 'Dependency') 
+        
+        grouped_modules = group_modules(integrations + dependencies, platform, version)
+        grouped_modules.sort(key=lambda x: x['Size (Bytes)'], reverse=True)
+        for module in grouped_modules:
+            if module['Size (Bytes)'] > 0:
+                module['Size'] = f"+{module['Size']}"
+
+        if csv:
+            print_csv(app, i, grouped_modules)
+        else:
+            print_table(app, grouped_modules, platform, version)
+
+
+
+        
+def get_diff(size_before, size_after, type):
+    all_paths = set(size_before.keys()) | set(size_after.keys())
+    diff_files = []
+
+    for path in all_paths:
+        size_b = size_before.get(path, 0)
+        size_a = size_after.get(path, 0)
+        size_delta = size_a - size_b
+        module = Path(path).parts[0] 
+        if size_delta != 0:
+            if size_b == 0:
+                diff_files.append(
+                    {
+                        'File Path': path,
+                        'Type': type,
+                        'Name': module + " (NEW)",
+                        'Size (Bytes)': size_delta, 
+                    }
+            )
+            elif size_a == 0:
+                diff_files.append(
+                    {
+                        'File Path': path,
+                        'Type': type,
+                        'Name': module + " (DELETED)",
+                        'Size (Bytes)': size_delta, 
+                    }
+                )
+            else:
+                diff_files.append(
+                    {
+                        'File Path': path,
+                        'Type': type,
+                        'Name': module,
+                        'Size (Bytes)': size_delta,
+                    }
+                )
+
+    
+    return diff_files
+
+def get_compressed_files(app, repo_path):
+
+    ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
+    git_ignore = get_gitignore_files(app, repo_path)
+    included_folder = "datadog_checks/"
+
+    file_data = {}
+    for root, _, files in os.walk(repo_path):
+        for file in files:
+            file_path = os.path.join(root, file)
+
+            # Convert the path to a relative format within the repo
+            relative_path = os.path.relpath(file_path, repo_path)
+
+            # Filter files
+            if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore):
+                compressed_size = compress(app, file_path, relative_path)
+                file_data[relative_path] = compressed_size
+    return file_data
+
+def get_compressed_dependencies(app, repo_path, platform, version):
+
+    resolved_path = os.path.join(repo_path, ".deps/resolved")
+
+    if not os.path.exists(resolved_path) or not os.path.isdir(resolved_path):
+        app.display_error(f"Error: Directory not found {resolved_path}")
+        sys.exit(1)
+
+    for filename in os.listdir(resolved_path):
+        file_path = os.path.join(resolved_path, filename)
+
+        if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
+            deps, download_urls = get_dependencies(app, file_path)
+            return get_dependencies_sizes(app, deps, download_urls)
+    return {}
+
+
+def get_dependencies_sizes(app, deps, download_urls):
+    file_data = {}
+    for dep, url in zip(deps, download_urls, strict=False):
+        dep_response = requests.head(url)
+        if dep_response.status_code != 200:
+            app.display_error(f"Error {dep_response.status_code}: Unable to fetch the dependencies file")
+            sys.exit(1)
+        else:
+            size = dep_response.headers.get("Content-Length", None)
+            file_data[dep] = int(size)
+
+    return file_data
+
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index 97f52300ae562..f52f41015a109 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -2,23 +2,21 @@
 # All rights reserved
 # Licensed under a 3-clause BSD style license (see LICENSE)
 
-import click
-import requests
-import re
 import os
+import re
+import sys
 import zlib
-import io
 from pathlib import Path
-import sys
-import csv as csv_lib
 
+import click
+
+from .common import convert_size, get_dependencies_sizes, is_correct_dependency, is_valid_integration, group_modules, print_csv, print_table, get_gitignore_files, get_dependencies, compress
 
 VALID_PLATFORMS = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"]
 VALID_PYTHON_VERSIONS = ["3.12"]
 REPO_PATH = Path(__file__).resolve().parents[5]
 
 
-
 @click.command()
 @click.option('--platform', type=click.Choice(VALID_PLATFORMS), help="Target platform")
 @click.option('--python', 'version', type=click.Choice(VALID_PYTHON_VERSIONS), help="Python version (MAJOR.MINOR)")
@@ -33,44 +31,25 @@ def status(app, platform, version, compressed, csv):
         status_mode(app, plat, ver, compressed, csv, i)
 
 
-
-def status_mode(app,platform, version, compressed,csv,i):
+def status_mode(app, platform, version, compressed, csv, i):
     if compressed:
-        modules = get_compressed_files(app) + get_compressed_dependencies(app, platform,version)
-        
-        grouped_modules = group_modules(modules,platform, version)
+        modules = get_compressed_files(app) + get_compressed_dependencies(app, platform, version)
+
+        grouped_modules = group_modules(modules, platform, version)
         grouped_modules.sort(key=lambda x: x['Size (Bytes)'], reverse=True)
-        
-        if csv:
-            headers = grouped_modules[0].keys()
-            if i == 0:
-                app.display(",".join(headers)) # comas alrededor
 
-            for row in grouped_modules:
-                app.display(",".join(str(row[h]) for h in headers))
+        if csv:
+            print_csv(app, i, grouped_modules)
         else:
-            modules_table = {col: {} for col in grouped_modules[0].keys()}
-            for i,row in enumerate(grouped_modules):
-                for key,value in row.items():
-                    modules_table[key][i] = str(value)
-            app.display_table(platform + " " + version, modules_table)
+            print_table(app, grouped_modules, platform, version)
 
-    
 
-def group_modules(modules, platform, version):
-    grouped_aux = {}
-
-    for file in modules:
-        key = (file['Name'], file['Type'])
-        grouped_aux[key] = grouped_aux.get(key, 0) + file["Size (Bytes)"]
-
-    return [{'Name': name, 'Type': type, 'Size (Bytes)': size, 'Size': convert_size(size), 'Platform': platform, 'Version': version} for (name,type), size in grouped_aux.items()]
 
 
 def get_compressed_files(app):
 
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
-    git_ignore = get_gitignore_files(app)
+    git_ignore = get_gitignore_files(app, REPO_PATH)
     included_folder = "datadog_checks/"
 
     file_data = []
@@ -81,126 +60,38 @@ def get_compressed_files(app):
             # Convert the path to a relative format within the repo
             relative_path = os.path.relpath(file_path, REPO_PATH)
 
-            # Filter files 
+            # Filter files
             if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore):
-                try:
-                    compressor = zlib.compressobj()
-                    compressed_size = 0
-
-                    # original_size = os.path.getsize(file_path)
-                    with open(file_path, "rb") as f:
-                        while chunk := f.read(8192):  # Read in 8KB chunks
-                            compressed_chunk = compressor.compress(chunk)
-                            compressed_size += len(compressed_chunk)
-
-                        compressed_size += len(compressor.flush())  # Flush the buffer
-                    integration = relative_path.split("/")[0]
-                    file_data.append({
+                compressed_size = compress(app, file_path, relative_path)
+                integration = relative_path.split(os.sep)[0]
+                file_data.append(
+                    {
                         "File Path": relative_path,
                         "Type": "Integration",
                         "Name": integration,
-                        "Size (Bytes)": compressed_size
-                    })
+                        "Size (Bytes)": compressed_size,
+                    }
+                )
+    return file_data
 
-                except Exception as e:
-                    app.display_error(f"Error processing {relative_path}: {e}") 
-                    sys.exit(1) 
 
-    return file_data
-    
 
-def get_compressed_dependencies(app,platform, version):
-    
+def get_compressed_dependencies(app, platform, version):
+
     resolved_path = os.path.join(REPO_PATH, ".deps/resolved")
 
     if not os.path.exists(resolved_path) or not os.path.isdir(resolved_path):
         app.display_error(f"Error: Directory not found {resolved_path}")
         sys.exit(1)
 
-    
     for filename in os.listdir(resolved_path):
         file_path = os.path.join(resolved_path, filename)
-        
+
         if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
             deps, download_urls = get_dependencies(app, file_path)
             return get_dependencies_sizes(app, deps, download_urls)
-    
-    
-    
 
 
-def is_correct_dependency(platform, version, name):
-    return platform in name and version in name
-        
-def get_dependencies_sizes(app, deps, download_urls):
-    file_data = []
-    for dep, url in zip(deps, download_urls):
-        dep_response = requests.head(url)
-        if dep_response.status_code != 200:
-            app.display_error(f"Error {dep_response.status_code}: Unable to fetch the dependencies file")
-            sys.exit(1)
-        else:
-            size = dep_response.headers.get("Content-Length", None)
-            file_data.append({"File Path": dep, "Type": "Dependency", "Name": dep, "Size (Bytes)": int(size)})
-        
-    return file_data 
-
-
-def get_dependencies(app,file_path):
-    download_urls = []
-    deps = []
-    try:
-        with open(file_path, "r", encoding="utf-8") as file:
-            file_content = file.read()
-            for line in file_content.splitlines():
-                match = re.search(r"([\w\-\d\.]+) @ (https?://[^\s#]+)", line)
-                if match:
-                    deps.append(match.group(1))
-                    download_urls.append(match.group(2))
-    except Exception as e:
-        app.display_error(f"Error reading file {file_path}: {e}")
-        sys.exit(1)
-    
-    return deps, download_urls
-
-def is_valid_integration(path, included_folder, ignored_files, git_ignore):
-    # It is not an integration
-    if path.startswith('.'):
-        return False
-    # It is part of an integration and it is not in the datadog_checks folder 
-    elif not (included_folder in path):
-        return False
-    # It is an irrelevant file
-    elif any(ignore in path for ignore in ignored_files):
-        return False
-    # This file is contained in .gitignore
-    elif any(ignore in path for ignore in git_ignore):
-        return False
-    else:
-        return True
-
-
-def get_gitignore_files(app): 
-    gitignore_path = os.path.join(REPO_PATH, ".gitignore")
-    if not os.path.exists(gitignore_path):
-        app.display_error(f"Error: .gitignore file not found at {gitignore_path}")
-        sys.exit(1)
-    
-    try:
-        with open(gitignore_path, "r", encoding="utf-8") as file:
-            gitignore_content = file.read()
-            ignored_patterns = [line.strip() for line in gitignore_content.splitlines() if line.strip() and not line.startswith("#")]
-            return ignored_patterns
-    except Exception as e:
-        app.display_error(f"Error reading .gitignore file: {e}")
-        sys.exit(1)
 
-def convert_size(size_bytes):
-    #Transforms bytes into a human-friendly format (KB, MB, GB)
-    for unit in [' B', ' KB', ' MB', ' GB']:
-        if size_bytes < 1024:
-            return (str(round(size_bytes, 2)) + unit)
-        size_bytes /= 1024
-    return (str(round(size_bytes, 2)) + " TB")
 
 
diff --git a/ddev/tests/cli/size/test_common.py b/ddev/tests/cli/size/test_common.py
new file mode 100644
index 0000000000000..e7dbd96d0672d
--- /dev/null
+++ b/ddev/tests/cli/size/test_common.py
@@ -0,0 +1,141 @@
+import os
+from unittest.mock import MagicMock, mock_open, patch
+
+from ddev.cli.application import Application
+from ddev.cli.size.status import (
+    convert_size,
+    print_csv,
+    compress,
+    get_dependencies,
+    get_dependencies_sizes,
+    get_gitignore_files,
+    group_modules,
+    is_correct_dependency,
+    is_valid_integration,
+)
+
+
+def test_is_correct_dependency():
+    assert is_correct_dependency("windows-x86_64", "3.12", "windows-x86_64-3.12")
+    assert not is_correct_dependency("windows-x86_64", "3.12", "linux-x86_64-3.12")
+    assert not is_correct_dependency("windows-x86_64", "3.13", "windows-x86_64-3.12")
+
+
+def test_convert_size():
+    assert convert_size(500) == "500 B"
+    assert convert_size(1024) == "1.0 KB"
+    assert convert_size(1048576) == "1.0 MB"
+    assert convert_size(1073741824) == "1.0 GB"
+
+
+def test_is_valid_integration():
+    included_folder = "datadog_checks/"
+    ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
+    git_ignore = [".git", "__pycache__"]
+
+    assert is_valid_integration("datadog_checks/example.py", included_folder, ignored_files, git_ignore)
+    assert not is_valid_integration("__pycache__/file.py", included_folder, ignored_files, git_ignore)
+    assert not is_valid_integration("datadog_checks_dev/example.py", included_folder, ignored_files, git_ignore)
+    assert not is_valid_integration(".git/config", included_folder, ignored_files, git_ignore)
+
+
+def test_get_dependencies(terminal):
+    file_content = (
+        "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl"
+    )
+    mock_open_obj = mock_open(read_data=file_content)
+    with patch("builtins.open", mock_open_obj):
+        deps, urls = get_dependencies(terminal, "fake_path")
+    assert deps == ["dependency1", "dependency2"]
+    assert urls == ["https://example.com/dependency1.whl", "https://example.com/dependency2.whl"]
+
+
+def test_get_dependencies_sizes(terminal):
+    mock_response = MagicMock()
+    mock_response.status_code = 200
+    mock_response.headers = {"Content-Length": "12345"}
+    with patch("requests.head", return_value=mock_response):
+        file_data = get_dependencies_sizes(terminal, ["dependency1"], ["https://example.com/dependency1.whl"])
+    assert file_data == [
+        {"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345}
+    ]
+
+
+def test_group_modules():
+    modules = [
+        {"Name": "module1", "Type": "A", "Size (Bytes)": 1500},
+        {"Name": "module2", "Type": "B", "Size (Bytes)": 3000},
+        {"Name": "module1", "Type": "A", "Size (Bytes)": 2500},
+        {"Name": "module3", "Type": "A", "Size (Bytes)": 4000},
+    ]
+
+    platform = "linux-aarch64"
+    version = "3.12"
+
+    expected_output = [
+        {
+            "Name": "module1",
+            "Type": "A",
+            "Size (Bytes)": 4000,
+            "Size": "3.91 KB",
+            "Platform": "linux-aarch64",
+            "Version": "3.12",
+        },
+        {
+            "Name": "module2",
+            "Type": "B",
+            "Size (Bytes)": 3000,
+            "Size": "2.93 KB",
+            "Platform": "linux-aarch64",
+            "Version": "3.12",
+        },
+        {
+            "Name": "module3",
+            "Type": "A",
+            "Size (Bytes)": 4000,
+            "Size": "3.91 KB",
+            "Platform": "linux-aarch64",
+            "Version": "3.12",
+        },
+    ]
+
+    assert group_modules(modules, platform, version) == expected_output
+
+def test_get_gitignore_files(terminal):
+    mock_gitignore = "__pycache__/\n*.log\n"  # Sample .gitignore file
+    repo_path = "/fake/repo"
+    with patch("builtins.open", mock_open(read_data=mock_gitignore)):
+        with patch("os.path.exists", return_value=True):
+            ignored_patterns = get_gitignore_files(terminal, repo_path)
+    assert ignored_patterns == ["__pycache__/", "*.log"]
+
+def test_compress():
+    mock_app = MagicMock()
+    fake_content = b'a' * 16384
+    original_size = len(fake_content)
+
+    m = mock_open(read_data=fake_content)
+    with patch("builtins.open", m):
+        compressed_size = compress(mock_app, "fake/path/file.py", "relative/path/file.py")
+
+    assert isinstance(compressed_size, int)
+    assert compressed_size > 0
+    assert compressed_size < original_size
+
+def test_print_csv():
+    mock_app = MagicMock()
+    modules = [
+        {"Name": "module1", "Size": 123},
+        {"Name": "module,with,comma", "Size": 456},
+    ]
+
+    print_csv(mock_app, i=0, modules=modules)
+
+    expected_calls = [
+        (("Name,Size",),),
+        (('module1,123',),),
+        (('"module,with,comma",456',),),
+    ]
+
+    actual_calls = mock_app.display.call_args_list
+    assert actual_calls == expected_calls
diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py
new file mode 100644
index 0000000000000..288067aa37bfc
--- /dev/null
+++ b/ddev/tests/cli/size/test_diff.py
@@ -0,0 +1,112 @@
+# (C) Datadog, Inc. 2022-present
+# All rights reserved
+# Licensed under a 3-clause BSD style license (see LICENSE)
+
+from unittest.mock import MagicMock, mock_open, patch
+
+from ddev.cli.size.diff import (
+    get_compressed_files,
+    get_compressed_dependencies,
+    get_diff
+   
+)
+
+def test_get_compressed_files():
+    mock_app = MagicMock()
+    mock_repo_path = "root"
+
+    mock_files = [
+        ("root/integration/datadog_checks", [], ["file1.py", "file2.py"]),
+        ("root/integration_b/datadog_checks", [], ["file3.py"]),
+        ("root", [], ["ignored.py"]),
+    ]
+
+    def fake_compress(app, file_path, relative_path):
+        return 1000
+
+    fake_gitignore = {"ignored.py"}
+
+    with patch("os.walk", return_value=mock_files), \
+         patch("os.path.relpath", side_effect=lambda path, _: path.replace("root/", "")), \
+         patch("os.path.exists", return_value=True), \
+         patch("builtins.open", mock_open(read_data="__pycache__/\n*.log\n")),\
+         patch("ddev.cli.size.diff.get_gitignore_files", return_value=fake_gitignore), \
+         patch("ddev.cli.size.diff.is_valid_integration", side_effect=lambda path, folder, ignored, git_ignore: path.startswith("integration")), \
+         patch("ddev.cli.size.diff.compress", side_effect=fake_compress):
+
+        result = get_compressed_files(mock_app, mock_repo_path)
+
+    expected = {
+        "integration/datadog_checks/file1.py": 1000,
+        "integration/datadog_checks/file2.py": 1000,
+        "integration_b/datadog_checks/file3.py": 1000
+    }
+
+
+    assert result == expected
+
+def test_get_compressed_dependencies(terminal):
+    platform = "windows-x86_64"
+    version = "3.12"
+
+    fake_file_content = (
+        "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl"
+    )
+
+    mock_response = MagicMock()
+    mock_response.status_code = 200
+    mock_response.headers = {"Content-Length": "12345"}
+    mock_repo_path = "root"
+
+    with (
+        patch("os.path.exists", return_value=True),
+        patch("os.path.isdir", return_value=True),
+        patch("os.listdir", return_value=[f"{platform}-{version}"]),
+        patch("os.path.isfile", return_value=True),
+        patch("builtins.open", mock_open(read_data=fake_file_content)),
+        patch("requests.head", return_value=mock_response),
+    ):
+
+        file_data = get_compressed_dependencies(terminal,mock_repo_path, platform, version)
+
+    assert file_data == {
+        "dependency1": 12345,
+        "dependency2": 12345,
+    } 
+
+    def test_get_diff():
+        size_before = {
+            "integration/foo.py": 1000,
+            "integration/bar.py": 2000,
+            "integration/deleted.py": 1500,
+        }
+        size_after = {
+            "integration/foo.py": 1200,  # modified
+            "integration/bar.py": 2000,  # unchanged
+            "integration/new.py": 800,   # new
+        }
+
+        expected = [
+            {
+                "File Path": "integration/foo.py",
+                "Type": "Integration",
+                "Name": "integration",
+                "Size (Bytes)": 200,
+            },
+            {
+                "File Path": "integration/deleted.py",
+                "Type": "Integration",
+                "Name": "integration (DELETED)",
+                "Size (Bytes)": -1500,
+            },
+            {
+                "File Path": "integration/new.py",
+                "Type": "Integration",
+                "Name": "integration (NEW)",
+                "Size (Bytes)": 800,
+            }
+        ]
+
+        result = get_diff(size_before, size_after, "Integration")
+        assert sorted(result, key=lambda x: x["File Path"]) == sorted(expected, key=lambda x: x["File Path"])
+
diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py
index a7625797ef763..1792802b44bca 100644
--- a/ddev/tests/cli/size/test_status.py
+++ b/ddev/tests/cli/size/test_status.py
@@ -2,129 +2,113 @@
 # All rights reserved
 # Licensed under a 3-clause BSD style license (see LICENSE)
 
-from unittest.mock import patch, mock_open, MagicMock
-import os
+from unittest.mock import MagicMock, mock_open, patch
+
 from ddev.cli.size.status import (
+    get_compressed_files,
     get_compressed_dependencies,
-    get_gitignore_files,
-    convert_size,
-    is_valid_integration,
-    is_correct_dependency,
-    get_dependencies,
-    get_dependencies_sizes,
-    group_modules
+   
 )
-from ddev.cli.application import Application
 
+def test_get_compressed_files():
+    mock_app = MagicMock()
 
-def test_is_correct_dependency():
-    assert is_correct_dependency("windows-x86_64", "3.12", "windows-x86_64-3.12")
-    assert not is_correct_dependency("windows-x86_64", "3.12", "linux-x86_64-3.12") 
-    assert not is_correct_dependency("windows-x86_64", "3.13", "windows-x86_64-3.12")
+    mock_files = [
+        ("root/integration/datadog_checks", [], ["file1.py", "file2.py"]),
+        ("root/integration_b/datadog_checks", [], ["file3.py"]),
+        ("root", [], ["ignored.py"]),
+    ]
+
+    def fake_compress(app, file_path, relative_path):
+        return 1000  
+
+    fake_gitignore = {"ignored.py"}
+
+    with patch("os.walk", return_value=mock_files), \
+         patch("os.path.relpath", side_effect=lambda path, _: path.replace("root/", "")), \
+         patch("ddev.cli.size.status.get_gitignore_files", return_value=fake_gitignore), \
+         patch("ddev.cli.size.status.is_valid_integration", side_effect=lambda path, folder, ignored, git_ignore: path.startswith("integration")), \
+         patch("ddev.cli.size.status.compress", side_effect=fake_compress):
+
+        result = get_compressed_files(mock_app)
+
+    expected = [
+        {
+            "File Path": "integration/datadog_checks/file1.py",
+            "Type": "Integration",
+            "Name": "integration",
+            "Size (Bytes)": 1000,
+        },
+        {
+            "File Path": "integration/datadog_checks/file2.py",
+            "Type": "Integration",
+            "Name": "integration",
+            "Size (Bytes)": 1000,
+        },
+        {
+            "File Path": "integration_b/datadog_checks/file3.py",
+            "Type": "Integration",
+            "Name": "integration_b",
+            "Size (Bytes)": 1000,
+        },
+    ]
+
+    assert result == expected
 
-   
-def test_convert_size():
-    assert convert_size(500) == "500 B"
-    assert convert_size(1024) == "1.0 KB"
-    assert convert_size(1048576) == "1.0 MB"
-    assert convert_size(1073741824) == "1.0 GB"
-
-def test_is_valid_integration():
-    included_folder = "datadog_checks/"
-    ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
-    git_ignore = [".git", "__pycache__"]
-    
-    assert is_valid_integration("datadog_checks/example.py", included_folder, ignored_files, git_ignore)
-    assert not is_valid_integration("__pycache__/file.py", included_folder, ignored_files, git_ignore) 
-    assert not is_valid_integration("datadog_checks_dev/example.py", included_folder, ignored_files, git_ignore)
-    assert not is_valid_integration(".git/config", included_folder, ignored_files, git_ignore)
-
-def test_get_dependencies(terminal):
-    file_content = "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl"
-    mock_open_obj = mock_open(read_data=file_content)
-    with patch("builtins.open", mock_open_obj):
-        deps, urls = get_dependencies(terminal, "fake_path")
-    assert deps == ["dependency1", "dependency2"]
-    assert urls == ["https://example.com/dependency1.whl", "https://example.com/dependency2.whl"]
-
-def test_get_gitignore_files(terminal):
-    mock_gitignore = "__pycache__/\n*.log\n"  # Sample .gitignore file
-    with patch("builtins.open", mock_open(read_data=mock_gitignore)):
-        with patch("os.path.exists", return_value=True):
-            ignored_patterns = get_gitignore_files(terminal)
-    assert ignored_patterns == ["__pycache__/", "*.log"]
-
-def test_get_dependencies_sizes(terminal):
-    mock_response = MagicMock()
-    mock_response.status_code = 200
-    mock_response.headers = {"Content-Length": "12345"}
-    with patch("requests.head", return_value=mock_response):
-        file_data = get_dependencies_sizes(terminal, ["dependency1"], ["https://example.com/dependency1.whl"])
-    assert file_data == [{"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345}]
 
 def test_get_compressed_dependencies(terminal):
     platform = "windows-x86_64"
     version = "3.12"
-    
-    fake_file_content = "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl"
-    
+
+    fake_file_content = (
+        "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl"
+    )
+
     mock_response = MagicMock()
     mock_response.status_code = 200
     mock_response.headers = {"Content-Length": "12345"}
-    
-    with patch("os.path.exists", return_value=True), \
-         patch("os.path.isdir", return_value=True), \
-         patch("os.listdir", return_value=[f"{platform}-{version}"]), \
-         patch("os.path.isfile", return_value=True), \
-         patch("builtins.open", mock_open(read_data=fake_file_content)), \
-         patch("requests.head", return_value=mock_response):
-        
+
+    with (
+        patch("os.path.exists", return_value=True),
+        patch("os.path.isdir", return_value=True),
+        patch("os.listdir", return_value=[f"{platform}-{version}"]),
+        patch("os.path.isfile", return_value=True),
+        patch("builtins.open", mock_open(read_data=fake_file_content)),
+        patch("requests.head", return_value=mock_response),
+    ):
+
         file_data = get_compressed_dependencies(terminal, platform, version)
-    
+
     assert file_data == [
         {"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345},
         {"File Path": "dependency2", "Type": "Dependency", "Name": "dependency2", "Size (Bytes)": 12345},
     ]
-def test_group_modules():
-    modules = [
-        {"Name": "module1", "Type": "A", "Size (Bytes)": 1500},
-        {"Name": "module2", "Type": "B", "Size (Bytes)": 3000},
-        {"Name": "module1", "Type": "A", "Size (Bytes)": 2500},  
-        {"Name": "module3", "Type": "A", "Size (Bytes)": 4000},
-    ]
-
-    platform = "linux-aarch64"
-    version = "3.12"
-
-    expected_output = [
-        {"Name": "module1", "Type": "A", "Size (Bytes)": 4000, "Size": "3.91 KB", "Platform": "linux-aarch64", "Version": "3.12"},
-        {"Name": "module2", "Type": "B", "Size (Bytes)": 3000, "Size": "2.93 KB", "Platform": "linux-aarch64", "Version": "3.12"},
-        {"Name": "module3", "Type": "A", "Size (Bytes)": 4000, "Size": "3.91 KB", "Platform": "linux-aarch64", "Version": "3.12"},
-    ]
 
-    assert group_modules(modules, platform, version) == expected_output
 
-def test_statu_no_args(ddev):
+def test_status_no_args(ddev):
     result = ddev('size', 'status', '--compressed')
     assert result.exit_code == 0
 
+
 def test_status(ddev):
     result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed')
     assert result.exit_code == 0
 
+
 def test_status_csv(ddev):
     result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed', '--csv')
     assert result.exit_code == 0
 
-def test_status_fail(ddev):
+
+def test_status_wrong_platform(ddev):
     result = ddev('size', 'status', '--platform', 'linux', '--python', '3.12', '--compressed')
     assert result.exit_code != 0
 
-def test_status_fail2(ddev):
+
+def test_status_wrong_version(ddev):
     result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '2.10', '--compressed')
     assert result.exit_code != 0
 
-def test_status_fail2(ddev):
-    result = ddev('size', 'status', '--platform', 'linux', '--python' ,'2.10', '--compressed')
+def test_status_wrong_plat_and_version(ddev):
+    result = ddev('size', 'status', '--platform', 'linux', '--python', '2.10', '--compressed')
     assert result.exit_code != 0
-

From fefd6a3f7d9911815741f7e1d169b309d3b692fe Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Tue, 25 Mar 2025 12:58:45 +0100
Subject: [PATCH 05/40] add diff mode

---
 ddev/src/ddev/cli/size/GitRepo.py  |  6 ++---
 ddev/src/ddev/cli/size/__init__.py |  3 ++-
 ddev/src/ddev/cli/size/common.py   | 39 ++++++++++++++++++-----------
 ddev/src/ddev/cli/size/diff.py     | 35 ++++++++++++++++----------
 ddev/src/ddev/cli/size/status.py   | 22 ++++++++--------
 ddev/tests/cli/size/test_common.py |  9 ++++---
 ddev/tests/cli/size/test_diff.py   | 40 +++++++++++++++---------------
 ddev/tests/cli/size/test_status.py | 22 ++++++++++------
 8 files changed, 101 insertions(+), 75 deletions(-)

diff --git a/ddev/src/ddev/cli/size/GitRepo.py b/ddev/src/ddev/cli/size/GitRepo.py
index 04e736b1058b5..112ee0c15d418 100644
--- a/ddev/src/ddev/cli/size/GitRepo.py
+++ b/ddev/src/ddev/cli/size/GitRepo.py
@@ -1,7 +1,8 @@
 import os
 import shutil
-import tempfile
 import subprocess
+import tempfile
+
 
 class GitRepo:
     def __init__(self, url):
@@ -21,7 +22,6 @@ def checkout_commit(self, commit):
         self._run(f"git fetch --depth 1 origin {commit}")
         self._run(f"git checkout {commit}")
 
-
     def __exit__(self, exception_type, exception_value, exception_traceback):
         if self.repo_dir and os.path.exists(self.repo_dir):
-            shutil.rmtree(self.repo_dir)
\ No newline at end of file
+            shutil.rmtree(self.repo_dir)
diff --git a/ddev/src/ddev/cli/size/__init__.py b/ddev/src/ddev/cli/size/__init__.py
index 1cb5140b75d23..6f3878b2fda35 100644
--- a/ddev/src/ddev/cli/size/__init__.py
+++ b/ddev/src/ddev/cli/size/__init__.py
@@ -4,8 +4,9 @@
 
 import click
 
-from ddev.cli.size.status import status
 from ddev.cli.size.diff import diff
+from ddev.cli.size.status import status
+
 
 @click.group(short_help='Get the size of integrations and dependencies by platform and python version')
 def size():
diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index 922b5ca748a4f..6a2c83f7a7c38 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -1,15 +1,17 @@
 # (C) Datadog, Inc. 2022-present
 # All rights reserved
 # Licensed under a 3-clause BSD style license (see LICENSE)
-import sys
-import re
-import requests
 import os
+import re
+import sys
 import zlib
 
-#utilities
+import requests
+
+# utilities
 
-#mirar si existe
+
+# mirar si existe
 def convert_size(size_bytes):
     # Transforms bytes into a human-friendly format (KB, MB, GB)
     for unit in [' B', ' KB', ' MB', ' GB']:
@@ -39,27 +41,31 @@ def is_valid_integration(path, included_folder, ignored_files, git_ignore):
 def is_correct_dependency(platform, version, name):
     return platform in name and version in name
 
+
 def print_csv(app, i, modules):
     headers = modules[0].keys()
     if i == 0:
-        app.display(",".join(headers))  
+        app.display(",".join(headers))
 
     for row in modules:
         app.display(",".join(format(str(row[h])) for h in headers))
 
+
 def format(s):
     if "," in s:
         return '"' + s + '"'
     else:
         return s
-    
+
+
 def print_table(app, modules, platform, version):
     modules_table = {col: {} for col in modules[0].keys()}
     for i, row in enumerate(modules):
         for key, value in row.items():
             modules_table[key][i] = str(value)
     app.display_table(platform + " " + version, modules_table)
-    
+
+
 def get_dependencies_sizes(app, deps, download_urls):
     file_data = []
     for dep, url in zip(deps, download_urls, strict=False):
@@ -73,6 +79,7 @@ def get_dependencies_sizes(app, deps, download_urls):
 
     return file_data
 
+
 def get_dependencies(app, file_path):
     download_urls = []
     deps = []
@@ -100,16 +107,17 @@ def group_modules(modules, platform, version):
 
     return [
         {
-            'Name': name ,
-            'Type': type ,
-            'Size (Bytes)': size ,
+            'Name': name,
+            'Type': type,
+            'Size (Bytes)': size,
             'Size': convert_size(size),
-            'Platform': platform ,
-            'Version': version ,
+            'Platform': platform,
+            'Version': version,
         }
         for (name, type), size in grouped_aux.items()
     ]
 
+
 def get_gitignore_files(app, repo_path):
     gitignore_path = os.path.join(repo_path, ".gitignore")
     if not os.path.exists(gitignore_path):
@@ -127,6 +135,7 @@ def get_gitignore_files(app, repo_path):
         app.display_error(f"Error reading .gitignore file: {e}")
         sys.exit(1)
 
+
 def compress(app, file_path, relative_path):
     compressor = zlib.compressobj()
     compressed_size = 0
@@ -137,8 +146,8 @@ def compress(app, file_path, relative_path):
                 compressed_chunk = compressor.compress(chunk)
                 compressed_size += len(compressed_chunk)
 
-            compressed_size += len(compressor.flush()) 
+            compressed_size += len(compressor.flush())
         return compressed_size
     except Exception as e:
         app.display_error(f"Error processing {relative_path}: {e}")
-        sys.exit(1)
\ No newline at end of file
+        sys.exit(1)
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index 0739dbe4dd482..a18dc313ec531 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -5,9 +5,20 @@
 import os
 import sys
 from pathlib import Path
+
 import click
 import requests
-from .common import is_correct_dependency, is_valid_integration, group_modules, print_csv, print_table, get_gitignore_files, get_dependencies, compress
+
+from .common import (
+    compress,
+    get_dependencies,
+    get_gitignore_files,
+    group_modules,
+    is_correct_dependency,
+    is_valid_integration,
+    print_csv,
+    print_table,
+)
 from .GitRepo import GitRepo
 
 VALID_PLATFORMS = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"]
@@ -40,10 +51,10 @@ def diff_mode(app, before, after, platform, version, compressed, csv, i):
             gitRepo.checkout_commit(after)
             files_a = get_compressed_files(app, repo)
             dependencies_a = get_compressed_dependencies(app, repo, platform, version)
-            
-        integrations = get_diff(files_b, files_a, 'Integration') 
-        dependencies = get_diff(dependencies_b, dependencies_a, 'Dependency') 
-        
+
+        integrations = get_diff(files_b, files_a, 'Integration')
+        dependencies = get_diff(dependencies_b, dependencies_a, 'Dependency')
+
         grouped_modules = group_modules(integrations + dependencies, platform, version)
         grouped_modules.sort(key=lambda x: x['Size (Bytes)'], reverse=True)
         for module in grouped_modules:
@@ -56,8 +67,6 @@ def diff_mode(app, before, after, platform, version, compressed, csv, i):
             print_table(app, grouped_modules, platform, version)
 
 
-
-        
 def get_diff(size_before, size_after, type):
     all_paths = set(size_before.keys()) | set(size_after.keys())
     diff_files = []
@@ -66,7 +75,7 @@ def get_diff(size_before, size_after, type):
         size_b = size_before.get(path, 0)
         size_a = size_after.get(path, 0)
         size_delta = size_a - size_b
-        module = Path(path).parts[0] 
+        module = Path(path).parts[0]
         if size_delta != 0:
             if size_b == 0:
                 diff_files.append(
@@ -74,16 +83,16 @@ def get_diff(size_before, size_after, type):
                         'File Path': path,
                         'Type': type,
                         'Name': module + " (NEW)",
-                        'Size (Bytes)': size_delta, 
+                        'Size (Bytes)': size_delta,
                     }
-            )
+                )
             elif size_a == 0:
                 diff_files.append(
                     {
                         'File Path': path,
                         'Type': type,
                         'Name': module + " (DELETED)",
-                        'Size (Bytes)': size_delta, 
+                        'Size (Bytes)': size_delta,
                     }
                 )
             else:
@@ -96,9 +105,9 @@ def get_diff(size_before, size_after, type):
                     }
                 )
 
-    
     return diff_files
 
+
 def get_compressed_files(app, repo_path):
 
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
@@ -119,6 +128,7 @@ def get_compressed_files(app, repo_path):
                 file_data[relative_path] = compressed_size
     return file_data
 
+
 def get_compressed_dependencies(app, repo_path, platform, version):
 
     resolved_path = os.path.join(repo_path, ".deps/resolved")
@@ -148,4 +158,3 @@ def get_dependencies_sizes(app, deps, download_urls):
             file_data[dep] = int(size)
 
     return file_data
-
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index f52f41015a109..f804233f4a406 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -3,14 +3,22 @@
 # Licensed under a 3-clause BSD style license (see LICENSE)
 
 import os
-import re
 import sys
-import zlib
 from pathlib import Path
 
 import click
 
-from .common import convert_size, get_dependencies_sizes, is_correct_dependency, is_valid_integration, group_modules, print_csv, print_table, get_gitignore_files, get_dependencies, compress
+from .common import (
+    compress,
+    get_dependencies,
+    get_dependencies_sizes,
+    get_gitignore_files,
+    group_modules,
+    is_correct_dependency,
+    is_valid_integration,
+    print_csv,
+    print_table,
+)
 
 VALID_PLATFORMS = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"]
 VALID_PYTHON_VERSIONS = ["3.12"]
@@ -44,8 +52,6 @@ def status_mode(app, platform, version, compressed, csv, i):
             print_table(app, grouped_modules, platform, version)
 
 
-
-
 def get_compressed_files(app):
 
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
@@ -75,7 +81,6 @@ def get_compressed_files(app):
     return file_data
 
 
-
 def get_compressed_dependencies(app, platform, version):
 
     resolved_path = os.path.join(REPO_PATH, ".deps/resolved")
@@ -90,8 +95,3 @@ def get_compressed_dependencies(app, platform, version):
         if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
             deps, download_urls = get_dependencies(app, file_path)
             return get_dependencies_sizes(app, deps, download_urls)
-
-
-
-
-
diff --git a/ddev/tests/cli/size/test_common.py b/ddev/tests/cli/size/test_common.py
index e7dbd96d0672d..8d218fab237e1 100644
--- a/ddev/tests/cli/size/test_common.py
+++ b/ddev/tests/cli/size/test_common.py
@@ -1,17 +1,15 @@
-import os
 from unittest.mock import MagicMock, mock_open, patch
 
-from ddev.cli.application import Application
 from ddev.cli.size.status import (
-    convert_size,
-    print_csv,
     compress,
+    convert_size,
     get_dependencies,
     get_dependencies_sizes,
     get_gitignore_files,
     group_modules,
     is_correct_dependency,
     is_valid_integration,
+    print_csv,
 )
 
 
@@ -101,6 +99,7 @@ def test_group_modules():
 
     assert group_modules(modules, platform, version) == expected_output
 
+
 def test_get_gitignore_files(terminal):
     mock_gitignore = "__pycache__/\n*.log\n"  # Sample .gitignore file
     repo_path = "/fake/repo"
@@ -109,6 +108,7 @@ def test_get_gitignore_files(terminal):
             ignored_patterns = get_gitignore_files(terminal, repo_path)
     assert ignored_patterns == ["__pycache__/", "*.log"]
 
+
 def test_compress():
     mock_app = MagicMock()
     fake_content = b'a' * 16384
@@ -122,6 +122,7 @@ def test_compress():
     assert compressed_size > 0
     assert compressed_size < original_size
 
+
 def test_print_csv():
     mock_app = MagicMock()
     modules = [
diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py
index 288067aa37bfc..4db01ab8ed3d5 100644
--- a/ddev/tests/cli/size/test_diff.py
+++ b/ddev/tests/cli/size/test_diff.py
@@ -4,12 +4,8 @@
 
 from unittest.mock import MagicMock, mock_open, patch
 
-from ddev.cli.size.diff import (
-    get_compressed_files,
-    get_compressed_dependencies,
-    get_diff
-   
-)
+from ddev.cli.size.diff import get_compressed_dependencies, get_compressed_files, get_diff
+
 
 def test_get_compressed_files():
     mock_app = MagicMock()
@@ -26,25 +22,30 @@ def fake_compress(app, file_path, relative_path):
 
     fake_gitignore = {"ignored.py"}
 
-    with patch("os.walk", return_value=mock_files), \
-         patch("os.path.relpath", side_effect=lambda path, _: path.replace("root/", "")), \
-         patch("os.path.exists", return_value=True), \
-         patch("builtins.open", mock_open(read_data="__pycache__/\n*.log\n")),\
-         patch("ddev.cli.size.diff.get_gitignore_files", return_value=fake_gitignore), \
-         patch("ddev.cli.size.diff.is_valid_integration", side_effect=lambda path, folder, ignored, git_ignore: path.startswith("integration")), \
-         patch("ddev.cli.size.diff.compress", side_effect=fake_compress):
+    with (
+        patch("os.walk", return_value=mock_files),
+        patch("os.path.relpath", side_effect=lambda path, _: path.replace("root/", "")),
+        patch("os.path.exists", return_value=True),
+        patch("builtins.open", mock_open(read_data="__pycache__/\n*.log\n")),
+        patch("ddev.cli.size.diff.get_gitignore_files", return_value=fake_gitignore),
+        patch(
+            "ddev.cli.size.diff.is_valid_integration",
+            side_effect=lambda path, folder, ignored, git_ignore: path.startswith("integration"),
+        ),
+        patch("ddev.cli.size.diff.compress", side_effect=fake_compress),
+    ):
 
         result = get_compressed_files(mock_app, mock_repo_path)
 
     expected = {
         "integration/datadog_checks/file1.py": 1000,
         "integration/datadog_checks/file2.py": 1000,
-        "integration_b/datadog_checks/file3.py": 1000
+        "integration_b/datadog_checks/file3.py": 1000,
     }
 
-
     assert result == expected
 
+
 def test_get_compressed_dependencies(terminal):
     platform = "windows-x86_64"
     version = "3.12"
@@ -67,12 +68,12 @@ def test_get_compressed_dependencies(terminal):
         patch("requests.head", return_value=mock_response),
     ):
 
-        file_data = get_compressed_dependencies(terminal,mock_repo_path, platform, version)
+        file_data = get_compressed_dependencies(terminal, mock_repo_path, platform, version)
 
     assert file_data == {
         "dependency1": 12345,
         "dependency2": 12345,
-    } 
+    }
 
     def test_get_diff():
         size_before = {
@@ -83,7 +84,7 @@ def test_get_diff():
         size_after = {
             "integration/foo.py": 1200,  # modified
             "integration/bar.py": 2000,  # unchanged
-            "integration/new.py": 800,   # new
+            "integration/new.py": 800,  # new
         }
 
         expected = [
@@ -104,9 +105,8 @@ def test_get_diff():
                 "Type": "Integration",
                 "Name": "integration (NEW)",
                 "Size (Bytes)": 800,
-            }
+            },
         ]
 
         result = get_diff(size_before, size_after, "Integration")
         assert sorted(result, key=lambda x: x["File Path"]) == sorted(expected, key=lambda x: x["File Path"])
-
diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py
index 1792802b44bca..fb2436725705d 100644
--- a/ddev/tests/cli/size/test_status.py
+++ b/ddev/tests/cli/size/test_status.py
@@ -5,11 +5,11 @@
 from unittest.mock import MagicMock, mock_open, patch
 
 from ddev.cli.size.status import (
-    get_compressed_files,
     get_compressed_dependencies,
-   
+    get_compressed_files,
 )
 
+
 def test_get_compressed_files():
     mock_app = MagicMock()
 
@@ -20,15 +20,20 @@ def test_get_compressed_files():
     ]
 
     def fake_compress(app, file_path, relative_path):
-        return 1000  
+        return 1000
 
     fake_gitignore = {"ignored.py"}
 
-    with patch("os.walk", return_value=mock_files), \
-         patch("os.path.relpath", side_effect=lambda path, _: path.replace("root/", "")), \
-         patch("ddev.cli.size.status.get_gitignore_files", return_value=fake_gitignore), \
-         patch("ddev.cli.size.status.is_valid_integration", side_effect=lambda path, folder, ignored, git_ignore: path.startswith("integration")), \
-         patch("ddev.cli.size.status.compress", side_effect=fake_compress):
+    with (
+        patch("os.walk", return_value=mock_files),
+        patch("os.path.relpath", side_effect=lambda path, _: path.replace("root/", "")),
+        patch("ddev.cli.size.status.get_gitignore_files", return_value=fake_gitignore),
+        patch(
+            "ddev.cli.size.status.is_valid_integration",
+            side_effect=lambda path, folder, ignored, git_ignore: path.startswith("integration"),
+        ),
+        patch("ddev.cli.size.status.compress", side_effect=fake_compress),
+    ):
 
         result = get_compressed_files(mock_app)
 
@@ -109,6 +114,7 @@ def test_status_wrong_version(ddev):
     result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '2.10', '--compressed')
     assert result.exit_code != 0
 
+
 def test_status_wrong_plat_and_version(ddev):
     result = ddev('size', 'status', '--platform', 'linux', '--python', '2.10', '--compressed')
     assert result.exit_code != 0

From 76c1f09cc1bd4e5904c721bdc763b293a0d4c56a Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Thu, 27 Mar 2025 12:55:33 +0100
Subject: [PATCH 06/40] final diff version

---
 ddev/src/ddev/cli/size/GitRepo.py  |  27 ----
 ddev/src/ddev/cli/size/common.py   |  95 ++++++--------
 ddev/src/ddev/cli/size/diff.py     | 109 ++++++++++------
 ddev/src/ddev/cli/size/status.py   |  32 ++---
 ddev/tests/cli/size/test_common.py |  23 ++--
 ddev/tests/cli/size/test_diff.py   | 200 +++++++++++++++++++++++------
 ddev/tests/cli/size/test_status.py |  46 +++++--
 7 files changed, 336 insertions(+), 196 deletions(-)
 delete mode 100644 ddev/src/ddev/cli/size/GitRepo.py

diff --git a/ddev/src/ddev/cli/size/GitRepo.py b/ddev/src/ddev/cli/size/GitRepo.py
deleted file mode 100644
index 112ee0c15d418..0000000000000
--- a/ddev/src/ddev/cli/size/GitRepo.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import os
-import shutil
-import subprocess
-import tempfile
-
-
-class GitRepo:
-    def __init__(self, url):
-        self.url = url
-        self.repo_dir = None
-
-    def __enter__(self):
-        self.repo_dir = tempfile.mkdtemp()
-        self._run("git init")
-        self._run(f"git remote add origin {self.url}")
-        return self
-
-    def _run(self, cmd):
-        subprocess.run(cmd, shell=True, cwd=self.repo_dir, check=True)
-
-    def checkout_commit(self, commit):
-        self._run(f"git fetch --depth 1 origin {commit}")
-        self._run(f"git checkout {commit}")
-
-    def __exit__(self, exception_type, exception_value, exception_traceback):
-        if self.repo_dir and os.path.exists(self.repo_dir):
-            shutil.rmtree(self.repo_dir)
diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index 6a2c83f7a7c38..983bd443ea016 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -3,13 +3,10 @@
 # Licensed under a 3-clause BSD style license (see LICENSE)
 import os
 import re
-import sys
 import zlib
 
 import requests
 
-# utilities
-
 
 # mirar si existe
 def convert_size(size_bytes):
@@ -43,7 +40,7 @@ def is_correct_dependency(platform, version, name):
 
 
 def print_csv(app, i, modules):
-    headers = modules[0].keys()
+    headers = [k for k in modules[0].keys() if k != 'Size']
     if i == 0:
         app.display(",".join(headers))
 
@@ -59,41 +56,37 @@ def format(s):
 
 
 def print_table(app, modules, platform, version):
-    modules_table = {col: {} for col in modules[0].keys()}
+    modules_table = {col: {} for col in modules[0].keys() if col != 'Size (Bytes)'}
     for i, row in enumerate(modules):
         for key, value in row.items():
-            modules_table[key][i] = str(value)
+            if key in modules_table:
+                modules_table[key][i] = str(value)
     app.display_table(platform + " " + version, modules_table)
 
 
-def get_dependencies_sizes(app, deps, download_urls):
+def get_dependencies_sizes(deps, download_urls):
     file_data = []
     for dep, url in zip(deps, download_urls, strict=False):
         dep_response = requests.head(url)
-        if dep_response.status_code != 200:
-            app.display_error(f"Error {dep_response.status_code}: Unable to fetch the dependencies file")
-            sys.exit(1)
-        else:
-            size = dep_response.headers.get("Content-Length", None)
-            file_data.append({"File Path": dep, "Type": "Dependency", "Name": dep, "Size (Bytes)": int(size)})
+        dep_response.raise_for_status()
+        size = dep_response.headers.get("Content-Length", None)
+        file_data.append({"File Path": dep, "Type": "Dependency", "Name": dep, "Size (Bytes)": int(size)})
 
     return file_data
 
 
-def get_dependencies(app, file_path):
+def get_dependencies(file_path):
     download_urls = []
     deps = []
-    try:
-        with open(file_path, "r", encoding="utf-8") as file:
-            file_content = file.read()
-            for line in file_content.splitlines():
-                match = re.search(r"([\w\-\d\.]+) @ (https?://[^\s#]+)", line)
-                if match:
-                    deps.append(match.group(1))
-                    download_urls.append(match.group(2))
-    except Exception as e:
-        app.display_error(f"Error reading file {file_path}: {e}")
-        sys.exit(1)
+    with open(file_path, "r", encoding="utf-8") as file:
+        file_content = file.read()
+        for line in file_content.splitlines():
+            match = re.search(r"([\w\-\d\.]+) @ (https?://[^\s#]+)", line)
+            if match:
+                deps.append(match.group(1))
+                download_urls.append(match.group(2))
+            else:
+                raise WrongDependencyFormat("The dependency format 'name @ link' is no longer supported.")
 
     return deps, download_urls
 
@@ -118,36 +111,28 @@ def group_modules(modules, platform, version):
     ]
 
 
-def get_gitignore_files(app, repo_path):
+def get_gitignore_files(repo_path):
     gitignore_path = os.path.join(repo_path, ".gitignore")
-    if not os.path.exists(gitignore_path):
-        app.display_error(f"Error: .gitignore file not found at {gitignore_path}")
-        sys.exit(1)
-
-    try:
-        with open(gitignore_path, "r", encoding="utf-8") as file:
-            gitignore_content = file.read()
-            ignored_patterns = [
-                line.strip() for line in gitignore_content.splitlines() if line.strip() and not line.startswith("#")
-            ]
-            return ignored_patterns
-    except Exception as e:
-        app.display_error(f"Error reading .gitignore file: {e}")
-        sys.exit(1)
-
-
-def compress(app, file_path, relative_path):
+    with open(gitignore_path, "r", encoding="utf-8") as file:
+        gitignore_content = file.read()
+        ignored_patterns = [
+            line.strip() for line in gitignore_content.splitlines() if line.strip() and not line.startswith("#")
+        ]
+        return ignored_patterns
+
+
+def compress(file_path):
     compressor = zlib.compressobj()
     compressed_size = 0
-    try:
-        # original_size = os.path.getsize(file_path)
-        with open(file_path, "rb") as f:
-            while chunk := f.read(8192):  # Read in 8KB chunks
-                compressed_chunk = compressor.compress(chunk)
-                compressed_size += len(compressed_chunk)
-
-            compressed_size += len(compressor.flush())
-        return compressed_size
-    except Exception as e:
-        app.display_error(f"Error processing {relative_path}: {e}")
-        sys.exit(1)
+    # original_size = os.path.getsize(file_path)
+    with open(file_path, "rb") as f:
+        while chunk := f.read(8192):  # Read in 8KB chunks
+            compressed_chunk = compressor.compress(chunk)
+            compressed_size += len(compressed_chunk)
+        compressed_size += len(compressor.flush())
+    return compressed_size
+
+
+class WrongDependencyFormat(Exception):
+    def __init__(self, mensaje):
+        super().__init__(mensaje)
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index a18dc313ec531..8ce12e8ea8ae2 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -3,7 +3,9 @@
 # Licensed under a 3-clause BSD style license (see LICENSE)
 
 import os
-import sys
+import shutil
+import subprocess
+import tempfile
 from pathlib import Path
 
 import click
@@ -19,7 +21,6 @@
     print_csv,
     print_table,
 )
-from .GitRepo import GitRepo
 
 VALID_PLATFORMS = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"]
 VALID_PYTHON_VERSIONS = ["3.12"]
@@ -34,37 +35,50 @@
 @click.option('--csv', is_flag=True, help="Output in CSV format")
 @click.pass_obj
 def diff(app, before, after, platform, version, compressed, csv):
-    platforms = VALID_PLATFORMS if platform is None else [platform]
-    versions = VALID_PYTHON_VERSIONS if version is None else [version]
+    try:
+        platforms = VALID_PLATFORMS if platform is None else [platform]
+        versions = VALID_PYTHON_VERSIONS if version is None else [version]
 
-    for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
-        diff_mode(app, before, after, plat, ver, compressed, csv, i)
+        for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
+            diff_mode(app, before, after, plat, ver, compressed, csv, i)
+    except Exception as e:
+        app.abort(str(e))
 
 
 def diff_mode(app, before, after, platform, version, compressed, csv, i):
+    url = "https://github.com/DataDog/integrations-core.git"
     if compressed:
-        with GitRepo("https://github.com/DataDog/integrations-core.git") as gitRepo:
-            repo = gitRepo.repo_dir
-            gitRepo.checkout_commit(before)
-            files_b = get_compressed_files(app, repo)
-            dependencies_b = get_compressed_dependencies(app, repo, platform, version)
-            gitRepo.checkout_commit(after)
-            files_a = get_compressed_files(app, repo)
-            dependencies_a = get_compressed_dependencies(app, repo, platform, version)
+        files_b, dependencies_b, files_a, dependencies_a = get_repo_info(url, platform, version, before, after)
 
         integrations = get_diff(files_b, files_a, 'Integration')
         dependencies = get_diff(dependencies_b, dependencies_a, 'Dependency')
-
         grouped_modules = group_modules(integrations + dependencies, platform, version)
-        grouped_modules.sort(key=lambda x: x['Size (Bytes)'], reverse=True)
+        grouped_modules.sort(key=lambda x: abs(x['Size (Bytes)']), reverse=True)
         for module in grouped_modules:
             if module['Size (Bytes)'] > 0:
                 module['Size'] = f"+{module['Size']}"
-
-        if csv:
-            print_csv(app, i, grouped_modules)
+        if grouped_modules == []:
+            app.display("No size differences were detected between the selected commits.")
         else:
-            print_table(app, grouped_modules, platform, version)
+            if csv:
+                print_csv(app, i, grouped_modules)
+            else:
+                print_table(app, grouped_modules, platform, version)
+
+
+def get_repo_info(repo_url, platform, version, before, after):
+    with GitRepo(repo_url) as gitRepo:
+        repo = gitRepo.repo_dir
+
+        gitRepo.checkout_commit(before)
+        files_b = get_compressed_files(repo)
+        dependencies_b = get_compressed_dependencies(repo, platform, version)
+
+        gitRepo.checkout_commit(after)
+        files_a = get_compressed_files(repo)
+        dependencies_a = get_compressed_dependencies(repo, platform, version)
+
+    return files_b, dependencies_b, files_a, dependencies_a
 
 
 def get_diff(size_before, size_after, type):
@@ -108,10 +122,10 @@ def get_diff(size_before, size_after, type):
     return diff_files
 
 
-def get_compressed_files(app, repo_path):
+def get_compressed_files(repo_path):
 
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
-    git_ignore = get_gitignore_files(app, repo_path)
+    git_ignore = get_gitignore_files(repo_path)
     included_folder = "datadog_checks/"
 
     file_data = {}
@@ -124,37 +138,58 @@ def get_compressed_files(app, repo_path):
 
             # Filter files
             if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore):
-                compressed_size = compress(app, file_path, relative_path)
+                compressed_size = compress(file_path)
                 file_data[relative_path] = compressed_size
     return file_data
 
 
-def get_compressed_dependencies(app, repo_path, platform, version):
+def get_compressed_dependencies(repo_path, platform, version):
 
     resolved_path = os.path.join(repo_path, ".deps/resolved")
 
-    if not os.path.exists(resolved_path) or not os.path.isdir(resolved_path):
-        app.display_error(f"Error: Directory not found {resolved_path}")
-        sys.exit(1)
-
     for filename in os.listdir(resolved_path):
         file_path = os.path.join(resolved_path, filename)
 
         if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
-            deps, download_urls = get_dependencies(app, file_path)
-            return get_dependencies_sizes(app, deps, download_urls)
+            deps, download_urls = get_dependencies(file_path)
+            return get_dependencies_sizes(deps, download_urls)
     return {}
 
 
-def get_dependencies_sizes(app, deps, download_urls):
+def get_dependencies_sizes(deps, download_urls):
     file_data = {}
     for dep, url in zip(deps, download_urls, strict=False):
         dep_response = requests.head(url)
-        if dep_response.status_code != 200:
-            app.display_error(f"Error {dep_response.status_code}: Unable to fetch the dependencies file")
-            sys.exit(1)
-        else:
-            size = dep_response.headers.get("Content-Length", None)
-            file_data[dep] = int(size)
+        dep_response.raise_for_status()
+        size = dep_response.headers.get("Content-Length", None)
+        file_data[dep] = int(size)
 
     return file_data
+
+
+class GitRepo:
+    def __init__(self, url):
+        self.url = url
+        self.repo_dir = None
+
+    def __enter__(self):
+        self.repo_dir = tempfile.mkdtemp()
+        self._run("git init --quiet")
+        self._run(f"git remote add origin {self.url}")
+        return self
+
+    def _run(self, cmd):
+        subprocess.run(
+            cmd,
+            shell=True,
+            cwd=self.repo_dir,
+            check=True,
+        )
+
+    def checkout_commit(self, commit):
+        self._run(f"git fetch --quiet --depth 1 origin {commit}")
+        self._run(f"git checkout --quiet {commit}")
+
+    def __exit__(self, exception_type, exception_value, exception_traceback):
+        if self.repo_dir and os.path.exists(self.repo_dir):
+            shutil.rmtree(self.repo_dir)
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index f804233f4a406..f6d44eb5068bd 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -3,7 +3,6 @@
 # Licensed under a 3-clause BSD style license (see LICENSE)
 
 import os
-import sys
 from pathlib import Path
 
 import click
@@ -32,16 +31,19 @@
 @click.option('--csv', is_flag=True, help="Output in CSV format")
 @click.pass_obj
 def status(app, platform, version, compressed, csv):
-    platforms = VALID_PLATFORMS if platform is None else [platform]
-    versions = VALID_PYTHON_VERSIONS if version is None else [version]
+    try:
+        platforms = VALID_PLATFORMS if platform is None else [platform]
+        versions = VALID_PYTHON_VERSIONS if version is None else [version]
 
-    for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
-        status_mode(app, plat, ver, compressed, csv, i)
+        for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
+            status_mode(app, plat, ver, compressed, csv, i)
+    except Exception as e:
+        app.abort(str(e))
 
 
 def status_mode(app, platform, version, compressed, csv, i):
     if compressed:
-        modules = get_compressed_files(app) + get_compressed_dependencies(app, platform, version)
+        modules = get_compressed_files() + get_compressed_dependencies(platform, version)
 
         grouped_modules = group_modules(modules, platform, version)
         grouped_modules.sort(key=lambda x: x['Size (Bytes)'], reverse=True)
@@ -52,10 +54,10 @@ def status_mode(app, platform, version, compressed, csv, i):
             print_table(app, grouped_modules, platform, version)
 
 
-def get_compressed_files(app):
+def get_compressed_files():
 
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
-    git_ignore = get_gitignore_files(app, REPO_PATH)
+    git_ignore = get_gitignore_files(REPO_PATH)
     included_folder = "datadog_checks/"
 
     file_data = []
@@ -68,7 +70,7 @@ def get_compressed_files(app):
 
             # Filter files
             if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore):
-                compressed_size = compress(app, file_path, relative_path)
+                compressed_size = compress(file_path)
                 integration = relative_path.split(os.sep)[0]
                 file_data.append(
                     {
@@ -81,17 +83,11 @@ def get_compressed_files(app):
     return file_data
 
 
-def get_compressed_dependencies(app, platform, version):
+def get_compressed_dependencies(platform, version):
 
     resolved_path = os.path.join(REPO_PATH, ".deps/resolved")
-
-    if not os.path.exists(resolved_path) or not os.path.isdir(resolved_path):
-        app.display_error(f"Error: Directory not found {resolved_path}")
-        sys.exit(1)
-
     for filename in os.listdir(resolved_path):
         file_path = os.path.join(resolved_path, filename)
-
         if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
-            deps, download_urls = get_dependencies(app, file_path)
-            return get_dependencies_sizes(app, deps, download_urls)
+            deps, download_urls = get_dependencies(file_path)
+            return get_dependencies_sizes(deps, download_urls)
diff --git a/ddev/tests/cli/size/test_common.py b/ddev/tests/cli/size/test_common.py
index 8d218fab237e1..839ef2f9b7627 100644
--- a/ddev/tests/cli/size/test_common.py
+++ b/ddev/tests/cli/size/test_common.py
@@ -1,6 +1,6 @@
 from unittest.mock import MagicMock, mock_open, patch
 
-from ddev.cli.size.status import (
+from ddev.cli.size.common import (
     compress,
     convert_size,
     get_dependencies,
@@ -37,23 +37,23 @@ def test_is_valid_integration():
     assert not is_valid_integration(".git/config", included_folder, ignored_files, git_ignore)
 
 
-def test_get_dependencies(terminal):
+def test_get_dependencies():
     file_content = (
         "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl"
     )
     mock_open_obj = mock_open(read_data=file_content)
     with patch("builtins.open", mock_open_obj):
-        deps, urls = get_dependencies(terminal, "fake_path")
+        deps, urls = get_dependencies("fake_path")
     assert deps == ["dependency1", "dependency2"]
     assert urls == ["https://example.com/dependency1.whl", "https://example.com/dependency2.whl"]
 
 
-def test_get_dependencies_sizes(terminal):
+def test_get_dependencies_sizes():
     mock_response = MagicMock()
     mock_response.status_code = 200
     mock_response.headers = {"Content-Length": "12345"}
     with patch("requests.head", return_value=mock_response):
-        file_data = get_dependencies_sizes(terminal, ["dependency1"], ["https://example.com/dependency1.whl"])
+        file_data = get_dependencies_sizes(["dependency1"], ["https://example.com/dependency1.whl"])
     assert file_data == [
         {"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345}
     ]
@@ -100,23 +100,22 @@ def test_group_modules():
     assert group_modules(modules, platform, version) == expected_output
 
 
-def test_get_gitignore_files(terminal):
+def test_get_gitignore_files():
     mock_gitignore = "__pycache__/\n*.log\n"  # Sample .gitignore file
     repo_path = "/fake/repo"
     with patch("builtins.open", mock_open(read_data=mock_gitignore)):
         with patch("os.path.exists", return_value=True):
-            ignored_patterns = get_gitignore_files(terminal, repo_path)
+            ignored_patterns = get_gitignore_files(repo_path)
     assert ignored_patterns == ["__pycache__/", "*.log"]
 
 
 def test_compress():
-    mock_app = MagicMock()
     fake_content = b'a' * 16384
     original_size = len(fake_content)
 
     m = mock_open(read_data=fake_content)
     with patch("builtins.open", m):
-        compressed_size = compress(mock_app, "fake/path/file.py", "relative/path/file.py")
+        compressed_size = compress("fake/path/file.py")
 
     assert isinstance(compressed_size, int)
     assert compressed_size > 0
@@ -126,14 +125,14 @@ def test_compress():
 def test_print_csv():
     mock_app = MagicMock()
     modules = [
-        {"Name": "module1", "Size": 123},
-        {"Name": "module,with,comma", "Size": 456},
+        {"Name": "module1", "Size B": 123, "Size": "2 B"},
+        {"Name": "module,with,comma", "Size B": 456, "Size": "2 B"},
     ]
 
     print_csv(mock_app, i=0, modules=modules)
 
     expected_calls = [
-        (("Name,Size",),),
+        (("Name,Size B",),),
         (('module1,123',),),
         (('"module,with,comma",456',),),
     ]
diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py
index 4db01ab8ed3d5..1de5cb1987056 100644
--- a/ddev/tests/cli/size/test_diff.py
+++ b/ddev/tests/cli/size/test_diff.py
@@ -2,13 +2,12 @@
 # All rights reserved
 # Licensed under a 3-clause BSD style license (see LICENSE)
 
-from unittest.mock import MagicMock, mock_open, patch
-
+from unittest.mock import MagicMock, mock_open, patch, Mock
+import pytest
 from ddev.cli.size.diff import get_compressed_dependencies, get_compressed_files, get_diff
 
 
 def test_get_compressed_files():
-    mock_app = MagicMock()
     mock_repo_path = "root"
 
     mock_files = [
@@ -17,7 +16,7 @@ def test_get_compressed_files():
         ("root", [], ["ignored.py"]),
     ]
 
-    def fake_compress(app, file_path, relative_path):
+    def fake_compress(file_path):
         return 1000
 
     fake_gitignore = {"ignored.py"}
@@ -35,7 +34,7 @@ def fake_compress(app, file_path, relative_path):
         patch("ddev.cli.size.diff.compress", side_effect=fake_compress),
     ):
 
-        result = get_compressed_files(mock_app, mock_repo_path)
+        result = get_compressed_files(mock_repo_path)
 
     expected = {
         "integration/datadog_checks/file1.py": 1000,
@@ -68,45 +67,170 @@ def test_get_compressed_dependencies(terminal):
         patch("requests.head", return_value=mock_response),
     ):
 
-        file_data = get_compressed_dependencies(terminal, mock_repo_path, platform, version)
+        file_data = get_compressed_dependencies(mock_repo_path, platform, version)
 
     assert file_data == {
         "dependency1": 12345,
         "dependency2": 12345,
     }
 
-    def test_get_diff():
-        size_before = {
-            "integration/foo.py": 1000,
-            "integration/bar.py": 2000,
-            "integration/deleted.py": 1500,
-        }
-        size_after = {
-            "integration/foo.py": 1200,  # modified
-            "integration/bar.py": 2000,  # unchanged
-            "integration/new.py": 800,  # new
-        }
-
-        expected = [
-            {
-                "File Path": "integration/foo.py",
-                "Type": "Integration",
-                "Name": "integration",
-                "Size (Bytes)": 200,
-            },
-            {
-                "File Path": "integration/deleted.py",
-                "Type": "Integration",
-                "Name": "integration (DELETED)",
-                "Size (Bytes)": -1500,
+
+def test_get_diff():
+    size_before = {
+        "integration/foo.py": 1000,
+        "integration/bar.py": 2000,
+        "integration/deleted.py": 1500,
+    }
+    size_after = {
+        "integration/foo.py": 1200,  # modified
+        "integration/bar.py": 2000,  # unchanged
+        "integration/new.py": 800,  # new
+    }
+
+    expected = [
+        {
+            "File Path": "integration/foo.py",
+            "Type": "Integration",
+            "Name": "integration",
+            "Size (Bytes)": 200,
+        },
+        {
+            "File Path": "integration/deleted.py",
+            "Type": "Integration",
+            "Name": "integration (DELETED)",
+            "Size (Bytes)": -1500,
+        },
+        {
+            "File Path": "integration/new.py",
+            "Type": "Integration",
+            "Name": "integration (NEW)",
+            "Size (Bytes)": 800,
+        },
+    ]
+
+    result = get_diff(size_before, size_after, "Integration")
+    assert sorted(result, key=lambda x: x["File Path"]) == sorted(expected, key=lambda x: x["File Path"])
+
+
+@pytest.fixture
+def mock_size_diff_dependencies():
+    mock_git_repo = MagicMock()
+    mock_git_repo.repo_dir = "/tmp/fake_repo"
+
+    def get_compressed_files_side_effect(_):
+        get_compressed_files_side_effect.counter += 1
+        if get_compressed_files_side_effect.counter % 2 == 1:
+            return {"path1.py": 1000}  # before
+        else:
+            return {"path1.py": 1200, "path2.py": 500}  # after
+
+    get_compressed_files_side_effect.counter = 0
+
+    def get_compressed_dependencies_side_effect(_, __, ___):
+        get_compressed_dependencies_side_effect.counter += 1
+        if get_compressed_dependencies_side_effect.counter % 2 == 1:
+            return {"dep1.whl": 2000}  # before
+        else:
+            return {"dep1.whl": 2500, "dep2.whl": 1000}  # after
+
+    get_compressed_dependencies_side_effect.counter = 0
+
+    with (
+        patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=mock_git_repo),
+        patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None),
+        patch("ddev.cli.size.diff.GitRepo.checkout_commit"),
+        patch("tempfile.mkdtemp", return_value="/tmp/fake_repo"),
+        patch("ddev.cli.size.diff.get_compressed_files", side_effect=get_compressed_files_side_effect),
+        patch("ddev.cli.size.diff.get_compressed_dependencies", side_effect=get_compressed_dependencies_side_effect),
+        patch("ddev.cli.size.common.group_modules", side_effect=lambda m, *_: m),
+        patch("ddev.cli.size.common.print_csv"),
+        patch("ddev.cli.size.common.print_table"),
+    ):
+        yield
+
+
+def test_diff_no_args(ddev, mock_size_diff_dependencies):
+    result = ddev('size', 'diff', 'commit1', 'commit2', '--compressed')
+    print("Exit code:", result.exit_code)
+    print("Output:\n", result.output)
+    print("Exception:", result.exception)
+    assert result.exit_code == 0
+
+
+def test_diff_with_platform_and_version(ddev, mock_size_diff_dependencies):
+    result = ddev(
+        'size', 'diff', 'commit1', 'commit2', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed'
+    )
+    assert result.exit_code == 0
+
+
+def test_diff_csv(ddev, mock_size_diff_dependencies):
+    result = ddev(
+        'size', 'diff', 'commit1', 'commit2', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed', '--csv'
+    )
+    assert result.exit_code == 0
+
+
+
+
+from unittest.mock import patch, MagicMock
+
+def test_diff_no_differences(ddev):
+    fake_repo = MagicMock()
+    
+    with (
+        patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=fake_repo),
+        patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None),
+        patch.object(fake_repo, "checkout_commit"),  
+        patch("tempfile.mkdtemp", return_value="/tmp/fake_repo"),
+        patch(
+            "ddev.cli.size.diff.get_compressed_files",
+            return_value={
+                "path1.py": 1000,
+                "path2.py": 500,
             },
-            {
-                "File Path": "integration/new.py",
-                "Type": "Integration",
-                "Name": "integration (NEW)",
-                "Size (Bytes)": 800,
+        ),
+        patch(
+            "ddev.cli.size.diff.get_compressed_dependencies",
+            return_value={
+                "dep1.whl": 2000,
+                "dep2.whl": 1000,
             },
-        ]
+        ),
+        patch("ddev.cli.size.common.group_modules", side_effect=lambda m, *_: m),
+    ):
+        result = ddev(
+            'size', 'diff', 'commit1', 'commit2', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed'
+        )
+        print(result.output)
+        print(result.exit_code)
+
+    assert result.exit_code == 0
+
+
+
+def test_diff_invalid_platform(ddev):
+    result = ddev(
+        'size', 'diff', 'commit1', 'commit2', '--platform', 'linux', '--python', '3.12', '--compressed'  # inválido
+    )
+    assert result.exit_code != 0
+
+
+def test_diff_invalid_version(ddev):
+    result = ddev(
+        'size',
+        'diff',
+        'commit1',
+        'commit2',
+        '--platform',
+        'linux-aarch64',
+        '--python',
+        '2.10',  # inválido
+        '--compressed',
+    )
+    assert result.exit_code != 0
+
 
-        result = get_diff(size_before, size_after, "Integration")
-        assert sorted(result, key=lambda x: x["File Path"]) == sorted(expected, key=lambda x: x["File Path"])
+def test_diff_invalid_platform_and_version(ddev):
+    result = ddev('size', 'diff', 'commit1', 'commit2', '--platform', 'linux', '--python', '2.10', '--compressed')
+    assert result.exit_code != 0
diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py
index fb2436725705d..22031ad0d5e52 100644
--- a/ddev/tests/cli/size/test_status.py
+++ b/ddev/tests/cli/size/test_status.py
@@ -4,6 +4,8 @@
 
 from unittest.mock import MagicMock, mock_open, patch
 
+import pytest
+
 from ddev.cli.size.status import (
     get_compressed_dependencies,
     get_compressed_files,
@@ -11,15 +13,13 @@
 
 
 def test_get_compressed_files():
-    mock_app = MagicMock()
-
     mock_files = [
         ("root/integration/datadog_checks", [], ["file1.py", "file2.py"]),
         ("root/integration_b/datadog_checks", [], ["file3.py"]),
         ("root", [], ["ignored.py"]),
     ]
 
-    def fake_compress(app, file_path, relative_path):
+    def fake_compress(file_path):
         return 1000
 
     fake_gitignore = {"ignored.py"}
@@ -35,7 +35,7 @@ def fake_compress(app, file_path, relative_path):
         patch("ddev.cli.size.status.compress", side_effect=fake_compress),
     ):
 
-        result = get_compressed_files(mock_app)
+        result = get_compressed_files()
 
     expected = [
         {
@@ -61,7 +61,7 @@ def fake_compress(app, file_path, relative_path):
     assert result == expected
 
 
-def test_get_compressed_dependencies(terminal):
+def test_get_compressed_dependencies():
     platform = "windows-x86_64"
     version = "3.12"
 
@@ -82,7 +82,7 @@ def test_get_compressed_dependencies(terminal):
         patch("requests.head", return_value=mock_response),
     ):
 
-        file_data = get_compressed_dependencies(terminal, platform, version)
+        file_data = get_compressed_dependencies(platform, version)
 
     assert file_data == [
         {"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345},
@@ -90,17 +90,45 @@ def test_get_compressed_dependencies(terminal):
     ]
 
 
-def test_status_no_args(ddev):
+@pytest.fixture()
+def mock_size_status():
+    with (
+        patch("ddev.cli.size.status.get_gitignore_files", return_value=set()),
+        patch("ddev.cli.size.status.compress", return_value=1234),
+        patch("ddev.cli.size.status.get_dependencies", return_value=(["dep1"], {"dep1": "https://example.com/dep1"})),
+        patch(
+            "ddev.cli.size.status.get_dependencies_sizes",
+            return_value=[
+                {"File Path": "dep1.whl", "Type": "Dependency", "Name": "dep1", "Size (Bytes)": 5678},
+            ],
+        ),
+        patch("ddev.cli.size.status.is_valid_integration", return_value=True),
+        patch("ddev.cli.size.status.is_correct_dependency", return_value=True),
+        patch("ddev.cli.size.status.print_csv"),
+        patch("ddev.cli.size.status.print_table"),
+        patch(
+            "os.walk",
+            return_value=[
+                ("datadog_checks/my_check", [], ["__init__.py"]),
+            ],
+        ),
+        patch("os.listdir", return_value=["fake_dep.whl"]),
+        patch("os.path.isfile", return_value=True),
+    ):
+        yield
+
+
+def test_status_no_args(ddev, mock_size_status):
     result = ddev('size', 'status', '--compressed')
     assert result.exit_code == 0
 
 
-def test_status(ddev):
+def test_status(ddev, mock_size_status):
     result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed')
     assert result.exit_code == 0
 
 
-def test_status_csv(ddev):
+def test_status_csv(ddev, mock_size_status):
     result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed', '--csv')
     assert result.exit_code == 0
 

From 5aa1f49c185a126dfd4029a1374e785fdba61cb7 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Thu, 27 Mar 2025 12:59:40 +0100
Subject: [PATCH 07/40] final diff version

---
 ddev/tests/cli/size/test_diff.py | 11 +++++------
 1 file changed, 5 insertions(+), 6 deletions(-)

diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py
index 1de5cb1987056..1ba94bfdc6693 100644
--- a/ddev/tests/cli/size/test_diff.py
+++ b/ddev/tests/cli/size/test_diff.py
@@ -2,8 +2,10 @@
 # All rights reserved
 # Licensed under a 3-clause BSD style license (see LICENSE)
 
-from unittest.mock import MagicMock, mock_open, patch, Mock
+from unittest.mock import MagicMock, mock_open, patch
+
 import pytest
+
 from ddev.cli.size.diff import get_compressed_dependencies, get_compressed_files, get_diff
 
 
@@ -173,15 +175,13 @@ def test_diff_csv(ddev, mock_size_diff_dependencies):
 
 
 
-from unittest.mock import patch, MagicMock
-
 def test_diff_no_differences(ddev):
     fake_repo = MagicMock()
-    
+
     with (
         patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=fake_repo),
         patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None),
-        patch.object(fake_repo, "checkout_commit"),  
+        patch.object(fake_repo, "checkout_commit"),
         patch("tempfile.mkdtemp", return_value="/tmp/fake_repo"),
         patch(
             "ddev.cli.size.diff.get_compressed_files",
@@ -208,7 +208,6 @@ def test_diff_no_differences(ddev):
     assert result.exit_code == 0
 
 
-
 def test_diff_invalid_platform(ddev):
     result = ddev(
         'size', 'diff', 'commit1', 'commit2', '--platform', 'linux', '--python', '3.12', '--compressed'  # inválido

From c64d2f9745300de9f4b80caf55132ecafbefa364 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Tue, 15 Apr 2025 09:47:32 +0200
Subject: [PATCH 08/40] Added timeline mode and uncompressed sizes (#5025)

---
 ddev/src/ddev/cli/size/__init__.py   |   3 +-
 ddev/src/ddev/cli/size/common.py     | 142 +++++++++++--
 ddev/src/ddev/cli/size/diff.py       | 171 +++++++--------
 ddev/src/ddev/cli/size/status.py     |  65 +++---
 ddev/tests/cli/size/test_common.py   |  32 ++-
 ddev/tests/cli/size/test_diff.py     |   8 +-
 ddev/tests/cli/size/test_status.py   |   5 +-
 ddev/tests/cli/size/test_timeline.py | 306 +++++++++++++++++++++++++++
 8 files changed, 595 insertions(+), 137 deletions(-)
 create mode 100644 ddev/tests/cli/size/test_timeline.py

diff --git a/ddev/src/ddev/cli/size/__init__.py b/ddev/src/ddev/cli/size/__init__.py
index 6f3878b2fda35..dc0a07beb809b 100644
--- a/ddev/src/ddev/cli/size/__init__.py
+++ b/ddev/src/ddev/cli/size/__init__.py
@@ -6,7 +6,7 @@
 
 from ddev.cli.size.diff import diff
 from ddev.cli.size.status import status
-
+from ddev.cli.size.timeline import timeline
 
 @click.group(short_help='Get the size of integrations and dependencies by platform and python version')
 def size():
@@ -16,6 +16,7 @@ def size():
 
 size.add_command(status)
 size.add_command(diff)
+size.add_command(timeline)
 
 if __name__ == "__main__":
     size()
diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index 983bd443ea016..4c2ad720edb7b 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -4,13 +4,28 @@
 import os
 import re
 import zlib
-
+import shutil
+import subprocess
+import tempfile
 import requests
-
+from pathlib import Path
+import zipfile
+
+
+def valid_platforms_versions(repo_path):
+    resolved_path = os.path.join(repo_path, ".deps/resolved")
+    platforms = []
+    versions = []
+    for file in os.listdir(resolved_path):
+        platforms.append("_".join(file.split('_')[:-1]))
+        match = re.search(r"\d+\.\d+", file)
+        if match:
+            versions.append(match.group())
+    return set(platforms), set(versions)
+    
 
 # mirar si existe
 def convert_size(size_bytes):
-    # Transforms bytes into a human-friendly format (KB, MB, GB)
     for unit in [' B', ' KB', ' MB', ' GB']:
         if size_bytes < 1024:
             return str(round(size_bytes, 2)) + unit
@@ -40,8 +55,8 @@ def is_correct_dependency(platform, version, name):
 
 
 def print_csv(app, i, modules):
-    headers = [k for k in modules[0].keys() if k != 'Size']
-    if i == 0:
+    headers = [k for k in modules[0].keys() if k not in ['Size', 'Delta']]
+    if not i:
         app.display(",".join(headers))
 
     for row in modules:
@@ -55,27 +70,45 @@ def format(s):
         return s
 
 
-def print_table(app, modules, platform, version):
-    modules_table = {col: {} for col in modules[0].keys() if col != 'Size (Bytes)'}
+def print_table(app, mode, modules):
+    modules_table = {col: {} for col in modules[0].keys() if '(Bytes)' not in col}
     for i, row in enumerate(modules):
         for key, value in row.items():
             if key in modules_table:
                 modules_table[key][i] = str(value)
-    app.display_table(platform + " " + version, modules_table)
+    app.display_table(mode, modules_table)
 
 
-def get_dependencies_sizes(deps, download_urls):
+def get_dependencies_sizes(deps, download_urls, compressed):
     file_data = []
     for dep, url in zip(deps, download_urls, strict=False):
-        dep_response = requests.head(url)
-        dep_response.raise_for_status()
-        size = dep_response.headers.get("Content-Length", None)
+        if compressed:
+            with requests.get(url, stream=True) as response:
+                response.raise_for_status()
+                size = int(response.headers.get("Content-Length"))
+        else:
+            with requests.get(url, stream=True) as response:
+                response.raise_for_status()
+                wheel_data = response.content
+
+            with tempfile.TemporaryDirectory() as tmpdir:
+                wheel_path = Path(tmpdir) / "package.whl"
+                with open(wheel_path, "wb") as f:
+                    f.write(wheel_data)
+                extract_path = Path(tmpdir) / "extracted"
+                with zipfile.ZipFile(wheel_path, 'r') as zip_ref:
+                    zip_ref.extractall(extract_path)
+
+                size = 0
+                for dirpath, _, filenames in os.walk(extract_path):
+                    for name in filenames:
+                        file_path = os.path.join(dirpath, name)
+                        size += os.path.getsize(file_path)
         file_data.append({"File Path": dep, "Type": "Dependency", "Name": dep, "Size (Bytes)": int(size)})
-
     return file_data
 
 
-def get_dependencies(file_path):
+def get_dependencies_list(file_path):
     download_urls = []
     deps = []
     with open(file_path, "r", encoding="utf-8") as file:
@@ -91,24 +124,34 @@ def get_dependencies(file_path):
     return deps, download_urls
 
 
-def group_modules(modules, platform, version):
+def group_modules(modules, platform, version, i):
     grouped_aux = {}
 
     for file in modules:
         key = (file['Name'], file['Type'])
         grouped_aux[key] = grouped_aux.get(key, 0) + file["Size (Bytes)"]
-
-    return [
+    if i is None:
+        return [
         {
             'Name': name,
             'Type': type,
             'Size (Bytes)': size,
-            'Size': convert_size(size),
-            'Platform': platform,
-            'Version': version,
+            'Size': convert_size(size)
         }
         for (name, type), size in grouped_aux.items()
-    ]
+        ]
+    else: 
+        return [
+            {
+                'Name': name,
+                'Type': type,
+                'Size (Bytes)': size,
+                'Size': convert_size(size),
+                'Platform': platform,
+                'Version': version,
+            }
+            for (name, type), size in grouped_aux.items()
+        ]
 
 
 def get_gitignore_files(repo_path):
@@ -132,7 +175,62 @@ def compress(file_path):
         compressed_size += len(compressor.flush())
     return compressed_size
 
-
 class WrongDependencyFormat(Exception):
     def __init__(self, mensaje):
         super().__init__(mensaje)
+
+class GitRepo:
+    def __init__(self, url):
+        self.url = url
+        self.repo_dir = None
+
+    def __enter__(self):
+        self.repo_dir = tempfile.mkdtemp()
+        try:
+            self._run("git status")
+        except Exception:
+            # If it is not already a repo
+            self._run(f"git clone --quiet {self.url} {self.repo_dir}")
+        return self
+
+    def _run(self, command):
+        result = subprocess.run(command, shell=True, capture_output=True, text=True, check=True, cwd=self.repo_dir)
+        return result.stdout.strip().split('\n')
+
+    def get_module_commits(self, module_path, initial, final, time):
+        self._run("git fetch origin --quiet") # 1 min no coger todo solo el module
+        self._run("git checkout origin/HEAD")
+        if time:
+            return self._run(f'git log --since="{time}" --reverse --pretty=format:%H -- {module_path}')
+        elif not initial and not final:
+            return self._run(f"git log --reverse --pretty=format:%H -- {module_path}")
+        elif not final:
+            return self._run(f"git log --reverse --pretty=format:%H {initial}..HEAD -- {module_path}")
+        else:
+            try:
+                self._run(f"git merge-base --is-ancestor {initial} {final}")
+            except subprocess.CalledProcessError:
+                raise ValueError(f"Commit {initial} does not come before {final}")
+            return self._run(f"git log --reverse --pretty=format:%H {initial}..{final} -- {module_path}")
+           
+
+    def checkout_commit(self, commit):
+        self._run(f"git fetch --quiet --depth 1 origin {commit}")
+        self._run(f"git checkout --quiet {commit}")
+
+    def sparse_checkout_commit(self, commit_sha, module):
+        self._run("git sparse-checkout init --cone") 
+        self._run(f"git sparse-checkout set {module}")
+        self._run(f"git checkout {commit_sha}")
+    
+    def get_commit_metadata(self,commit):
+        result = self._run(f'git log -1 --date=format:"%b %d %Y" --pretty=format:"%ad\n%an\n%s" {commit}')
+        date, author, message = result
+        return date, author, message
+    
+    def get_creation_commit_module(self, module):
+        return self._run(f'git log --reverse --format="%H" -- {module}')[0]
+
+    def __exit__(self, exception_type, exception_value, exception_traceback):
+        if self.repo_dir and os.path.exists(self.repo_dir):
+            shutil.rmtree(self.repo_dir)
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index 8ce12e8ea8ae2..dd3ce4ab624df 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -3,80 +3,89 @@
 # Licensed under a 3-clause BSD style license (see LICENSE)
 
 import os
-import shutil
-import subprocess
-import tempfile
 from pathlib import Path
-
+from rich.console import Console
 import click
 import requests
-
+import tempfile
+import zipfile
 from .common import (
     compress,
-    get_dependencies,
+    valid_platforms_versions,
+    get_dependencies_list,
     get_gitignore_files,
     group_modules,
     is_correct_dependency,
     is_valid_integration,
     print_csv,
     print_table,
+    GitRepo
 )
 
-VALID_PLATFORMS = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"]
-VALID_PYTHON_VERSIONS = ["3.12"]
+# VALID_PLATFORMS, VALID_PYTHON_VERSIONS = valid_platforms_versions()
+console = Console()
 
 
 @click.command()
 @click.argument("before")
 @click.argument("after")
-@click.option('--platform', type=click.Choice(VALID_PLATFORMS), help="Target platform")
-@click.option('--python', 'version', type=click.Choice(VALID_PYTHON_VERSIONS), help="Python version (MAJOR.MINOR)")
+@click.option('--platform', help="Target platform")
+@click.option('--python', 'version', help="Python version (MAJOR.MINOR)")
 @click.option('--compressed', is_flag=True, help="Measure compressed size")
 @click.option('--csv', is_flag=True, help="Output in CSV format")
 @click.pass_obj
 def diff(app, before, after, platform, version, compressed, csv):
-    try:
-        platforms = VALID_PLATFORMS if platform is None else [platform]
-        versions = VALID_PYTHON_VERSIONS if version is None else [version]
-
-        for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
-            diff_mode(app, before, after, plat, ver, compressed, csv, i)
-    except Exception as e:
-        app.abort(str(e))
-
-
-def diff_mode(app, before, after, platform, version, compressed, csv, i):
-    url = "https://github.com/DataDog/integrations-core.git"
-    if compressed:
-        files_b, dependencies_b, files_a, dependencies_a = get_repo_info(url, platform, version, before, after)
-
-        integrations = get_diff(files_b, files_a, 'Integration')
-        dependencies = get_diff(dependencies_b, dependencies_a, 'Dependency')
-        grouped_modules = group_modules(integrations + dependencies, platform, version)
-        grouped_modules.sort(key=lambda x: abs(x['Size (Bytes)']), reverse=True)
-        for module in grouped_modules:
-            if module['Size (Bytes)'] > 0:
-                module['Size'] = f"+{module['Size']}"
-        if grouped_modules == []:
-            app.display("No size differences were detected between the selected commits.")
-        else:
-            if csv:
-                print_csv(app, i, grouped_modules)
+    repo_url = app.repo.path
+    with GitRepo(repo_url) as gitRepo:
+        try:
+            valid_platforms,valid_versions = valid_platforms_versions(gitRepo.repo_dir)
+            if platform and platform not in valid_platforms:
+                raise ValueError(f"Invalid platform: {platform}")
+            elif version and version not in valid_versions:
+                raise ValueError(f"Invalid version: {version}")
+            if platform is None or version is None:
+                platforms = valid_platforms if platform is None else [platform]
+                versions = valid_versions if version is None else [version]
+
+                for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
+                    diff_mode(app, gitRepo, before, after, plat, ver, compressed, csv, i)
             else:
-                print_table(app, grouped_modules, platform, version)
-
+                    diff_mode(app, gitRepo, before, after, platform, version, compressed, csv, None)
+
+        except Exception as e:
+            app.abort(str(e))
+
+
+def diff_mode(app, gitRepo, before, after, platform, version, compressed, csv, i):
+    files_b, dependencies_b, files_a, dependencies_a = get_repo_info(gitRepo, platform, version, before, after, compressed)
+
+    integrations = get_diff(files_b, files_a, 'Integration')
+    dependencies = get_diff(dependencies_b, dependencies_a, 'Dependency')
+    grouped_modules = group_modules(integrations + dependencies, platform, version, i)
+    grouped_modules.sort(key=lambda x: abs(x['Size (Bytes)']), reverse=True)
+    for module in grouped_modules:
+        if module['Size (Bytes)'] > 0:
+            module['Size'] = f"+{module['Size']}"
+    if grouped_modules == []:
+        app.display("No size differences were detected between the selected commits.")
+    else:
+        if csv:
+            print_csv(app, i, grouped_modules)
+        else:
+            print_table(app, "Diff", grouped_modules)
 
-def get_repo_info(repo_url, platform, version, before, after):
-    with GitRepo(repo_url) as gitRepo:
-        repo = gitRepo.repo_dir
 
+def get_repo_info(gitRepo, platform, version, before, after, compressed):
+    repo = gitRepo.repo_dir
+    with console.status("[cyan]Calculating compressed sizes for the first commit...", spinner="dots"):
         gitRepo.checkout_commit(before)
-        files_b = get_compressed_files(repo)
-        dependencies_b = get_compressed_dependencies(repo, platform, version)
+        files_b = get_files(repo, compressed)
+        dependencies_b = get_dependencies(repo, platform, version, compressed)
 
+    with console.status("[cyan]Calculating compressed sizes for the second commit...", spinner="dots"):
         gitRepo.checkout_commit(after)
-        files_a = get_compressed_files(repo)
-        dependencies_a = get_compressed_dependencies(repo, platform, version)
+        files_a = get_files(repo, compressed)
+        dependencies_a = get_dependencies(repo, platform, version, compressed)
 
     return files_b, dependencies_b, files_a, dependencies_a
 
@@ -122,7 +131,7 @@ def get_diff(size_before, size_after, type):
     return diff_files
 
 
-def get_compressed_files(repo_path):
+def get_files(repo_path, compressed):
 
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
     git_ignore = get_gitignore_files(repo_path)
@@ -138,12 +147,12 @@ def get_compressed_files(repo_path):
 
             # Filter files
             if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore):
-                compressed_size = compress(file_path)
-                file_data[relative_path] = compressed_size
+                size = compress(file_path) if compressed else os.path.getsize(file_path)
+                file_data[relative_path] = size
     return file_data
 
 
-def get_compressed_dependencies(repo_path, platform, version):
+def get_dependencies(repo_path, platform, version, compressed):
 
     resolved_path = os.path.join(repo_path, ".deps/resolved")
 
@@ -151,45 +160,37 @@ def get_compressed_dependencies(repo_path, platform, version):
         file_path = os.path.join(resolved_path, filename)
 
         if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
-            deps, download_urls = get_dependencies(file_path)
-            return get_dependencies_sizes(deps, download_urls)
+            deps, download_urls = get_dependencies_list(file_path)
+            return get_dependencies_sizes(deps, download_urls, compressed)
     return {}
 
 
-def get_dependencies_sizes(deps, download_urls):
+def get_dependencies_sizes(deps, download_urls, compressed):
     file_data = {}
-    for dep, url in zip(deps, download_urls, strict=False):
-        dep_response = requests.head(url)
-        dep_response.raise_for_status()
-        size = dep_response.headers.get("Content-Length", None)
-        file_data[dep] = int(size)
-
+    for dep, url in zip(deps, download_urls):
+        if compressed:
+            with requests.get(url, stream=True) as response:
+                response.raise_for_status()
+                size = int(response.headers.get("Content-Length"))
+        else:
+            with requests.get(url, stream=True) as response:
+                response.raise_for_status()
+                wheel_data = response.content
+
+            with tempfile.TemporaryDirectory() as tmpdir:
+                wheel_path = Path(tmpdir) / "package.whl"
+                with open(wheel_path, "wb") as f:
+                    f.write(wheel_data)
+                extract_path = Path(tmpdir) / "extracted"
+                with zipfile.ZipFile(wheel_path, 'r') as zip_ref:
+                    zip_ref.extractall(extract_path)
+
+                size = 0
+                for dirpath, _, filenames in os.walk(extract_path):
+                    for name in filenames:
+                        file_path = os.path.join(dirpath, name)
+                        size += os.path.getsize(file_path)
+        file_data[dep] = size
     return file_data
 
 
-class GitRepo:
-    def __init__(self, url):
-        self.url = url
-        self.repo_dir = None
-
-    def __enter__(self):
-        self.repo_dir = tempfile.mkdtemp()
-        self._run("git init --quiet")
-        self._run(f"git remote add origin {self.url}")
-        return self
-
-    def _run(self, cmd):
-        subprocess.run(
-            cmd,
-            shell=True,
-            cwd=self.repo_dir,
-            check=True,
-        )
-
-    def checkout_commit(self, commit):
-        self._run(f"git fetch --quiet --depth 1 origin {commit}")
-        self._run(f"git checkout --quiet {commit}")
-
-    def __exit__(self, exception_type, exception_value, exception_traceback):
-        if self.repo_dir and os.path.exists(self.repo_dir):
-            shutil.rmtree(self.repo_dir)
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index f6d44eb5068bd..252e682210e6c 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -4,12 +4,12 @@
 
 import os
 from pathlib import Path
-
+from rich.console import Console
 import click
 
 from .common import (
     compress,
-    get_dependencies,
+    get_dependencies_list,
     get_dependencies_sizes,
     get_gitignore_files,
     group_modules,
@@ -17,44 +17,56 @@
     is_valid_integration,
     print_csv,
     print_table,
+    valid_platforms_versions
 )
 
-VALID_PLATFORMS = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"]
-VALID_PYTHON_VERSIONS = ["3.12"]
+#VALID_PLATFORMS = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"]
+
+
 REPO_PATH = Path(__file__).resolve().parents[5]
+# VALID_PLATFORMS, VALID_PYTHON_VERSIONS = valid_platforms_versions()
 
+console = Console()
 
 @click.command()
-@click.option('--platform', type=click.Choice(VALID_PLATFORMS), help="Target platform")
-@click.option('--python', 'version', type=click.Choice(VALID_PYTHON_VERSIONS), help="Python version (MAJOR.MINOR)")
+@click.option('--platform', help="Target platform")
+@click.option('--python', 'version', help="Python version (MAJOR.MINOR)")
 @click.option('--compressed', is_flag=True, help="Measure compressed size")
 @click.option('--csv', is_flag=True, help="Output in CSV format")
 @click.pass_obj
 def status(app, platform, version, compressed, csv):
     try:
-        platforms = VALID_PLATFORMS if platform is None else [platform]
-        versions = VALID_PYTHON_VERSIONS if version is None else [version]
-
-        for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
-            status_mode(app, plat, ver, compressed, csv, i)
+        repo_path = app.repo.path
+        valid_platforms,valid_versions = valid_platforms_versions(repo_path)
+        if platform and platform not in valid_platforms:
+            raise ValueError(f"Invalid platform: {platform}")
+        elif version and version not in valid_versions:
+            raise ValueError(f"Invalid version: {version}")
+        if platform is None or version is None:
+            platforms =  valid_platforms if platform is None else [platform]
+            versions = valid_versions if version is None else [version]
+            for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
+                status_mode(app, plat, ver, compressed, csv, i)
+        else:
+           status_mode(app, platform, version, compressed, csv, None)
+        
     except Exception as e:
         app.abort(str(e))
 
 
 def status_mode(app, platform, version, compressed, csv, i):
-    if compressed:
-        modules = get_compressed_files() + get_compressed_dependencies(platform, version)
+    with console.status("[cyan]Calculating sizes...", spinner="dots"):
+        modules = get_files(compressed) + get_dependencies(platform, version,compressed)
+    grouped_modules = group_modules(modules, platform, version, i)
+    grouped_modules.sort(key=lambda x: x['Size (Bytes)'], reverse=True)
 
-        grouped_modules = group_modules(modules, platform, version)
-        grouped_modules.sort(key=lambda x: x['Size (Bytes)'], reverse=True)
-
-        if csv:
-            print_csv(app, i, grouped_modules)
-        else:
-            print_table(app, grouped_modules, platform, version)
+    if csv:
+        print_csv(app, i, grouped_modules)
+    else:
+        print_table(app, "STATUS", grouped_modules)
 
 
-def get_compressed_files():
+def get_files(compressed):
 
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
     git_ignore = get_gitignore_files(REPO_PATH)
@@ -70,24 +82,27 @@ def get_compressed_files():
 
             # Filter files
             if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore):
-                compressed_size = compress(file_path)
+                size = compress(file_path) if compressed else os.path.getsize(file_path)
                 integration = relative_path.split(os.sep)[0]
                 file_data.append(
                     {
                         "File Path": relative_path,
                         "Type": "Integration",
                         "Name": integration,
-                        "Size (Bytes)": compressed_size,
+                        "Size (Bytes)": size,
                     }
                 )
     return file_data
 
 
-def get_compressed_dependencies(platform, version):
+def get_dependencies(platform, version):
 
     resolved_path = os.path.join(REPO_PATH, ".deps/resolved")
     for filename in os.listdir(resolved_path):
         file_path = os.path.join(resolved_path, filename)
         if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
-            deps, download_urls = get_dependencies(file_path)
+            deps, download_urls = get_dependencies_list(file_path)
             return get_dependencies_sizes(deps, download_urls)
+
+
+
diff --git a/ddev/tests/cli/size/test_common.py b/ddev/tests/cli/size/test_common.py
index 839ef2f9b7627..f3c8565ac5e07 100644
--- a/ddev/tests/cli/size/test_common.py
+++ b/ddev/tests/cli/size/test_common.py
@@ -10,9 +10,39 @@
     is_correct_dependency,
     is_valid_integration,
     print_csv,
+    valid_platforms_versions
 )
 
 
+def test_valid_platforms_versions():
+    filenames = [
+        "linux-aarch64_3.12.txt",
+        "linux-aarch64_py2.txt",
+        "linux-aarch64_py3.txt",
+        "linux-x86_64_3.12.txt",
+        "linux-x86_64_py2.txt",
+        "linux-x86_64_py3.txt",
+        "macos-x86_64_3.12.txt",
+        "macos-x86_64_py2.txt",
+        "macos-x86_64_py3.txt",
+        "windows-x86_64_3.12.txt",
+        "windows-x86_64_py2.txt",
+        "windows-x86_64_py3.txt"
+    ]
+
+    expected_platforms = {
+        "linux-aarch64",
+        "linux-x86_64",
+        "macos-x86_64",
+        "windows-x86_64"
+    }
+    expected_versions = {"3.12"}
+    with patch("os.listdir", return_value=filenames):
+        platforms, versions = valid_platforms_versions("/tmp/fake_repo")
+        assert platforms == expected_platforms
+        assert versions == expected_versions
+
+
 def test_is_correct_dependency():
     assert is_correct_dependency("windows-x86_64", "3.12", "windows-x86_64-3.12")
     assert not is_correct_dependency("windows-x86_64", "3.12", "linux-x86_64-3.12")
@@ -97,7 +127,7 @@ def test_group_modules():
         },
     ]
 
-    assert group_modules(modules, platform, version) == expected_output
+    assert group_modules(modules, platform, version,0) == expected_output
 
 
 def test_get_gitignore_files():
diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py
index 1ba94bfdc6693..cd6027bc6914a 100644
--- a/ddev/tests/cli/size/test_diff.py
+++ b/ddev/tests/cli/size/test_diff.py
@@ -138,6 +138,7 @@ def get_compressed_dependencies_side_effect(_, __, ___):
     get_compressed_dependencies_side_effect.counter = 0
 
     with (
+        patch("ddev.cli.size.diff.valid_platforms_versions", return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'})),
         patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=mock_git_repo),
         patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None),
         patch("ddev.cli.size.diff.GitRepo.checkout_commit"),
@@ -174,15 +175,20 @@ def test_diff_csv(ddev, mock_size_diff_dependencies):
 
 
 
-
 def test_diff_no_differences(ddev):
     fake_repo = MagicMock()
+    fake_repo.repo_dir = "/tmp/fake_repo"
 
     with (
         patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=fake_repo),
+        patch("ddev.cli.size.diff.valid_platforms_versions", return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'})),
         patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None),
         patch.object(fake_repo, "checkout_commit"),
         patch("tempfile.mkdtemp", return_value="/tmp/fake_repo"),
+        patch("os.path.exists", return_value=True),
+        patch("os.path.isdir", return_value=True),
+        patch("os.path.isfile", return_value=True),
+        patch("os.listdir", return_value=["linux-aarch64_3.12"]),
         patch(
             "ddev.cli.size.diff.get_compressed_files",
             return_value={
diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py
index 22031ad0d5e52..f4500c228600a 100644
--- a/ddev/tests/cli/size/test_status.py
+++ b/ddev/tests/cli/size/test_status.py
@@ -12,7 +12,7 @@
 )
 
 
-def test_get_compressed_files():
+def test_get_files_compressed():
     mock_files = [
         ("root/integration/datadog_checks", [], ["file1.py", "file2.py"]),
         ("root/integration_b/datadog_checks", [], ["file3.py"]),
@@ -35,7 +35,7 @@ def fake_compress(file_path):
         patch("ddev.cli.size.status.compress", side_effect=fake_compress),
     ):
 
-        result = get_compressed_files()
+        result = get_files(True)
 
     expected = [
         {
@@ -93,6 +93,7 @@ def test_get_compressed_dependencies():
 @pytest.fixture()
 def mock_size_status():
     with (
+        patch("ddev.cli.size.status.valid_platforms_versions", return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'})),
         patch("ddev.cli.size.status.get_gitignore_files", return_value=set()),
         patch("ddev.cli.size.status.compress", return_value=1234),
         patch("ddev.cli.size.status.get_dependencies", return_value=(["dep1"], {"dep1": "https://example.com/dep1"})),
diff --git a/ddev/tests/cli/size/test_timeline.py b/ddev/tests/cli/size/test_timeline.py
new file mode 100644
index 0000000000000..60e92b4f6c96a
--- /dev/null
+++ b/ddev/tests/cli/size/test_timeline.py
@@ -0,0 +1,306 @@
+import pytest
+from unittest.mock import MagicMock, patch, mock_open
+from ddev.cli.size.timeline import (
+    get_version,
+    format_commit_data,
+    trim_modules,
+    group_modules,
+    get_dependency_size,
+    get_dependency, 
+    get_compressed_dependencies,
+    get_compressed_files,
+    module_exists
+)
+from datetime import datetime
+
+
+def test_get_compressed_files():
+    with (
+        patch("os.walk", return_value=[("/tmp/fake_repo/int1", [], ["int1.py"])]),
+        patch("os.path.relpath", return_value="int1/int1.py"),
+        patch("os.path.exists", return_value=True),
+        patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()),
+        patch("ddev.cli.size.timeline.is_valid_integration", return_value=True),
+        patch("ddev.cli.size.timeline.compress", return_value=1234),
+    ):
+        result = get_compressed_files(
+            "/tmp/fake_repo",
+            "int1",
+            "abc1234",
+            datetime(2025, 4, 4).date(),
+            "auth",
+            "Added int1",
+            []
+        )
+        assert result == [
+            {
+                "Size (Bytes)": 1234,
+                "Date": datetime(2025, 4, 4).date(),
+                "Author": "auth",
+                "Commit Message": "Added int1",
+                "Commit SHA": "abc1234"
+            }
+        ]
+
+def test_get_compressed_files_deleted_only():
+    repo_path = "/tmp/fake_repo"
+    module = "foo"
+    commit = "abc1234"
+    date = datetime.strptime("Apr 5 2025", "%b %d %Y").date()
+    author = "Author"
+    message = "deleted module"
+
+    with (
+        patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()),
+        patch("os.walk", return_value=[]),
+        patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"{repo_path}/", "")),
+        patch("os.path.exists", return_value=False),  
+    ):
+        file_data = get_compressed_files(repo_path, module, commit, date, author, message, [])
+
+    assert file_data == [
+        {
+            "Size (Bytes)": 0,
+            "Date": date,
+            "Author": author,
+            "Commit Message": "(DELETED) " + message,
+            "Commit SHA": commit,
+        }
+    ]
+
+
+def test_get_version():
+    files = ["linux-x86_64_3.12.txt", "linux-x86_64_3.10.txt"]
+    version = get_version(files, "linux-x86_64")
+    assert version == "3.12"
+
+def test_format_commit_data():
+    date, message, commit = format_commit_data("Apr 4 2025", "this is a very long commit message that should be trimmed (#1234)", "abc1234def", "abc1234def")
+    expected_date = datetime.strptime("Apr 4 2025", "%b %d %Y").date()
+    expected_message = "(NEW) this is a very long...(#1234)"
+    expected_commit = "abc1234"
+    assert date == expected_date
+    assert message == expected_message
+    assert commit == expected_commit
+
+def test_trim_modules_keep_some_remove_some():
+    modules = [
+        {"Size (Bytes)": 1000},
+        {"Size (Bytes)": 1100},  # diff = 100 -> should be removed if threshold = 200
+        {"Size (Bytes)": 1400},  # diff = 300 -> should be kept
+    ]
+    expected = [
+        {"Size (Bytes)": 1000, "Delta (Bytes)": 0, "Delta": " "},
+        {"Size (Bytes)": 1400, "Delta (Bytes)": 300, "Delta": "300 B"},
+    ]
+    trimmed = trim_modules(modules, threshold=200)
+    assert trimmed == expected
+
+
+def test_group_modules():
+    modules = [
+        {"Size (Bytes)": 1000, "Date": datetime(2025, 4, 4).date(), "Author": "A", "Commit Message": "msg", "Commit SHA": "c1"},
+        {"Size (Bytes)": 500, "Date": datetime(2025, 4, 4).date(), "Author": "A", "Commit Message": "msg", "Commit SHA": "c1"},
+        {"Size (Bytes)": 1500, "Date": datetime(2025, 4, 5).date(), "Author": "A", "Commit Message": "msg2", "Commit SHA": "c2"},
+    ]
+    expected = [
+        {
+            "Commit SHA": "c1",
+            "Size (Bytes)": 1500,
+            "Size": "1.46 KB",
+            "Delta (Bytes)": "N/A",
+            "Delta": "N/A",
+            "Date": datetime(2025, 4, 4).date(),
+            "Author": "A",
+            "Commit Message": "msg",
+            "Platform": "linux-x86_64",
+        },
+        {
+            "Commit SHA": "c2",
+            "Size (Bytes)": 1500,
+            "Size": "1.46 KB",
+            "Delta (Bytes)": "N/A",
+            "Delta": "N/A",
+            "Date": datetime(2025, 4, 5).date(),
+            "Author": "A",
+            "Commit Message": "msg2",
+            "Platform": "linux-x86_64",
+        },
+    ]
+    grouped = group_modules(modules, "linux-x86_64", 0)
+    assert grouped == expected
+
+
+def test_get_dependency():
+    content = """dep1 @ https://example.com/dep1.whl
+dep2 @ https://example.com/dep2.whl"""
+    with patch("builtins.open", mock_open(read_data=content)):
+        url = get_dependency("some/path/file.txt", "dep2")
+        assert url == "https://example.com/dep2.whl"
+
+def make_mock_response(size):
+    mock_response = MagicMock()
+    mock_response.__enter__.return_value = mock_response
+    mock_response.headers = {"Content-Length": size}
+    mock_response.raise_for_status = lambda: None
+    return mock_response
+
+def test_get_dependency_size():
+    mock_response = make_mock_response("45678")
+    with patch("requests.get", return_value=mock_response):
+        info = get_dependency_size("https://example.com/file.whl", "abc1234", datetime(2025, 4, 4).date(), "auth", "Fixed bug")
+        assert info == {
+            "Size (Bytes)": 45678,
+            "Date": datetime(2025, 4, 4).date(),
+            "Author": "auth",
+            "Commit Message": "Fixed bug",
+            "Commit SHA": "abc1234",
+        }
+
+def test_get_compressed_dependencies():
+    with (
+        patch("os.path.exists", return_value=True),
+        patch("os.path.isdir", return_value=True),
+        patch("os.path.isfile", return_value=True),
+        patch("os.listdir", return_value=["linux-x86_64_3.12.txt"]),
+        patch("ddev.cli.size.timeline.get_dependency", return_value="https://example.com/dep1.whl"),
+        patch("ddev.cli.size.timeline.requests.get", return_value=make_mock_response("12345")),
+    ):
+        result = get_compressed_dependencies(
+            "/tmp/fake_repo",
+            "dep1",
+            "linux-x86_64",
+            "abc1234",
+            datetime(2025, 4, 4).date(),
+            "auth",
+            "Added dep1"
+        )
+        assert result == {
+            "Size (Bytes)": 12345,
+            "Date": datetime(2025, 4, 4).date(),
+            "Author": "auth",
+            "Commit Message": "Added dep1",
+            "Commit SHA": "abc1234"
+        }
+
+def test_get_dependency_size():
+    with patch("requests.get", return_value=make_mock_response("45678")):
+        result = get_dependency_size(
+            "https://example.com/dep1.whl",
+            "abc1234",
+            datetime(2025, 4, 4).date(),
+            "auth",
+            "Fixed bug"
+        )
+        assert result == {
+            "Size (Bytes)": 45678,
+            "Date": datetime(2025, 4, 4).date(),
+            "Author": "auth",
+            "Commit Message": "Fixed bug",
+            "Commit SHA": "abc1234"
+        }
+
+
+@pytest.fixture
+def mock_timeline_gitrepo():
+    mock_git_repo = MagicMock()
+    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
+    mock_git_repo.get_creation_commit_module.return_value = "commit1"
+    mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Initial commit", c)
+    
+    with (
+        patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_git_repo),
+        patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None),
+        patch("ddev.cli.size.timeline.GitRepo.sparse_checkout_commit"),
+        patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()),
+        patch("ddev.cli.size.timeline.compress", return_value=1234),
+        patch("os.walk", return_value=[("/tmp/fake_repo/int", [], ["file1.py"])]),
+        patch("os.path.exists", return_value=True),
+        patch("ddev.cli.size.timeline.group_modules", side_effect=lambda m, *_: m),
+        patch("ddev.cli.size.timeline.trim_modules", side_effect=lambda m, *_: m),
+        patch("ddev.cli.size.timeline.print_table"),
+        patch("ddev.cli.size.timeline.print_csv"),
+        patch("os.path.exists", return_value=True),
+        patch("os.path.isdir", return_value=True),
+        patch("os.path.isfile", return_value=True),
+        patch("os.listdir", return_value=[
+            "linux-x86_64_3.12_dep1.whl",
+            "linux-x86_64_3.12_dep2.whl"
+        ]),
+    ):
+        yield
+
+@pytest.fixture
+def app():
+    mock_app = MagicMock()
+    mock_app.repo.path = "/tmp/fake_repo"
+    return mock_app
+
+def test_timeline_integration_compressed(ddev, mock_timeline_gitrepo, app):
+    result = ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--compressed", obj=app)
+    assert result.exit_code == 0
+
+@pytest.fixture
+def mock_timeline_dependencies():
+    mock_git_repo = MagicMock()
+    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
+    mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
+
+    with (
+        patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_git_repo),
+        patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None),
+        patch("ddev.cli.size.timeline.GitRepo.sparse_checkout_commit"),
+        patch("ddev.cli.size.timeline.valid_platforms_versions", return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'})),
+        patch("os.path.exists", return_value=True),
+        patch("os.path.isdir", return_value=True),
+        patch("os.listdir", return_value=["linux-x86_64-3.12"]),
+        patch("os.path.isfile", return_value=True),
+        patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()),
+        patch("ddev.cli.size.timeline.get_dependency", return_value="https://example.com/dep1.whl"),
+        patch("ddev.cli.size.timeline.requests.get") as mock_get,
+        patch("ddev.cli.size.timeline.group_modules", side_effect=lambda m, *_: m),
+        patch("ddev.cli.size.timeline.trim_modules", side_effect=lambda m, *_: m),
+        patch("ddev.cli.size.timeline.print_table"),
+    ):
+        mock_response = MagicMock()
+        mock_response.__enter__.return_value = mock_response
+        mock_response.headers = {"Content-Length": "1024"}
+        mock_response.raise_for_status = lambda: None
+        mock_get.return_value = mock_response
+
+        yield
+
+def test_timeline_dependency_compressed(ddev, mock_timeline_dependencies, app):
+    result = ddev(
+        "size", "timeline", "dependency", "dep1", "commit1", "commit2",
+        "--compressed", "--platform", "linux-x86_64",
+        obj=app,
+    )
+
+    assert result.exit_code == 0
+
+
+def test_timeline_invalid_platform(ddev):
+    result = ddev(
+        "size", "timeline", "dependency", "dep1", "commit1", "commit2",
+        "--compressed", "--platform", "invalid-platform"
+    )
+    assert result.exit_code != 0
+
+
+
+
+def test_timeline_no_changes_in_integration(ddev):
+    mock_git_repo = MagicMock()
+    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.get_module_commits.return_value = [""]
+    
+    with (
+        patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_git_repo),
+        patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None),
+    ):
+        result = ddev("size", "timeline", "integration", "integration/foo", "commit1", "commit2", "--compressed")
+        assert result.exit_code != 0
+        assert "No changes found" in result.output

From 846886dc0028f9790b02f13b8f369a0a4107f543 Mon Sep 17 00:00:00 2001
From: Enrico Donnici <enrico.donnici@datadoghq.com>
Date: Tue, 15 Apr 2025 11:55:45 +0200
Subject: [PATCH 09/40] Test ddev size status in GHA (by hijacking the slapr
 workflow)

---
 .github/workflows/slapr.yml | 73 +++++++++++++++++++++++++------------
 1 file changed, 49 insertions(+), 24 deletions(-)

diff --git a/.github/workflows/slapr.yml b/.github/workflows/slapr.yml
index 98ee0448e5614..3fbefcc73aa2e 100644
--- a/.github/workflows/slapr.yml
+++ b/.github/workflows/slapr.yml
@@ -1,30 +1,55 @@
-# https://github.com/DataDog/slapr
+name: Measure Disk Usage
 
-name: Slack emoji PR updates
 on:
-  pull_request_review:
-    types: [submitted]
   pull_request:
-    types: [closed]
+    branches:
+    - master
+env:
+  PYTHON_VERSION: "3.12"
 
 jobs:
-  run_slapr_agent_integrations:
-    runs-on: ubuntu-latest
-    strategy:
-      matrix:
-        slack_channel_variable:
-          - SLACK_CHANNEL_ID
-          - SLACK_CHANNEL_ID_AGENT_INTEGRATIONS_REVIEWS
-          - SLACK_CHANNEL_ID_INFRA_INTEGRATIONS
+  measure-disk-usage:
+    runs-on: ubuntu-22.04
     steps:
-    - uses: DataDog/slapr@master
-      env:
-        GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
-        SLACK_CHANNEL_ID: "${{ secrets[matrix.slack_channel_variable] }}"
-        SLACK_API_TOKEN: "${{ secrets.SLACK_API_TOKEN }}"
-        SLAPR_BOT_USER_ID: "${{ secrets.SLAPR_BOT_USER_ID }}"
-        SLAPR_EMOJI_REVIEW_STARTED: "review_started"
-        SLAPR_EMOJI_APPROVED: "approved2"
-        SLAPR_EMOJI_CHANGES_REQUESTED: "changes_requested"
-        SLAPR_EMOJI_MERGED: "merged"
-        SLAPR_EMOJI_CLOSED: "closed"
+    - uses: actions/checkout@v4
+      with:
+        fetch-depth: 0
+    - name: Set up Python ${{ env.PYTHON_VERSION }}
+      uses: actions/setup-python@v5
+      with:
+        python-version: ${{ env.PYTHON_VERSION }}
+    - name: Install ddev
+      run: |
+        pip install -e ./datadog_checks_dev[cli]
+        pip install -e ./ddev
+
+    - name: Configure ddev
+      run: |
+        ddev config set repos.core .
+        ddev config set repo core
+    - name: Measure disk usage Uncompressed
+      run: | 
+        ddev size status --csv > size-uncompressed.csv
+        ddev size status
+        echo "```" >> $GITHUB_STEP_SUMMARY
+        ddev size status >> $GITHUB_STEP_SUMMARY
+        echo "```" >> $GITHUB_STEP_SUMMARY
+    - name: Measure disk usage Compressed
+      run: |
+        ddev size status --csv --compressed > size-compressed.csv
+        ddev size status --compressed
+        echo "```" >> $GITHUB_STEP_SUMMARY
+        ddev size status --compressed >> $GITHUB_STEP_SUMMARY
+        echo "```" >> $GITHUB_STEP_SUMMARY
+    - name: Upload file sizes (uncompressed)
+      uses: actions/upload-artifact@v4
+      with:
+        name: size-uncompressed.csv
+        path: size-uncompressed.csv
+        if-no-files-found: error
+    - name: Upload file sizes (compressed)
+      uses: actions/upload-artifact@v4
+      with:
+        name: size-compressed.csv
+        path: size-compressed.csv
+        if-no-files-found: error

From a3aafc5540d4c27a4f9a184abb9d5337c9902a44 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Tue, 15 Apr 2025 12:03:34 +0200
Subject: [PATCH 10/40] Timeline mode

---
 ddev/src/ddev/cli/size/common.py     |   6 +-
 ddev/src/ddev/cli/size/diff.py       |   6 +-
 ddev/src/ddev/cli/size/status.py     |   4 +-
 ddev/src/ddev/cli/size/timeline.py   | 314 +++++++++++++++++++++++++++
 ddev/tests/cli/size/test_common.py   |   8 +-
 ddev/tests/cli/size/test_diff.py     |  36 +--
 ddev/tests/cli/size/test_status.py   |   8 +-
 ddev/tests/cli/size/test_timeline.py |  29 ++-
 8 files changed, 368 insertions(+), 43 deletions(-)
 create mode 100644 ddev/src/ddev/cli/size/timeline.py

diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index 4c2ad720edb7b..0f480bbe5ab85 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -83,9 +83,9 @@ def get_dependencies_sizes(deps, download_urls, compressed):
     file_data = []
     for dep, url in zip(deps, download_urls, strict=False):
         if compressed:
-            with requests.get(url, stream=True) as response:
-                response.raise_for_status()
-                size = int(response.headers.get("Content-Length"))
+            response = requests.head(url)
+            response.raise_for_status()
+            size = int(response.headers.get("Content-Length"))
         else:
             with requests.get(url, stream=True) as response:
                 response.raise_for_status()
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index dd3ce4ab624df..1700eda378686 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -169,9 +169,9 @@ def get_dependencies_sizes(deps, download_urls, compressed):
     file_data = {}
     for dep, url in zip(deps, download_urls):
         if compressed:
-            with requests.get(url, stream=True) as response:
-                response.raise_for_status()
-                size = int(response.headers.get("Content-Length"))
+            response = requests.head(url)
+            response.raise_for_status()
+            size = int(response.headers.get("Content-Length"))
         else:
             with requests.get(url, stream=True) as response:
                 response.raise_for_status()
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index 252e682210e6c..8e1db4e78b840 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -95,14 +95,14 @@ def get_files(compressed):
     return file_data
 
 
-def get_dependencies(platform, version):
+def get_dependencies(platform, version, compressed):
 
     resolved_path = os.path.join(REPO_PATH, ".deps/resolved")
     for filename in os.listdir(resolved_path):
         file_path = os.path.join(resolved_path, filename)
         if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
             deps, download_urls = get_dependencies_list(file_path)
-            return get_dependencies_sizes(deps, download_urls)
+            return get_dependencies_sizes(deps, download_urls, compressed)
 
 
 
diff --git a/ddev/src/ddev/cli/size/timeline.py b/ddev/src/ddev/cli/size/timeline.py
new file mode 100644
index 0000000000000..aee4256084ed9
--- /dev/null
+++ b/ddev/src/ddev/cli/size/timeline.py
@@ -0,0 +1,314 @@
+
+import click
+import requests
+import os
+import re
+from datetime import datetime
+from rich.progress import Progress, SpinnerColumn, BarColumn, TextColumn, TimeElapsedColumn
+from rich.console import Console
+import tempfile
+from pathlib import Path
+import zipfile
+from .common import (
+    compress,
+    get_gitignore_files,
+    convert_size,
+    is_correct_dependency,
+    is_valid_integration,
+    print_csv,
+    print_table,
+    GitRepo,
+    WrongDependencyFormat, 
+    valid_platforms_versions
+)
+
+#VALID_PLATFORMS = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"]
+#VALID_PYTHON_VERSIONS = ["3.12"]
+
+# VALID_PLATFORMS, _ = valid_platforms_versions()
+DEPENDENCY_FILE_CHANGE = datetime.strptime("Sep 17 2024","%b %d %Y").date()
+MINIMUM_DATE = datetime.strptime("Apr 3 2024","%b %d %Y").date()
+console = Console()
+
+@click.command()
+@click.argument('type', type=click.Choice(['integration', 'dependency']))
+@click.argument('module')
+@click.argument('initial', required=False)
+@click.argument('final', required=False)
+@click.option('--time', help="Filter commits starting from a specific date. Accepts both absolute and relative formats, "
+         "such as '2025-03-01', '2 weeks ago', or 'yesterday'")
+@click.option('--threshold', help="Only show modules with size differences greater than a threshold in bytes")
+@click.option('--platform', help="Target platform to analyze. Only required for dependencies. If not specified, all platforms will be analyzed")
+#@click.option('--python', 'version', type=click.Choice(VALID_PYTHON_VERSIONS), help="Python version (MAJOR.MINOR)")
+@click.option('--compressed', is_flag=True, help="Measure compressed size")
+@click.option('--csv', is_flag=True, help="Output results in CSV format")
+@click.pass_obj
+def timeline(app, type, module, initial, final, time, threshold, platform, compressed, csv):
+    url = app.repo.path
+    with GitRepo(url) as gitRepo:
+        try:
+            with console.status("[cyan]Fetching commits...", spinner="dots"):
+                folder = module if type == 'integration' else '.deps/resolved'
+                commits = gitRepo.get_module_commits(folder, initial, final, time)
+                first_commit = gitRepo.get_creation_commit_module(module)
+                gitRepo.checkout_commit(commits[-1])
+                valid_platforms, _ = valid_platforms_versions(gitRepo.repo_dir)
+                n_platforms = len(valid_platforms)
+            if platform and platform not in valid_platforms:
+                raise ValueError(f"Invalid platform: {platform}")
+            elif commits == [''] and type == "integration" and module_exists(gitRepo.repo_dir, module):
+                raise ValueError(f"No changes found: {module}")
+            elif commits == [''] and type == "integration" and not module_exists(gitRepo.repo_dir, module):
+                raise ValueError(f"Integration {module} not found in latest commit, is the name correct?")
+            elif type == 'dependency' and platform and module not in get_dependency_list(gitRepo.repo_dir, [platform]):
+                raise ValueError(f"Dependency {module} not found in latest commit for the platform {platform}, is the name correct?")
+            elif type == 'dependency' and not platform and module not in get_dependency_list(gitRepo.repo_dir, valid_platforms):
+                raise ValueError(f"Dependency {module} not found in latest commit, is the name correct?")
+            elif type == 'dependency' and commits == ['']:
+                raise ValueError(f"No changes found: {module}")
+            if type == "dependency" and platform is None:
+                for i, plat in enumerate(valid_platforms):
+                    timeline_mode(app, gitRepo, type, module, commits, threshold, plat, compressed, csv, i, True, n_platforms, None)
+            else:
+                timeline_mode(app, gitRepo, type, module, commits, threshold, platform, compressed, csv, None, False, n_platforms, first_commit)
+        except Exception as e:
+            app.abort(str(e))
+
+
+def timeline_mode(app, gitRepo, type, module, commits, threshold, platform, compressed, csv, i, maybe_mod_missing, n_platforms,first_commit):
+    modules = get_repo_info(gitRepo, type, platform, module, commits, i, maybe_mod_missing,n_platforms, compressed, first_commit)
+    if modules != []:
+        with console.status("[cyan]Exporting data...", spinner="dots"):
+            grouped_modules = group_modules(modules, platform, i)
+            trimmed_modules = trim_modules(grouped_modules, threshold)
+            maybe_mod_missing = False
+            if csv:
+                print_csv(app, i, trimmed_modules)
+            else:
+                print_table(app, "Timeline for " + module, trimmed_modules)
+
+def get_repo_info(gitRepo, type, platform, module, commits, i, maybe_mod_missing, n_platforms, compressed, first_commit):
+    with Progress(
+        SpinnerColumn(),
+        TextColumn("[progress.description]{task.description}"),
+        BarColumn(),
+        TimeElapsedColumn(),
+        transient=True, 
+    ) as progress:
+        if type == "integration":
+            file_data = process_commits(commits, module, gitRepo, progress, platform, type, compressed, first_commit)
+        else: 
+            file_data = process_commits(commits, module, gitRepo, progress, platform, type, compressed, None)
+    return file_data      
+    
+def process_commits(commits, module, gitRepo, progress, platform, type, compressed, first_commit=None):
+    file_data=[]
+    task = progress.add_task("[cyan]Processing commits...", total=len(commits))
+    repo = gitRepo.repo_dir
+    
+    folder = module if type == 'integration' else '.deps/resolved'
+    for commit in commits:
+        gitRepo.sparse_checkout_commit(commit, folder)
+        date, author, message = gitRepo.get_commit_metadata(commit)
+        date, message, commit = format_commit_data(date, message, commit, first_commit)
+        if type == 'dependency' and date < MINIMUM_DATE:
+            continue
+        elif type == 'dependency':
+            result = get_dependencies(repo, module, platform, commit, date, author, message, compressed)
+            if result:
+                file_data.append(result)
+        elif type == 'integration':
+            file_data = get_files(repo, module, commit, date, author, message, file_data, compressed)
+        progress.advance(task)
+    return file_data
+
+def get_files(repo_path, module, commit, date, author, message, file_data, compressed):   
+    
+    if not module_exists(repo_path, module):
+        file_data.append(
+                    {
+                        "Size (Bytes)": 0,
+                        "Date": date,
+                        "Author": author,
+                        "Commit Message": "(DELETED) " + message,
+                        "Commit SHA": commit
+                    }
+                )
+        return file_data    
+    
+    ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
+    # resolved_path = os.path.join(repo_path, module)
+
+    git_ignore = get_gitignore_files(repo_path)
+    included_folder = "datadog_checks/"
+    for root, _, files in os.walk(repo_path):
+        for file in files:
+            file_path = os.path.join(root, file)
+            # Convert the path to a relative format within the repo
+            relative_path = os.path.relpath(file_path, repo_path)
+
+            # Filter files
+            if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore):
+                size = compress(file_path) if compressed else os.path.getsize(file_path)
+                file_data.append(
+                    {
+                        "Size (Bytes)": size,
+                        "Date": date,
+                        "Author": author,
+                        "Commit Message": message,
+                        "Commit SHA": commit
+                    }
+                )
+    return file_data
+
+def get_dependencies(repo_path, module, platform, commit, date, author, message, compressed):
+    resolved_path = os.path.join(repo_path, ".deps/resolved")
+    paths = os.listdir(resolved_path)
+    version = get_version(paths, platform)
+    for filename in paths:
+        file_path = os.path.join(resolved_path, filename)
+        if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
+            download_url = get_dependency(file_path, module)
+            return get_dependency_size(download_url, commit, date, author, message, compressed) if download_url else None
+
+def get_dependency(file_path, module):
+    with open(file_path, "r", encoding="utf-8") as file:
+        file_content = file.read()
+        for line in file_content.splitlines():
+            match = re.search(r"([\w\-\d\.]+) @ (https?://[^\s#]+)", line)
+            if not match:
+                raise WrongDependencyFormat("The dependency format 'name @ link' is no longer supported.")
+            name, url = match.groups()
+            if name == module:
+                return url
+    return None            
+
+def get_dependency_size(download_url, commit, date, author, message, compressed):
+    if compressed:
+        response = requests.head(download_url)
+        response.raise_for_status()
+        size = int(response.headers.get("Content-Length"))
+    else:
+        with requests.get(download_url, stream=True) as response:
+            response.raise_for_status()
+            wheel_data = response.content
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            wheel_path = Path(tmpdir) / "package.whl"
+            with open(wheel_path, "wb") as f:
+                f.write(wheel_data)
+            extract_path = Path(tmpdir) / "extracted"
+            with zipfile.ZipFile(wheel_path, 'r') as zip_ref:
+                zip_ref.extractall(extract_path)
+
+            size = 0
+            for dirpath, _, filenames in os.walk(extract_path):
+                for name in filenames:
+                    file_path = os.path.join(dirpath, name)
+                    size += os.path.getsize(file_path)
+
+    return  {
+                "Size (Bytes)": size,
+                "Date": date,
+                "Author": author,
+                "Commit Message": message,
+                "Commit SHA": commit
+            }
+
+def get_version(files, platform):
+    final_version = ''
+    for file in files:
+        if platform in file:
+            version = file.split('_')[-1]
+            match = re.search(r"\d+(?:\.\d+)?", version)
+            version = match.group(0) if match else None
+            if version > final_version:
+                final_version = version
+    return final_version if len(final_version) != 1 else 'py'+ final_version
+
+
+def is_correct_dependency(platform, version, name):
+        return platform in name and version in name
+
+
+
+    
+
+def group_modules(modules, platform, i):
+    grouped_aux = {}
+
+    for file in modules:
+        key = (file['Date'], file['Author'], file['Commit Message'], file['Commit SHA'])
+        grouped_aux[key] = grouped_aux.get(key, 0) + file["Size (Bytes)"]
+    if i is None: 
+        return [
+            {
+                "Commit SHA": commit,
+                "Size (Bytes)": size,
+                'Size': convert_size(size),
+                'Delta (Bytes)': 'N/A',
+                'Delta': 'N/A',
+                "Date": date,
+                "Author": author,
+                "Commit Message": message,
+                
+            }
+            for (date, author, message, commit), size in grouped_aux.items()
+        ]
+    else: 
+        return [
+            {
+                "Commit SHA": commit,
+                "Size (Bytes)": size,
+                'Size': convert_size(size),
+                'Delta (Bytes)': 'N/A',
+                'Delta': 'N/A',
+                "Date": date,
+                "Author": author,
+                "Commit Message": message,
+                'Platform': platform,
+            }
+            for (date, author, message, commit), size in grouped_aux.items()
+        ]
+
+def trim_modules(modules, threshold=0):
+    modules[0]['Delta (Bytes)'] = 0
+    modules[0]['Delta'] = ' '
+    trimmed_modules = [modules[0]]
+    for i in range(1, len(modules)-1):
+        delta = modules[i]['Size (Bytes)']-modules[i-1]['Size (Bytes)']
+        if abs(delta) > int(threshold):
+            modules[i]['Delta (Bytes)'] = delta
+            modules[i]['Delta'] = convert_size(delta)
+            trimmed_modules.append(modules[i])
+    if len(modules) > 1:
+        delta = modules[-1]['Size (Bytes)']-modules[-2]['Size (Bytes)']
+        modules[-1]['Delta (Bytes)'] = delta
+        modules[-1]['Delta'] = convert_size(delta)
+        trimmed_modules.append(modules[-1])
+    return trimmed_modules
+
+def format_commit_data(date, message, commit, first_commit):
+    if commit == first_commit:
+        message = "(NEW) " + message
+    message = message if len(message) <= 35 else message[:30].rsplit(" ", 1)[0] + "..." + message.split()[-1]
+    date = datetime.strptime(date, "%b %d %Y").date()
+    return date, message, commit[:7]
+
+def module_exists(path, module):
+    return os.path.exists(os.path.join(path, module))
+
+def get_dependency_list(path, platforms):
+    resolved_path = os.path.join(path, ".deps/resolved")
+    all_files = os.listdir(resolved_path)
+    dependencies = set()
+
+    for platform in platforms:
+        version = get_version(all_files, platform)
+        for filename in all_files:
+            file_path = os.path.join(resolved_path, filename)
+            if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
+                with open(file_path, "r", encoding="utf-8") as file:
+                    matches = re.findall(r"([\w\-\d\.]+) @ https?://[^\s#]+", file.read())
+                    dependencies.update(matches)
+    return dependencies
diff --git a/ddev/tests/cli/size/test_common.py b/ddev/tests/cli/size/test_common.py
index f3c8565ac5e07..a3f29bc17134e 100644
--- a/ddev/tests/cli/size/test_common.py
+++ b/ddev/tests/cli/size/test_common.py
@@ -3,7 +3,7 @@
 from ddev.cli.size.common import (
     compress,
     convert_size,
-    get_dependencies,
+    get_dependencies_list,
     get_dependencies_sizes,
     get_gitignore_files,
     group_modules,
@@ -67,13 +67,13 @@ def test_is_valid_integration():
     assert not is_valid_integration(".git/config", included_folder, ignored_files, git_ignore)
 
 
-def test_get_dependencies():
+def test_get_dependencies_list():
     file_content = (
         "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl"
     )
     mock_open_obj = mock_open(read_data=file_content)
     with patch("builtins.open", mock_open_obj):
-        deps, urls = get_dependencies("fake_path")
+        deps, urls = get_dependencies_list("fake_path")
     assert deps == ["dependency1", "dependency2"]
     assert urls == ["https://example.com/dependency1.whl", "https://example.com/dependency2.whl"]
 
@@ -83,7 +83,7 @@ def test_get_dependencies_sizes():
     mock_response.status_code = 200
     mock_response.headers = {"Content-Length": "12345"}
     with patch("requests.head", return_value=mock_response):
-        file_data = get_dependencies_sizes(["dependency1"], ["https://example.com/dependency1.whl"])
+        file_data = get_dependencies_sizes(["dependency1"], ["https://example.com/dependency1.whl"], True)
     assert file_data == [
         {"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345}
     ]
diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py
index cd6027bc6914a..56759b92edb68 100644
--- a/ddev/tests/cli/size/test_diff.py
+++ b/ddev/tests/cli/size/test_diff.py
@@ -6,7 +6,7 @@
 
 import pytest
 
-from ddev.cli.size.diff import get_compressed_dependencies, get_compressed_files, get_diff
+from ddev.cli.size.diff import get_dependencies, get_files, get_diff
 
 
 def test_get_compressed_files():
@@ -36,7 +36,7 @@ def fake_compress(file_path):
         patch("ddev.cli.size.diff.compress", side_effect=fake_compress),
     ):
 
-        result = get_compressed_files(mock_repo_path)
+        result = get_files(mock_repo_path, True)
 
     expected = {
         "integration/datadog_checks/file1.py": 1000,
@@ -55,9 +55,16 @@ def test_get_compressed_dependencies(terminal):
         "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl"
     )
 
-    mock_response = MagicMock()
-    mock_response.status_code = 200
-    mock_response.headers = {"Content-Length": "12345"}
+    mock_head_response = MagicMock()
+    mock_head_response.status_code = 200
+    mock_head_response.headers = {"Content-Length": "12345"}
+
+    mock_get_response = MagicMock()
+    mock_get_response.__enter__.return_value = mock_get_response  # for use in `with` block
+    mock_get_response.status_code = 200
+    mock_get_response.headers = {"Content-Length": "12345"}
+    mock_get_response.content = b"Fake wheel file content"
+
     mock_repo_path = "root"
 
     with (
@@ -66,17 +73,16 @@ def test_get_compressed_dependencies(terminal):
         patch("os.listdir", return_value=[f"{platform}-{version}"]),
         patch("os.path.isfile", return_value=True),
         patch("builtins.open", mock_open(read_data=fake_file_content)),
-        patch("requests.head", return_value=mock_response),
+        patch("requests.head", return_value=mock_head_response),
+        patch("requests.get", return_value=mock_get_response),
     ):
-
-        file_data = get_compressed_dependencies(mock_repo_path, platform, version)
+        file_data = get_dependencies(mock_repo_path, platform, version, True)
 
     assert file_data == {
         "dependency1": 12345,
         "dependency2": 12345,
     }
 
-
 def test_get_diff():
     size_before = {
         "integration/foo.py": 1000,
@@ -119,7 +125,7 @@ def mock_size_diff_dependencies():
     mock_git_repo = MagicMock()
     mock_git_repo.repo_dir = "/tmp/fake_repo"
 
-    def get_compressed_files_side_effect(_):
+    def get_compressed_files_side_effect(_, __):
         get_compressed_files_side_effect.counter += 1
         if get_compressed_files_side_effect.counter % 2 == 1:
             return {"path1.py": 1000}  # before
@@ -128,7 +134,7 @@ def get_compressed_files_side_effect(_):
 
     get_compressed_files_side_effect.counter = 0
 
-    def get_compressed_dependencies_side_effect(_, __, ___):
+    def get_compressed_dependencies_side_effect(_, __, ___, ____):
         get_compressed_dependencies_side_effect.counter += 1
         if get_compressed_dependencies_side_effect.counter % 2 == 1:
             return {"dep1.whl": 2000}  # before
@@ -143,8 +149,8 @@ def get_compressed_dependencies_side_effect(_, __, ___):
         patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None),
         patch("ddev.cli.size.diff.GitRepo.checkout_commit"),
         patch("tempfile.mkdtemp", return_value="/tmp/fake_repo"),
-        patch("ddev.cli.size.diff.get_compressed_files", side_effect=get_compressed_files_side_effect),
-        patch("ddev.cli.size.diff.get_compressed_dependencies", side_effect=get_compressed_dependencies_side_effect),
+        patch("ddev.cli.size.diff.get_files", side_effect=get_compressed_files_side_effect),
+        patch("ddev.cli.size.diff.get_dependencies", side_effect=get_compressed_dependencies_side_effect),
         patch("ddev.cli.size.common.group_modules", side_effect=lambda m, *_: m),
         patch("ddev.cli.size.common.print_csv"),
         patch("ddev.cli.size.common.print_table"),
@@ -190,14 +196,14 @@ def test_diff_no_differences(ddev):
         patch("os.path.isfile", return_value=True),
         patch("os.listdir", return_value=["linux-aarch64_3.12"]),
         patch(
-            "ddev.cli.size.diff.get_compressed_files",
+            "ddev.cli.size.diff.get_files",
             return_value={
                 "path1.py": 1000,
                 "path2.py": 500,
             },
         ),
         patch(
-            "ddev.cli.size.diff.get_compressed_dependencies",
+            "ddev.cli.size.diff.get_dependencies",
             return_value={
                 "dep1.whl": 2000,
                 "dep2.whl": 1000,
diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py
index f4500c228600a..13657e33b93a2 100644
--- a/ddev/tests/cli/size/test_status.py
+++ b/ddev/tests/cli/size/test_status.py
@@ -7,8 +7,8 @@
 import pytest
 
 from ddev.cli.size.status import (
-    get_compressed_dependencies,
-    get_compressed_files,
+    get_dependencies,
+    get_files,
 )
 
 
@@ -82,7 +82,7 @@ def test_get_compressed_dependencies():
         patch("requests.head", return_value=mock_response),
     ):
 
-        file_data = get_compressed_dependencies(platform, version)
+        file_data = get_dependencies(platform, version, True)
 
     assert file_data == [
         {"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345},
@@ -96,7 +96,7 @@ def mock_size_status():
         patch("ddev.cli.size.status.valid_platforms_versions", return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'})),
         patch("ddev.cli.size.status.get_gitignore_files", return_value=set()),
         patch("ddev.cli.size.status.compress", return_value=1234),
-        patch("ddev.cli.size.status.get_dependencies", return_value=(["dep1"], {"dep1": "https://example.com/dep1"})),
+        patch("ddev.cli.size.status.get_dependencies_list", return_value=(["dep1"], {"dep1": "https://example.com/dep1"})),
         patch(
             "ddev.cli.size.status.get_dependencies_sizes",
             return_value=[
diff --git a/ddev/tests/cli/size/test_timeline.py b/ddev/tests/cli/size/test_timeline.py
index 60e92b4f6c96a..0cb5d49a4b062 100644
--- a/ddev/tests/cli/size/test_timeline.py
+++ b/ddev/tests/cli/size/test_timeline.py
@@ -7,9 +7,8 @@
     group_modules,
     get_dependency_size,
     get_dependency, 
-    get_compressed_dependencies,
-    get_compressed_files,
-    module_exists
+    get_dependencies,
+    get_files,
 )
 from datetime import datetime
 
@@ -23,14 +22,15 @@ def test_get_compressed_files():
         patch("ddev.cli.size.timeline.is_valid_integration", return_value=True),
         patch("ddev.cli.size.timeline.compress", return_value=1234),
     ):
-        result = get_compressed_files(
+        result = get_files(
             "/tmp/fake_repo",
             "int1",
             "abc1234",
             datetime(2025, 4, 4).date(),
             "auth",
             "Added int1",
-            []
+            [],
+            True
         )
         assert result == [
             {
@@ -56,7 +56,7 @@ def test_get_compressed_files_deleted_only():
         patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"{repo_path}/", "")),
         patch("os.path.exists", return_value=False),  
     ):
-        file_data = get_compressed_files(repo_path, module, commit, date, author, message, [])
+        file_data = get_files(repo_path, module, commit, date, author, message, [], True)
 
     assert file_data == [
         {
@@ -166,14 +166,15 @@ def test_get_compressed_dependencies():
         patch("ddev.cli.size.timeline.get_dependency", return_value="https://example.com/dep1.whl"),
         patch("ddev.cli.size.timeline.requests.get", return_value=make_mock_response("12345")),
     ):
-        result = get_compressed_dependencies(
+        result = get_dependencies(
             "/tmp/fake_repo",
             "dep1",
             "linux-x86_64",
             "abc1234",
             datetime(2025, 4, 4).date(),
             "auth",
-            "Added dep1"
+            "Added dep1",
+            True
         )
         assert result == {
             "Size (Bytes)": 12345,
@@ -190,7 +191,8 @@ def test_get_dependency_size():
             "abc1234",
             datetime(2025, 4, 4).date(),
             "auth",
-            "Fixed bug"
+            "Fixed bug",
+            True
         )
         assert result == {
             "Size (Bytes)": 45678,
@@ -259,16 +261,15 @@ def mock_timeline_dependencies():
         patch("os.path.isfile", return_value=True),
         patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()),
         patch("ddev.cli.size.timeline.get_dependency", return_value="https://example.com/dep1.whl"),
-        patch("ddev.cli.size.timeline.requests.get") as mock_get,
+        patch("ddev.cli.size.timeline.requests.head") as mock_head,
         patch("ddev.cli.size.timeline.group_modules", side_effect=lambda m, *_: m),
         patch("ddev.cli.size.timeline.trim_modules", side_effect=lambda m, *_: m),
         patch("ddev.cli.size.timeline.print_table"),
     ):
         mock_response = MagicMock()
-        mock_response.__enter__.return_value = mock_response
         mock_response.headers = {"Content-Length": "1024"}
         mock_response.raise_for_status = lambda: None
-        mock_get.return_value = mock_response
+        mock_head.return_value = mock_response
 
         yield
 
@@ -300,6 +301,10 @@ def test_timeline_no_changes_in_integration(ddev):
     with (
         patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_git_repo),
         patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None),
+        patch("os.path.exists", return_value=True),
+        patch("os.path.isdir", return_value=True),
+        patch("os.listdir", return_value=[]),
+
     ):
         result = ddev("size", "timeline", "integration", "integration/foo", "commit1", "commit2", "--compressed")
         assert result.exit_code != 0

From 5a4e5d363a1eee259c8e230ffa160a03cf56edb1 Mon Sep 17 00:00:00 2001
From: Enrico Donnici <enrico.donnici@datadoghq.com>
Date: Tue, 15 Apr 2025 12:17:06 +0200
Subject: [PATCH 11/40] Try to fix job summaries

---
 .github/workflows/slapr.yml | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/slapr.yml b/.github/workflows/slapr.yml
index 3fbefcc73aa2e..8d0845e7d54c1 100644
--- a/.github/workflows/slapr.yml
+++ b/.github/workflows/slapr.yml
@@ -27,17 +27,19 @@ jobs:
       run: |
         ddev config set repos.core .
         ddev config set repo core
-    - name: Measure disk usage Uncompressed
+    - name: Measure disk usage (uncompressed)
       run: | 
         ddev size status --csv > size-uncompressed.csv
         ddev size status
+        echo "# Size (uncompressed)" >> $GITHUB_STEP_SUMMARY
         echo "```" >> $GITHUB_STEP_SUMMARY
         ddev size status >> $GITHUB_STEP_SUMMARY
         echo "```" >> $GITHUB_STEP_SUMMARY
-    - name: Measure disk usage Compressed
+    - name: Measure disk usage (compressed)
       run: |
         ddev size status --csv --compressed > size-compressed.csv
         ddev size status --compressed
+        echo "# Size (compressed)" >> $GITHUB_STEP_SUMMARY
         echo "```" >> $GITHUB_STEP_SUMMARY
         ddev size status --compressed >> $GITHUB_STEP_SUMMARY
         echo "```" >> $GITHUB_STEP_SUMMARY

From a4a00dd081ee0001f734b8c6a42871436be29f4a Mon Sep 17 00:00:00 2001
From: Enrico Donnici <enrico.donnici@datadoghq.com>
Date: Tue, 15 Apr 2025 12:27:18 +0200
Subject: [PATCH 12/40] Try fixing the job summaries again

---
 .github/workflows/slapr.yml | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/slapr.yml b/.github/workflows/slapr.yml
index 8d0845e7d54c1..1ea4befc58971 100644
--- a/.github/workflows/slapr.yml
+++ b/.github/workflows/slapr.yml
@@ -32,17 +32,17 @@ jobs:
         ddev size status --csv > size-uncompressed.csv
         ddev size status
         echo "# Size (uncompressed)" >> $GITHUB_STEP_SUMMARY
-        echo "```" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
         ddev size status >> $GITHUB_STEP_SUMMARY
-        echo "```" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
     - name: Measure disk usage (compressed)
       run: |
         ddev size status --csv --compressed > size-compressed.csv
         ddev size status --compressed
         echo "# Size (compressed)" >> $GITHUB_STEP_SUMMARY
-        echo "```" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
         ddev size status --compressed >> $GITHUB_STEP_SUMMARY
-        echo "```" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
     - name: Upload file sizes (uncompressed)
       uses: actions/upload-artifact@v4
       with:

From c821c043124b9d949991368bb97d35c9ac25bf7b Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Wed, 16 Apr 2025 09:46:39 +0200
Subject: [PATCH 13/40] optimized the code

---
 .github/workflows/slapr.yml | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/slapr.yml b/.github/workflows/slapr.yml
index 1ea4befc58971..07b6d5022a1e3 100644
--- a/.github/workflows/slapr.yml
+++ b/.github/workflows/slapr.yml
@@ -30,18 +30,20 @@ jobs:
     - name: Measure disk usage (uncompressed)
       run: | 
         ddev size status --csv > size-uncompressed.csv
-        ddev size status
+        ddev size status > size-uncompressed.txt
+        cat size-uncompressed.txt
         echo "# Size (uncompressed)" >> $GITHUB_STEP_SUMMARY
         echo '```' >> $GITHUB_STEP_SUMMARY
-        ddev size status >> $GITHUB_STEP_SUMMARY
+        cat size-uncompressed.txt >> $GITHUB_STEP_SUMMARY
         echo '```' >> $GITHUB_STEP_SUMMARY
     - name: Measure disk usage (compressed)
       run: |
         ddev size status --csv --compressed > size-compressed.csv
-        ddev size status --compressed
+        ddev size status --compressed > size-compressed.txt
+        cat size-compressed.txt
         echo "# Size (compressed)" >> $GITHUB_STEP_SUMMARY
         echo '```' >> $GITHUB_STEP_SUMMARY
-        ddev size status --compressed >> $GITHUB_STEP_SUMMARY
+        cat size-compressed.txt >> $GITHUB_STEP_SUMMARY
         echo '```' >> $GITHUB_STEP_SUMMARY
     - name: Upload file sizes (uncompressed)
       uses: actions/upload-artifact@v4

From ed1e372faccabe565d7b0f7af125d2c20e6d98c0 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Wed, 16 Apr 2025 09:55:22 +0200
Subject: [PATCH 14/40] CI size status integration on master

---
 .github/workflows/measure-disk-usage.yml | 59 ++++++++++++++++++++++++
 1 file changed, 59 insertions(+)
 create mode 100644 .github/workflows/measure-disk-usage.yml

diff --git a/.github/workflows/measure-disk-usage.yml b/.github/workflows/measure-disk-usage.yml
new file mode 100644
index 0000000000000..31f78b1316415
--- /dev/null
+++ b/.github/workflows/measure-disk-usage.yml
@@ -0,0 +1,59 @@
+name: Measure Disk Usage
+
+on:
+  push:
+    branches:
+    - master
+env:
+  PYTHON_VERSION: "3.12"
+
+jobs:
+  measure-disk-usage:
+    runs-on: ubuntu-22.04
+    steps:
+    - uses: actions/checkout@v4
+      with:
+        fetch-depth: 0
+    - name: Set up Python ${{ env.PYTHON_VERSION }}
+      uses: actions/setup-python@v5
+      with:
+        python-version: ${{ env.PYTHON_VERSION }}
+    - name: Install ddev
+      run: |
+        pip install -e ./datadog_checks_dev[cli]
+        pip install -e ./ddev
+
+    - name: Configure ddev
+      run: |
+        ddev config set repos.core .
+        ddev config set repo core
+    - name: Measure disk usage (uncompressed)
+      run: | 
+        ddev size status --csv > size-uncompressed.csv
+        ddev size status > size-uncompressed.txt
+        cat size-uncompressed.txt
+        echo "# Size (uncompressed)" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+        cat size-uncompressed.txt >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+    - name: Measure disk usage (compressed)
+      run: |
+        ddev size status --csv --compressed > size-compressed.csv
+        ddev size status --compressed > size-compressed.txt
+        cat size-compressed.txt
+        echo "# Size (compressed)" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+        cat size-compressed.txt >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+    - name: Upload file sizes (uncompressed)
+      uses: actions/upload-artifact@v4
+      with:
+        name: size-uncompressed.csv
+        path: size-uncompressed.csv
+        if-no-files-found: error
+    - name: Upload file sizes (compressed)
+      uses: actions/upload-artifact@v4
+      with:
+        name: size-compressed.csv
+        path: size-compressed.csv
+        if-no-files-found: error

From 9cc7e5b2f0928536f2de4ea439e7ffbb166629c6 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 10:08:24 +0200
Subject: [PATCH 15/40] types added

---
 ddev/src/ddev/cli/size/__init__.py   |  18 +-
 ddev/src/ddev/cli/size/common.py     |  97 +++----
 ddev/src/ddev/cli/size/diff.py       | 134 ++++++----
 ddev/src/ddev/cli/size/status.py     |  43 ++--
 ddev/src/ddev/cli/size/timeline.py   | 362 +++++++++++++++++----------
 ddev/tests/cli/size/test_common.py   |  13 +-
 ddev/tests/cli/size/test_diff.py     |  38 ++-
 ddev/tests/cli/size/test_status.py   |  21 +-
 ddev/tests/cli/size/test_timeline.py | 245 +++++++++++++-----
 9 files changed, 647 insertions(+), 324 deletions(-)

diff --git a/ddev/src/ddev/cli/size/__init__.py b/ddev/src/ddev/cli/size/__init__.py
index dc0a07beb809b..df20d8628ba2f 100644
--- a/ddev/src/ddev/cli/size/__init__.py
+++ b/ddev/src/ddev/cli/size/__init__.py
@@ -8,9 +8,23 @@
 from ddev.cli.size.status import status
 from ddev.cli.size.timeline import timeline
 
-@click.group(short_help='Get the size of integrations and dependencies by platform and python version')
+
+@click.group()
 def size():
-    """Package Size Analyzer"""
+    """
+    Analyze the download size of integrations and dependencies in various modes.
+
+    This command provides tools to inspect, compare, and monitor size changes of modules
+    across different commits, platforms, and Python versions.
+
+    Available subcommands:
+      • status   Show the current sizes of all modules
+      • diff     Compare sizes between two commits
+      • timeline Show the size evolution of a module over time
+
+    Use `ddev size <subcommand> --help` for more details on each mode.
+    """
+
     pass
 
 
diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index 0f480bbe5ab85..491ff024d4d48 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -3,16 +3,21 @@
 # Licensed under a 3-clause BSD style license (see LICENSE)
 import os
 import re
-import zlib
 import shutil
 import subprocess
 import tempfile
-import requests
-from pathlib import Path
 import zipfile
+import zlib
+from pathlib import Path
+from types import TracebackType
+from typing import Dict, List, Optional, Set, Tuple, Type, Union
 
+import requests
+
+from ddev.cli.application import Application
 
-def valid_platforms_versions(repo_path):
+
+def valid_platforms_versions(repo_path: str) -> Tuple[Set[str], Set[str]]:
     resolved_path = os.path.join(repo_path, ".deps/resolved")
     platforms = []
     versions = []
@@ -22,18 +27,17 @@ def valid_platforms_versions(repo_path):
         if match:
             versions.append(match.group())
     return set(platforms), set(versions)
-    
 
-# mirar si existe
-def convert_size(size_bytes):
+
+def convert_size(size_bytes: int) -> str:
     for unit in [' B', ' KB', ' MB', ' GB']:
-        if size_bytes < 1024:
+        if abs(size_bytes) < 1024:
             return str(round(size_bytes, 2)) + unit
         size_bytes /= 1024
     return str(round(size_bytes, 2)) + " TB"
 
 
-def is_valid_integration(path, included_folder, ignored_files, git_ignore):
+def is_valid_integration(path: str, included_folder: str, ignored_files: Set[str], git_ignore: List[str]) -> bool:
     # It is not an integration
     if path.startswith('.'):
         return False
@@ -50,11 +54,11 @@ def is_valid_integration(path, included_folder, ignored_files, git_ignore):
         return True
 
 
-def is_correct_dependency(platform, version, name):
+def is_correct_dependency(platform: str, version: str, name: str) -> bool:
     return platform in name and version in name
 
 
-def print_csv(app, i, modules):
+def print_csv(app: Application, i: Optional[int], modules: List[Dict[str, Union[str, int]]]) -> None:
     headers = [k for k in modules[0].keys() if k not in ['Size', 'Delta']]
     if not i:
         app.display(",".join(headers))
@@ -63,14 +67,11 @@ def print_csv(app, i, modules):
         app.display(",".join(format(str(row[h])) for h in headers))
 
 
-def format(s):
-    if "," in s:
-        return '"' + s + '"'
-    else:
-        return s
+def format(s: str) -> str:
+    return f'"{s}"' if "," in s else s
 
 
-def print_table(app, mode, modules):
+def print_table(app: Application, mode: str, modules: List[Dict[str, Union[str, int]]]) -> None:
     modules_table = {col: {} for col in modules[0].keys() if '(Bytes)' not in col}
     for i, row in enumerate(modules):
         for key, value in row.items():
@@ -79,7 +80,9 @@ def print_table(app, mode, modules):
     app.display_table(mode, modules_table)
 
 
-def get_dependencies_sizes(deps, download_urls, compressed):
+def get_dependencies_sizes(
+    deps: List[str], download_urls: List[str], compressed: bool
+) -> List[Dict[str, Union[str, int]]]:
     file_data = []
     for dep, url in zip(deps, download_urls, strict=False):
         if compressed:
@@ -108,7 +111,7 @@ def get_dependencies_sizes(deps, download_urls, compressed):
     return file_data
 
 
-def get_dependencies_list(file_path):
+def get_dependencies_list(file_path: str) -> Tuple[List[str], List[str]]:
     download_urls = []
     deps = []
     with open(file_path, "r", encoding="utf-8") as file:
@@ -124,7 +127,9 @@ def get_dependencies_list(file_path):
     return deps, download_urls
 
 
-def group_modules(modules, platform, version, i):
+def group_modules(
+    modules: List[Dict[str, Union[str, int]]], platform: str, version: str, i: Optional[int]
+) -> List[Dict[str, Union[str, int]]]:
     grouped_aux = {}
 
     for file in modules:
@@ -132,15 +137,10 @@ def group_modules(modules, platform, version, i):
         grouped_aux[key] = grouped_aux.get(key, 0) + file["Size (Bytes)"]
     if i is None:
         return [
-        {
-            'Name': name,
-            'Type': type,
-            'Size (Bytes)': size,
-            'Size': convert_size(size)
-        }
-        for (name, type), size in grouped_aux.items()
+            {'Name': name, 'Type': type, 'Size (Bytes)': size, 'Size': convert_size(size)}
+            for (name, type), size in grouped_aux.items()
         ]
-    else: 
+    else:
         return [
             {
                 'Name': name,
@@ -154,7 +154,7 @@ def group_modules(modules, platform, version, i):
         ]
 
 
-def get_gitignore_files(repo_path):
+def get_gitignore_files(repo_path: str) -> List[str]:
     gitignore_path = os.path.join(repo_path, ".gitignore")
     with open(gitignore_path, "r", encoding="utf-8") as file:
         gitignore_content = file.read()
@@ -164,7 +164,7 @@ def get_gitignore_files(repo_path):
         return ignored_patterns
 
 
-def compress(file_path):
+def compress(file_path: str) -> int:
     compressor = zlib.compressobj()
     compressed_size = 0
     # original_size = os.path.getsize(file_path)
@@ -175,12 +175,14 @@ def compress(file_path):
         compressed_size += len(compressor.flush())
     return compressed_size
 
+
 class WrongDependencyFormat(Exception):
-    def __init__(self, mensaje):
+    def __init__(self, mensaje: str) -> None:
         super().__init__(mensaje)
 
+
 class GitRepo:
-    def __init__(self, url):
+    def __init__(self, url: str) -> None:
         self.url = url
         self.repo_dir = None
 
@@ -193,12 +195,14 @@ def __enter__(self):
             self._run(f"git clone --quiet {self.url} {self.repo_dir}")
         return self
 
-    def _run(self, command):
+    def _run(self, command: str) -> List[str]:
         result = subprocess.run(command, shell=True, capture_output=True, text=True, check=True, cwd=self.repo_dir)
         return result.stdout.strip().split('\n')
 
-    def get_module_commits(self, module_path, initial, final, time):
-        self._run("git fetch origin --quiet") # 1 min no coger todo solo el module
+    def get_module_commits(
+        self, module_path: str, initial: Optional[str], final: Optional[str], time: Optional[str]
+    ) -> List[str]:
+        self._run("git fetch origin --quiet")
         self._run("git checkout origin/HEAD")
         if time:
             return self._run(f'git log --since="{time}" --reverse --pretty=format:%H -- {module_path}')
@@ -212,25 +216,30 @@ def get_module_commits(self, module_path, initial, final, time):
             except subprocess.CalledProcessError:
                 raise ValueError(f"Commit {initial} does not come before {final}")
             return self._run(f"git log --reverse --pretty=format:%H {initial}..{final} -- {module_path}")
-           
 
-    def checkout_commit(self, commit):
+    def checkout_commit(self, commit: str) -> None:
         self._run(f"git fetch --quiet --depth 1 origin {commit}")
         self._run(f"git checkout --quiet {commit}")
 
-    def sparse_checkout_commit(self, commit_sha, module):
-        self._run("git sparse-checkout init --cone") 
+    def sparse_checkout_commit(self, commit_sha: str, module: str) -> None:
+        self._run("git sparse-checkout init --cone")
         self._run(f"git sparse-checkout set {module}")
         self._run(f"git checkout {commit_sha}")
-    
-    def get_commit_metadata(self,commit):
+
+    def get_commit_metadata(self, commit: str) -> Tuple[str, str, str]:
         result = self._run(f'git log -1 --date=format:"%b %d %Y" --pretty=format:"%ad\n%an\n%s" {commit}')
         date, author, message = result
         return date, author, message
-    
-    def get_creation_commit_module(self, module):
+
+    def get_creation_commit_module(self, module: str) -> str:
         return self._run(f'git log --reverse --format="%H" -- {module}')[0]
 
-    def __exit__(self, exception_type, exception_value, exception_traceback):
+
+    def __exit__(
+        self,
+        exception_type: Optional[Type[BaseException]],
+        exception_value: Optional[BaseException],
+        exception_traceback: Optional[TracebackType],
+    ) -> None:
         if self.repo_dir and os.path.exists(self.repo_dir):
             shutil.rmtree(self.repo_dir)
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index 1700eda378686..bfad6bf61442a 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -3,15 +3,21 @@
 # Licensed under a 3-clause BSD style license (see LICENSE)
 
 import os
+import tempfile
+import zipfile
 from pathlib import Path
-from rich.console import Console
+from typing import Dict, List, Optional, Tuple
+
 import click
 import requests
-import tempfile
-import zipfile
+from rich.console import Console
+from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn
+
+from ddev.cli.application import Application
+
 from .common import (
+    GitRepo,
     compress,
-    valid_platforms_versions,
     get_dependencies_list,
     get_gitignore_files,
     group_modules,
@@ -19,45 +25,74 @@
     is_valid_integration,
     print_csv,
     print_table,
-    GitRepo
+    valid_platforms_versions,
 )
 
-# VALID_PLATFORMS, VALID_PYTHON_VERSIONS = valid_platforms_versions()
 console = Console()
 
 
 @click.command()
 @click.argument("before")
 @click.argument("after")
-@click.option('--platform', help="Target platform")
-@click.option('--python', 'version', help="Python version (MAJOR.MINOR)")
+@click.option(
+    '--platform', help="Target platform (e.g. linux-aarch64). If not specified, all platforms will be analyzed"
+)
+@click.option('--python', 'version', help="Python version (e.g 3.12).  If not specified, all versions will be analyzed")
 @click.option('--compressed', is_flag=True, help="Measure compressed size")
 @click.option('--csv', is_flag=True, help="Output in CSV format")
 @click.pass_obj
-def diff(app, before, after, platform, version, compressed, csv):
-    repo_url = app.repo.path
-    with GitRepo(repo_url) as gitRepo:
-        try:
-            valid_platforms,valid_versions = valid_platforms_versions(gitRepo.repo_dir)
-            if platform and platform not in valid_platforms:
-                raise ValueError(f"Invalid platform: {platform}")
-            elif version and version not in valid_versions:
-                raise ValueError(f"Invalid version: {version}")
-            if platform is None or version is None:
-                platforms = valid_platforms if platform is None else [platform]
-                versions = valid_versions if version is None else [version]
-
-                for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
-                    diff_mode(app, gitRepo, before, after, plat, ver, compressed, csv, i)
-            else:
-                    diff_mode(app, gitRepo, before, after, platform, version, compressed, csv, None)
-
-        except Exception as e:
-            app.abort(str(e))
-
-
-def diff_mode(app, gitRepo, before, after, platform, version, compressed, csv, i):
-    files_b, dependencies_b, files_a, dependencies_a = get_repo_info(gitRepo, platform, version, before, after, compressed)
+def diff(
+    app: str, before: str, after: str, platform: Optional[str], version: Optional[str], compressed: bool, csv: bool
+) -> None:
+    """
+    Compare the size of integrations and dependencies between two commits.
+    """
+    with Progress(
+        SpinnerColumn(),
+        TextColumn("[progress.description]{task.description}"),
+        BarColumn(),
+        TimeElapsedColumn(),
+        transient=True,
+    ) as progress:
+        task = progress.add_task("[cyan]Calculating differences...", total=None)
+        repo_url = app.repo.path
+        with GitRepo(repo_url) as gitRepo:
+            try:
+                valid_platforms, valid_versions = valid_platforms_versions(gitRepo.repo_dir)
+                if platform and platform not in valid_platforms:
+                    raise ValueError(f"Invalid platform: {platform}")
+                elif version and version not in valid_versions:
+                    raise ValueError(f"Invalid version: {version}")
+                if platform is None or version is None:
+                    platforms = valid_platforms if platform is None else [platform]
+                    versions = valid_versions if version is None else [version]
+                    progress.remove_task(task)
+
+                    for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
+                        diff_mode(app, gitRepo, before, after, plat, ver, compressed, csv, i,progress)
+                else:
+                    progress.remove_task(task)
+                    diff_mode(app, gitRepo, before, after, platform, version, compressed, csv, None, progress)
+
+            except Exception as e:
+                app.abort(str(e))
+
+
+def diff_mode(
+    app: Application,
+    gitRepo: GitRepo,
+    before: str,
+    after: str,
+    platform: str,
+    version: str,
+    compressed: bool,
+    csv: bool,
+    i: Optional[int],
+    progress: Progress,
+) -> None:
+    files_b, dependencies_b, files_a, dependencies_a = get_repo_info(
+        gitRepo, platform, version, before, after, compressed,progress
+    )
 
     integrations = get_diff(files_b, files_a, 'Integration')
     dependencies = get_diff(dependencies_b, dependencies_a, 'Dependency')
@@ -75,22 +110,27 @@ def diff_mode(app, gitRepo, before, after, platform, version, compressed, csv, i
             print_table(app, "Diff", grouped_modules)
 
 
-def get_repo_info(gitRepo, platform, version, before, after, compressed):
+def get_repo_info(
+    gitRepo: GitRepo, platform: str, version: str, before: str, after: str, compressed: bool,progress: Progress,
+) -> Tuple[Dict[str, int], Dict[str, int], Dict[str, int], Dict[str, int]]:
     repo = gitRepo.repo_dir
-    with console.status("[cyan]Calculating compressed sizes for the first commit...", spinner="dots"):
-        gitRepo.checkout_commit(before)
-        files_b = get_files(repo, compressed)
-        dependencies_b = get_dependencies(repo, platform, version, compressed)
+    task = progress.add_task("[cyan]Calculating sizes for the first commit...", total=None)
+    gitRepo.checkout_commit(before)
+    files_b = get_files(repo, compressed)
+    dependencies_b = get_dependencies(repo, platform, version, compressed)
+    progress.remove_task(task)
+
+    task = progress.add_task("[cyan]Calculating sizes for the second commit...", total=None)
+    gitRepo.checkout_commit(after)
+    files_a = get_files(repo, compressed)
+    dependencies_a = get_dependencies(repo, platform, version, compressed)
+    progress.remove_task(task)
 
-    with console.status("[cyan]Calculating compressed sizes for the second commit...", spinner="dots"):
-        gitRepo.checkout_commit(after)
-        files_a = get_files(repo, compressed)
-        dependencies_a = get_dependencies(repo, platform, version, compressed)
 
     return files_b, dependencies_b, files_a, dependencies_a
 
 
-def get_diff(size_before, size_after, type):
+def get_diff(size_before: Dict[str, int], size_after: Dict[str, int], type: str) -> List[Dict[str, str | int]]:
     all_paths = set(size_before.keys()) | set(size_after.keys())
     diff_files = []
 
@@ -131,7 +171,7 @@ def get_diff(size_before, size_after, type):
     return diff_files
 
 
-def get_files(repo_path, compressed):
+def get_files(repo_path: str, compressed: bool) -> Dict[str, int]:
 
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
     git_ignore = get_gitignore_files(repo_path)
@@ -152,7 +192,7 @@ def get_files(repo_path, compressed):
     return file_data
 
 
-def get_dependencies(repo_path, platform, version, compressed):
+def get_dependencies(repo_path: str, platform: str, version: str, compressed: bool) -> Dict[str, int]:
 
     resolved_path = os.path.join(repo_path, ".deps/resolved")
 
@@ -165,9 +205,9 @@ def get_dependencies(repo_path, platform, version, compressed):
     return {}
 
 
-def get_dependencies_sizes(deps, download_urls, compressed):
+def get_dependencies_sizes(deps: List[str], download_urls: List[str], compressed: bool) -> Dict[str, int]:
     file_data = {}
-    for dep, url in zip(deps, download_urls):
+    for dep, url in zip(deps, download_urls, strict=False):
         if compressed:
             response = requests.head(url)
             response.raise_for_status()
@@ -192,5 +232,3 @@ def get_dependencies_sizes(deps, download_urls, compressed):
                         size += os.path.getsize(file_path)
         file_data[dep] = size
     return file_data
-
-
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index 8e1db4e78b840..66efd243b317b 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -4,8 +4,12 @@
 
 import os
 from pathlib import Path
-from rich.console import Console
+from typing import Dict, List, Optional, Union
+
 import click
+from rich.console import Console
+
+from ddev.cli.application import Application
 
 from .common import (
     compress,
@@ -17,46 +21,48 @@
     is_valid_integration,
     print_csv,
     print_table,
-    valid_platforms_versions
+    valid_platforms_versions,
 )
 
-#VALID_PLATFORMS = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"]
-
-
 REPO_PATH = Path(__file__).resolve().parents[5]
-# VALID_PLATFORMS, VALID_PYTHON_VERSIONS = valid_platforms_versions()
 
 console = Console()
 
+
 @click.command()
-@click.option('--platform', help="Target platform")
-@click.option('--python', 'version', help="Python version (MAJOR.MINOR)")
+@click.option(
+    '--platform', help="Target platform (e.g. linux-aarch64). If not specified, all platforms will be analyzed"
+)
+@click.option('--python', 'version', help="Python version (e.g 3.12).  If not specified, all versions will be analyzed")
 @click.option('--compressed', is_flag=True, help="Measure compressed size")
 @click.option('--csv', is_flag=True, help="Output in CSV format")
 @click.pass_obj
-def status(app, platform, version, compressed, csv):
+def status(app: Application, platform: Optional[str], version: Optional[str], compressed: bool, csv: bool) -> None:
+    """
+    Show the current size of all integrations and dependencies.
+    """
     try:
         repo_path = app.repo.path
-        valid_platforms,valid_versions = valid_platforms_versions(repo_path)
+        valid_platforms, valid_versions = valid_platforms_versions(repo_path)
         if platform and platform not in valid_platforms:
             raise ValueError(f"Invalid platform: {platform}")
         elif version and version not in valid_versions:
             raise ValueError(f"Invalid version: {version}")
         if platform is None or version is None:
-            platforms =  valid_platforms if platform is None else [platform]
+            platforms = valid_platforms if platform is None else [platform]
             versions = valid_versions if version is None else [version]
             for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
                 status_mode(app, plat, ver, compressed, csv, i)
         else:
-           status_mode(app, platform, version, compressed, csv, None)
-        
+            status_mode(app, platform, version, compressed, csv, None)
+
     except Exception as e:
         app.abort(str(e))
 
 
-def status_mode(app, platform, version, compressed, csv, i):
+def status_mode(app: Application, platform: str, version: str, compressed: bool, csv: bool, i: Optional[int]) -> None:
     with console.status("[cyan]Calculating sizes...", spinner="dots"):
-        modules = get_files(compressed) + get_dependencies(platform, version,compressed)
+        modules = get_files(compressed) + get_dependencies(platform, version, compressed)
     grouped_modules = group_modules(modules, platform, version, i)
     grouped_modules.sort(key=lambda x: x['Size (Bytes)'], reverse=True)
 
@@ -66,7 +72,7 @@ def status_mode(app, platform, version, compressed, csv, i):
         print_table(app, "STATUS", grouped_modules)
 
 
-def get_files(compressed):
+def get_files(compressed: bool) -> List[Dict[str, Union[str, int]]]:
 
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
     git_ignore = get_gitignore_files(REPO_PATH)
@@ -95,7 +101,7 @@ def get_files(compressed):
     return file_data
 
 
-def get_dependencies(platform, version, compressed):
+def get_dependencies(platform: str, version: str, compressed: bool) -> List[Dict[str, Union[str, int]]]:
 
     resolved_path = os.path.join(REPO_PATH, ".deps/resolved")
     for filename in os.listdir(resolved_path):
@@ -103,6 +109,3 @@ def get_dependencies(platform, version, compressed):
         if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
             deps, download_urls = get_dependencies_list(file_path)
             return get_dependencies_sizes(deps, download_urls, compressed)
-
-
-
diff --git a/ddev/src/ddev/cli/size/timeline.py b/ddev/src/ddev/cli/size/timeline.py
index aee4256084ed9..6cb1b5c3a8775 100644
--- a/ddev/src/ddev/cli/size/timeline.py
+++ b/ddev/src/ddev/cli/size/timeline.py
@@ -1,111 +1,194 @@
-
-import click
-import requests
 import os
 import re
-from datetime import datetime
-from rich.progress import Progress, SpinnerColumn, BarColumn, TextColumn, TimeElapsedColumn
-from rich.console import Console
 import tempfile
-from pathlib import Path
 import zipfile
+from datetime import date, datetime
+from pathlib import Path
+from typing import Dict, List, Optional, Set, Tuple, Union
+
+import click
+import requests
+from rich.console import Console
+from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn
+
+from ddev.cli.application import Application
+
 from .common import (
+    GitRepo,
+    WrongDependencyFormat,
     compress,
-    get_gitignore_files,
     convert_size,
+    get_gitignore_files,
     is_correct_dependency,
     is_valid_integration,
     print_csv,
     print_table,
-    GitRepo,
-    WrongDependencyFormat, 
-    valid_platforms_versions
+    valid_platforms_versions,
 )
 
-#VALID_PLATFORMS = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"]
-#VALID_PYTHON_VERSIONS = ["3.12"]
-
-# VALID_PLATFORMS, _ = valid_platforms_versions()
-DEPENDENCY_FILE_CHANGE = datetime.strptime("Sep 17 2024","%b %d %Y").date()
-MINIMUM_DATE = datetime.strptime("Apr 3 2024","%b %d %Y").date()
+DEPENDENCY_FILE_CHANGE = datetime.strptime("Sep 17 2024", "%b %d %Y").date()
+MINIMUM_DATE = datetime.strptime("Apr 3 2024", "%b %d %Y").date()
 console = Console()
 
+
 @click.command()
 @click.argument('type', type=click.Choice(['integration', 'dependency']))
 @click.argument('module')
 @click.argument('initial', required=False)
 @click.argument('final', required=False)
-@click.option('--time', help="Filter commits starting from a specific date. Accepts both absolute and relative formats, "
-         "such as '2025-03-01', '2 weeks ago', or 'yesterday'")
+@click.option(
+    '--time',
+    help="Filter commits starting from a specific date. Accepts both absolute and relative formats, "
+    "such as '2025-03-01', '2 weeks ago', or 'yesterday'",
+)
 @click.option('--threshold', help="Only show modules with size differences greater than a threshold in bytes")
-@click.option('--platform', help="Target platform to analyze. Only required for dependencies. If not specified, all platforms will be analyzed")
-#@click.option('--python', 'version', type=click.Choice(VALID_PYTHON_VERSIONS), help="Python version (MAJOR.MINOR)")
+@click.option(
+    '--platform',
+    help="Target platform to analyze. Only required for dependencies. If not specified, all platforms will be analyzed",
+)
 @click.option('--compressed', is_flag=True, help="Measure compressed size")
 @click.option('--csv', is_flag=True, help="Output results in CSV format")
 @click.pass_obj
-def timeline(app, type, module, initial, final, time, threshold, platform, compressed, csv):
-    url = app.repo.path
-    with GitRepo(url) as gitRepo:
-        try:
-            with console.status("[cyan]Fetching commits...", spinner="dots"):
-                folder = module if type == 'integration' else '.deps/resolved'
-                commits = gitRepo.get_module_commits(folder, initial, final, time)
-                first_commit = gitRepo.get_creation_commit_module(module)
-                gitRepo.checkout_commit(commits[-1])
-                valid_platforms, _ = valid_platforms_versions(gitRepo.repo_dir)
-                n_platforms = len(valid_platforms)
-            if platform and platform not in valid_platforms:
-                raise ValueError(f"Invalid platform: {platform}")
-            elif commits == [''] and type == "integration" and module_exists(gitRepo.repo_dir, module):
-                raise ValueError(f"No changes found: {module}")
-            elif commits == [''] and type == "integration" and not module_exists(gitRepo.repo_dir, module):
-                raise ValueError(f"Integration {module} not found in latest commit, is the name correct?")
-            elif type == 'dependency' and platform and module not in get_dependency_list(gitRepo.repo_dir, [platform]):
-                raise ValueError(f"Dependency {module} not found in latest commit for the platform {platform}, is the name correct?")
-            elif type == 'dependency' and not platform and module not in get_dependency_list(gitRepo.repo_dir, valid_platforms):
-                raise ValueError(f"Dependency {module} not found in latest commit, is the name correct?")
-            elif type == 'dependency' and commits == ['']:
-                raise ValueError(f"No changes found: {module}")
-            if type == "dependency" and platform is None:
-                for i, plat in enumerate(valid_platforms):
-                    timeline_mode(app, gitRepo, type, module, commits, threshold, plat, compressed, csv, i, True, n_platforms, None)
-            else:
-                timeline_mode(app, gitRepo, type, module, commits, threshold, platform, compressed, csv, None, False, n_platforms, first_commit)
-        except Exception as e:
-            app.abort(str(e))
-
-
-def timeline_mode(app, gitRepo, type, module, commits, threshold, platform, compressed, csv, i, maybe_mod_missing, n_platforms,first_commit):
-    modules = get_repo_info(gitRepo, type, platform, module, commits, i, maybe_mod_missing,n_platforms, compressed, first_commit)
-    if modules != []:
-        with console.status("[cyan]Exporting data...", spinner="dots"):
-            grouped_modules = group_modules(modules, platform, i)
-            trimmed_modules = trim_modules(grouped_modules, threshold)
-            maybe_mod_missing = False
-            if csv:
-                print_csv(app, i, trimmed_modules)
-            else:
-                print_table(app, "Timeline for " + module, trimmed_modules)
-
-def get_repo_info(gitRepo, type, platform, module, commits, i, maybe_mod_missing, n_platforms, compressed, first_commit):
+def timeline(
+    app: Application,
+    type: str,
+    module: str,
+    initial: Optional[str],
+    final: Optional[str],
+    time: Optional[str],
+    threshold: Optional[str],
+    platform: Optional[str],
+    compressed: bool,
+    csv: bool,
+) -> None:
+    """
+    Show the size evolution of a module (integration or dependency) over time.
+    """
     with Progress(
         SpinnerColumn(),
         TextColumn("[progress.description]{task.description}"),
         BarColumn(),
         TimeElapsedColumn(),
-        transient=True, 
+        transient=True,
     ) as progress:
+        task = progress.add_task("[cyan]Calculating timeline...", total=None)
+        url = app.repo.path
+        with GitRepo(url) as gitRepo:
+            try:
+                # with console.status("[cyan]Fetching commits...", spinner="dots"):
+                folder = module if type == 'integration' else '.deps/resolved'
+                commits = gitRepo.get_module_commits(folder, initial, final, time)
+                first_commit = gitRepo.get_creation_commit_module(module)
+                gitRepo.checkout_commit(commits[-1])
+                valid_platforms, _ = valid_platforms_versions(gitRepo.repo_dir)
+                if platform and platform not in valid_platforms:
+                    raise ValueError(f"Invalid platform: {platform}")
+                elif commits == [''] and type == "integration" and module_exists(gitRepo.repo_dir, module):
+                    raise ValueError(f"No changes found: {module}")
+                elif commits == [''] and type == "integration" and not module_exists(gitRepo.repo_dir, module):
+                    raise ValueError(f"Integration {module} not found in latest commit, is the name correct?")
+                elif (
+                    type == 'dependency'
+                    and platform
+                    and module not in get_dependency_list(gitRepo.repo_dir, [platform])
+                ):
+                    raise ValueError(
+                        f"Dependency {module} not found in latest commit for the platform {platform}, "
+                        "is the name correct?"
+                    )
+                elif (
+                    type == 'dependency'
+                    and not platform
+                    and module not in get_dependency_list(gitRepo.repo_dir, valid_platforms)
+                ):
+                    raise ValueError(f"Dependency {module} not found in latest commit, is the name correct?")
+                elif type == 'dependency' and commits == ['']:
+                    raise ValueError(f"No changes found: {module}")
+                if type == "dependency" and platform is None:
+                    progress.remove_task(task)
+                    for i, plat in enumerate(valid_platforms):
+                        timeline_mode(
+                            app, gitRepo, type, module, commits, threshold, plat, compressed, csv, i, None, progress
+                        )
+                else:
+                    progress.remove_task(task)
+
+                    timeline_mode(
+                        app,
+                        gitRepo,
+                        type,
+                        module,
+                        commits,
+                        threshold,
+                        platform,
+                        compressed,
+                        csv,
+                        None,
+                        first_commit,
+                        progress,
+                    )
+
+            except Exception as e:
+                progress.remove_task(task)
+                app.abort(str(e))
+
+
+def timeline_mode(
+    app: Application,
+    gitRepo: GitRepo,
+    type: str,
+    module: str,
+    commits: List[str],
+    threshold: Optional[str],
+    platform: Optional[str],
+    compressed: bool,
+    csv: bool,
+    i: Optional[int],
+    first_commit: Optional[str],
+    progress: Progress,
+) -> None:
+    modules = get_repo_info(gitRepo, type, platform, module, commits, compressed, first_commit, progress)
+    if modules != []:
+        grouped_modules = group_modules(modules, platform, i)
+        trimmed_modules = trim_modules(grouped_modules, threshold)
+        if csv:
+            print_csv(app, i, trimmed_modules)
+        else:
+            print_table(app, "Timeline for " + module, trimmed_modules)
+
+
+def get_repo_info(
+    gitRepo: GitRepo,
+    type: str,
+    platform: Optional[str],
+    module: str,
+    commits: List[str],
+    compressed: bool,
+    first_commit: Optional[str],
+    progress: Progress,
+) -> List[Dict[str, Union[str, int, date]]]:
+    with progress:
         if type == "integration":
             file_data = process_commits(commits, module, gitRepo, progress, platform, type, compressed, first_commit)
-        else: 
+        else:
             file_data = process_commits(commits, module, gitRepo, progress, platform, type, compressed, None)
-    return file_data      
-    
-def process_commits(commits, module, gitRepo, progress, platform, type, compressed, first_commit=None):
-    file_data=[]
+    return file_data
+
+
+def process_commits(
+    commits: List[str],
+    module: str,
+    gitRepo: GitRepo,
+    progress: Progress,
+    platform: Optional[str],
+    type: str,
+    compressed: bool,
+    first_commit: Optional[str],
+) -> List[Dict[str, Union[str, int, date]]]:
+    file_data = []
     task = progress.add_task("[cyan]Processing commits...", total=len(commits))
     repo = gitRepo.repo_dir
-    
+
     folder = module if type == 'integration' else '.deps/resolved'
     for commit in commits:
         gitRepo.sparse_checkout_commit(commit, folder)
@@ -120,34 +203,42 @@ def process_commits(commits, module, gitRepo, progress, platform, type, compress
         elif type == 'integration':
             file_data = get_files(repo, module, commit, date, author, message, file_data, compressed)
         progress.advance(task)
+    progress.remove_task(task)
+
     return file_data
 
-def get_files(repo_path, module, commit, date, author, message, file_data, compressed):   
-    
+
+def get_files(
+    repo_path: str,
+    module: str,
+    commit: str,
+    date: date,
+    author: str,
+    message: str,
+    file_data: List[Dict[str, Union[str, int, date]]],
+    compressed: bool,
+) -> List[Dict[str, Union[str, int, date]]]:
     if not module_exists(repo_path, module):
         file_data.append(
-                    {
-                        "Size (Bytes)": 0,
-                        "Date": date,
-                        "Author": author,
-                        "Commit Message": "(DELETED) " + message,
-                        "Commit SHA": commit
-                    }
-                )
-        return file_data    
-    
+            {
+                "Size (Bytes)": 0,
+                "Date": date,
+                "Author": author,
+                "Commit Message": "(DELETED) " + message,
+                "Commit SHA": commit,
+            }
+        )
+        return file_data
+
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
-    # resolved_path = os.path.join(repo_path, module)
 
     git_ignore = get_gitignore_files(repo_path)
     included_folder = "datadog_checks/"
     for root, _, files in os.walk(repo_path):
         for file in files:
             file_path = os.path.join(root, file)
-            # Convert the path to a relative format within the repo
             relative_path = os.path.relpath(file_path, repo_path)
 
-            # Filter files
             if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore):
                 size = compress(file_path) if compressed else os.path.getsize(file_path)
                 file_data.append(
@@ -156,12 +247,22 @@ def get_files(repo_path, module, commit, date, author, message, file_data, compr
                         "Date": date,
                         "Author": author,
                         "Commit Message": message,
-                        "Commit SHA": commit
+                        "Commit SHA": commit,
                     }
                 )
     return file_data
 
-def get_dependencies(repo_path, module, platform, commit, date, author, message, compressed):
+
+def get_dependencies(
+    repo_path: str,
+    module: str,
+    platform: Optional[str],
+    commit: str,
+    date: date,
+    author: str,
+    message: str,
+    compressed: bool,
+) -> Optional[Dict[str, Union[str, int, date]]]:
     resolved_path = os.path.join(repo_path, ".deps/resolved")
     paths = os.listdir(resolved_path)
     version = get_version(paths, platform)
@@ -169,9 +270,12 @@ def get_dependencies(repo_path, module, platform, commit, date, author, message,
         file_path = os.path.join(resolved_path, filename)
         if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
             download_url = get_dependency(file_path, module)
-            return get_dependency_size(download_url, commit, date, author, message, compressed) if download_url else None
+            return (
+                get_dependency_size(download_url, commit, date, author, message, compressed) if download_url else None
+            )
+
 
-def get_dependency(file_path, module):
+def get_dependency(file_path: str, module: str) -> Optional[str]:
     with open(file_path, "r", encoding="utf-8") as file:
         file_content = file.read()
         for line in file_content.splitlines():
@@ -181,9 +285,12 @@ def get_dependency(file_path, module):
             name, url = match.groups()
             if name == module:
                 return url
-    return None            
+    return None
 
-def get_dependency_size(download_url, commit, date, author, message, compressed):
+
+def get_dependency_size(
+    download_url: str, commit: str, date: date, author: str, message: str, compressed: bool
+) -> Dict[str, Union[str, int, date]]:
     if compressed:
         response = requests.head(download_url)
         response.raise_for_status()
@@ -207,15 +314,10 @@ def get_dependency_size(download_url, commit, date, author, message, compressed)
                     file_path = os.path.join(dirpath, name)
                     size += os.path.getsize(file_path)
 
-    return  {
-                "Size (Bytes)": size,
-                "Date": date,
-                "Author": author,
-                "Commit Message": message,
-                "Commit SHA": commit
-            }
+    return {"Size (Bytes)": size, "Date": date, "Author": author, "Commit Message": message, "Commit SHA": commit}
+
 
-def get_version(files, platform):
+def get_version(files: List[str], platform: Optional[str]) -> str:
     final_version = ''
     for file in files:
         if platform in file:
@@ -224,23 +326,18 @@ def get_version(files, platform):
             version = match.group(0) if match else None
             if version > final_version:
                 final_version = version
-    return final_version if len(final_version) != 1 else 'py'+ final_version
-
-
-def is_correct_dependency(platform, version, name):
-        return platform in name and version in name
+    return final_version if len(final_version) != 1 else 'py' + final_version
 
 
-
-    
-
-def group_modules(modules, platform, i):
+def group_modules(
+    modules: List[Dict[str, Union[str, int, date]]], platform: Optional[str], i: Optional[int]
+) -> List[Dict[str, Union[str, int, date]]]:
     grouped_aux = {}
 
     for file in modules:
         key = (file['Date'], file['Author'], file['Commit Message'], file['Commit SHA'])
         grouped_aux[key] = grouped_aux.get(key, 0) + file["Size (Bytes)"]
-    if i is None: 
+    if i is None:
         return [
             {
                 "Commit SHA": commit,
@@ -251,11 +348,10 @@ def group_modules(modules, platform, i):
                 "Date": date,
                 "Author": author,
                 "Commit Message": message,
-                
             }
             for (date, author, message, commit), size in grouped_aux.items()
         ]
-    else: 
+    else:
         return [
             {
                 "Commit SHA": commit,
@@ -271,34 +367,40 @@ def group_modules(modules, platform, i):
             for (date, author, message, commit), size in grouped_aux.items()
         ]
 
-def trim_modules(modules, threshold=0):
+
+def trim_modules(
+    modules: List[Dict[str, Union[str, int, date]]], threshold: Optional[str] = None
+) -> List[Dict[str, Union[str, int, date]]]:
     modules[0]['Delta (Bytes)'] = 0
     modules[0]['Delta'] = ' '
     trimmed_modules = [modules[0]]
-    for i in range(1, len(modules)-1):
-        delta = modules[i]['Size (Bytes)']-modules[i-1]['Size (Bytes)']
-        if abs(delta) > int(threshold):
-            modules[i]['Delta (Bytes)'] = delta
-            modules[i]['Delta'] = convert_size(delta)
-            trimmed_modules.append(modules[i])
-    if len(modules) > 1:
-        delta = modules[-1]['Size (Bytes)']-modules[-2]['Size (Bytes)']
-        modules[-1]['Delta (Bytes)'] = delta
-        modules[-1]['Delta'] = convert_size(delta)
-        trimmed_modules.append(modules[-1])
+    threshold_value = int(threshold) if threshold else 0
+
+    for i in range(1, len(modules)):
+        prev = modules[i - 1]
+        curr = modules[i]
+        delta = curr['Size (Bytes)'] - prev['Size (Bytes)']
+        if abs(delta) > threshold_value or i == len(modules) - 1:
+            curr['Delta (Bytes)'] = delta
+            curr['Delta'] = convert_size(delta)
+            trimmed_modules.append(curr)
+
     return trimmed_modules
 
-def format_commit_data(date, message, commit, first_commit):
+
+def format_commit_data(date_str: str, message: str, commit: str, first_commit: Optional[str]) -> Tuple[date, str, str]:
     if commit == first_commit:
         message = "(NEW) " + message
     message = message if len(message) <= 35 else message[:30].rsplit(" ", 1)[0] + "..." + message.split()[-1]
-    date = datetime.strptime(date, "%b %d %Y").date()
+    date = datetime.strptime(date_str, "%b %d %Y").date()
     return date, message, commit[:7]
 
-def module_exists(path, module):
+
+def module_exists(path: str, module: str) -> bool:
     return os.path.exists(os.path.join(path, module))
 
-def get_dependency_list(path, platforms):
+
+def get_dependency_list(path: str, platforms: List[str]) -> Set[str]:
     resolved_path = os.path.join(path, ".deps/resolved")
     all_files = os.listdir(resolved_path)
     dependencies = set()
diff --git a/ddev/tests/cli/size/test_common.py b/ddev/tests/cli/size/test_common.py
index a3f29bc17134e..cd11c1ba8c41f 100644
--- a/ddev/tests/cli/size/test_common.py
+++ b/ddev/tests/cli/size/test_common.py
@@ -10,7 +10,7 @@
     is_correct_dependency,
     is_valid_integration,
     print_csv,
-    valid_platforms_versions
+    valid_platforms_versions,
 )
 
 
@@ -27,15 +27,10 @@ def test_valid_platforms_versions():
         "macos-x86_64_py3.txt",
         "windows-x86_64_3.12.txt",
         "windows-x86_64_py2.txt",
-        "windows-x86_64_py3.txt"
+        "windows-x86_64_py3.txt",
     ]
 
-    expected_platforms = {
-        "linux-aarch64",
-        "linux-x86_64",
-        "macos-x86_64",
-        "windows-x86_64"
-    }
+    expected_platforms = {"linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"}
     expected_versions = {"3.12"}
     with patch("os.listdir", return_value=filenames):
         platforms, versions = valid_platforms_versions("/tmp/fake_repo")
@@ -127,7 +122,7 @@ def test_group_modules():
         },
     ]
 
-    assert group_modules(modules, platform, version,0) == expected_output
+    assert group_modules(modules, platform, version, 0) == expected_output
 
 
 def test_get_gitignore_files():
diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py
index 56759b92edb68..ba1d0a826ad63 100644
--- a/ddev/tests/cli/size/test_diff.py
+++ b/ddev/tests/cli/size/test_diff.py
@@ -6,7 +6,7 @@
 
 import pytest
 
-from ddev.cli.size.diff import get_dependencies, get_files, get_diff
+from ddev.cli.size.diff import get_dependencies, get_diff, get_files
 
 
 def test_get_compressed_files():
@@ -83,6 +83,7 @@ def test_get_compressed_dependencies(terminal):
         "dependency2": 12345,
     }
 
+
 def test_get_diff():
     size_before = {
         "integration/foo.py": 1000,
@@ -144,7 +145,10 @@ def get_compressed_dependencies_side_effect(_, __, ___, ____):
     get_compressed_dependencies_side_effect.counter = 0
 
     with (
-        patch("ddev.cli.size.diff.valid_platforms_versions", return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'})),
+        patch(
+            "ddev.cli.size.diff.valid_platforms_versions",
+            return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+        ),
         patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=mock_git_repo),
         patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None),
         patch("ddev.cli.size.diff.GitRepo.checkout_commit"),
@@ -180,14 +184,16 @@ def test_diff_csv(ddev, mock_size_diff_dependencies):
     assert result.exit_code == 0
 
 
-
 def test_diff_no_differences(ddev):
     fake_repo = MagicMock()
     fake_repo.repo_dir = "/tmp/fake_repo"
 
     with (
         patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=fake_repo),
-        patch("ddev.cli.size.diff.valid_platforms_versions", return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'})),
+        patch(
+            "ddev.cli.size.diff.valid_platforms_versions",
+            return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+        ),
         patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None),
         patch.object(fake_repo, "checkout_commit"),
         patch("tempfile.mkdtemp", return_value="/tmp/fake_repo"),
@@ -221,6 +227,14 @@ def test_diff_no_differences(ddev):
 
 
 def test_diff_invalid_platform(ddev):
+    mock_git_repo = MagicMock()
+    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
+    mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
+    patch(
+        "ddev.cli.size.timeline.valid_platforms_versions",
+        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+    ),
     result = ddev(
         'size', 'diff', 'commit1', 'commit2', '--platform', 'linux', '--python', '3.12', '--compressed'  # inválido
     )
@@ -228,6 +242,14 @@ def test_diff_invalid_platform(ddev):
 
 
 def test_diff_invalid_version(ddev):
+    mock_git_repo = MagicMock()
+    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
+    mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
+    patch(
+        "ddev.cli.size.timeline.valid_platforms_versions",
+        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+    ),
     result = ddev(
         'size',
         'diff',
@@ -243,5 +265,13 @@ def test_diff_invalid_version(ddev):
 
 
 def test_diff_invalid_platform_and_version(ddev):
+    mock_git_repo = MagicMock()
+    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
+    mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
+    patch(
+        "ddev.cli.size.timeline.valid_platforms_versions",
+        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+    ),
     result = ddev('size', 'diff', 'commit1', 'commit2', '--platform', 'linux', '--python', '2.10', '--compressed')
     assert result.exit_code != 0
diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py
index 13657e33b93a2..cce9193345de8 100644
--- a/ddev/tests/cli/size/test_status.py
+++ b/ddev/tests/cli/size/test_status.py
@@ -93,10 +93,15 @@ def test_get_compressed_dependencies():
 @pytest.fixture()
 def mock_size_status():
     with (
-        patch("ddev.cli.size.status.valid_platforms_versions", return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'})),
+        patch(
+            "ddev.cli.size.status.valid_platforms_versions",
+            return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+        ),
         patch("ddev.cli.size.status.get_gitignore_files", return_value=set()),
         patch("ddev.cli.size.status.compress", return_value=1234),
-        patch("ddev.cli.size.status.get_dependencies_list", return_value=(["dep1"], {"dep1": "https://example.com/dep1"})),
+        patch(
+            "ddev.cli.size.status.get_dependencies_list", return_value=(["dep1"], {"dep1": "https://example.com/dep1"})
+        ),
         patch(
             "ddev.cli.size.status.get_dependencies_sizes",
             return_value=[
@@ -135,15 +140,27 @@ def test_status_csv(ddev, mock_size_status):
 
 
 def test_status_wrong_platform(ddev):
+    patch(
+        "ddev.cli.size.timeline.valid_platforms_versions",
+        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+    ),
     result = ddev('size', 'status', '--platform', 'linux', '--python', '3.12', '--compressed')
     assert result.exit_code != 0
 
 
 def test_status_wrong_version(ddev):
+    patch(
+        "ddev.cli.size.timeline.valid_platforms_versions",
+        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+    ),
     result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '2.10', '--compressed')
     assert result.exit_code != 0
 
 
 def test_status_wrong_plat_and_version(ddev):
+    patch(
+        "ddev.cli.size.timeline.valid_platforms_versions",
+        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+    ),
     result = ddev('size', 'status', '--platform', 'linux', '--python', '2.10', '--compressed')
     assert result.exit_code != 0
diff --git a/ddev/tests/cli/size/test_timeline.py b/ddev/tests/cli/size/test_timeline.py
index 0cb5d49a4b062..6042f92f85d5a 100644
--- a/ddev/tests/cli/size/test_timeline.py
+++ b/ddev/tests/cli/size/test_timeline.py
@@ -1,16 +1,18 @@
+from datetime import datetime
+from unittest.mock import MagicMock, mock_open, patch
+
 import pytest
-from unittest.mock import MagicMock, patch, mock_open
+
 from ddev.cli.size.timeline import (
-    get_version,
     format_commit_data,
-    trim_modules,
-    group_modules,
-    get_dependency_size,
-    get_dependency, 
     get_dependencies,
+    get_dependency,
+    get_dependency_size,
     get_files,
+    get_version,
+    group_modules,
+    trim_modules,
 )
-from datetime import datetime
 
 
 def test_get_compressed_files():
@@ -23,14 +25,7 @@ def test_get_compressed_files():
         patch("ddev.cli.size.timeline.compress", return_value=1234),
     ):
         result = get_files(
-            "/tmp/fake_repo",
-            "int1",
-            "abc1234",
-            datetime(2025, 4, 4).date(),
-            "auth",
-            "Added int1",
-            [],
-            True
+            "/tmp/fake_repo", "int1", "abc1234", datetime(2025, 4, 4).date(), "auth", "Added int1", [], True
         )
         assert result == [
             {
@@ -38,10 +33,11 @@ def test_get_compressed_files():
                 "Date": datetime(2025, 4, 4).date(),
                 "Author": "auth",
                 "Commit Message": "Added int1",
-                "Commit SHA": "abc1234"
+                "Commit SHA": "abc1234",
             }
         ]
 
+
 def test_get_compressed_files_deleted_only():
     repo_path = "/tmp/fake_repo"
     module = "foo"
@@ -54,7 +50,7 @@ def test_get_compressed_files_deleted_only():
         patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()),
         patch("os.walk", return_value=[]),
         patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"{repo_path}/", "")),
-        patch("os.path.exists", return_value=False),  
+        patch("os.path.exists", return_value=False),
     ):
         file_data = get_files(repo_path, module, commit, date, author, message, [], True)
 
@@ -74,8 +70,11 @@ def test_get_version():
     version = get_version(files, "linux-x86_64")
     assert version == "3.12"
 
+
 def test_format_commit_data():
-    date, message, commit = format_commit_data("Apr 4 2025", "this is a very long commit message that should be trimmed (#1234)", "abc1234def", "abc1234def")
+    date, message, commit = format_commit_data(
+        "Apr 4 2025", "this is a very long commit message that should be trimmed (#1234)", "abc1234def", "abc1234def"
+    )
     expected_date = datetime.strptime("Apr 4 2025", "%b %d %Y").date()
     expected_message = "(NEW) this is a very long...(#1234)"
     expected_commit = "abc1234"
@@ -83,6 +82,7 @@ def test_format_commit_data():
     assert message == expected_message
     assert commit == expected_commit
 
+
 def test_trim_modules_keep_some_remove_some():
     modules = [
         {"Size (Bytes)": 1000},
@@ -99,9 +99,27 @@ def test_trim_modules_keep_some_remove_some():
 
 def test_group_modules():
     modules = [
-        {"Size (Bytes)": 1000, "Date": datetime(2025, 4, 4).date(), "Author": "A", "Commit Message": "msg", "Commit SHA": "c1"},
-        {"Size (Bytes)": 500, "Date": datetime(2025, 4, 4).date(), "Author": "A", "Commit Message": "msg", "Commit SHA": "c1"},
-        {"Size (Bytes)": 1500, "Date": datetime(2025, 4, 5).date(), "Author": "A", "Commit Message": "msg2", "Commit SHA": "c2"},
+        {
+            "Size (Bytes)": 1000,
+            "Date": datetime(2025, 4, 4).date(),
+            "Author": "A",
+            "Commit Message": "msg",
+            "Commit SHA": "c1",
+        },
+        {
+            "Size (Bytes)": 500,
+            "Date": datetime(2025, 4, 4).date(),
+            "Author": "A",
+            "Commit Message": "msg",
+            "Commit SHA": "c1",
+        },
+        {
+            "Size (Bytes)": 1500,
+            "Date": datetime(2025, 4, 5).date(),
+            "Author": "A",
+            "Commit Message": "msg2",
+            "Commit SHA": "c2",
+        },
     ]
     expected = [
         {
@@ -138,6 +156,7 @@ def test_get_dependency():
         url = get_dependency("some/path/file.txt", "dep2")
         assert url == "https://example.com/dep2.whl"
 
+
 def make_mock_response(size):
     mock_response = MagicMock()
     mock_response.__enter__.return_value = mock_response
@@ -145,10 +164,13 @@ def make_mock_response(size):
     mock_response.raise_for_status = lambda: None
     return mock_response
 
+
 def test_get_dependency_size():
     mock_response = make_mock_response("45678")
-    with patch("requests.get", return_value=mock_response):
-        info = get_dependency_size("https://example.com/file.whl", "abc1234", datetime(2025, 4, 4).date(), "auth", "Fixed bug")
+    with patch("requests.head", return_value=mock_response):
+        info = get_dependency_size(
+            "https://example.com/file.whl", "abc1234", datetime(2025, 4, 4).date(), "auth", "Fixed bug", True
+        )
         assert info == {
             "Size (Bytes)": 45678,
             "Date": datetime(2025, 4, 4).date(),
@@ -157,6 +179,7 @@ def test_get_dependency_size():
             "Commit SHA": "abc1234",
         }
 
+
 def test_get_compressed_dependencies():
     with (
         patch("os.path.exists", return_value=True),
@@ -164,42 +187,17 @@ def test_get_compressed_dependencies():
         patch("os.path.isfile", return_value=True),
         patch("os.listdir", return_value=["linux-x86_64_3.12.txt"]),
         patch("ddev.cli.size.timeline.get_dependency", return_value="https://example.com/dep1.whl"),
-        patch("ddev.cli.size.timeline.requests.get", return_value=make_mock_response("12345")),
+        patch("ddev.cli.size.timeline.requests.head", return_value=make_mock_response("12345")),
     ):
         result = get_dependencies(
-            "/tmp/fake_repo",
-            "dep1",
-            "linux-x86_64",
-            "abc1234",
-            datetime(2025, 4, 4).date(),
-            "auth",
-            "Added dep1",
-            True
+            "/tmp/fake_repo", "dep1", "linux-x86_64", "abc1234", datetime(2025, 4, 4).date(), "auth", "Added dep1", True
         )
         assert result == {
             "Size (Bytes)": 12345,
             "Date": datetime(2025, 4, 4).date(),
             "Author": "auth",
             "Commit Message": "Added dep1",
-            "Commit SHA": "abc1234"
-        }
-
-def test_get_dependency_size():
-    with patch("requests.get", return_value=make_mock_response("45678")):
-        result = get_dependency_size(
-            "https://example.com/dep1.whl",
-            "abc1234",
-            datetime(2025, 4, 4).date(),
-            "auth",
-            "Fixed bug",
-            True
-        )
-        assert result == {
-            "Size (Bytes)": 45678,
-            "Date": datetime(2025, 4, 4).date(),
-            "Author": "auth",
-            "Commit Message": "Fixed bug",
-            "Commit SHA": "abc1234"
+            "Commit SHA": "abc1234",
         }
 
 
@@ -210,7 +208,7 @@ def mock_timeline_gitrepo():
     mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
     mock_git_repo.get_creation_commit_module.return_value = "commit1"
     mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Initial commit", c)
-    
+
     with (
         patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_git_repo),
         patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None),
@@ -226,23 +224,23 @@ def mock_timeline_gitrepo():
         patch("os.path.exists", return_value=True),
         patch("os.path.isdir", return_value=True),
         patch("os.path.isfile", return_value=True),
-        patch("os.listdir", return_value=[
-            "linux-x86_64_3.12_dep1.whl",
-            "linux-x86_64_3.12_dep2.whl"
-        ]),
+        patch("os.listdir", return_value=["linux-x86_64_3.12_dep1.whl", "linux-x86_64_3.12_dep2.whl"]),
     ):
         yield
 
+
 @pytest.fixture
 def app():
     mock_app = MagicMock()
     mock_app.repo.path = "/tmp/fake_repo"
     return mock_app
 
+
 def test_timeline_integration_compressed(ddev, mock_timeline_gitrepo, app):
     result = ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--compressed", obj=app)
     assert result.exit_code == 0
 
+
 @pytest.fixture
 def mock_timeline_dependencies():
     mock_git_repo = MagicMock()
@@ -254,7 +252,11 @@ def mock_timeline_dependencies():
         patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_git_repo),
         patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None),
         patch("ddev.cli.size.timeline.GitRepo.sparse_checkout_commit"),
-        patch("ddev.cli.size.timeline.valid_platforms_versions", return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'})),
+        patch(
+            "ddev.cli.size.timeline.valid_platforms_versions",
+            return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+        ),
+        patch("ddev.cli.size.timeline.get_dependency_list", return_value={"dep1"}),
         patch("os.path.exists", return_value=True),
         patch("os.path.isdir", return_value=True),
         patch("os.listdir", return_value=["linux-x86_64-3.12"]),
@@ -273,10 +275,18 @@ def mock_timeline_dependencies():
 
         yield
 
+
 def test_timeline_dependency_compressed(ddev, mock_timeline_dependencies, app):
     result = ddev(
-        "size", "timeline", "dependency", "dep1", "commit1", "commit2",
-        "--compressed", "--platform", "linux-x86_64",
+        "size",
+        "timeline",
+        "dependency",
+        "dep1",
+        "commit1",
+        "commit2",
+        "--compressed",
+        "--platform",
+        "linux-x86_64",
         obj=app,
     )
 
@@ -284,28 +294,133 @@ def test_timeline_dependency_compressed(ddev, mock_timeline_dependencies, app):
 
 
 def test_timeline_invalid_platform(ddev):
+    mock_git_repo = MagicMock()
+    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
+    mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
+    patch(
+        "ddev.cli.size.timeline.valid_platforms_versions",
+        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+    ),
     result = ddev(
-        "size", "timeline", "dependency", "dep1", "commit1", "commit2",
-        "--compressed", "--platform", "invalid-platform"
+        "size", "timeline", "dependency", "dep1", "commit1", "commit2", "--compressed", "--platform", "invalid-platform"
     )
     assert result.exit_code != 0
 
 
-
-
 def test_timeline_no_changes_in_integration(ddev):
     mock_git_repo = MagicMock()
     mock_git_repo.repo_dir = "/tmp/fake_repo"
     mock_git_repo.get_module_commits.return_value = [""]
-    
+
     with (
         patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_git_repo),
         patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None),
         patch("os.path.exists", return_value=True),
         patch("os.path.isdir", return_value=True),
         patch("os.listdir", return_value=[]),
-
     ):
         result = ddev("size", "timeline", "integration", "integration/foo", "commit1", "commit2", "--compressed")
         assert result.exit_code != 0
         assert "No changes found" in result.output
+
+
+def test_timeline_integration_not_found(ddev):
+    mock_repo = MagicMock()
+    mock_repo.repo_dir = "/fake"
+    mock_repo.get_module_commits.return_value = [""]
+    mock_repo.get_creation_commit_module.return_value = "c1"
+    mock_repo.checkout_commit.return_value = None
+
+    with (
+        patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_repo),
+        patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None),
+        patch(
+            "ddev.cli.size.timeline.valid_platforms_versions",
+            return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+        ),
+        patch("ddev.cli.size.timeline.module_exists", return_value=False),
+    ):
+        result = ddev("size", "timeline", "integration", "missing_module", "c1", "c2")
+        assert result.exit_code != 0
+        assert "not found" in result.output
+
+
+def test_timeline_dependency_missing_no_platform(ddev):
+    mock_repo = MagicMock()
+    mock_repo.repo_dir = "/fake"
+    mock_repo.get_module_commits.return_value = ["c1"]
+    mock_repo.get_creation_commit_module.return_value = "c1"
+    mock_repo.checkout_commit.return_value = None
+
+    with (
+        patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_repo),
+        patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None),
+        patch("ddev.cli.size.timeline.valid_platforms_versions", return_value=({"linux-x86_64"}, {"3.12"})),
+        patch("ddev.cli.size.timeline.get_dependency_list", return_value=set()),
+    ):
+        result = ddev("size", "timeline", "dependency", "missing_module", "c1", "c2")
+        assert result.exit_code != 0
+        assert "Dependency missing_module not found in latest commit" in result.output
+
+
+def test_timeline_dependency_missing_for_platform(ddev, app):
+    mock_repo = MagicMock()
+    mock_repo.repo_dir = "/fake"
+    mock_repo.get_module_commits.return_value = ["c1"]
+    mock_repo.get_creation_commit_module.return_value = "c1"
+    mock_repo.checkout_commit.return_value = None
+
+    with (
+        patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_repo),
+        patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None),
+        patch("ddev.cli.size.timeline.valid_platforms_versions", return_value=({"linux-x86_64"}, {"3.12"})),
+        patch("ddev.cli.size.timeline.get_dependency_list", return_value=set()),
+    ):
+
+        result = ddev(
+            "size",
+            "timeline",
+            "dependency",
+            "missing_module",
+            "c1",
+            "c2",
+            "--platform",
+            "linux-x86_64",
+        )
+
+        assert result.exit_code != 0
+        assert (
+            "Dependency missing_module not found in latest commit for the platform linux-x86_64, is the name correct?"
+            in result.output
+        )
+
+
+def test_timeline_dependency_no_changes(ddev, app):
+    mock_repo = MagicMock()
+    mock_repo.repo_dir = "/fake"
+    mock_repo.get_module_commits.return_value = [""]
+    mock_repo.get_creation_commit_module.return_value = "c1"
+    mock_repo.checkout_commit.return_value = None
+
+    with (
+        patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_repo),
+        patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None),
+        patch("ddev.cli.size.timeline.valid_platforms_versions", return_value=({"linux-x86_64"}, {"3.12"})),
+        patch("ddev.cli.size.timeline.get_dependency_list", return_value={"dep1"}),
+    ):
+
+        result = ddev(
+            "size",
+            "timeline",
+            "dependency",
+            "dep1",
+            "c1",
+            "c2",
+            "--platform",
+            "linux-x86_64",
+            obj=app,
+        )
+
+        assert result.exit_code != 0
+        assert "no changes found" in result.output.lower()

From 0f7b09c37d8691b111d52c4fade755ebdfc8c5fd Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 10:14:09 +0200
Subject: [PATCH 16/40] change --help

---
 ddev/src/ddev/cli/size/__init__.py | 8 +-------
 1 file changed, 1 insertion(+), 7 deletions(-)

diff --git a/ddev/src/ddev/cli/size/__init__.py b/ddev/src/ddev/cli/size/__init__.py
index df20d8628ba2f..63ddba4fa4af8 100644
--- a/ddev/src/ddev/cli/size/__init__.py
+++ b/ddev/src/ddev/cli/size/__init__.py
@@ -14,15 +14,9 @@ def size():
     """
     Analyze the download size of integrations and dependencies in various modes.
 
-    This command provides tools to inspect, compare, and monitor size changes of modules
+    This command provides tools to inspect the current status, compare commits and monitor size changes of modules
     across different commits, platforms, and Python versions.
 
-    Available subcommands:
-      • status   Show the current sizes of all modules
-      • diff     Compare sizes between two commits
-      • timeline Show the size evolution of a module over time
-
-    Use `ddev size <subcommand> --help` for more details on each mode.
     """
 
     pass

From d1ac4c21763b1d6563a96a49368f56b1fe6cc09b Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 10:26:04 +0200
Subject: [PATCH 17/40] Testing diff mode

---
 .github/workflows/slapr.yml | 39 +++++++++++++++++++++++++++++++++++++
 1 file changed, 39 insertions(+)

diff --git a/.github/workflows/slapr.yml b/.github/workflows/slapr.yml
index 07b6d5022a1e3..ee47be53357bd 100644
--- a/.github/workflows/slapr.yml
+++ b/.github/workflows/slapr.yml
@@ -45,6 +45,32 @@ jobs:
         echo '```' >> $GITHUB_STEP_SUMMARY
         cat size-compressed.txt >> $GITHUB_STEP_SUMMARY
         echo '```' >> $GITHUB_STEP_SUMMARY
+
+    - name: Measure disk usage differences from last commit (uncompressed)
+      run: | 
+        BEFORE=$(git rev-parse HEAD^)
+        AFTER=$(git rev-parse HEAD)
+        ddev size diff $BEFORE $AFTER --csv > diff-uncompressed.csv
+        ddev size diff $BEFORE $AFTER > diff-uncompressed.txt
+        cat diff-uncompressed.txt
+        echo "# Size diff (uncompressed)" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+        cat diff-uncompressed.txt >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+
+    - name: Measure disk usage differences from last commit (compressed)
+      run: | 
+        BEFORE=$(git rev-parse HEAD^)
+        AFTER=$(git rev-parse HEAD)
+        ddev size diff $BEFORE $AFTER --compressed --csv > diff-compressed.csv
+        ddev size diff $BEFORE $AFTER --compressed > diff-compressed.txt
+        cat diff-compressed.txt
+        echo "# Size diff (compressed)" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+        cat diff-compressed.txt >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+
+
     - name: Upload file sizes (uncompressed)
       uses: actions/upload-artifact@v4
       with:
@@ -57,3 +83,16 @@ jobs:
         name: size-compressed.csv
         path: size-compressed.csv
         if-no-files-found: error
+        
+    - name: Upload file sizes diff (uncompressed)
+      uses: actions/upload-artifact@v4
+      with:
+        name: diff-uncompressed.csv
+        path: diff-uncompressed.csv
+        if-no-files-found: error     
+    - name: Upload file sizes diff (compressed)
+      uses: actions/upload-artifact@v4
+      with:
+        name: diff-compressed.csv
+        path: diff-compressed.csv
+        if-no-files-found: error

From 09e4419ba3afa43e2161f1530b5fbd06d1db73df Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 10:35:39 +0200
Subject: [PATCH 18/40] Testing diff mode with differences

---
 .../datadog_checks/aerospike/__init__.py      | 99 +++++++++++++++++++
 1 file changed, 99 insertions(+)

diff --git a/aerospike/datadog_checks/aerospike/__init__.py b/aerospike/datadog_checks/aerospike/__init__.py
index b9c4e94f28c2b..d1b355efb6650 100644
--- a/aerospike/datadog_checks/aerospike/__init__.py
+++ b/aerospike/datadog_checks/aerospike/__init__.py
@@ -5,3 +5,102 @@
 from .aerospike import AerospikeCheck
 
 __all__ = ['__version__', 'AerospikeCheck']
+
+'''
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+
+
+
+'''
\ No newline at end of file

From 5673f8a0d66f9faf5d87bd83a3e417d2a33ff211 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 10:47:25 +0200
Subject: [PATCH 19/40] test disabled diff

---
 .github/workflows/slapr.yml | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/slapr.yml b/.github/workflows/slapr.yml
index ee47be53357bd..f661155a5026c 100644
--- a/.github/workflows/slapr.yml
+++ b/.github/workflows/slapr.yml
@@ -47,6 +47,7 @@ jobs:
         echo '```' >> $GITHUB_STEP_SUMMARY
 
     - name: Measure disk usage differences from last commit (uncompressed)
+      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
       run: | 
         BEFORE=$(git rev-parse HEAD^)
         AFTER=$(git rev-parse HEAD)
@@ -59,6 +60,7 @@ jobs:
         echo '```' >> $GITHUB_STEP_SUMMARY
 
     - name: Measure disk usage differences from last commit (compressed)
+      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
       run: | 
         BEFORE=$(git rev-parse HEAD^)
         AFTER=$(git rev-parse HEAD)
@@ -83,14 +85,16 @@ jobs:
         name: size-compressed.csv
         path: size-compressed.csv
         if-no-files-found: error
-        
+
     - name: Upload file sizes diff (uncompressed)
+      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
       uses: actions/upload-artifact@v4
       with:
         name: diff-uncompressed.csv
         path: diff-uncompressed.csv
         if-no-files-found: error     
     - name: Upload file sizes diff (compressed)
+      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
       uses: actions/upload-artifact@v4
       with:
         name: diff-compressed.csv

From 7064fd9ad7e0f43dfe531dff4899ee69b1d7ce9d Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 12:18:55 +0200
Subject: [PATCH 20/40] fix

---
 .github/workflows/measure-disk-usage.yml | 44 ++++++++++++++++++++
 ddev/src/ddev/cli/size/common.py         | 16 ++++++--
 ddev/src/ddev/cli/size/diff.py           | 51 +++++++++++++++---------
 ddev/src/ddev/cli/size/status.py         |  2 +-
 ddev/src/ddev/cli/size/timeline.py       |  5 ++-
 5 files changed, 93 insertions(+), 25 deletions(-)

diff --git a/.github/workflows/measure-disk-usage.yml b/.github/workflows/measure-disk-usage.yml
index 31f78b1316415..181cdfabd5880 100644
--- a/.github/workflows/measure-disk-usage.yml
+++ b/.github/workflows/measure-disk-usage.yml
@@ -45,15 +45,59 @@ jobs:
         echo '```' >> $GITHUB_STEP_SUMMARY
         cat size-compressed.txt >> $GITHUB_STEP_SUMMARY
         echo '```' >> $GITHUB_STEP_SUMMARY
+
+    - name: Measure disk usage differences from last commit (uncompressed)
+      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
+      run: | 
+        BEFORE=$(git rev-parse HEAD^)
+        AFTER=$(git rev-parse HEAD)
+        ddev size diff $BEFORE $AFTER --csv > diff-uncompressed.csv
+        ddev size diff $BEFORE $AFTER > diff-uncompressed.txt
+        cat diff-uncompressed.txt
+        echo "# Size diff (uncompressed)" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+        cat diff-uncompressed.txt >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+
+    - name: Measure disk usage differences from last commit (compressed)
+      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
+      run: | 
+        BEFORE=$(git rev-parse HEAD^)
+        AFTER=$(git rev-parse HEAD)
+        ddev size diff $BEFORE $AFTER --compressed --csv > diff-compressed.csv
+        ddev size diff $BEFORE $AFTER --compressed > diff-compressed.txt
+        cat diff-compressed.txt
+        echo "# Size diff (compressed)" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+        cat diff-compressed.txt >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+
+
     - name: Upload file sizes (uncompressed)
       uses: actions/upload-artifact@v4
       with:
         name: size-uncompressed.csv
         path: size-uncompressed.csv
         if-no-files-found: error
+        
     - name: Upload file sizes (compressed)
       uses: actions/upload-artifact@v4
       with:
         name: size-compressed.csv
         path: size-compressed.csv
         if-no-files-found: error
+
+    - name: Upload file sizes diff (uncompressed)
+      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
+      uses: actions/upload-artifact@v4
+      with:
+        name: diff-uncompressed.csv
+        path: diff-uncompressed.csv
+        if-no-files-found: error     
+    - name: Upload file sizes diff (compressed)
+      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
+      uses: actions/upload-artifact@v4
+      with:
+        name: diff-compressed.csv
+        path: diff-compressed.csv
+        if-no-files-found: error
diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index 491ff024d4d48..7101401c72c7f 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -64,7 +64,8 @@ def print_csv(app: Application, i: Optional[int], modules: List[Dict[str, Union[
         app.display(",".join(headers))
 
     for row in modules:
-        app.display(",".join(format(str(row[h])) for h in headers))
+        if any(str(value).strip() not in ("", "0") for value in row.values()):
+            app.display(",".join(format(str(row[h])) for h in headers))
 
 
 def format(s: str) -> str:
@@ -131,7 +132,17 @@ def group_modules(
     modules: List[Dict[str, Union[str, int]]], platform: str, version: str, i: Optional[int]
 ) -> List[Dict[str, Union[str, int]]]:
     grouped_aux = {}
-
+    if modules == []:
+        return [
+            {
+                'Name': '',
+                'Type': '',
+                'Size (Bytes)': 0,
+                'Size': '',
+                'Platform': '',
+                'Version': '',
+            }
+        ]
     for file in modules:
         key = (file['Name'], file['Type'])
         grouped_aux[key] = grouped_aux.get(key, 0) + file["Size (Bytes)"]
@@ -234,7 +245,6 @@ def get_commit_metadata(self, commit: str) -> Tuple[str, str, str]:
     def get_creation_commit_module(self, module: str) -> str:
         return self._run(f'git log --reverse --format="%H" -- {module}')[0]
 
-
     def __exit__(
         self,
         exception_type: Optional[Type[BaseException]],
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index bfad6bf61442a..15ea32a971a1b 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -42,7 +42,13 @@
 @click.option('--csv', is_flag=True, help="Output in CSV format")
 @click.pass_obj
 def diff(
-    app: str, before: str, after: str, platform: Optional[str], version: Optional[str], compressed: bool, csv: bool
+    app: Application,
+    before: str,
+    after: str,
+    platform: Optional[str],
+    version: Optional[str],
+    compressed: bool,
+    csv: bool,
 ) -> None:
     """
     Compare the size of integrations and dependencies between two commits.
@@ -69,7 +75,7 @@ def diff(
                     progress.remove_task(task)
 
                     for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
-                        diff_mode(app, gitRepo, before, after, plat, ver, compressed, csv, i,progress)
+                        diff_mode(app, gitRepo, before, after, plat, ver, compressed, csv, i, progress)
                 else:
                     progress.remove_task(task)
                     diff_mode(app, gitRepo, before, after, platform, version, compressed, csv, None, progress)
@@ -91,18 +97,19 @@ def diff_mode(
     progress: Progress,
 ) -> None:
     files_b, dependencies_b, files_a, dependencies_a = get_repo_info(
-        gitRepo, platform, version, before, after, compressed,progress
+        gitRepo, platform, version, before, after, compressed, progress
     )
 
     integrations = get_diff(files_b, files_a, 'Integration')
     dependencies = get_diff(dependencies_b, dependencies_a, 'Dependency')
+    if integrations + dependencies == [] and not csv:
+        app.display(f"No size differences were detected between the selected commits for {platform}.")
+
     grouped_modules = group_modules(integrations + dependencies, platform, version, i)
     grouped_modules.sort(key=lambda x: abs(x['Size (Bytes)']), reverse=True)
     for module in grouped_modules:
         if module['Size (Bytes)'] > 0:
             module['Size'] = f"+{module['Size']}"
-    if grouped_modules == []:
-        app.display("No size differences were detected between the selected commits.")
     else:
         if csv:
             print_csv(app, i, grouped_modules)
@@ -111,21 +118,27 @@ def diff_mode(
 
 
 def get_repo_info(
-    gitRepo: GitRepo, platform: str, version: str, before: str, after: str, compressed: bool,progress: Progress,
+    gitRepo: GitRepo,
+    platform: str,
+    version: str,
+    before: str,
+    after: str,
+    compressed: bool,
+    progress: Progress,
 ) -> Tuple[Dict[str, int], Dict[str, int], Dict[str, int], Dict[str, int]]:
-    repo = gitRepo.repo_dir
-    task = progress.add_task("[cyan]Calculating sizes for the first commit...", total=None)
-    gitRepo.checkout_commit(before)
-    files_b = get_files(repo, compressed)
-    dependencies_b = get_dependencies(repo, platform, version, compressed)
-    progress.remove_task(task)
-
-    task = progress.add_task("[cyan]Calculating sizes for the second commit...", total=None)
-    gitRepo.checkout_commit(after)
-    files_a = get_files(repo, compressed)
-    dependencies_a = get_dependencies(repo, platform, version, compressed)
-    progress.remove_task(task)
-
+    with progress:
+        repo = gitRepo.repo_dir
+        task = progress.add_task("[cyan]Calculating sizes for the first commit...", total=None)
+        gitRepo.checkout_commit(before)
+        files_b = get_files(repo, compressed)
+        dependencies_b = get_dependencies(repo, platform, version, compressed)
+        progress.remove_task(task)
+
+        task = progress.add_task("[cyan]Calculating sizes for the second commit...", total=None)
+        gitRepo.checkout_commit(after)
+        files_a = get_files(repo, compressed)
+        dependencies_a = get_dependencies(repo, platform, version, compressed)
+        progress.remove_task(task)
 
     return files_b, dependencies_b, files_a, dependencies_a
 
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index 66efd243b317b..f8877b1c9cbf9 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -69,7 +69,7 @@ def status_mode(app: Application, platform: str, version: str, compressed: bool,
     if csv:
         print_csv(app, i, grouped_modules)
     else:
-        print_table(app, "STATUS", grouped_modules)
+        print_table(app, "Status", grouped_modules)
 
 
 def get_files(compressed: bool) -> List[Dict[str, Union[str, int]]]:
diff --git a/ddev/src/ddev/cli/size/timeline.py b/ddev/src/ddev/cli/size/timeline.py
index 6cb1b5c3a8775..f41acf71b112c 100644
--- a/ddev/src/ddev/cli/size/timeline.py
+++ b/ddev/src/ddev/cli/size/timeline.py
@@ -33,7 +33,7 @@
 
 @click.command()
 @click.argument('type', type=click.Choice(['integration', 'dependency']))
-@click.argument('module')
+@click.argument('name')
 @click.argument('initial', required=False)
 @click.argument('final', required=False)
 @click.option(
@@ -52,7 +52,7 @@
 def timeline(
     app: Application,
     type: str,
-    module: str,
+    name: str,
     initial: Optional[str],
     final: Optional[str],
     time: Optional[str],
@@ -71,6 +71,7 @@ def timeline(
         TimeElapsedColumn(),
         transient=True,
     ) as progress:
+        module = name  # module is the name of the integration or the dependency
         task = progress.add_task("[cyan]Calculating timeline...", total=None)
         url = app.repo.path
         with GitRepo(url) as gitRepo:

From 8cc771027ae4eaf2908d1b4015e9a4f8c8ba4a3b Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 14:33:25 +0200
Subject: [PATCH 21/40] fix typing

---
 ddev/src/ddev/cli/size/common.py   | 21 +++++++++++++--------
 ddev/src/ddev/cli/size/timeline.py | 30 ++++++++++++++++++------------
 2 files changed, 31 insertions(+), 20 deletions(-)

diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index 7101401c72c7f..5c384a71dceea 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -8,6 +8,7 @@
 import tempfile
 import zipfile
 import zlib
+from datetime import date
 from pathlib import Path
 from types import TracebackType
 from typing import Dict, List, Optional, Set, Tuple, Type, Union
@@ -29,7 +30,7 @@ def valid_platforms_versions(repo_path: str) -> Tuple[Set[str], Set[str]]:
     return set(platforms), set(versions)
 
 
-def convert_size(size_bytes: int) -> str:
+def convert_size(size_bytes: float) -> str:
     for unit in [' B', ' KB', ' MB', ' GB']:
         if abs(size_bytes) < 1024:
             return str(round(size_bytes, 2)) + unit
@@ -58,7 +59,7 @@ def is_correct_dependency(platform: str, version: str, name: str) -> bool:
     return platform in name and version in name
 
 
-def print_csv(app: Application, i: Optional[int], modules: List[Dict[str, Union[str, int]]]) -> None:
+def print_csv(app: Application, i: Optional[int], modules: List[Dict[str, Union[str, int, date]]]) -> None:
     headers = [k for k in modules[0].keys() if k not in ['Size', 'Delta']]
     if not i:
         app.display(",".join(headers))
@@ -72,8 +73,8 @@ def format(s: str) -> str:
     return f'"{s}"' if "," in s else s
 
 
-def print_table(app: Application, mode: str, modules: List[Dict[str, Union[str, int]]]) -> None:
-    modules_table = {col: {} for col in modules[0].keys() if '(Bytes)' not in col}
+def print_table(app: Application, mode: str, modules: List[Dict[str, Union[str, int, date]]]) -> None:
+    modules_table : Dict[str, Dict[str, Union[str, int]]] = {col: {} for col in modules[0].keys() if '(Bytes)' not in col}
     for i, row in enumerate(modules):
         for key, value in row.items():
             if key in modules_table:
@@ -89,7 +90,11 @@ def get_dependencies_sizes(
         if compressed:
             response = requests.head(url)
             response.raise_for_status()
-            size = int(response.headers.get("Content-Length"))
+            size_str = response.headers.get("Content-Length")
+            if size_str is None:
+                raise ValueError(f"Missing size for {dep}")
+            size = int(size_str)
+
         else:
             with requests.get(url, stream=True) as response:
                 response.raise_for_status()
@@ -131,7 +136,6 @@ def get_dependencies_list(file_path: str) -> Tuple[List[str], List[str]]:
 def group_modules(
     modules: List[Dict[str, Union[str, int]]], platform: str, version: str, i: Optional[int]
 ) -> List[Dict[str, Union[str, int]]]:
-    grouped_aux = {}
     if modules == []:
         return [
             {
@@ -143,6 +147,7 @@ def group_modules(
                 'Version': '',
             }
         ]
+    grouped_aux : Dict[tuple[str, str], int] = {}
     for file in modules:
         key = (file['Name'], file['Type'])
         grouped_aux[key] = grouped_aux.get(key, 0) + file["Size (Bytes)"]
@@ -193,9 +198,9 @@ def __init__(self, mensaje: str) -> None:
 
 
 class GitRepo:
-    def __init__(self, url: str) -> None:
+    def __init__(self, url: Union[Path,str]) -> None:
         self.url = url
-        self.repo_dir = None
+        self.repo_dir : str
 
     def __enter__(self):
         self.repo_dir = tempfile.mkdtemp()
diff --git a/ddev/src/ddev/cli/size/timeline.py b/ddev/src/ddev/cli/size/timeline.py
index f41acf71b112c..c17e30e451a44 100644
--- a/ddev/src/ddev/cli/size/timeline.py
+++ b/ddev/src/ddev/cli/size/timeline.py
@@ -4,7 +4,7 @@
 import zipfile
 from datetime import date, datetime
 from pathlib import Path
-from typing import Dict, List, Optional, Set, Tuple, Union
+from typing import Dict, List, Optional, Set, Tuple, Union, cast
 
 import click
 import requests
@@ -91,7 +91,7 @@ def timeline(
                 elif (
                     type == 'dependency'
                     and platform
-                    and module not in get_dependency_list(gitRepo.repo_dir, [platform])
+                    and module not in get_dependency_list(gitRepo.repo_dir, {platform})
                 ):
                     raise ValueError(
                         f"Dependency {module} not found in latest commit for the platform {platform}, "
@@ -193,8 +193,8 @@ def process_commits(
     folder = module if type == 'integration' else '.deps/resolved'
     for commit in commits:
         gitRepo.sparse_checkout_commit(commit, folder)
-        date, author, message = gitRepo.get_commit_metadata(commit)
-        date, message, commit = format_commit_data(date, message, commit, first_commit)
+        date_str, author, message = gitRepo.get_commit_metadata(commit)
+        date, message, commit = format_commit_data(date_str, message, commit, first_commit)
         if type == 'dependency' and date < MINIMUM_DATE:
             continue
         elif type == 'dependency':
@@ -257,7 +257,7 @@ def get_files(
 def get_dependencies(
     repo_path: str,
     module: str,
-    platform: Optional[str],
+    platform: str,
     commit: str,
     date: date,
     author: str,
@@ -274,6 +274,7 @@ def get_dependencies(
             return (
                 get_dependency_size(download_url, commit, date, author, message, compressed) if download_url else None
             )
+    return None
 
 
 def get_dependency(file_path: str, module: str) -> Optional[str]:
@@ -295,7 +296,10 @@ def get_dependency_size(
     if compressed:
         response = requests.head(download_url)
         response.raise_for_status()
-        size = int(response.headers.get("Content-Length"))
+        size_str = response.headers.get("Content-Length")
+        if size_str is None:
+            raise ValueError(f"Missing size for commit {commit}")
+        size = int(size_str)
     else:
         with requests.get(download_url, stream=True) as response:
             response.raise_for_status()
@@ -318,14 +322,14 @@ def get_dependency_size(
     return {"Size (Bytes)": size, "Date": date, "Author": author, "Commit Message": message, "Commit SHA": commit}
 
 
-def get_version(files: List[str], platform: Optional[str]) -> str:
+def get_version(files: List[str], platform: str) -> str:
     final_version = ''
     for file in files:
         if platform in file:
-            version = file.split('_')[-1]
-            match = re.search(r"\d+(?:\.\d+)?", version)
+            curr_version = file.split('_')[-1]
+            match = re.search(r"\d+(?:\.\d+)?", curr_version)
             version = match.group(0) if match else None
-            if version > final_version:
+            if version and version > final_version:
                 final_version = version
     return final_version if len(final_version) != 1 else 'py' + final_version
 
@@ -333,7 +337,7 @@ def get_version(files: List[str], platform: Optional[str]) -> str:
 def group_modules(
     modules: List[Dict[str, Union[str, int, date]]], platform: Optional[str], i: Optional[int]
 ) -> List[Dict[str, Union[str, int, date]]]:
-    grouped_aux = {}
+    grouped_aux : Dict[tuple[date, str, str, str], int] = {}
 
     for file in modules:
         key = (file['Date'], file['Author'], file['Commit Message'], file['Commit SHA'])
@@ -353,6 +357,7 @@ def group_modules(
             for (date, author, message, commit), size in grouped_aux.items()
         ]
     else:
+        assert platform is not None
         return [
             {
                 "Commit SHA": commit,
@@ -380,7 +385,8 @@ def trim_modules(
     for i in range(1, len(modules)):
         prev = modules[i - 1]
         curr = modules[i]
-        delta = curr['Size (Bytes)'] - prev['Size (Bytes)']
+        delta = cast(int, curr['Size (Bytes)']) - cast(int, prev['Size (Bytes)'])
+
         if abs(delta) > threshold_value or i == len(modules) - 1:
             curr['Delta (Bytes)'] = delta
             curr['Delta'] = convert_size(delta)

From f1711ccdfb7921ba33ae38495d869e1a6f8adcf4 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 14:39:34 +0200
Subject: [PATCH 22/40] fix typing

---
 ddev/src/ddev/cli/size/common.py   | 10 ++++++----
 ddev/src/ddev/cli/size/timeline.py |  2 +-
 2 files changed, 7 insertions(+), 5 deletions(-)

diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index 5c384a71dceea..68cc172689676 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -74,7 +74,9 @@ def format(s: str) -> str:
 
 
 def print_table(app: Application, mode: str, modules: List[Dict[str, Union[str, int, date]]]) -> None:
-    modules_table : Dict[str, Dict[str, Union[str, int]]] = {col: {} for col in modules[0].keys() if '(Bytes)' not in col}
+    modules_table: Dict[str, Dict[str, Union[str, int]]] = {
+        col: {} for col in modules[0].keys() if '(Bytes)' not in col
+    }
     for i, row in enumerate(modules):
         for key, value in row.items():
             if key in modules_table:
@@ -147,7 +149,7 @@ def group_modules(
                 'Version': '',
             }
         ]
-    grouped_aux : Dict[tuple[str, str], int] = {}
+    grouped_aux: Dict[tuple[str, str], int] = {}
     for file in modules:
         key = (file['Name'], file['Type'])
         grouped_aux[key] = grouped_aux.get(key, 0) + file["Size (Bytes)"]
@@ -198,9 +200,9 @@ def __init__(self, mensaje: str) -> None:
 
 
 class GitRepo:
-    def __init__(self, url: Union[Path,str]) -> None:
+    def __init__(self, url: Union[Path, str]) -> None:
         self.url = url
-        self.repo_dir : str
+        self.repo_dir: str
 
     def __enter__(self):
         self.repo_dir = tempfile.mkdtemp()
diff --git a/ddev/src/ddev/cli/size/timeline.py b/ddev/src/ddev/cli/size/timeline.py
index c17e30e451a44..dae017f410a69 100644
--- a/ddev/src/ddev/cli/size/timeline.py
+++ b/ddev/src/ddev/cli/size/timeline.py
@@ -337,7 +337,7 @@ def get_version(files: List[str], platform: str) -> str:
 def group_modules(
     modules: List[Dict[str, Union[str, int, date]]], platform: Optional[str], i: Optional[int]
 ) -> List[Dict[str, Union[str, int, date]]]:
-    grouped_aux : Dict[tuple[date, str, str, str], int] = {}
+    grouped_aux: Dict[tuple[date, str, str, str], int] = {}
 
     for file in modules:
         key = (file['Date'], file['Author'], file['Commit Message'], file['Commit SHA'])

From b33a1fde304144e6f6877510b2d8b109bc280039 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 15:28:55 +0200
Subject: [PATCH 23/40] fix types

---
 ddev/src/ddev/cli/size/common.py     | 16 +++---
 ddev/src/ddev/cli/size/diff.py       |  5 +-
 ddev/src/ddev/cli/size/status.py     |  1 +
 ddev/src/ddev/cli/size/timeline.py   | 12 +++--
 ddev/tests/cli/size/test_diff.py     | 73 ++++++++++++++++------------
 ddev/tests/cli/size/test_status.py   | 24 ++++-----
 ddev/tests/cli/size/test_timeline.py | 29 ++++++++---
 7 files changed, 98 insertions(+), 62 deletions(-)

diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index 68cc172689676..f9c70d4747938 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -18,7 +18,7 @@
 from ddev.cli.application import Application
 
 
-def valid_platforms_versions(repo_path: str) -> Tuple[Set[str], Set[str]]:
+def valid_platforms_versions(repo_path: Union[Path, str]) -> Tuple[Set[str], Set[str]]:
     resolved_path = os.path.join(repo_path, ".deps/resolved")
     platforms = []
     versions = []
@@ -74,9 +74,7 @@ def format(s: str) -> str:
 
 
 def print_table(app: Application, mode: str, modules: List[Dict[str, Union[str, int, date]]]) -> None:
-    modules_table: Dict[str, Dict[str, Union[str, int]]] = {
-        col: {} for col in modules[0].keys() if '(Bytes)' not in col
-    }
+    modules_table: Dict[str, Dict[int, str]] = {col: {} for col in modules[0].keys() if '(Bytes)' not in col}
     for i, row in enumerate(modules):
         for key, value in row.items():
             if key in modules_table:
@@ -115,7 +113,7 @@ def get_dependencies_sizes(
                     for name in filenames:
                         file_path = os.path.join(dirpath, name)
                         size += os.path.getsize(file_path)
-        file_data.append({"File Path": dep, "Type": "Dependency", "Name": dep, "Size (Bytes)": int(size)})
+        file_data.append({"File Path": str(dep), "Type": "Dependency", "Name": str(dep), "Size (Bytes)": int(size)})
     return file_data
 
 
@@ -137,7 +135,7 @@ def get_dependencies_list(file_path: str) -> Tuple[List[str], List[str]]:
 
 def group_modules(
     modules: List[Dict[str, Union[str, int]]], platform: str, version: str, i: Optional[int]
-) -> List[Dict[str, Union[str, int]]]:
+) -> List[Dict[str, Union[str, int, date]]]:
     if modules == []:
         return [
             {
@@ -151,8 +149,8 @@ def group_modules(
         ]
     grouped_aux: Dict[tuple[str, str], int] = {}
     for file in modules:
-        key = (file['Name'], file['Type'])
-        grouped_aux[key] = grouped_aux.get(key, 0) + file["Size (Bytes)"]
+        key = (str(file['Name']), str(file['Type']))
+        grouped_aux[key] = grouped_aux.get(key, 0) + int(file["Size (Bytes)"])
     if i is None:
         return [
             {'Name': name, 'Type': type, 'Size (Bytes)': size, 'Size': convert_size(size)}
@@ -172,7 +170,7 @@ def group_modules(
         ]
 
 
-def get_gitignore_files(repo_path: str) -> List[str]:
+def get_gitignore_files(repo_path: Union[str, Path]) -> List[str]:
     gitignore_path = os.path.join(repo_path, ".gitignore")
     with open(gitignore_path, "r", encoding="utf-8") as file:
         gitignore_content = file.read()
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index 15ea32a971a1b..e0040e1e94b68 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -224,7 +224,10 @@ def get_dependencies_sizes(deps: List[str], download_urls: List[str], compressed
         if compressed:
             response = requests.head(url)
             response.raise_for_status()
-            size = int(response.headers.get("Content-Length"))
+            size_str = response.headers.get("Content-Length")
+            if size_str is None:
+                raise ValueError(f"Missing size for {dep}")
+            size = int(size_str)
         else:
             with requests.get(url, stream=True) as response:
                 response.raise_for_status()
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index f8877b1c9cbf9..0d0b1126aff08 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -109,3 +109,4 @@ def get_dependencies(platform: str, version: str, compressed: bool) -> List[Dict
         if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
             deps, download_urls = get_dependencies_list(file_path)
             return get_dependencies_sizes(deps, download_urls, compressed)
+    return {}
diff --git a/ddev/src/ddev/cli/size/timeline.py b/ddev/src/ddev/cli/size/timeline.py
index dae017f410a69..d268d44c05109 100644
--- a/ddev/src/ddev/cli/size/timeline.py
+++ b/ddev/src/ddev/cli/size/timeline.py
@@ -198,6 +198,7 @@ def process_commits(
         if type == 'dependency' and date < MINIMUM_DATE:
             continue
         elif type == 'dependency':
+            assert platform is not None
             result = get_dependencies(repo, module, platform, commit, date, author, message, compressed)
             if result:
                 file_data.append(result)
@@ -340,8 +341,13 @@ def group_modules(
     grouped_aux: Dict[tuple[date, str, str, str], int] = {}
 
     for file in modules:
-        key = (file['Date'], file['Author'], file['Commit Message'], file['Commit SHA'])
-        grouped_aux[key] = grouped_aux.get(key, 0) + file["Size (Bytes)"]
+        key = (
+            cast(date, file['Date']),
+            cast(str, file['Author']),
+            cast(str, file['Commit Message']),
+            cast(str, file['Commit SHA']),
+        )
+        grouped_aux[key] = grouped_aux.get(key, 0) + cast(int, file["Size (Bytes)"])
     if i is None:
         return [
             {
@@ -407,7 +413,7 @@ def module_exists(path: str, module: str) -> bool:
     return os.path.exists(os.path.join(path, module))
 
 
-def get_dependency_list(path: str, platforms: List[str]) -> Set[str]:
+def get_dependency_list(path: str, platforms: Set[str]) -> Set[str]:
     resolved_path = os.path.join(path, ".deps/resolved")
     all_files = os.listdir(resolved_path)
     dependencies = set()
diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py
index ba1d0a826ad63..ab2dad24a32de 100644
--- a/ddev/tests/cli/size/test_diff.py
+++ b/ddev/tests/cli/size/test_diff.py
@@ -231,14 +231,18 @@ def test_diff_invalid_platform(ddev):
     mock_git_repo.repo_dir = "/tmp/fake_repo"
     mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
     mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
-    patch(
-        "ddev.cli.size.timeline.valid_platforms_versions",
-        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
-    ),
-    result = ddev(
-        'size', 'diff', 'commit1', 'commit2', '--platform', 'linux', '--python', '3.12', '--compressed'  # inválido
-    )
-    assert result.exit_code != 0
+    mock_git_repo.__enter__.return_value = mock_git_repo
+    with (
+        patch("ddev.cli.size.diff.GitRepo", return_value=mock_git_repo),
+        patch(
+            "ddev.cli.size.timeline.valid_platforms_versions",
+            return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+        ),
+    ):
+        result = ddev(
+            'size', 'diff', 'commit1', 'commit2', '--platform', 'linux', '--python', '3.12', '--compressed'  # inválido
+        )
+        assert result.exit_code != 0
 
 
 def test_diff_invalid_version(ddev):
@@ -246,22 +250,27 @@ def test_diff_invalid_version(ddev):
     mock_git_repo.repo_dir = "/tmp/fake_repo"
     mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
     mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
-    patch(
-        "ddev.cli.size.timeline.valid_platforms_versions",
-        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
-    ),
-    result = ddev(
-        'size',
-        'diff',
-        'commit1',
-        'commit2',
-        '--platform',
-        'linux-aarch64',
-        '--python',
-        '2.10',  # inválido
-        '--compressed',
-    )
-    assert result.exit_code != 0
+    mock_git_repo.__enter__.return_value = mock_git_repo
+
+    with (
+        patch("ddev.cli.size.diff.GitRepo", return_value=mock_git_repo),
+        patch(
+            "ddev.cli.size.timeline.valid_platforms_versions",
+            return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+        ),
+    ):
+        result = ddev(
+            'size',
+            'diff',
+            'commit1',
+            'commit2',
+            '--platform',
+            'linux-aarch64',
+            '--python',
+            '2.10',  # invalid
+            '--compressed',
+        )
+        assert result.exit_code != 0
 
 
 def test_diff_invalid_platform_and_version(ddev):
@@ -269,9 +278,13 @@ def test_diff_invalid_platform_and_version(ddev):
     mock_git_repo.repo_dir = "/tmp/fake_repo"
     mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
     mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
-    patch(
-        "ddev.cli.size.timeline.valid_platforms_versions",
-        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
-    ),
-    result = ddev('size', 'diff', 'commit1', 'commit2', '--platform', 'linux', '--python', '2.10', '--compressed')
-    assert result.exit_code != 0
+    mock_git_repo.__enter__.return_value = mock_git_repo
+    with (
+        patch("ddev.cli.size.diff.GitRepo", return_value=mock_git_repo),
+        patch(
+            "ddev.cli.size.timeline.valid_platforms_versions",
+            return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+        ),
+    ):
+        result = ddev('size', 'diff', 'commit1', 'commit2', '--platform', 'linux', '--python', '2.10', '--compressed')
+        assert result.exit_code != 0
diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py
index cce9193345de8..141b0b5529675 100644
--- a/ddev/tests/cli/size/test_status.py
+++ b/ddev/tests/cli/size/test_status.py
@@ -140,27 +140,27 @@ def test_status_csv(ddev, mock_size_status):
 
 
 def test_status_wrong_platform(ddev):
-    patch(
+    with patch(
         "ddev.cli.size.timeline.valid_platforms_versions",
         return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
-    ),
-    result = ddev('size', 'status', '--platform', 'linux', '--python', '3.12', '--compressed')
-    assert result.exit_code != 0
+    ):
+        result = ddev('size', 'status', '--platform', 'linux', '--python', '3.12', '--compressed')
+        assert result.exit_code != 0
 
 
 def test_status_wrong_version(ddev):
-    patch(
+    with patch(
         "ddev.cli.size.timeline.valid_platforms_versions",
         return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
-    ),
-    result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '2.10', '--compressed')
-    assert result.exit_code != 0
+    ):
+        result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '2.10', '--compressed')
+        assert result.exit_code != 0
 
 
 def test_status_wrong_plat_and_version(ddev):
-    patch(
+    with patch(
         "ddev.cli.size.timeline.valid_platforms_versions",
         return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
-    ),
-    result = ddev('size', 'status', '--platform', 'linux', '--python', '2.10', '--compressed')
-    assert result.exit_code != 0
+    ):
+        result = ddev('size', 'status', '--platform', 'linux', '--python', '2.10', '--compressed')
+        assert result.exit_code != 0
diff --git a/ddev/tests/cli/size/test_timeline.py b/ddev/tests/cli/size/test_timeline.py
index 6042f92f85d5a..a55c1e8851333 100644
--- a/ddev/tests/cli/size/test_timeline.py
+++ b/ddev/tests/cli/size/test_timeline.py
@@ -298,13 +298,28 @@ def test_timeline_invalid_platform(ddev):
     mock_git_repo.repo_dir = "/tmp/fake_repo"
     mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
     mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
-    patch(
-        "ddev.cli.size.timeline.valid_platforms_versions",
-        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
-    ),
-    result = ddev(
-        "size", "timeline", "dependency", "dep1", "commit1", "commit2", "--compressed", "--platform", "invalid-platform"
-    )
+    mock_git_repo.__enter__.return_value = mock_git_repo
+
+    with (
+        patch("ddev.cli.size.timeline.GitRepo", return_value=mock_git_repo),
+        patch(
+            "ddev.cli.size.timeline.valid_platforms_versions",
+            return_value=({'linux-x86_64', 'linux-aarch64', 'macos-x86_64'}, {'3.12'}),
+        ),
+    ):
+
+        result = ddev(
+            "size",
+            "timeline",
+            "dependency",
+            "dep1",
+            "commit1",
+            "commit2",
+            "--compressed",
+            "--platform",
+            "invalid-platform",
+        )
+
     assert result.exit_code != 0
 
 

From f16938b15d0612a594f12ade5863e6cc87dc4c0b Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 15:40:33 +0200
Subject: [PATCH 24/40] fix types

---
 ddev/src/ddev/cli/size/common.py | 5 +++--
 ddev/src/ddev/cli/size/diff.py   | 8 ++++----
 ddev/src/ddev/cli/size/status.py | 8 ++++----
 3 files changed, 11 insertions(+), 10 deletions(-)

diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index f9c70d4747938..df36acd4ee195 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -11,7 +11,7 @@
 from datetime import date
 from pathlib import Path
 from types import TracebackType
-from typing import Dict, List, Optional, Set, Tuple, Type, Union
+from typing import Dict, List, Optional, Set, Tuple, Type, Union, cast
 
 import requests
 
@@ -114,7 +114,8 @@ def get_dependencies_sizes(
                         file_path = os.path.join(dirpath, name)
                         size += os.path.getsize(file_path)
         file_data.append({"File Path": str(dep), "Type": "Dependency", "Name": str(dep), "Size (Bytes)": int(size)})
-    return file_data
+    return cast(List[Dict[str, Union[str, int]]], file_data)
+
 
 
 def get_dependencies_list(file_path: str) -> Tuple[List[str], List[str]]:
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index e0040e1e94b68..64b13989ce07a 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -6,7 +6,7 @@
 import tempfile
 import zipfile
 from pathlib import Path
-from typing import Dict, List, Optional, Tuple
+from typing import Dict, List, Optional, Tuple, cast
 
 import click
 import requests
@@ -106,9 +106,9 @@ def diff_mode(
         app.display(f"No size differences were detected between the selected commits for {platform}.")
 
     grouped_modules = group_modules(integrations + dependencies, platform, version, i)
-    grouped_modules.sort(key=lambda x: abs(x['Size (Bytes)']), reverse=True)
+    grouped_modules.sort(key=lambda x: abs(int(x['Size (Bytes)'])), reverse=True)
     for module in grouped_modules:
-        if module['Size (Bytes)'] > 0:
+        if int(module['Size (Bytes)']) > 0:
             module['Size'] = f"+{module['Size']}"
     else:
         if csv:
@@ -181,7 +181,7 @@ def get_diff(size_before: Dict[str, int], size_after: Dict[str, int], type: str)
                     }
                 )
 
-    return diff_files
+    return cast(List[Dict[str, str | int]], diff_files)
 
 
 def get_files(repo_path: str, compressed: bool) -> Dict[str, int]:
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index 0d0b1126aff08..ac0b545c481c5 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -4,7 +4,7 @@
 
 import os
 from pathlib import Path
-from typing import Dict, List, Optional, Union
+from typing import Dict, List, Optional, Union, cast
 
 import click
 from rich.console import Console
@@ -95,10 +95,10 @@ def get_files(compressed: bool) -> List[Dict[str, Union[str, int]]]:
                         "File Path": relative_path,
                         "Type": "Integration",
                         "Name": integration,
-                        "Size (Bytes)": size,
+                        "Size (Bytes)": int(size),
                     }
                 )
-    return file_data
+    return cast(List[Dict[str, Union[str, int]]], file_data)
 
 
 def get_dependencies(platform: str, version: str, compressed: bool) -> List[Dict[str, Union[str, int]]]:
@@ -109,4 +109,4 @@ def get_dependencies(platform: str, version: str, compressed: bool) -> List[Dict
         if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
             deps, download_urls = get_dependencies_list(file_path)
             return get_dependencies_sizes(deps, download_urls, compressed)
-    return {}
+    return []

From 4d2c8e96cbef90727788265f1d2d7730c6853d76 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 15:52:00 +0200
Subject: [PATCH 25/40] fix typing

---
 ddev/src/ddev/cli/size/diff.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index 64b13989ce07a..37347609a4066 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -217,7 +217,6 @@ def get_dependencies(repo_path: str, platform: str, version: str, compressed: bo
             return get_dependencies_sizes(deps, download_urls, compressed)
     return {}
 
-
 def get_dependencies_sizes(deps: List[str], download_urls: List[str], compressed: bool) -> Dict[str, int]:
     file_data = {}
     for dep, url in zip(deps, download_urls, strict=False):

From 3d6a2288c7186f68388f73a29ddcf94be7a3797b Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 15:54:12 +0200
Subject: [PATCH 26/40] fix typing

---
 ddev/src/ddev/cli/size/common.py | 1 -
 ddev/src/ddev/cli/size/diff.py   | 1 +
 2 files changed, 1 insertion(+), 1 deletion(-)

diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index df36acd4ee195..1c619c7d4e990 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -117,7 +117,6 @@ def get_dependencies_sizes(
     return cast(List[Dict[str, Union[str, int]]], file_data)
 
 
-
 def get_dependencies_list(file_path: str) -> Tuple[List[str], List[str]]:
     download_urls = []
     deps = []
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index 37347609a4066..64b13989ce07a 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -217,6 +217,7 @@ def get_dependencies(repo_path: str, platform: str, version: str, compressed: bo
             return get_dependencies_sizes(deps, download_urls, compressed)
     return {}
 
+
 def get_dependencies_sizes(deps: List[str], download_urls: List[str], compressed: bool) -> Dict[str, int]:
     file_data = {}
     for dep, url in zip(deps, download_urls, strict=False):

From d97854beeb51ef8a51a786e1111dbf44eb1aa882 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 16:55:17 +0200
Subject: [PATCH 27/40] fix typing

---
 ddev/src/ddev/cli/size/diff.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index 64b13989ce07a..83d35a97326fa 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -106,9 +106,9 @@ def diff_mode(
         app.display(f"No size differences were detected between the selected commits for {platform}.")
 
     grouped_modules = group_modules(integrations + dependencies, platform, version, i)
-    grouped_modules.sort(key=lambda x: abs(int(x['Size (Bytes)'])), reverse=True)
+    grouped_modules.sort(key=lambda x: abs(cast(int, x['Size (Bytes)'])), reverse=True)
     for module in grouped_modules:
-        if int(module['Size (Bytes)']) > 0:
+        if cast(int,module['Size (Bytes)']) > 0:
             module['Size'] = f"+{module['Size']}"
     else:
         if csv:

From f71309711e588dd373d3c165889521164e1049eb Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Mon, 21 Apr 2025 16:58:42 +0200
Subject: [PATCH 28/40] fix typing

---
 ddev/src/ddev/cli/size/diff.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index 83d35a97326fa..12e57c313aec4 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -108,7 +108,7 @@ def diff_mode(
     grouped_modules = group_modules(integrations + dependencies, platform, version, i)
     grouped_modules.sort(key=lambda x: abs(cast(int, x['Size (Bytes)'])), reverse=True)
     for module in grouped_modules:
-        if cast(int,module['Size (Bytes)']) > 0:
+        if cast(int, module['Size (Bytes)']) > 0:
             module['Size'] = f"+{module['Size']}"
     else:
         if csv:

From 0d750b332df6f5ebee1e38b8d77230fb345d2310 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Tue, 22 Apr 2025 09:36:37 +0200
Subject: [PATCH 29/40] fix tests for Windows

---
 ddev/tests/cli/size/test_status.py | 20 ++++++++++++++------
 1 file changed, 14 insertions(+), 6 deletions(-)

diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py
index 141b0b5529675..ba7e943c59b82 100644
--- a/ddev/tests/cli/size/test_status.py
+++ b/ddev/tests/cli/size/test_status.py
@@ -2,6 +2,7 @@
 # All rights reserved
 # Licensed under a 3-clause BSD style license (see LICENSE)
 
+import os
 from unittest.mock import MagicMock, mock_open, patch
 
 import pytest
@@ -12,10 +13,14 @@
 )
 
 
+def to_native_path(path: str) -> str:
+    return path.replace("/", os.sep)
+
+
 def test_get_files_compressed():
     mock_files = [
-        ("root/integration/datadog_checks", [], ["file1.py", "file2.py"]),
-        ("root/integration_b/datadog_checks", [], ["file3.py"]),
+        (os.path.join("root", "integration", "datadog_checks"), [], ["file1.py", "file2.py"]),
+        (os.path.join("root", "integration_b", "datadog_checks"), [], ["file3.py"]),
         ("root", [], ["ignored.py"]),
     ]
 
@@ -26,7 +31,7 @@ def fake_compress(file_path):
 
     with (
         patch("os.walk", return_value=mock_files),
-        patch("os.path.relpath", side_effect=lambda path, _: path.replace("root/", "")),
+        patch("os.path.relpath", side_effect=lambda path, _: os.path.relpath(path, "root")),
         patch("ddev.cli.size.status.get_gitignore_files", return_value=fake_gitignore),
         patch(
             "ddev.cli.size.status.is_valid_integration",
@@ -39,19 +44,19 @@ def fake_compress(file_path):
 
     expected = [
         {
-            "File Path": "integration/datadog_checks/file1.py",
+            "File Path": to_native_path("integration/datadog_checks/file1.py"),
             "Type": "Integration",
             "Name": "integration",
             "Size (Bytes)": 1000,
         },
         {
-            "File Path": "integration/datadog_checks/file2.py",
+            "File Path": to_native_path("integration/datadog_checks/file2.py"),
             "Type": "Integration",
             "Name": "integration",
             "Size (Bytes)": 1000,
         },
         {
-            "File Path": "integration_b/datadog_checks/file3.py",
+            "File Path": to_native_path("integration_b/datadog_checks/file3.py"),
             "Type": "Integration",
             "Name": "integration_b",
             "Size (Bytes)": 1000,
@@ -126,16 +131,19 @@ def mock_size_status():
 
 def test_status_no_args(ddev, mock_size_status):
     result = ddev('size', 'status', '--compressed')
+    print(result.output)
     assert result.exit_code == 0
 
 
 def test_status(ddev, mock_size_status):
     result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed')
+    print(result.output)
     assert result.exit_code == 0
 
 
 def test_status_csv(ddev, mock_size_status):
     result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed', '--csv')
+    print(result.output)
     assert result.exit_code == 0
 
 

From 06a2b5f0109a17ae79cf3d8d769bba20124c629f Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Tue, 22 Apr 2025 10:03:22 +0200
Subject: [PATCH 30/40] Fix tests

---
 ddev/tests/cli/size/test_status.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py
index ba7e943c59b82..2825afa5e7d43 100644
--- a/ddev/tests/cli/size/test_status.py
+++ b/ddev/tests/cli/size/test_status.py
@@ -31,7 +31,7 @@ def fake_compress(file_path):
 
     with (
         patch("os.walk", return_value=mock_files),
-        patch("os.path.relpath", side_effect=lambda path, _: os.path.relpath(path, "root")),
+        patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"root{os.sep}", "")),
         patch("ddev.cli.size.status.get_gitignore_files", return_value=fake_gitignore),
         patch(
             "ddev.cli.size.status.is_valid_integration",

From 63137be3a5db019c15848ae9ceb0a325fdd9a2ca Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Tue, 22 Apr 2025 12:05:24 +0200
Subject: [PATCH 31/40] fix windows tests

---
 ddev/pyproject.toml                  |   4 +-
 ddev/src/ddev/cli/size/common.py     | 126 ++++++++++++++++++++++-----
 ddev/src/ddev/cli/size/diff.py       |   4 +-
 ddev/src/ddev/cli/size/status.py     |  30 ++++---
 ddev/tests/cli/size/test_common.py   |  27 +++---
 ddev/tests/cli/size/test_diff.py     |  57 ++++++------
 ddev/tests/cli/size/test_status.py   |  53 +++++------
 ddev/tests/cli/size/test_timeline.py |  38 ++++----
 8 files changed, 219 insertions(+), 120 deletions(-)

diff --git a/ddev/pyproject.toml b/ddev/pyproject.toml
index 8a1ab15edbb2c..78378e34e80de 100644
--- a/ddev/pyproject.toml
+++ b/ddev/pyproject.toml
@@ -40,7 +40,9 @@ dependencies = [
     "tomli-w",
     "tomlkit",
     "tqdm",
-    "requests"
+    "requests",
+    "matplotlib",
+    "squarify"
 ]
 dynamic = ["version"]
 
diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index 1c619c7d4e990..25c0521e6ecc1 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -13,17 +13,21 @@
 from types import TracebackType
 from typing import Dict, List, Optional, Set, Tuple, Type, Union, cast
 
+import matplotlib.cm as cm
+import matplotlib.pyplot as plt
 import requests
+import squarify
+from matplotlib.patches import Patch
 
 from ddev.cli.application import Application
 
 
 def valid_platforms_versions(repo_path: Union[Path, str]) -> Tuple[Set[str], Set[str]]:
-    resolved_path = os.path.join(repo_path, ".deps/resolved")
+    resolved_path = os.path.join(repo_path, os.path.join(repo_path, ".deps", "resolved"))
     platforms = []
     versions = []
     for file in os.listdir(resolved_path):
-        platforms.append("_".join(file.split('_')[:-1]))
+        platforms.append("_".join(file.split("_")[:-1]))
         match = re.search(r"\d+\.\d+", file)
         if match:
             versions.append(match.group())
@@ -31,7 +35,7 @@ def valid_platforms_versions(repo_path: Union[Path, str]) -> Tuple[Set[str], Set
 
 
 def convert_size(size_bytes: float) -> str:
-    for unit in [' B', ' KB', ' MB', ' GB']:
+    for unit in [" B", " KB", " MB", " GB"]:
         if abs(size_bytes) < 1024:
             return str(round(size_bytes, 2)) + unit
         size_bytes /= 1024
@@ -40,7 +44,7 @@ def convert_size(size_bytes: float) -> str:
 
 def is_valid_integration(path: str, included_folder: str, ignored_files: Set[str], git_ignore: List[str]) -> bool:
     # It is not an integration
-    if path.startswith('.'):
+    if path.startswith("."):
         return False
     # It is part of an integration and it is not in the datadog_checks folder
     elif included_folder not in path:
@@ -60,7 +64,7 @@ def is_correct_dependency(platform: str, version: str, name: str) -> bool:
 
 
 def print_csv(app: Application, i: Optional[int], modules: List[Dict[str, Union[str, int, date]]]) -> None:
-    headers = [k for k in modules[0].keys() if k not in ['Size', 'Delta']]
+    headers = [k for k in modules[0].keys() if k not in ["Size", "Delta"]]
     if not i:
         app.display(",".join(headers))
 
@@ -74,7 +78,7 @@ def format(s: str) -> str:
 
 
 def print_table(app: Application, mode: str, modules: List[Dict[str, Union[str, int, date]]]) -> None:
-    modules_table: Dict[str, Dict[int, str]] = {col: {} for col in modules[0].keys() if '(Bytes)' not in col}
+    modules_table: Dict[str, Dict[int, str]] = {col: {} for col in modules[0].keys() if "(Bytes)" not in col}
     for i, row in enumerate(modules):
         for key, value in row.items():
             if key in modules_table:
@@ -82,6 +86,84 @@ def print_table(app: Application, mode: str, modules: List[Dict[str, Union[str,
     app.display_table(mode, modules_table)
 
 
+def plot_treemap(modules):
+    sizes = [mod["Size (Bytes)"] for mod in modules]
+
+    integrations = [mod for mod in modules if mod["Type"] == "Integration"]
+    dependencies = [mod for mod in modules if mod["Type"] == "Dependency"]
+
+    def normalize(mods):
+        if not mods:
+            return []
+        sizes = [mod["Size (Bytes)"] for mod in mods]
+        min_size = min(sizes)
+        max_size = max(sizes)
+        range_size = max_size - min_size or 1
+        return [(s - min_size) / range_size for s in sizes]
+
+    norm_int = normalize(integrations)
+    norm_dep = normalize(dependencies)
+
+    # Use lighter color range: from 0.3 to 0.85
+    def scale(val, vmin=0.3, vmax=0.85):
+        return vmin + val * (vmax - vmin)
+
+    cmap_int = cm.get_cmap("Purples")
+    cmap_dep = cm.get_cmap("Reds")
+
+    colors = []
+    for mod in modules:
+        if mod["Type"] == "Integration":
+            idx = integrations.index(mod)
+            colors.append(cmap_int(scale(norm_int[idx], 0.6, 0.85)))  # lighter start for integrations
+        elif mod["Type"] == "Dependency":
+            idx = dependencies.index(mod)
+            colors.append(cmap_dep(scale(norm_dep[idx], 0.3, 0.85)))
+        else:
+            colors.append("#999999")
+
+    plt.figure(figsize=(12, 8))
+    ax = plt.gca()
+    ax.set_axis_off()
+
+    rects = squarify.normalize_sizes(sizes, 100, 100)
+    rects = squarify.squarify(rects, 0, 0, 100, 100)
+
+    for rect, mod, color in zip(rects, modules, colors, strict=False):
+        x, y, dx, dy = rect["x"], rect["y"], rect["dx"], rect["dy"]
+        ax.add_patch(plt.Rectangle((x, y), dx, dy, color=color, ec="white"))
+
+        area = dx * dy
+        font_size = max(6, min(18, area / 100))
+
+        if area > 400:
+            label = f"{mod['Name']}\n({mod['Size']})"
+        elif area > 40:
+            label = f"{mod['Name']}"
+        else:
+            label = None
+
+        if label:
+            ax.text(
+                x + dx / 2, y + dy / 2, label, va="center", ha="center", fontsize=font_size, color="black", wrap=True
+            )
+
+    ax.set_xlim(0, 100)
+    ax.set_ylim(0, 100)
+
+    plt.title("Modules by Disk Usage", fontsize=16)
+
+    legend_handles = [
+        Patch(color=cmap_int(0.6), label="Integration"),
+        Patch(color=cmap_dep(0.6), label="Dependency"),
+    ]
+    plt.legend(handles=legend_handles, title="Type", loc="center left", bbox_to_anchor=(1.0, 0.5))
+
+    plt.subplots_adjust(right=0.8)
+    plt.tight_layout()
+    plt.show()
+
+
 def get_dependencies_sizes(
     deps: List[str], download_urls: List[str], compressed: bool
 ) -> List[Dict[str, Union[str, int]]]:
@@ -105,7 +187,7 @@ def get_dependencies_sizes(
                 with open(wheel_path, "wb") as f:
                     f.write(wheel_data)
                 extract_path = Path(tmpdir) / "extracted"
-                with zipfile.ZipFile(wheel_path, 'r') as zip_ref:
+                with zipfile.ZipFile(wheel_path, "r") as zip_ref:
                     zip_ref.extractall(extract_path)
 
                 size = 0
@@ -139,32 +221,32 @@ def group_modules(
     if modules == []:
         return [
             {
-                'Name': '',
-                'Type': '',
-                'Size (Bytes)': 0,
-                'Size': '',
-                'Platform': '',
-                'Version': '',
+                "Name": "",
+                "Type": "",
+                "Size (Bytes)": 0,
+                "Size": "",
+                "Platform": "",
+                "Version": "",
             }
         ]
     grouped_aux: Dict[tuple[str, str], int] = {}
     for file in modules:
-        key = (str(file['Name']), str(file['Type']))
+        key = (str(file["Name"]), str(file["Type"]))
         grouped_aux[key] = grouped_aux.get(key, 0) + int(file["Size (Bytes)"])
     if i is None:
         return [
-            {'Name': name, 'Type': type, 'Size (Bytes)': size, 'Size': convert_size(size)}
+            {"Name": name, "Type": type, "Size (Bytes)": size, "Size": convert_size(size)}
             for (name, type), size in grouped_aux.items()
         ]
     else:
         return [
             {
-                'Name': name,
-                'Type': type,
-                'Size (Bytes)': size,
-                'Size': convert_size(size),
-                'Platform': platform,
-                'Version': version,
+                "Name": name,
+                "Type": type,
+                "Size (Bytes)": size,
+                "Size": convert_size(size),
+                "Platform": platform,
+                "Version": version,
             }
             for (name, type), size in grouped_aux.items()
         ]
@@ -213,7 +295,7 @@ def __enter__(self):
 
     def _run(self, command: str) -> List[str]:
         result = subprocess.run(command, shell=True, capture_output=True, text=True, check=True, cwd=self.repo_dir)
-        return result.stdout.strip().split('\n')
+        return result.stdout.strip().split("\n")
 
     def get_module_commits(
         self, module_path: str, initial: Optional[str], final: Optional[str], time: Optional[str]
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index 12e57c313aec4..091f5af8bfe73 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -188,7 +188,7 @@ def get_files(repo_path: str, compressed: bool) -> Dict[str, int]:
 
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
     git_ignore = get_gitignore_files(repo_path)
-    included_folder = "datadog_checks/"
+    included_folder = "datadog_checks" + os.sep
 
     file_data = {}
     for root, _, files in os.walk(repo_path):
@@ -207,7 +207,7 @@ def get_files(repo_path: str, compressed: bool) -> Dict[str, int]:
 
 def get_dependencies(repo_path: str, platform: str, version: str, compressed: bool) -> Dict[str, int]:
 
-    resolved_path = os.path.join(repo_path, ".deps/resolved")
+    resolved_path = os.path.join(repo_path, os.path.join(repo_path, ".deps", "resolved"))
 
     for filename in os.listdir(resolved_path):
         file_path = os.path.join(resolved_path, filename)
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index ac0b545c481c5..5ea54ac76adc6 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -19,12 +19,13 @@
     group_modules,
     is_correct_dependency,
     is_valid_integration,
+    plot_treemap,
     print_csv,
     print_table,
     valid_platforms_versions,
 )
 
-REPO_PATH = Path(__file__).resolve().parents[5]
+# REPO_PATH = Path(__file__).resolve().parents[5]
 
 console = Console()
 
@@ -52,17 +53,19 @@ def status(app: Application, platform: Optional[str], version: Optional[str], co
             platforms = valid_platforms if platform is None else [platform]
             versions = valid_versions if version is None else [version]
             for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
-                status_mode(app, plat, ver, compressed, csv, i)
+                status_mode(app, repo_path, plat, ver, compressed, csv, i)
         else:
-            status_mode(app, platform, version, compressed, csv, None)
+            status_mode(app, repo_path, platform, version, compressed, csv, None)
 
     except Exception as e:
         app.abort(str(e))
 
 
-def status_mode(app: Application, platform: str, version: str, compressed: bool, csv: bool, i: Optional[int]) -> None:
+def status_mode(
+    app: Application, repo_path: Path, platform: str, version: str, compressed: bool, csv: bool, i: Optional[int]
+) -> None:
     with console.status("[cyan]Calculating sizes...", spinner="dots"):
-        modules = get_files(compressed) + get_dependencies(platform, version, compressed)
+        modules = get_files(compressed, repo_path) + get_dependencies(repo_path, platform, version, compressed)
     grouped_modules = group_modules(modules, platform, version, i)
     grouped_modules.sort(key=lambda x: x['Size (Bytes)'], reverse=True)
 
@@ -70,21 +73,22 @@ def status_mode(app: Application, platform: str, version: str, compressed: bool,
         print_csv(app, i, grouped_modules)
     else:
         print_table(app, "Status", grouped_modules)
+        plot_treemap(grouped_modules)
 
 
-def get_files(compressed: bool) -> List[Dict[str, Union[str, int]]]:
+def get_files(compressed: bool, repo_path: Path) -> List[Dict[str, Union[str, int]]]:
 
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
-    git_ignore = get_gitignore_files(REPO_PATH)
-    included_folder = "datadog_checks/"
+    git_ignore = get_gitignore_files(repo_path)
+    included_folder = "datadog_checks" + os.sep
 
     file_data = []
-    for root, _, files in os.walk(REPO_PATH):
+    for root, _, files in os.walk(repo_path):
         for file in files:
             file_path = os.path.join(root, file)
 
             # Convert the path to a relative format within the repo
-            relative_path = os.path.relpath(file_path, REPO_PATH)
+            relative_path = os.path.relpath(file_path, repo_path)
 
             # Filter files
             if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore):
@@ -101,9 +105,11 @@ def get_files(compressed: bool) -> List[Dict[str, Union[str, int]]]:
     return cast(List[Dict[str, Union[str, int]]], file_data)
 
 
-def get_dependencies(platform: str, version: str, compressed: bool) -> List[Dict[str, Union[str, int]]]:
+def get_dependencies(
+    repo_path: Path, platform: str, version: str, compressed: bool
+) -> List[Dict[str, Union[str, int]]]:
 
-    resolved_path = os.path.join(REPO_PATH, ".deps/resolved")
+    resolved_path = os.path.join(repo_path, os.path.join(repo_path, ".deps", "resolved"))
     for filename in os.listdir(resolved_path):
         file_path = os.path.join(resolved_path, filename)
         if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename):
diff --git a/ddev/tests/cli/size/test_common.py b/ddev/tests/cli/size/test_common.py
index cd11c1ba8c41f..bed031f7e0260 100644
--- a/ddev/tests/cli/size/test_common.py
+++ b/ddev/tests/cli/size/test_common.py
@@ -1,3 +1,4 @@
+import os
 from unittest.mock import MagicMock, mock_open, patch
 
 from ddev.cli.size.common import (
@@ -14,6 +15,10 @@
 )
 
 
+def to_native_path(path: str) -> str:
+    return path.replace("/", os.sep)
+
+
 def test_valid_platforms_versions():
     filenames = [
         "linux-aarch64_3.12.txt",
@@ -33,7 +38,7 @@ def test_valid_platforms_versions():
     expected_platforms = {"linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"}
     expected_versions = {"3.12"}
     with patch("os.listdir", return_value=filenames):
-        platforms, versions = valid_platforms_versions("/tmp/fake_repo")
+        platforms, versions = valid_platforms_versions("fake_repo")
         assert platforms == expected_platforms
         assert versions == expected_versions
 
@@ -52,14 +57,16 @@ def test_convert_size():
 
 
 def test_is_valid_integration():
-    included_folder = "datadog_checks/"
+    included_folder = "datadog_checks" + os.sep
     ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"}
     git_ignore = [".git", "__pycache__"]
 
-    assert is_valid_integration("datadog_checks/example.py", included_folder, ignored_files, git_ignore)
-    assert not is_valid_integration("__pycache__/file.py", included_folder, ignored_files, git_ignore)
-    assert not is_valid_integration("datadog_checks_dev/example.py", included_folder, ignored_files, git_ignore)
-    assert not is_valid_integration(".git/config", included_folder, ignored_files, git_ignore)
+    assert is_valid_integration(to_native_path("datadog_checks/example.py"), included_folder, ignored_files, git_ignore)
+    assert not is_valid_integration(to_native_path("__pycache__/file.py"), included_folder, ignored_files, git_ignore)
+    assert not is_valid_integration(
+        to_native_path("datadog_checks_dev/example.py"), included_folder, ignored_files, git_ignore
+    )
+    assert not is_valid_integration(to_native_path(".git/config"), included_folder, ignored_files, git_ignore)
 
 
 def test_get_dependencies_list():
@@ -126,12 +133,12 @@ def test_group_modules():
 
 
 def test_get_gitignore_files():
-    mock_gitignore = "__pycache__/\n*.log\n"  # Sample .gitignore file
-    repo_path = "/fake/repo"
+    mock_gitignore = f"__pycache__{os.sep}\n*.log\n"  # Sample .gitignore file
+    repo_path = "fake_repo"
     with patch("builtins.open", mock_open(read_data=mock_gitignore)):
         with patch("os.path.exists", return_value=True):
             ignored_patterns = get_gitignore_files(repo_path)
-    assert ignored_patterns == ["__pycache__/", "*.log"]
+    assert ignored_patterns == ["__pycache__" + os.sep, "*.log"]
 
 
 def test_compress():
@@ -140,7 +147,7 @@ def test_compress():
 
     m = mock_open(read_data=fake_content)
     with patch("builtins.open", m):
-        compressed_size = compress("fake/path/file.py")
+        compressed_size = compress(to_native_path("fake/path/file.py"))
 
     assert isinstance(compressed_size, int)
     assert compressed_size > 0
diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py
index ab2dad24a32de..04649f6a5e441 100644
--- a/ddev/tests/cli/size/test_diff.py
+++ b/ddev/tests/cli/size/test_diff.py
@@ -2,6 +2,8 @@
 # All rights reserved
 # Licensed under a 3-clause BSD style license (see LICENSE)
 
+import os
+from pathlib import Path
 from unittest.mock import MagicMock, mock_open, patch
 
 import pytest
@@ -9,12 +11,16 @@
 from ddev.cli.size.diff import get_dependencies, get_diff, get_files
 
 
+def to_native_path(path: str) -> str:
+    return path.replace("/", os.sep)
+
+
 def test_get_compressed_files():
     mock_repo_path = "root"
 
     mock_files = [
-        ("root/integration/datadog_checks", [], ["file1.py", "file2.py"]),
-        ("root/integration_b/datadog_checks", [], ["file3.py"]),
+        (os.path.join("root", "integration", "datadog_checks"), [], ["file1.py", "file2.py"]),
+        (os.path.join("root", "integration_b", "datadog_checks"), [], ["file3.py"]),
         ("root", [], ["ignored.py"]),
     ]
 
@@ -25,7 +31,7 @@ def fake_compress(file_path):
 
     with (
         patch("os.walk", return_value=mock_files),
-        patch("os.path.relpath", side_effect=lambda path, _: path.replace("root/", "")),
+        patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"root{os.sep}", "")),
         patch("os.path.exists", return_value=True),
         patch("builtins.open", mock_open(read_data="__pycache__/\n*.log\n")),
         patch("ddev.cli.size.diff.get_gitignore_files", return_value=fake_gitignore),
@@ -39,9 +45,9 @@ def fake_compress(file_path):
         result = get_files(mock_repo_path, True)
 
     expected = {
-        "integration/datadog_checks/file1.py": 1000,
-        "integration/datadog_checks/file2.py": 1000,
-        "integration_b/datadog_checks/file3.py": 1000,
+        to_native_path("integration/datadog_checks/file1.py"): 1000,
+        to_native_path("integration/datadog_checks/file2.py"): 1000,
+        to_native_path("integration_b/datadog_checks/file3.py"): 1000,
     }
 
     assert result == expected
@@ -86,31 +92,31 @@ def test_get_compressed_dependencies(terminal):
 
 def test_get_diff():
     size_before = {
-        "integration/foo.py": 1000,
-        "integration/bar.py": 2000,
-        "integration/deleted.py": 1500,
+        to_native_path("integration/foo.py"): 1000,
+        to_native_path("integration/bar.py"): 2000,
+        to_native_path("integration/deleted.py"): 1500,
     }
     size_after = {
-        "integration/foo.py": 1200,  # modified
-        "integration/bar.py": 2000,  # unchanged
-        "integration/new.py": 800,  # new
+        to_native_path("integration/foo.py"): 1200,
+        to_native_path("integration/bar.py"): 2000,
+        to_native_path("integration/new.py"): 800,
     }
 
     expected = [
         {
-            "File Path": "integration/foo.py",
+            "File Path": to_native_path("integration/foo.py"),
             "Type": "Integration",
             "Name": "integration",
             "Size (Bytes)": 200,
         },
         {
-            "File Path": "integration/deleted.py",
+            "File Path": to_native_path("integration/deleted.py"),
             "Type": "Integration",
             "Name": "integration (DELETED)",
             "Size (Bytes)": -1500,
         },
         {
-            "File Path": "integration/new.py",
+            "File Path": to_native_path("integration/new.py"),
             "Type": "Integration",
             "Name": "integration (NEW)",
             "Size (Bytes)": 800,
@@ -124,7 +130,7 @@ def test_get_diff():
 @pytest.fixture
 def mock_size_diff_dependencies():
     mock_git_repo = MagicMock()
-    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.repo_dir = "fake_repo"
 
     def get_compressed_files_side_effect(_, __):
         get_compressed_files_side_effect.counter += 1
@@ -152,7 +158,7 @@ def get_compressed_dependencies_side_effect(_, __, ___, ____):
         patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=mock_git_repo),
         patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None),
         patch("ddev.cli.size.diff.GitRepo.checkout_commit"),
-        patch("tempfile.mkdtemp", return_value="/tmp/fake_repo"),
+        patch("tempfile.mkdtemp", return_value="fake_repo"),
         patch("ddev.cli.size.diff.get_files", side_effect=get_compressed_files_side_effect),
         patch("ddev.cli.size.diff.get_dependencies", side_effect=get_compressed_dependencies_side_effect),
         patch("ddev.cli.size.common.group_modules", side_effect=lambda m, *_: m),
@@ -164,9 +170,6 @@ def get_compressed_dependencies_side_effect(_, __, ___, ____):
 
 def test_diff_no_args(ddev, mock_size_diff_dependencies):
     result = ddev('size', 'diff', 'commit1', 'commit2', '--compressed')
-    print("Exit code:", result.exit_code)
-    print("Output:\n", result.output)
-    print("Exception:", result.exception)
     assert result.exit_code == 0
 
 
@@ -186,7 +189,7 @@ def test_diff_csv(ddev, mock_size_diff_dependencies):
 
 def test_diff_no_differences(ddev):
     fake_repo = MagicMock()
-    fake_repo.repo_dir = "/tmp/fake_repo"
+    fake_repo.repo_dir = "fake_repo"
 
     with (
         patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=fake_repo),
@@ -196,7 +199,7 @@ def test_diff_no_differences(ddev):
         ),
         patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None),
         patch.object(fake_repo, "checkout_commit"),
-        patch("tempfile.mkdtemp", return_value="/tmp/fake_repo"),
+        patch("tempfile.mkdtemp", return_value="fake_repo"),
         patch("os.path.exists", return_value=True),
         patch("os.path.isdir", return_value=True),
         patch("os.path.isfile", return_value=True),
@@ -228,7 +231,7 @@ def test_diff_no_differences(ddev):
 
 def test_diff_invalid_platform(ddev):
     mock_git_repo = MagicMock()
-    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.repo_dir = "fake_repo"
     mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
     mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
     mock_git_repo.__enter__.return_value = mock_git_repo
@@ -239,15 +242,13 @@ def test_diff_invalid_platform(ddev):
             return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
         ),
     ):
-        result = ddev(
-            'size', 'diff', 'commit1', 'commit2', '--platform', 'linux', '--python', '3.12', '--compressed'  # inválido
-        )
+        result = ddev('size', 'diff', 'commit1', 'commit2', '--platform', 'linux', '--python', '3.12', '--compressed')
         assert result.exit_code != 0
 
 
 def test_diff_invalid_version(ddev):
     mock_git_repo = MagicMock()
-    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.repo_dir = "fake_repo"
     mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
     mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
     mock_git_repo.__enter__.return_value = mock_git_repo
@@ -275,7 +276,7 @@ def test_diff_invalid_version(ddev):
 
 def test_diff_invalid_platform_and_version(ddev):
     mock_git_repo = MagicMock()
-    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.repo_dir = "fake_repo"
     mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
     mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
     mock_git_repo.__enter__.return_value = mock_git_repo
diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py
index 2825afa5e7d43..28697345ab783 100644
--- a/ddev/tests/cli/size/test_status.py
+++ b/ddev/tests/cli/size/test_status.py
@@ -3,6 +3,7 @@
 # Licensed under a 3-clause BSD style license (see LICENSE)
 
 import os
+from pathlib import Path
 from unittest.mock import MagicMock, mock_open, patch
 
 import pytest
@@ -23,6 +24,7 @@ def test_get_files_compressed():
         (os.path.join("root", "integration_b", "datadog_checks"), [], ["file3.py"]),
         ("root", [], ["ignored.py"]),
     ]
+    mock_repo_path = "root"
 
     def fake_compress(file_path):
         return 1000
@@ -39,8 +41,7 @@ def fake_compress(file_path):
         ),
         patch("ddev.cli.size.status.compress", side_effect=fake_compress),
     ):
-
-        result = get_files(True)
+        result = get_files(True, mock_repo_path)
 
     expected = [
         {
@@ -77,6 +78,7 @@ def test_get_compressed_dependencies():
     mock_response = MagicMock()
     mock_response.status_code = 200
     mock_response.headers = {"Content-Length": "12345"}
+    mock_repo_path = "root"
 
     with (
         patch("os.path.exists", return_value=True),
@@ -86,8 +88,7 @@ def test_get_compressed_dependencies():
         patch("builtins.open", mock_open(read_data=fake_file_content)),
         patch("requests.head", return_value=mock_response),
     ):
-
-        file_data = get_dependencies(platform, version, True)
+        file_data = get_dependencies(mock_repo_path, platform, version, True)
 
     assert file_data == [
         {"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345},
@@ -97,52 +98,52 @@ def test_get_compressed_dependencies():
 
 @pytest.fixture()
 def mock_size_status():
+    fake_repo_path = Path(os.path.join("fake_root")).resolve()
+
+    mock_walk = [(os.path.join(str(fake_repo_path), "datadog_checks", "my_check"), [], ["__init__.py"])]
+
+    mock_app = MagicMock()
+    mock_app.repo.path = fake_repo_path
+
     with (
+        patch("ddev.cli.size.status.get_gitignore_files", return_value=set()),
         patch(
             "ddev.cli.size.status.valid_platforms_versions",
-            return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+            return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "windows-x86_64"}, {"3.12"}),
         ),
-        patch("ddev.cli.size.status.get_gitignore_files", return_value=set()),
         patch("ddev.cli.size.status.compress", return_value=1234),
         patch(
             "ddev.cli.size.status.get_dependencies_list", return_value=(["dep1"], {"dep1": "https://example.com/dep1"})
         ),
         patch(
             "ddev.cli.size.status.get_dependencies_sizes",
-            return_value=[
-                {"File Path": "dep1.whl", "Type": "Dependency", "Name": "dep1", "Size (Bytes)": 5678},
-            ],
+            return_value=[{"File Path": "dep1.whl", "Type": "Dependency", "Name": "dep1", "Size (Bytes)": 5678}],
         ),
         patch("ddev.cli.size.status.is_valid_integration", return_value=True),
         patch("ddev.cli.size.status.is_correct_dependency", return_value=True),
         patch("ddev.cli.size.status.print_csv"),
         patch("ddev.cli.size.status.print_table"),
-        patch(
-            "os.walk",
-            return_value=[
-                ("datadog_checks/my_check", [], ["__init__.py"]),
-            ],
-        ),
+        patch("ddev.cli.size.status.plot_treemap"),
+        patch("os.walk", return_value=mock_walk),
         patch("os.listdir", return_value=["fake_dep.whl"]),
         patch("os.path.isfile", return_value=True),
     ):
-        yield
+        yield mock_app
 
 
 def test_status_no_args(ddev, mock_size_status):
-    result = ddev('size', 'status', '--compressed')
-    print(result.output)
+    result = ddev("size", "status", "--compressed")
     assert result.exit_code == 0
 
 
 def test_status(ddev, mock_size_status):
-    result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed')
+    result = ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--compressed")
     print(result.output)
     assert result.exit_code == 0
 
 
 def test_status_csv(ddev, mock_size_status):
-    result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed', '--csv')
+    result = ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--compressed", "--csv")
     print(result.output)
     assert result.exit_code == 0
 
@@ -150,25 +151,25 @@ def test_status_csv(ddev, mock_size_status):
 def test_status_wrong_platform(ddev):
     with patch(
         "ddev.cli.size.timeline.valid_platforms_versions",
-        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+        return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "windows-x86_64"}, {"3.12"}),
     ):
-        result = ddev('size', 'status', '--platform', 'linux', '--python', '3.12', '--compressed')
+        result = ddev("size", "status", "--platform", "linux", "--python", "3.12", "--compressed")
         assert result.exit_code != 0
 
 
 def test_status_wrong_version(ddev):
     with patch(
         "ddev.cli.size.timeline.valid_platforms_versions",
-        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+        return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "windows-x86_64"}, {"3.12"}),
     ):
-        result = ddev('size', 'status', '--platform', 'linux-aarch64', '--python', '2.10', '--compressed')
+        result = ddev("size", "status", "--platform", "linux-aarch64", "--python", "2.10", "--compressed")
         assert result.exit_code != 0
 
 
 def test_status_wrong_plat_and_version(ddev):
     with patch(
         "ddev.cli.size.timeline.valid_platforms_versions",
-        return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}),
+        return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "windows-x86_64"}, {"3.12"}),
     ):
-        result = ddev('size', 'status', '--platform', 'linux', '--python', '2.10', '--compressed')
+        result = ddev("size", "status", "--platform", "linux", "--python", "2.10", "--compressed")
         assert result.exit_code != 0
diff --git a/ddev/tests/cli/size/test_timeline.py b/ddev/tests/cli/size/test_timeline.py
index a55c1e8851333..0f3fb40d8d147 100644
--- a/ddev/tests/cli/size/test_timeline.py
+++ b/ddev/tests/cli/size/test_timeline.py
@@ -1,4 +1,6 @@
+import os
 from datetime import datetime
+from pathlib import Path
 from unittest.mock import MagicMock, mock_open, patch
 
 import pytest
@@ -17,16 +19,14 @@
 
 def test_get_compressed_files():
     with (
-        patch("os.walk", return_value=[("/tmp/fake_repo/int1", [], ["int1.py"])]),
-        patch("os.path.relpath", return_value="int1/int1.py"),
+        patch("os.walk", return_value=[(os.path.join("fake_repo", "int1"), [], ["int1.py"])]),
+        patch("os.path.relpath", return_value=os.path.join("int1", "int1.py")),
         patch("os.path.exists", return_value=True),
         patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()),
         patch("ddev.cli.size.timeline.is_valid_integration", return_value=True),
         patch("ddev.cli.size.timeline.compress", return_value=1234),
     ):
-        result = get_files(
-            "/tmp/fake_repo", "int1", "abc1234", datetime(2025, 4, 4).date(), "auth", "Added int1", [], True
-        )
+        result = get_files("fake_repo", "int1", "abc1234", datetime(2025, 4, 4).date(), "auth", "Added int1", [], True)
         assert result == [
             {
                 "Size (Bytes)": 1234,
@@ -39,7 +39,7 @@ def test_get_compressed_files():
 
 
 def test_get_compressed_files_deleted_only():
-    repo_path = "/tmp/fake_repo"
+    repo_path = "fake_repo"
     module = "foo"
     commit = "abc1234"
     date = datetime.strptime("Apr 5 2025", "%b %d %Y").date()
@@ -49,7 +49,7 @@ def test_get_compressed_files_deleted_only():
     with (
         patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()),
         patch("os.walk", return_value=[]),
-        patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"{repo_path}/", "")),
+        patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"{repo_path}{os.sep}", "")),
         patch("os.path.exists", return_value=False),
     ):
         file_data = get_files(repo_path, module, commit, date, author, message, [], True)
@@ -153,7 +153,7 @@ def test_get_dependency():
     content = """dep1 @ https://example.com/dep1.whl
 dep2 @ https://example.com/dep2.whl"""
     with patch("builtins.open", mock_open(read_data=content)):
-        url = get_dependency("some/path/file.txt", "dep2")
+        url = get_dependency(Path("some") / "path" / "file.txt", "dep2")
         assert url == "https://example.com/dep2.whl"
 
 
@@ -190,7 +190,7 @@ def test_get_compressed_dependencies():
         patch("ddev.cli.size.timeline.requests.head", return_value=make_mock_response("12345")),
     ):
         result = get_dependencies(
-            "/tmp/fake_repo", "dep1", "linux-x86_64", "abc1234", datetime(2025, 4, 4).date(), "auth", "Added dep1", True
+            "fake_repo", "dep1", "linux-x86_64", "abc1234", datetime(2025, 4, 4).date(), "auth", "Added dep1", True
         )
         assert result == {
             "Size (Bytes)": 12345,
@@ -204,7 +204,7 @@ def test_get_compressed_dependencies():
 @pytest.fixture
 def mock_timeline_gitrepo():
     mock_git_repo = MagicMock()
-    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.repo_dir = "fake_repo"
     mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
     mock_git_repo.get_creation_commit_module.return_value = "commit1"
     mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Initial commit", c)
@@ -215,7 +215,7 @@ def mock_timeline_gitrepo():
         patch("ddev.cli.size.timeline.GitRepo.sparse_checkout_commit"),
         patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()),
         patch("ddev.cli.size.timeline.compress", return_value=1234),
-        patch("os.walk", return_value=[("/tmp/fake_repo/int", [], ["file1.py"])]),
+        patch("os.walk", return_value=[(Path("/tmp") / "fake_repo" / "int", [], ["file1.py"])]),
         patch("os.path.exists", return_value=True),
         patch("ddev.cli.size.timeline.group_modules", side_effect=lambda m, *_: m),
         patch("ddev.cli.size.timeline.trim_modules", side_effect=lambda m, *_: m),
@@ -232,7 +232,7 @@ def mock_timeline_gitrepo():
 @pytest.fixture
 def app():
     mock_app = MagicMock()
-    mock_app.repo.path = "/tmp/fake_repo"
+    mock_app.repo.path = "fake_repo"
     return mock_app
 
 
@@ -244,7 +244,7 @@ def test_timeline_integration_compressed(ddev, mock_timeline_gitrepo, app):
 @pytest.fixture
 def mock_timeline_dependencies():
     mock_git_repo = MagicMock()
-    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.repo_dir = "fake_repo"
     mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
     mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
 
@@ -295,7 +295,7 @@ def test_timeline_dependency_compressed(ddev, mock_timeline_dependencies, app):
 
 def test_timeline_invalid_platform(ddev):
     mock_git_repo = MagicMock()
-    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.repo_dir = "fake_repo"
     mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"]
     mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c)
     mock_git_repo.__enter__.return_value = mock_git_repo
@@ -325,7 +325,7 @@ def test_timeline_invalid_platform(ddev):
 
 def test_timeline_no_changes_in_integration(ddev):
     mock_git_repo = MagicMock()
-    mock_git_repo.repo_dir = "/tmp/fake_repo"
+    mock_git_repo.repo_dir = "fake_repo"
     mock_git_repo.get_module_commits.return_value = [""]
 
     with (
@@ -342,7 +342,7 @@ def test_timeline_no_changes_in_integration(ddev):
 
 def test_timeline_integration_not_found(ddev):
     mock_repo = MagicMock()
-    mock_repo.repo_dir = "/fake"
+    mock_repo.repo_dir = "fake"
     mock_repo.get_module_commits.return_value = [""]
     mock_repo.get_creation_commit_module.return_value = "c1"
     mock_repo.checkout_commit.return_value = None
@@ -363,7 +363,7 @@ def test_timeline_integration_not_found(ddev):
 
 def test_timeline_dependency_missing_no_platform(ddev):
     mock_repo = MagicMock()
-    mock_repo.repo_dir = "/fake"
+    mock_repo.repo_dir = "fake"
     mock_repo.get_module_commits.return_value = ["c1"]
     mock_repo.get_creation_commit_module.return_value = "c1"
     mock_repo.checkout_commit.return_value = None
@@ -381,7 +381,7 @@ def test_timeline_dependency_missing_no_platform(ddev):
 
 def test_timeline_dependency_missing_for_platform(ddev, app):
     mock_repo = MagicMock()
-    mock_repo.repo_dir = "/fake"
+    mock_repo.repo_dir = "fake"
     mock_repo.get_module_commits.return_value = ["c1"]
     mock_repo.get_creation_commit_module.return_value = "c1"
     mock_repo.checkout_commit.return_value = None
@@ -413,7 +413,7 @@ def test_timeline_dependency_missing_for_platform(ddev, app):
 
 def test_timeline_dependency_no_changes(ddev, app):
     mock_repo = MagicMock()
-    mock_repo.repo_dir = "/fake"
+    mock_repo.repo_dir = "fake"
     mock_repo.get_module_commits.return_value = [""]
     mock_repo.get_creation_commit_module.return_value = "c1"
     mock_repo.checkout_commit.return_value = None

From 179172823d1edbb0ccce425f0c46ef52b2d7bd3d Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Tue, 22 Apr 2025 12:22:54 +0200
Subject: [PATCH 32/40] Fix tests

---
 ddev/src/ddev/cli/size/common.py | 26 +++++++++++++++-----------
 ddev/tests/cli/size/test_diff.py |  1 -
 2 files changed, 15 insertions(+), 12 deletions(-)

diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index 25c0521e6ecc1..669180debaa59 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -86,7 +86,7 @@ def print_table(app: Application, mode: str, modules: List[Dict[str, Union[str,
     app.display_table(mode, modules_table)
 
 
-def plot_treemap(modules):
+def plot_treemap(modules: List[Dict[str, Union[str, int, date]]]) -> None:
     sizes = [mod["Size (Bytes)"] for mod in modules]
 
     integrations = [mod for mod in modules if mod["Type"] == "Integration"]
@@ -104,7 +104,6 @@ def normalize(mods):
     norm_int = normalize(integrations)
     norm_dep = normalize(dependencies)
 
-    # Use lighter color range: from 0.3 to 0.85
     def scale(val, vmin=0.3, vmax=0.85):
         return vmin + val * (vmax - vmin)
 
@@ -115,7 +114,7 @@ def scale(val, vmin=0.3, vmax=0.85):
     for mod in modules:
         if mod["Type"] == "Integration":
             idx = integrations.index(mod)
-            colors.append(cmap_int(scale(norm_int[idx], 0.6, 0.85)))  # lighter start for integrations
+            colors.append(cmap_int(scale(norm_int[idx], 0.6, 0.85)))
         elif mod["Type"] == "Dependency":
             idx = dependencies.index(mod)
             colors.append(cmap_dep(scale(norm_dep[idx], 0.3, 0.85)))
@@ -135,18 +134,23 @@ def scale(val, vmin=0.3, vmax=0.85):
 
         area = dx * dy
         font_size = max(6, min(18, area / 100))
-
-        if area > 400:
-            label = f"{mod['Name']}\n({mod['Size']})"
-        elif area > 40:
-            label = f"{mod['Name']}"
+        name = mod["Name"]
+        size_str = f"({mod['Size']})"
+
+        label = ""
+        name_fits = 0.5 * (len(name) + 2) < dx
+        size_fits = 0.5 * (len(size_str) + 2)
+        both_fit = 5 < dy
+
+        if name_fits and size_fits and both_fit:
+            label = f"{name}\n{size_str}"
+        elif name_fits:
+            label = name
         else:
             label = None
 
         if label:
-            ax.text(
-                x + dx / 2, y + dy / 2, label, va="center", ha="center", fontsize=font_size, color="black", wrap=True
-            )
+            ax.text(x + dx / 2, y + dy / 2, label, va="center", ha="center", fontsize=font_size, color="black")
 
     ax.set_xlim(0, 100)
     ax.set_ylim(0, 100)
diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py
index 04649f6a5e441..06c2ba443989f 100644
--- a/ddev/tests/cli/size/test_diff.py
+++ b/ddev/tests/cli/size/test_diff.py
@@ -3,7 +3,6 @@
 # Licensed under a 3-clause BSD style license (see LICENSE)
 
 import os
-from pathlib import Path
 from unittest.mock import MagicMock, mock_open, patch
 
 import pytest

From c2328680f392b7f3b8bede1943280544bf5268f1 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Tue, 22 Apr 2025 12:30:18 +0200
Subject: [PATCH 33/40] Fix tests

---
 ddev/src/ddev/cli/size/common.py | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)

diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index 669180debaa59..86fb0f69ab9d6 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -13,12 +13,12 @@
 from types import TracebackType
 from typing import Dict, List, Optional, Set, Tuple, Type, Union, cast
 
-import matplotlib.cm as cm
-import matplotlib.pyplot as plt
+# import matplotlib.cm as cm
+# import matplotlib.pyplot as plt
 import requests
-import squarify
-from matplotlib.patches import Patch
 
+# import squarify
+# from matplotlib.patches import Patch
 from ddev.cli.application import Application
 
 
@@ -87,6 +87,7 @@ def print_table(app: Application, mode: str, modules: List[Dict[str, Union[str,
 
 
 def plot_treemap(modules: List[Dict[str, Union[str, int, date]]]) -> None:
+    '''
     sizes = [mod["Size (Bytes)"] for mod in modules]
 
     integrations = [mod for mod in modules if mod["Type"] == "Integration"]
@@ -166,6 +167,7 @@ def scale(val, vmin=0.3, vmax=0.85):
     plt.subplots_adjust(right=0.8)
     plt.tight_layout()
     plt.show()
+    '''
 
 
 def get_dependencies_sizes(

From b02658d42eb6d638e28df58de81c6c426da313e1 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Tue, 22 Apr 2025 14:31:42 +0200
Subject: [PATCH 34/40] fix windows tests

---
 ddev/tests/cli/size/test_status.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py
index 28697345ab783..967018f8b7c81 100644
--- a/ddev/tests/cli/size/test_status.py
+++ b/ddev/tests/cli/size/test_status.py
@@ -119,6 +119,7 @@ def mock_size_status():
             "ddev.cli.size.status.get_dependencies_sizes",
             return_value=[{"File Path": "dep1.whl", "Type": "Dependency", "Name": "dep1", "Size (Bytes)": 5678}],
         ),
+        patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"fake_root{os.sep}", "")),
         patch("ddev.cli.size.status.is_valid_integration", return_value=True),
         patch("ddev.cli.size.status.is_correct_dependency", return_value=True),
         patch("ddev.cli.size.status.print_csv"),

From 0fcf1628bfea44b459cdb901d095de706a8118ac Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Wed, 23 Apr 2025 10:59:29 +0200
Subject: [PATCH 35/40] Final visualizations

---
 ddev/src/ddev/cli/size/common.py     | 203 ++++++++++++++++++++-------
 ddev/src/ddev/cli/size/diff.py       |  50 ++++++-
 ddev/src/ddev/cli/size/status.py     |  40 +++++-
 ddev/src/ddev/cli/size/timeline.py   |  52 ++++++-
 ddev/tests/cli/size/test_diff.py     |   1 +
 ddev/tests/cli/size/test_timeline.py |   1 +
 6 files changed, 287 insertions(+), 60 deletions(-)

diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py
index 86fb0f69ab9d6..c6a01615c79bd 100644
--- a/ddev/src/ddev/cli/size/common.py
+++ b/ddev/src/ddev/cli/size/common.py
@@ -11,14 +11,16 @@
 from datetime import date
 from pathlib import Path
 from types import TracebackType
-from typing import Dict, List, Optional, Set, Tuple, Type, Union, cast
+from typing import Dict, List, Literal, Optional, Set, Tuple, Type, Union, cast
 
-# import matplotlib.cm as cm
-# import matplotlib.pyplot as plt
+import matplotlib.cm as cm
+
+# import matplotlib.patheffects as path_effects
+import matplotlib.pyplot as plt
 import requests
+import squarify
+from matplotlib.patches import Patch
 
-# import squarify
-# from matplotlib.patches import Patch
 from ddev.cli.application import Application
 
 
@@ -86,62 +88,151 @@ def print_table(app: Application, mode: str, modules: List[Dict[str, Union[str,
     app.display_table(mode, modules_table)
 
 
-def plot_treemap(modules: List[Dict[str, Union[str, int, date]]]) -> None:
-    '''
-    sizes = [mod["Size (Bytes)"] for mod in modules]
+def plot_treemap(
+    modules: List[Dict[str, Union[str, int, date]]],
+    title: str,
+    show: bool,
+    mode: Literal["status", "diff"] = "status",
+    path: Optional[str] = None,
+) -> None:
+    # Always use absolute value for sizing
+    sizes = [abs(mod["Size (Bytes)"]) for mod in modules]
 
-    integrations = [mod for mod in modules if mod["Type"] == "Integration"]
-    dependencies = [mod for mod in modules if mod["Type"] == "Dependency"]
+    # Setup figure
+    plt.figure(figsize=(12, 8))
+    ax = plt.gca()
+    ax.set_axis_off()
 
-    def normalize(mods):
-        if not mods:
-            return []
-        sizes = [mod["Size (Bytes)"] for mod in mods]
-        min_size = min(sizes)
-        max_size = max(sizes)
-        range_size = max_size - min_size or 1
-        return [(s - min_size) / range_size for s in sizes]
+    # Compute layout
+    rects = squarify.normalize_sizes(sizes, 100, 100)
+    rects = squarify.squarify(rects, 0, 0, 100, 100)
 
-    norm_int = normalize(integrations)
-    norm_dep = normalize(dependencies)
+    colors = []
 
-    def scale(val, vmin=0.3, vmax=0.85):
-        return vmin + val * (vmax - vmin)
+    if mode == "status":
+        # Normalization by type
+        integrations = [mod for mod in modules if mod["Type"] == "Integration"]
+        dependencies = [mod for mod in modules if mod["Type"] == "Dependency"]
+
+        def normalize(mods):
+            if not mods:
+                return []
+            sizes = [mod["Size (Bytes)"] for mod in mods]
+            min_size = min(sizes)
+            max_size = max(sizes)
+            range_size = max_size - min_size or 1
+            return [(s - min_size) / range_size for s in sizes]
+
+        norm_int = normalize(integrations)
+        norm_dep = normalize(dependencies)
+
+        def scale(val, vmin=0.3, vmax=0.85):
+            return vmin + val * (vmax - vmin)
+
+        cmap_int = cm.get_cmap("Purples")
+        cmap_dep = cm.get_cmap("Reds")
+
+        for mod in modules:
+            if mod["Type"] == "Integration":
+                idx = integrations.index(mod)
+                colors.append(cmap_int(scale(norm_int[idx], 0.3, 0.6)))
+            elif mod["Type"] == "Dependency":
+                idx = dependencies.index(mod)
+                colors.append(cmap_dep(scale(norm_dep[idx], 0.3, 0.85)))
+            else:
+                colors.append("#999999")
 
-    cmap_int = cm.get_cmap("Purples")
-    cmap_dep = cm.get_cmap("Reds")
+    elif mode == "diff":
+        '''
+        # ------- BOTH POSITIVE AND NEGATIVE IN THE SAME TREEMAP --------
+        # Reds for positive, Greens for negative
+        cmap_pos = cm.get_cmap("Reds")
+        cmap_neg = cm.get_cmap("Greens")
 
-    colors = []
-    for mod in modules:
-        if mod["Type"] == "Integration":
-            idx = integrations.index(mod)
-            colors.append(cmap_int(scale(norm_int[idx], 0.6, 0.85)))
-        elif mod["Type"] == "Dependency":
-            idx = dependencies.index(mod)
-            colors.append(cmap_dep(scale(norm_dep[idx], 0.3, 0.85)))
+        max_size = max(abs(mod["Size (Bytes)"]) for mod in modules)
+
+        for mod in modules:
+            value = mod["Size (Bytes)"]
+            intensity = abs(value) / max_size
+            color = cmap_pos(intensity) if value > 0 else cmap_neg(intensity)
+            colors.append(color)
+
+        '''
+        cmap_pos = cm.get_cmap("Oranges")
+        cmap_neg = cm.get_cmap("Blues")
+
+        positives = [mod for mod in modules if mod["Size (Bytes)"] > 0]
+        negatives = [mod for mod in modules if mod["Size (Bytes)"] < 0]
+
+        sizes_pos = [mod["Size (Bytes)"] for mod in positives]
+        sizes_neg = [abs(mod["Size (Bytes)"]) for mod in negatives]
+
+        sum_pos = sum(sizes_pos)
+        sum_neg = sum(sizes_neg)
+
+        canvas_area = 50 * 100  # each half has same max area
+
+        # Determine which side is dominant (fills fully)
+        if sum_pos >= sum_neg:
+            # Red fills right, green scales left
+            norm_sizes_pos = [s / sum_pos * canvas_area for s in sizes_pos]
+            norm_sizes_neg = [s / sum_pos * canvas_area for s in sizes_neg]
+            rects_pos = squarify.squarify(norm_sizes_pos, 50, 0, 50, 100)
+            rects_neg = squarify.squarify(norm_sizes_neg, 0, 0, 50, 100)
         else:
-            colors.append("#999999")
+            # Green fills left, red scales right
+            norm_sizes_neg = [s / sum_neg * canvas_area for s in sizes_neg]
+            norm_sizes_pos = [s / sum_neg * canvas_area for s in sizes_pos]
+            rects_neg = squarify.squarify(norm_sizes_neg, 0, 0, 50, 100)
+            rects_pos = squarify.squarify(norm_sizes_pos, 50, 0, 50, 100)
 
-    plt.figure(figsize=(12, 8))
-    ax = plt.gca()
-    ax.set_axis_off()
+        rects = rects_neg + rects_pos
+        modules = negatives + positives
 
-    rects = squarify.normalize_sizes(sizes, 100, 100)
-    rects = squarify.squarify(rects, 0, 0, 100, 100)
+        # Draw colors
+        def rescale_intensity(val, min_val=0.3, max_val=0.8):
+            return min_val + (max_val - min_val) * val
+
+        max_size = max(sizes_pos + sizes_neg) or 1
+        colors = []
+
+        for mod in negatives:
+            raw = abs(mod["Size (Bytes)"]) / max_size
+            intensity = rescale_intensity(raw)
+            colors.append(cmap_neg(intensity))
 
+        for mod in positives:
+            raw = mod["Size (Bytes)"] / max_size
+            intensity = rescale_intensity(raw)
+            colors.append(cmap_pos(intensity))
+
+    # Draw rectangles and labels
     for rect, mod, color in zip(rects, modules, colors, strict=False):
         x, y, dx, dy = rect["x"], rect["y"], rect["dx"], rect["dy"]
         ax.add_patch(plt.Rectangle((x, y), dx, dy, color=color, ec="white"))
 
-        area = dx * dy
-        font_size = max(6, min(18, area / 100))
+        # Font size config
+        MIN_FONT_SIZE = 6
+        MAX_FONT_SIZE = 12
+        FONT_SIZE_SCALE = 0.4
+        AVG_SIDE = (dx * dy) ** 0.5
+        font_size = max(MIN_FONT_SIZE, min(MAX_FONT_SIZE, AVG_SIDE * FONT_SIZE_SCALE))
         name = mod["Name"]
         size_str = f"({mod['Size']})"
 
-        label = ""
-        name_fits = 0.5 * (len(name) + 2) < dx
-        size_fits = 0.5 * (len(size_str) + 2)
-        both_fit = 5 < dy
+        CHAR_WIDTH_FACTOR = 0.1
+        CHAR_HEIGHT_FACTOR = 0.5
+        name_fits = (len(name) + 2) * font_size * CHAR_WIDTH_FACTOR < dx and dy > font_size * CHAR_HEIGHT_FACTOR
+        size_fits = (len(size_str) + 2) * font_size * CHAR_WIDTH_FACTOR < dx
+        both_fit = dy > font_size * CHAR_HEIGHT_FACTOR * 2
+
+        if dx < 5 or dy < 5:
+            label = None
+        elif not name_fits and dx > 5:
+            max_chars = int(dx / (font_size * CHAR_WIDTH_FACTOR)) - 2
+            if 4 <= max_chars:
+                name = name[: max_chars - 3] + "..."
+                name_fits = True
 
         if name_fits and size_fits and both_fit:
             label = f"{name}\n{size_str}"
@@ -156,18 +247,26 @@ def scale(val, vmin=0.3, vmax=0.85):
     ax.set_xlim(0, 100)
     ax.set_ylim(0, 100)
 
-    plt.title("Modules by Disk Usage", fontsize=16)
+    plt.title(title, fontsize=16)
 
-    legend_handles = [
-        Patch(color=cmap_int(0.6), label="Integration"),
-        Patch(color=cmap_dep(0.6), label="Dependency"),
-    ]
-    plt.legend(handles=legend_handles, title="Type", loc="center left", bbox_to_anchor=(1.0, 0.5))
+    if mode == "status":
+        legend_handles = [
+            Patch(color=cm.get_cmap("Purples")(0.6), label="Integration"),
+            Patch(color=cm.get_cmap("Reds")(0.6), label="Dependency"),
+        ]
+    elif mode == "diff":
+        legend_handles = [
+            Patch(color=cm.get_cmap("Oranges")(0.7), label="Increase"),
+            Patch(color=cm.get_cmap("Blues")(0.7), label="Decrease"),
+        ]
 
+    plt.legend(handles=legend_handles, title="Type", loc="center left", bbox_to_anchor=(1.0, 0.5))
     plt.subplots_adjust(right=0.8)
     plt.tight_layout()
-    plt.show()
-    '''
+    if show:
+        plt.show()
+    if path:
+        plt.savefig(path, bbox_inches='tight')
 
 
 def get_dependencies_sizes(
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index 091f5af8bfe73..dcce6a37f1521 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -23,6 +23,7 @@
     group_modules,
     is_correct_dependency,
     is_valid_integration,
+    plot_treemap,
     print_csv,
     print_table,
     valid_platforms_versions,
@@ -40,6 +41,12 @@
 @click.option('--python', 'version', help="Python version (e.g 3.12).  If not specified, all versions will be analyzed")
 @click.option('--compressed', is_flag=True, help="Measure compressed size")
 @click.option('--csv', is_flag=True, help="Output in CSV format")
+@click.option('--save_to_png_path', help="Path to save the treemap as PNG")
+@click.option(
+    '--show_gui',
+    is_flag=True,
+    help="Display a pop-up window with a treemap showing size differences between the two commits.",
+)
 @click.pass_obj
 def diff(
     app: Application,
@@ -49,6 +56,8 @@ def diff(
     version: Optional[str],
     compressed: bool,
     csv: bool,
+    save_to_png_path: str,
+    show_gui: bool,
 ) -> None:
     """
     Compare the size of integrations and dependencies between two commits.
@@ -75,10 +84,36 @@ def diff(
                     progress.remove_task(task)
 
                     for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
-                        diff_mode(app, gitRepo, before, after, plat, ver, compressed, csv, i, progress)
+                        diff_mode(
+                            app,
+                            gitRepo,
+                            before,
+                            after,
+                            plat,
+                            ver,
+                            compressed,
+                            csv,
+                            i,
+                            progress,
+                            save_to_png_path,
+                            show_gui,
+                        )
                 else:
                     progress.remove_task(task)
-                    diff_mode(app, gitRepo, before, after, platform, version, compressed, csv, None, progress)
+                    diff_mode(
+                        app,
+                        gitRepo,
+                        before,
+                        after,
+                        platform,
+                        version,
+                        compressed,
+                        csv,
+                        None,
+                        progress,
+                        save_to_png_path,
+                        show_gui,
+                    )
 
             except Exception as e:
                 app.abort(str(e))
@@ -95,6 +130,8 @@ def diff_mode(
     csv: bool,
     i: Optional[int],
     progress: Progress,
+    save_to_png_path: str,
+    show_gui: bool,
 ) -> None:
     files_b, dependencies_b, files_a, dependencies_a = get_repo_info(
         gitRepo, platform, version, before, after, compressed, progress
@@ -113,6 +150,15 @@ def diff_mode(
     else:
         if csv:
             print_csv(app, i, grouped_modules)
+        elif show_gui or save_to_png_path:
+            print_table(app, "Diff", grouped_modules)
+            plot_treemap(
+                grouped_modules,
+                f"Disk Usage Differences for {platform} and Python version {version}",
+                show_gui,
+                "diff",
+                save_to_png_path,
+            )
         else:
             print_table(app, "Diff", grouped_modules)
 
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index 5ea54ac76adc6..35f122984f9e6 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -37,8 +37,22 @@
 @click.option('--python', 'version', help="Python version (e.g 3.12).  If not specified, all versions will be analyzed")
 @click.option('--compressed', is_flag=True, help="Measure compressed size")
 @click.option('--csv', is_flag=True, help="Output in CSV format")
+@click.option('--save_to_png_path', help="Path to save the treemap as PNG")
+@click.option(
+    '--show_gui',
+    is_flag=True,
+    help="Display a pop-up window with a treemap showing the current size distribution of modules.",
+)
 @click.pass_obj
-def status(app: Application, platform: Optional[str], version: Optional[str], compressed: bool, csv: bool) -> None:
+def status(
+    app: Application,
+    platform: Optional[str],
+    version: Optional[str],
+    compressed: bool,
+    csv: bool,
+    save_to_png_path: str,
+    show_gui: bool,
+) -> None:
     """
     Show the current size of all integrations and dependencies.
     """
@@ -53,16 +67,24 @@ def status(app: Application, platform: Optional[str], version: Optional[str], co
             platforms = valid_platforms if platform is None else [platform]
             versions = valid_versions if version is None else [version]
             for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
-                status_mode(app, repo_path, plat, ver, compressed, csv, i)
+                status_mode(app, repo_path, plat, ver, compressed, csv, i, save_to_png_path, show_gui)
         else:
-            status_mode(app, repo_path, platform, version, compressed, csv, None)
+            status_mode(app, repo_path, platform, version, compressed, csv, None, save_to_png_path, show_gui)
 
     except Exception as e:
         app.abort(str(e))
 
 
 def status_mode(
-    app: Application, repo_path: Path, platform: str, version: str, compressed: bool, csv: bool, i: Optional[int]
+    app: Application,
+    repo_path: Path,
+    platform: str,
+    version: str,
+    compressed: bool,
+    csv: bool,
+    i: Optional[int],
+    save_to_png_path: str,
+    show_gui: bool,
 ) -> None:
     with console.status("[cyan]Calculating sizes...", spinner="dots"):
         modules = get_files(compressed, repo_path) + get_dependencies(repo_path, platform, version, compressed)
@@ -71,9 +93,17 @@ def status_mode(
 
     if csv:
         print_csv(app, i, grouped_modules)
+    elif show_gui or save_to_png_path:
+        print_table(app, "Status", grouped_modules)
+        plot_treemap(
+            grouped_modules,
+            f"Disk Usage Status for {platform} and Python version {version}",
+            show_gui,
+            "status",
+            save_to_png_path,
+        )
     else:
         print_table(app, "Status", grouped_modules)
-        plot_treemap(grouped_modules)
 
 
 def get_files(compressed: bool, repo_path: Path) -> List[Dict[str, Union[str, int]]]:
diff --git a/ddev/src/ddev/cli/size/timeline.py b/ddev/src/ddev/cli/size/timeline.py
index d268d44c05109..08ec61be68fb3 100644
--- a/ddev/src/ddev/cli/size/timeline.py
+++ b/ddev/src/ddev/cli/size/timeline.py
@@ -7,6 +7,7 @@
 from typing import Dict, List, Optional, Set, Tuple, Union, cast
 
 import click
+import matplotlib.pyplot as plt
 import requests
 from rich.console import Console
 from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn
@@ -48,6 +49,12 @@
 )
 @click.option('--compressed', is_flag=True, help="Measure compressed size")
 @click.option('--csv', is_flag=True, help="Output results in CSV format")
+@click.option('--save_to_png_path', help="Path to save the treemap as PNG")
+@click.option(
+    '--show_gui',
+    is_flag=True,
+    help="Display a pop-up window with a line chart showing the size evolution of the selected module over time.",
+)
 @click.pass_obj
 def timeline(
     app: Application,
@@ -60,6 +67,8 @@ def timeline(
     platform: Optional[str],
     compressed: bool,
     csv: bool,
+    save_to_png_path: str,
+    show_gui: bool,
 ) -> None:
     """
     Show the size evolution of a module (integration or dependency) over time.
@@ -109,7 +118,20 @@ def timeline(
                     progress.remove_task(task)
                     for i, plat in enumerate(valid_platforms):
                         timeline_mode(
-                            app, gitRepo, type, module, commits, threshold, plat, compressed, csv, i, None, progress
+                            app,
+                            gitRepo,
+                            type,
+                            module,
+                            commits,
+                            threshold,
+                            plat,
+                            compressed,
+                            csv,
+                            i,
+                            None,
+                            progress,
+                            save_to_png_path,
+                            show_gui,
                         )
                 else:
                     progress.remove_task(task)
@@ -127,6 +149,8 @@ def timeline(
                         None,
                         first_commit,
                         progress,
+                        save_to_png_path,
+                        show_gui,
                     )
 
             except Exception as e:
@@ -147,6 +171,8 @@ def timeline_mode(
     i: Optional[int],
     first_commit: Optional[str],
     progress: Progress,
+    save_to_png_path: str,
+    show_gui: bool,
 ) -> None:
     modules = get_repo_info(gitRepo, type, platform, module, commits, compressed, first_commit, progress)
     if modules != []:
@@ -154,6 +180,9 @@ def timeline_mode(
         trimmed_modules = trim_modules(grouped_modules, threshold)
         if csv:
             print_csv(app, i, trimmed_modules)
+        elif show_gui or save_to_png_path:
+            print_table(app, "Timeline for " + module, trimmed_modules)
+            plot_linegraph(trimmed_modules, module, platform, show_gui, save_to_png_path)
         else:
             print_table(app, "Timeline for " + module, trimmed_modules)
 
@@ -427,3 +456,24 @@ def get_dependency_list(path: str, platforms: Set[str]) -> Set[str]:
                     matches = re.findall(r"([\w\-\d\.]+) @ https?://[^\s#]+", file.read())
                     dependencies.update(matches)
     return dependencies
+
+
+def plot_linegraph(modules, module, platform, show, path):
+    dates = [entry["Date"] for entry in modules]
+    sizes = [entry["Size (Bytes)"] for entry in modules]
+    title = f"Disk Usage Evolution of {module} for {platform}" if platform else f"Disk Usage Evolution of {module}"
+
+    plt.figure(figsize=(10, 6))
+    plt.plot(dates, sizes, linestyle='-')
+    plt.title(title)
+    plt.xlabel("Date")
+    plt.ylabel("Size (Bytes)")
+    plt.grid(True)
+    plt.xticks(rotation=45)
+    plt.tight_layout()
+
+    if path:
+        plt.savefig(path)
+    if show:
+        plt.show()
+    plt.close()
diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py
index 06c2ba443989f..038c4b227d9d1 100644
--- a/ddev/tests/cli/size/test_diff.py
+++ b/ddev/tests/cli/size/test_diff.py
@@ -163,6 +163,7 @@ def get_compressed_dependencies_side_effect(_, __, ___, ____):
         patch("ddev.cli.size.common.group_modules", side_effect=lambda m, *_: m),
         patch("ddev.cli.size.common.print_csv"),
         patch("ddev.cli.size.common.print_table"),
+        patch("ddev.cli.size.common.plot_treemap"),
     ):
         yield
 
diff --git a/ddev/tests/cli/size/test_timeline.py b/ddev/tests/cli/size/test_timeline.py
index 0f3fb40d8d147..7e7f7abc163b8 100644
--- a/ddev/tests/cli/size/test_timeline.py
+++ b/ddev/tests/cli/size/test_timeline.py
@@ -267,6 +267,7 @@ def mock_timeline_dependencies():
         patch("ddev.cli.size.timeline.group_modules", side_effect=lambda m, *_: m),
         patch("ddev.cli.size.timeline.trim_modules", side_effect=lambda m, *_: m),
         patch("ddev.cli.size.timeline.print_table"),
+        patch("ddev.cli.size.timeline.plot_linegraph"),
     ):
         mock_response = MagicMock()
         mock_response.headers = {"Content-Length": "1024"}

From dc6a9db268277f7691b2cc79c967f6148c0815cd Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Wed, 23 Apr 2025 11:13:49 +0200
Subject: [PATCH 36/40] Changelog

---
 ddev/changelog.d/20128.added | 4 ++++
 1 file changed, 4 insertions(+)
 create mode 100644 ddev/changelog.d/20128.added

diff --git a/ddev/changelog.d/20128.added b/ddev/changelog.d/20128.added
new file mode 100644
index 0000000000000..6b95fbdfccc6f
--- /dev/null
+++ b/ddev/changelog.d/20128.added
@@ -0,0 +1,4 @@
+Added new commands to track and analyze size changes in integrations and dependencies:
+- **`ddev size status`**: Shows current sizes of all modules.
+- **`ddev size diff [COMMIT_BEFORE] [COMMIT_AFTER]`**: Compares size changes between two commits.
+- **`ddev size timeline {integration | dependency} [INTEGRATION_NAME/DEPENDENCY_NAME]`**: Visualizes the size evolution of a module over time.

From a425dab4f699226a24d8d44c5ece5657a0be9325 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Wed, 23 Apr 2025 11:21:02 +0200
Subject: [PATCH 37/40] testing in windows

---
 .github/workflows/slapr.yml | 69 ++++++++-----------------------------
 1 file changed, 15 insertions(+), 54 deletions(-)

diff --git a/.github/workflows/slapr.yml b/.github/workflows/slapr.yml
index f661155a5026c..6537111c8b07a 100644
--- a/.github/workflows/slapr.yml
+++ b/.github/workflows/slapr.yml
@@ -9,7 +9,7 @@ env:
 
 jobs:
   measure-disk-usage:
-    runs-on: ubuntu-22.04
+    runs-on: windows-2022  # Cambiado de ubuntu-22.04 a windows-2022
     steps:
     - uses: actions/checkout@v4
       with:
@@ -18,6 +18,7 @@ jobs:
       uses: actions/setup-python@v5
       with:
         python-version: ${{ env.PYTHON_VERSION }}
+
     - name: Install ddev
       run: |
         pip install -e ./datadog_checks_dev[cli]
@@ -27,51 +28,26 @@ jobs:
       run: |
         ddev config set repos.core .
         ddev config set repo core
+
     - name: Measure disk usage (uncompressed)
-      run: | 
+      run: |
         ddev size status --csv > size-uncompressed.csv
         ddev size status > size-uncompressed.txt
-        cat size-uncompressed.txt
-        echo "# Size (uncompressed)" >> $GITHUB_STEP_SUMMARY
-        echo '```' >> $GITHUB_STEP_SUMMARY
-        cat size-uncompressed.txt >> $GITHUB_STEP_SUMMARY
-        echo '```' >> $GITHUB_STEP_SUMMARY
+        type size-uncompressed.txt
+        echo "# Size (uncompressed)" >> %GITHUB_STEP_SUMMARY%
+        echo ``` >> %GITHUB_STEP_SUMMARY%
+        type size-uncompressed.txt >> %GITHUB_STEP_SUMMARY%
+        echo ``` >> %GITHUB_STEP_SUMMARY%
+
     - name: Measure disk usage (compressed)
       run: |
         ddev size status --csv --compressed > size-compressed.csv
         ddev size status --compressed > size-compressed.txt
-        cat size-compressed.txt
-        echo "# Size (compressed)" >> $GITHUB_STEP_SUMMARY
-        echo '```' >> $GITHUB_STEP_SUMMARY
-        cat size-compressed.txt >> $GITHUB_STEP_SUMMARY
-        echo '```' >> $GITHUB_STEP_SUMMARY
-
-    - name: Measure disk usage differences from last commit (uncompressed)
-      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
-      run: | 
-        BEFORE=$(git rev-parse HEAD^)
-        AFTER=$(git rev-parse HEAD)
-        ddev size diff $BEFORE $AFTER --csv > diff-uncompressed.csv
-        ddev size diff $BEFORE $AFTER > diff-uncompressed.txt
-        cat diff-uncompressed.txt
-        echo "# Size diff (uncompressed)" >> $GITHUB_STEP_SUMMARY
-        echo '```' >> $GITHUB_STEP_SUMMARY
-        cat diff-uncompressed.txt >> $GITHUB_STEP_SUMMARY
-        echo '```' >> $GITHUB_STEP_SUMMARY
-
-    - name: Measure disk usage differences from last commit (compressed)
-      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
-      run: | 
-        BEFORE=$(git rev-parse HEAD^)
-        AFTER=$(git rev-parse HEAD)
-        ddev size diff $BEFORE $AFTER --compressed --csv > diff-compressed.csv
-        ddev size diff $BEFORE $AFTER --compressed > diff-compressed.txt
-        cat diff-compressed.txt
-        echo "# Size diff (compressed)" >> $GITHUB_STEP_SUMMARY
-        echo '```' >> $GITHUB_STEP_SUMMARY
-        cat diff-compressed.txt >> $GITHUB_STEP_SUMMARY
-        echo '```' >> $GITHUB_STEP_SUMMARY
-
+        type size-compressed.txt
+        echo "# Size (compressed)" >> %GITHUB_STEP_SUMMARY%
+        echo ``` >> %GITHUB_STEP_SUMMARY%
+        type size-compressed.txt >> %GITHUB_STEP_SUMMARY%
+        echo ``` >> %GITHUB_STEP_SUMMARY%
 
     - name: Upload file sizes (uncompressed)
       uses: actions/upload-artifact@v4
@@ -85,18 +61,3 @@ jobs:
         name: size-compressed.csv
         path: size-compressed.csv
         if-no-files-found: error
-
-    - name: Upload file sizes diff (uncompressed)
-      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
-      uses: actions/upload-artifact@v4
-      with:
-        name: diff-uncompressed.csv
-        path: diff-uncompressed.csv
-        if-no-files-found: error     
-    - name: Upload file sizes diff (compressed)
-      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
-      uses: actions/upload-artifact@v4
-      with:
-        name: diff-compressed.csv
-        path: diff-compressed.csv
-        if-no-files-found: error

From 2ded26bf2c62b3b8822f0b8b1a6013ea0675862f Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Wed, 23 Apr 2025 11:55:03 +0200
Subject: [PATCH 38/40] test images

---
 .github/workflows/slapr.yml      | 96 ++++++++++++++++++++++++++------
 ddev/src/ddev/cli/size/diff.py   |  3 +
 ddev/src/ddev/cli/size/status.py |  3 +
 3 files changed, 85 insertions(+), 17 deletions(-)

diff --git a/.github/workflows/slapr.yml b/.github/workflows/slapr.yml
index 6537111c8b07a..c8b417a22daa0 100644
--- a/.github/workflows/slapr.yml
+++ b/.github/workflows/slapr.yml
@@ -9,7 +9,7 @@ env:
 
 jobs:
   measure-disk-usage:
-    runs-on: windows-2022  # Cambiado de ubuntu-22.04 a windows-2022
+    runs-on: ubuntu-22.04
     steps:
     - uses: actions/checkout@v4
       with:
@@ -18,7 +18,6 @@ jobs:
       uses: actions/setup-python@v5
       with:
         python-version: ${{ env.PYTHON_VERSION }}
-
     - name: Install ddev
       run: |
         pip install -e ./datadog_checks_dev[cli]
@@ -28,26 +27,56 @@ jobs:
       run: |
         ddev config set repos.core .
         ddev config set repo core
-
     - name: Measure disk usage (uncompressed)
-      run: |
+      run: | 
+        mkdir -p status_visualizations
         ddev size status --csv > size-uncompressed.csv
-        ddev size status > size-uncompressed.txt
-        type size-uncompressed.txt
-        echo "# Size (uncompressed)" >> %GITHUB_STEP_SUMMARY%
-        echo ``` >> %GITHUB_STEP_SUMMARY%
-        type size-uncompressed.txt >> %GITHUB_STEP_SUMMARY%
-        echo ``` >> %GITHUB_STEP_SUMMARY%
-
+        ddev size status --save_to_png_path status_visualizations/uncompressed.png > size-uncompressed.txt
+        cat size-uncompressed.txt
+        echo "# Size (uncompressed)" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+        cat size-uncompressed.txt >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+  
     - name: Measure disk usage (compressed)
       run: |
+        mkdir -p status_visualizations
         ddev size status --csv --compressed > size-compressed.csv
-        ddev size status --compressed > size-compressed.txt
-        type size-compressed.txt
-        echo "# Size (compressed)" >> %GITHUB_STEP_SUMMARY%
-        echo ``` >> %GITHUB_STEP_SUMMARY%
-        type size-compressed.txt >> %GITHUB_STEP_SUMMARY%
-        echo ``` >> %GITHUB_STEP_SUMMARY%
+        ddev size status --compressed --save_to_png_path status_visualizations/compressed.png > size-compressed.txt
+        cat size-compressed.txt
+        echo "# Size (compressed)" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+        cat size-compressed.txt >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+
+
+    - name: Measure disk usage differences from last commit (uncompressed)
+      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
+      run: | 
+        mkdir -p diff_visualizations
+        BEFORE=$(git rev-parse HEAD^)
+        AFTER=$(git rev-parse HEAD)
+        ddev size diff $BEFORE $AFTER --csv > diff-uncompressed.csv
+        ddev size diff $BEFORE $AFTER --save_to_png_path diff_visualizations/diff-uncompressed-linux.png > diff-uncompressed.txt
+        cat diff-uncompressed.txt
+        echo "# Size diff (uncompressed)" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+        cat diff-uncompressed.txt >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+
+    - name: Measure disk usage differences from last commit (compressed)
+      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
+      run: | 
+        mkdir -p diff_visualizations
+        BEFORE=$(git rev-parse HEAD^)
+        AFTER=$(git rev-parse HEAD)
+        ddev size diff $BEFORE $AFTER --compressed --csv > diff-compressed.csv
+        ddev size diff $BEFORE $AFTER --compressed --save_to_png_path diff_visualizations/diff-compressed-linux.png > diff-compressed.txt
+        cat diff-compressed.txt
+        echo "# Size diff (compressed)" >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
+        cat diff-compressed.txt >> $GITHUB_STEP_SUMMARY
+        echo '```' >> $GITHUB_STEP_SUMMARY
 
     - name: Upload file sizes (uncompressed)
       uses: actions/upload-artifact@v4
@@ -55,9 +84,42 @@ jobs:
         name: size-uncompressed.csv
         path: size-uncompressed.csv
         if-no-files-found: error
+
     - name: Upload file sizes (compressed)
       uses: actions/upload-artifact@v4
       with:
         name: size-compressed.csv
         path: size-compressed.csv
         if-no-files-found: error
+  
+    - name: Upload file sizes diff (uncompressed)
+      #if: false # Disabled: size difference is not accurate due to dependency sizes not updated
+      uses: actions/upload-artifact@v4
+      with:
+        name: diff-uncompressed.csv
+        path: diff-uncompressed.csv
+        if-no-files-found: error     
+
+    - name: Upload file sizes diff (compressed)
+      #if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
+      uses: actions/upload-artifact@v4
+      with:
+        name: diff-compressed.csv
+        path: diff-compressed.csv
+        if-no-files-found: error
+
+    - name: Upload status PNGs
+      uses: actions/upload-artifact@v4
+      with:
+        name: size-visuals
+        path: status_visualizations/
+        if-no-files-found: error
+
+    - name: Upload diff PNGs
+      #if: false  
+      uses: actions/upload-artifact@v4
+      with:
+        name: diff-visuals
+        path: diff_visualizations/
+        if-no-files-found: error
+    
diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py
index dcce6a37f1521..a4f7655d2af15 100644
--- a/ddev/src/ddev/cli/size/diff.py
+++ b/ddev/src/ddev/cli/size/diff.py
@@ -84,6 +84,9 @@ def diff(
                     progress.remove_task(task)
 
                     for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
+                        if save_to_png_path:
+                            base, ext = os.path.splitext(save_to_png_path)
+                            save_to_png_path = f"{base}_{plat}_{ver}{ext}"
                         diff_mode(
                             app,
                             gitRepo,
diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py
index 35f122984f9e6..9894367e0730c 100644
--- a/ddev/src/ddev/cli/size/status.py
+++ b/ddev/src/ddev/cli/size/status.py
@@ -67,6 +67,9 @@ def status(
             platforms = valid_platforms if platform is None else [platform]
             versions = valid_versions if version is None else [version]
             for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]):
+                if save_to_png_path:
+                    base, ext = os.path.splitext(save_to_png_path)
+                    save_to_png_path = f"{base}_{plat}_{ver}{ext}"
                 status_mode(app, repo_path, plat, ver, compressed, csv, i, save_to_png_path, show_gui)
         else:
             status_mode(app, repo_path, platform, version, compressed, csv, None, save_to_png_path, show_gui)

From 2f04a84173ba38c278346cfa8449ec0f266eff28 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Wed, 23 Apr 2025 11:59:14 +0200
Subject: [PATCH 39/40] test images

---
 .github/workflows/slapr.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/slapr.yml b/.github/workflows/slapr.yml
index c8b417a22daa0..4c9f6a874f96c 100644
--- a/.github/workflows/slapr.yml
+++ b/.github/workflows/slapr.yml
@@ -51,7 +51,7 @@ jobs:
 
 
     - name: Measure disk usage differences from last commit (uncompressed)
-      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
+      #if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
       run: | 
         mkdir -p diff_visualizations
         BEFORE=$(git rev-parse HEAD^)
@@ -65,7 +65,7 @@ jobs:
         echo '```' >> $GITHUB_STEP_SUMMARY
 
     - name: Measure disk usage differences from last commit (compressed)
-      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
+      #if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
       run: | 
         mkdir -p diff_visualizations
         BEFORE=$(git rev-parse HEAD^)

From e9d4d084e1cf9574015fe630b617c8c6e33bad90 Mon Sep 17 00:00:00 2001
From: Lucia Sanchez Bella <lucia.sanchezbella@datadoghq.com>
Date: Wed, 23 Apr 2025 12:05:20 +0200
Subject: [PATCH 40/40] test images

---
 .github/workflows/measure-disk-usage.yml | 46 +++++++++++++++++-------
 1 file changed, 34 insertions(+), 12 deletions(-)

diff --git a/.github/workflows/measure-disk-usage.yml b/.github/workflows/measure-disk-usage.yml
index 181cdfabd5880..4c9f6a874f96c 100644
--- a/.github/workflows/measure-disk-usage.yml
+++ b/.github/workflows/measure-disk-usage.yml
@@ -1,7 +1,7 @@
 name: Measure Disk Usage
 
 on:
-  push:
+  pull_request:
     branches:
     - master
 env:
@@ -29,30 +29,35 @@ jobs:
         ddev config set repo core
     - name: Measure disk usage (uncompressed)
       run: | 
+        mkdir -p status_visualizations
         ddev size status --csv > size-uncompressed.csv
-        ddev size status > size-uncompressed.txt
+        ddev size status --save_to_png_path status_visualizations/uncompressed.png > size-uncompressed.txt
         cat size-uncompressed.txt
         echo "# Size (uncompressed)" >> $GITHUB_STEP_SUMMARY
         echo '```' >> $GITHUB_STEP_SUMMARY
         cat size-uncompressed.txt >> $GITHUB_STEP_SUMMARY
         echo '```' >> $GITHUB_STEP_SUMMARY
+  
     - name: Measure disk usage (compressed)
       run: |
+        mkdir -p status_visualizations
         ddev size status --csv --compressed > size-compressed.csv
-        ddev size status --compressed > size-compressed.txt
+        ddev size status --compressed --save_to_png_path status_visualizations/compressed.png > size-compressed.txt
         cat size-compressed.txt
         echo "# Size (compressed)" >> $GITHUB_STEP_SUMMARY
         echo '```' >> $GITHUB_STEP_SUMMARY
         cat size-compressed.txt >> $GITHUB_STEP_SUMMARY
         echo '```' >> $GITHUB_STEP_SUMMARY
 
+
     - name: Measure disk usage differences from last commit (uncompressed)
-      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
+      #if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
       run: | 
+        mkdir -p diff_visualizations
         BEFORE=$(git rev-parse HEAD^)
         AFTER=$(git rev-parse HEAD)
         ddev size diff $BEFORE $AFTER --csv > diff-uncompressed.csv
-        ddev size diff $BEFORE $AFTER > diff-uncompressed.txt
+        ddev size diff $BEFORE $AFTER --save_to_png_path diff_visualizations/diff-uncompressed-linux.png > diff-uncompressed.txt
         cat diff-uncompressed.txt
         echo "# Size diff (uncompressed)" >> $GITHUB_STEP_SUMMARY
         echo '```' >> $GITHUB_STEP_SUMMARY
@@ -60,44 +65,61 @@ jobs:
         echo '```' >> $GITHUB_STEP_SUMMARY
 
     - name: Measure disk usage differences from last commit (compressed)
-      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
+      #if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
       run: | 
+        mkdir -p diff_visualizations
         BEFORE=$(git rev-parse HEAD^)
         AFTER=$(git rev-parse HEAD)
         ddev size diff $BEFORE $AFTER --compressed --csv > diff-compressed.csv
-        ddev size diff $BEFORE $AFTER --compressed > diff-compressed.txt
+        ddev size diff $BEFORE $AFTER --compressed --save_to_png_path diff_visualizations/diff-compressed-linux.png > diff-compressed.txt
         cat diff-compressed.txt
         echo "# Size diff (compressed)" >> $GITHUB_STEP_SUMMARY
         echo '```' >> $GITHUB_STEP_SUMMARY
         cat diff-compressed.txt >> $GITHUB_STEP_SUMMARY
         echo '```' >> $GITHUB_STEP_SUMMARY
 
-
     - name: Upload file sizes (uncompressed)
       uses: actions/upload-artifact@v4
       with:
         name: size-uncompressed.csv
         path: size-uncompressed.csv
         if-no-files-found: error
-        
+
     - name: Upload file sizes (compressed)
       uses: actions/upload-artifact@v4
       with:
         name: size-compressed.csv
         path: size-compressed.csv
         if-no-files-found: error
-
+  
     - name: Upload file sizes diff (uncompressed)
-      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
+      #if: false # Disabled: size difference is not accurate due to dependency sizes not updated
       uses: actions/upload-artifact@v4
       with:
         name: diff-uncompressed.csv
         path: diff-uncompressed.csv
         if-no-files-found: error     
+
     - name: Upload file sizes diff (compressed)
-      if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
+      #if: false  # Disabled: size difference is not accurate due to dependency sizes not updated
       uses: actions/upload-artifact@v4
       with:
         name: diff-compressed.csv
         path: diff-compressed.csv
         if-no-files-found: error
+
+    - name: Upload status PNGs
+      uses: actions/upload-artifact@v4
+      with:
+        name: size-visuals
+        path: status_visualizations/
+        if-no-files-found: error
+
+    - name: Upload diff PNGs
+      #if: false  
+      uses: actions/upload-artifact@v4
+      with:
+        name: diff-visuals
+        path: diff_visualizations/
+        if-no-files-found: error
+