diff --git a/.github/workflows/measure-disk-usage.yml b/.github/workflows/measure-disk-usage.yml new file mode 100644 index 0000000000000..4c9f6a874f96c --- /dev/null +++ b/.github/workflows/measure-disk-usage.yml @@ -0,0 +1,125 @@ +name: Measure Disk Usage + +on: + pull_request: + branches: + - master +env: + PYTHON_VERSION: "3.12" + +jobs: + measure-disk-usage: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up Python ${{ env.PYTHON_VERSION }} + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: Install ddev + run: | + pip install -e ./datadog_checks_dev[cli] + pip install -e ./ddev + + - name: Configure ddev + run: | + ddev config set repos.core . + ddev config set repo core + - name: Measure disk usage (uncompressed) + run: | + mkdir -p status_visualizations + ddev size status --csv > size-uncompressed.csv + ddev size status --save_to_png_path status_visualizations/uncompressed.png > size-uncompressed.txt + cat size-uncompressed.txt + echo "# Size (uncompressed)" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + cat size-uncompressed.txt >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + - name: Measure disk usage (compressed) + run: | + mkdir -p status_visualizations + ddev size status --csv --compressed > size-compressed.csv + ddev size status --compressed --save_to_png_path status_visualizations/compressed.png > size-compressed.txt + cat size-compressed.txt + echo "# Size (compressed)" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + cat size-compressed.txt >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + + - name: Measure disk usage differences from last commit (uncompressed) + #if: false # Disabled: size difference is not accurate due to dependency sizes not updated + run: | + mkdir -p diff_visualizations + BEFORE=$(git rev-parse HEAD^) + AFTER=$(git rev-parse HEAD) + ddev size diff $BEFORE $AFTER --csv > diff-uncompressed.csv + ddev size diff $BEFORE $AFTER --save_to_png_path diff_visualizations/diff-uncompressed-linux.png > diff-uncompressed.txt + cat diff-uncompressed.txt + echo "# Size diff (uncompressed)" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + cat diff-uncompressed.txt >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + - name: Measure disk usage differences from last commit (compressed) + #if: false # Disabled: size difference is not accurate due to dependency sizes not updated + run: | + mkdir -p diff_visualizations + BEFORE=$(git rev-parse HEAD^) + AFTER=$(git rev-parse HEAD) + ddev size diff $BEFORE $AFTER --compressed --csv > diff-compressed.csv + ddev size diff $BEFORE $AFTER --compressed --save_to_png_path diff_visualizations/diff-compressed-linux.png > diff-compressed.txt + cat diff-compressed.txt + echo "# Size diff (compressed)" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + cat diff-compressed.txt >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + - name: Upload file sizes (uncompressed) + uses: actions/upload-artifact@v4 + with: + name: size-uncompressed.csv + path: size-uncompressed.csv + if-no-files-found: error + + - name: Upload file sizes (compressed) + uses: actions/upload-artifact@v4 + with: + name: size-compressed.csv + path: size-compressed.csv + if-no-files-found: error + + - name: Upload file sizes diff (uncompressed) + #if: false # Disabled: size difference is not accurate due to dependency sizes not updated + uses: actions/upload-artifact@v4 + with: + name: diff-uncompressed.csv + path: diff-uncompressed.csv + if-no-files-found: error + + - name: Upload file sizes diff (compressed) + #if: false # Disabled: size difference is not accurate due to dependency sizes not updated + uses: actions/upload-artifact@v4 + with: + name: diff-compressed.csv + path: diff-compressed.csv + if-no-files-found: error + + - name: Upload status PNGs + uses: actions/upload-artifact@v4 + with: + name: size-visuals + path: status_visualizations/ + if-no-files-found: error + + - name: Upload diff PNGs + #if: false + uses: actions/upload-artifact@v4 + with: + name: diff-visuals + path: diff_visualizations/ + if-no-files-found: error + diff --git a/.github/workflows/slapr.yml b/.github/workflows/slapr.yml index 98ee0448e5614..4c9f6a874f96c 100644 --- a/.github/workflows/slapr.yml +++ b/.github/workflows/slapr.yml @@ -1,30 +1,125 @@ -# https://github.com/DataDog/slapr +name: Measure Disk Usage -name: Slack emoji PR updates on: - pull_request_review: - types: [submitted] pull_request: - types: [closed] + branches: + - master +env: + PYTHON_VERSION: "3.12" jobs: - run_slapr_agent_integrations: - runs-on: ubuntu-latest - strategy: - matrix: - slack_channel_variable: - - SLACK_CHANNEL_ID - - SLACK_CHANNEL_ID_AGENT_INTEGRATIONS_REVIEWS - - SLACK_CHANNEL_ID_INFRA_INTEGRATIONS + measure-disk-usage: + runs-on: ubuntu-22.04 steps: - - uses: DataDog/slapr@master - env: - GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" - SLACK_CHANNEL_ID: "${{ secrets[matrix.slack_channel_variable] }}" - SLACK_API_TOKEN: "${{ secrets.SLACK_API_TOKEN }}" - SLAPR_BOT_USER_ID: "${{ secrets.SLAPR_BOT_USER_ID }}" - SLAPR_EMOJI_REVIEW_STARTED: "review_started" - SLAPR_EMOJI_APPROVED: "approved2" - SLAPR_EMOJI_CHANGES_REQUESTED: "changes_requested" - SLAPR_EMOJI_MERGED: "merged" - SLAPR_EMOJI_CLOSED: "closed" + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up Python ${{ env.PYTHON_VERSION }} + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: Install ddev + run: | + pip install -e ./datadog_checks_dev[cli] + pip install -e ./ddev + + - name: Configure ddev + run: | + ddev config set repos.core . + ddev config set repo core + - name: Measure disk usage (uncompressed) + run: | + mkdir -p status_visualizations + ddev size status --csv > size-uncompressed.csv + ddev size status --save_to_png_path status_visualizations/uncompressed.png > size-uncompressed.txt + cat size-uncompressed.txt + echo "# Size (uncompressed)" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + cat size-uncompressed.txt >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + - name: Measure disk usage (compressed) + run: | + mkdir -p status_visualizations + ddev size status --csv --compressed > size-compressed.csv + ddev size status --compressed --save_to_png_path status_visualizations/compressed.png > size-compressed.txt + cat size-compressed.txt + echo "# Size (compressed)" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + cat size-compressed.txt >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + + - name: Measure disk usage differences from last commit (uncompressed) + #if: false # Disabled: size difference is not accurate due to dependency sizes not updated + run: | + mkdir -p diff_visualizations + BEFORE=$(git rev-parse HEAD^) + AFTER=$(git rev-parse HEAD) + ddev size diff $BEFORE $AFTER --csv > diff-uncompressed.csv + ddev size diff $BEFORE $AFTER --save_to_png_path diff_visualizations/diff-uncompressed-linux.png > diff-uncompressed.txt + cat diff-uncompressed.txt + echo "# Size diff (uncompressed)" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + cat diff-uncompressed.txt >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + - name: Measure disk usage differences from last commit (compressed) + #if: false # Disabled: size difference is not accurate due to dependency sizes not updated + run: | + mkdir -p diff_visualizations + BEFORE=$(git rev-parse HEAD^) + AFTER=$(git rev-parse HEAD) + ddev size diff $BEFORE $AFTER --compressed --csv > diff-compressed.csv + ddev size diff $BEFORE $AFTER --compressed --save_to_png_path diff_visualizations/diff-compressed-linux.png > diff-compressed.txt + cat diff-compressed.txt + echo "# Size diff (compressed)" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + cat diff-compressed.txt >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + - name: Upload file sizes (uncompressed) + uses: actions/upload-artifact@v4 + with: + name: size-uncompressed.csv + path: size-uncompressed.csv + if-no-files-found: error + + - name: Upload file sizes (compressed) + uses: actions/upload-artifact@v4 + with: + name: size-compressed.csv + path: size-compressed.csv + if-no-files-found: error + + - name: Upload file sizes diff (uncompressed) + #if: false # Disabled: size difference is not accurate due to dependency sizes not updated + uses: actions/upload-artifact@v4 + with: + name: diff-uncompressed.csv + path: diff-uncompressed.csv + if-no-files-found: error + + - name: Upload file sizes diff (compressed) + #if: false # Disabled: size difference is not accurate due to dependency sizes not updated + uses: actions/upload-artifact@v4 + with: + name: diff-compressed.csv + path: diff-compressed.csv + if-no-files-found: error + + - name: Upload status PNGs + uses: actions/upload-artifact@v4 + with: + name: size-visuals + path: status_visualizations/ + if-no-files-found: error + + - name: Upload diff PNGs + #if: false + uses: actions/upload-artifact@v4 + with: + name: diff-visuals + path: diff_visualizations/ + if-no-files-found: error + diff --git a/aerospike/datadog_checks/aerospike/__init__.py b/aerospike/datadog_checks/aerospike/__init__.py index b9c4e94f28c2b..d1b355efb6650 100644 --- a/aerospike/datadog_checks/aerospike/__init__.py +++ b/aerospike/datadog_checks/aerospike/__init__.py @@ -5,3 +5,102 @@ from .aerospike import AerospikeCheck __all__ = ['__version__', 'AerospikeCheck'] + +''' +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + + + +''' \ No newline at end of file diff --git a/ddev/changelog.d/20128.added b/ddev/changelog.d/20128.added new file mode 100644 index 0000000000000..6b95fbdfccc6f --- /dev/null +++ b/ddev/changelog.d/20128.added @@ -0,0 +1,4 @@ +Added new commands to track and analyze size changes in integrations and dependencies: +- **`ddev size status`**: Shows current sizes of all modules. +- **`ddev size diff [COMMIT_BEFORE] [COMMIT_AFTER]`**: Compares size changes between two commits. +- **`ddev size timeline {integration | dependency} [INTEGRATION_NAME/DEPENDENCY_NAME]`**: Visualizes the size evolution of a module over time. diff --git a/ddev/pyproject.toml b/ddev/pyproject.toml index 7218fa7a26a98..78378e34e80de 100644 --- a/ddev/pyproject.toml +++ b/ddev/pyproject.toml @@ -40,6 +40,9 @@ dependencies = [ "tomli-w", "tomlkit", "tqdm", + "requests", + "matplotlib", + "squarify" ] dynamic = ["version"] diff --git a/ddev/src/ddev/cli/__init__.py b/ddev/src/ddev/cli/__init__.py index 302f859cd9f54..e16dc80db4146 100644 --- a/ddev/src/ddev/cli/__init__.py +++ b/ddev/src/ddev/cli/__init__.py @@ -18,6 +18,7 @@ from ddev.cli.env import env from ddev.cli.meta import meta from ddev.cli.release import release +from ddev.cli.size import size from ddev.cli.status import status from ddev.cli.test import test from ddev.cli.validate import validate @@ -149,6 +150,7 @@ def ddev( ddev.add_command(status) ddev.add_command(test) ddev.add_command(validate) +ddev.add_command(size) __management_command = os.environ.get('PYAPP_COMMAND_NAME', '') if __management_command: diff --git a/ddev/src/ddev/cli/size/__init__.py b/ddev/src/ddev/cli/size/__init__.py new file mode 100644 index 0000000000000..63ddba4fa4af8 --- /dev/null +++ b/ddev/src/ddev/cli/size/__init__.py @@ -0,0 +1,30 @@ +# (C) Datadog, Inc. 2022-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +import click + +from ddev.cli.size.diff import diff +from ddev.cli.size.status import status +from ddev.cli.size.timeline import timeline + + +@click.group() +def size(): + """ + Analyze the download size of integrations and dependencies in various modes. + + This command provides tools to inspect the current status, compare commits and monitor size changes of modules + across different commits, platforms, and Python versions. + + """ + + pass + + +size.add_command(status) +size.add_command(diff) +size.add_command(timeline) + +if __name__ == "__main__": + size() diff --git a/ddev/src/ddev/cli/size/common.py b/ddev/src/ddev/cli/size/common.py new file mode 100644 index 0000000000000..c6a01615c79bd --- /dev/null +++ b/ddev/src/ddev/cli/size/common.py @@ -0,0 +1,447 @@ +# (C) Datadog, Inc. 2022-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import os +import re +import shutil +import subprocess +import tempfile +import zipfile +import zlib +from datetime import date +from pathlib import Path +from types import TracebackType +from typing import Dict, List, Literal, Optional, Set, Tuple, Type, Union, cast + +import matplotlib.cm as cm + +# import matplotlib.patheffects as path_effects +import matplotlib.pyplot as plt +import requests +import squarify +from matplotlib.patches import Patch + +from ddev.cli.application import Application + + +def valid_platforms_versions(repo_path: Union[Path, str]) -> Tuple[Set[str], Set[str]]: + resolved_path = os.path.join(repo_path, os.path.join(repo_path, ".deps", "resolved")) + platforms = [] + versions = [] + for file in os.listdir(resolved_path): + platforms.append("_".join(file.split("_")[:-1])) + match = re.search(r"\d+\.\d+", file) + if match: + versions.append(match.group()) + return set(platforms), set(versions) + + +def convert_size(size_bytes: float) -> str: + for unit in [" B", " KB", " MB", " GB"]: + if abs(size_bytes) < 1024: + return str(round(size_bytes, 2)) + unit + size_bytes /= 1024 + return str(round(size_bytes, 2)) + " TB" + + +def is_valid_integration(path: str, included_folder: str, ignored_files: Set[str], git_ignore: List[str]) -> bool: + # It is not an integration + if path.startswith("."): + return False + # It is part of an integration and it is not in the datadog_checks folder + elif included_folder not in path: + return False + # It is an irrelevant file + elif any(ignore in path for ignore in ignored_files): + return False + # This file is contained in .gitignore + elif any(ignore in path for ignore in git_ignore): + return False + else: + return True + + +def is_correct_dependency(platform: str, version: str, name: str) -> bool: + return platform in name and version in name + + +def print_csv(app: Application, i: Optional[int], modules: List[Dict[str, Union[str, int, date]]]) -> None: + headers = [k for k in modules[0].keys() if k not in ["Size", "Delta"]] + if not i: + app.display(",".join(headers)) + + for row in modules: + if any(str(value).strip() not in ("", "0") for value in row.values()): + app.display(",".join(format(str(row[h])) for h in headers)) + + +def format(s: str) -> str: + return f'"{s}"' if "," in s else s + + +def print_table(app: Application, mode: str, modules: List[Dict[str, Union[str, int, date]]]) -> None: + modules_table: Dict[str, Dict[int, str]] = {col: {} for col in modules[0].keys() if "(Bytes)" not in col} + for i, row in enumerate(modules): + for key, value in row.items(): + if key in modules_table: + modules_table[key][i] = str(value) + app.display_table(mode, modules_table) + + +def plot_treemap( + modules: List[Dict[str, Union[str, int, date]]], + title: str, + show: bool, + mode: Literal["status", "diff"] = "status", + path: Optional[str] = None, +) -> None: + # Always use absolute value for sizing + sizes = [abs(mod["Size (Bytes)"]) for mod in modules] + + # Setup figure + plt.figure(figsize=(12, 8)) + ax = plt.gca() + ax.set_axis_off() + + # Compute layout + rects = squarify.normalize_sizes(sizes, 100, 100) + rects = squarify.squarify(rects, 0, 0, 100, 100) + + colors = [] + + if mode == "status": + # Normalization by type + integrations = [mod for mod in modules if mod["Type"] == "Integration"] + dependencies = [mod for mod in modules if mod["Type"] == "Dependency"] + + def normalize(mods): + if not mods: + return [] + sizes = [mod["Size (Bytes)"] for mod in mods] + min_size = min(sizes) + max_size = max(sizes) + range_size = max_size - min_size or 1 + return [(s - min_size) / range_size for s in sizes] + + norm_int = normalize(integrations) + norm_dep = normalize(dependencies) + + def scale(val, vmin=0.3, vmax=0.85): + return vmin + val * (vmax - vmin) + + cmap_int = cm.get_cmap("Purples") + cmap_dep = cm.get_cmap("Reds") + + for mod in modules: + if mod["Type"] == "Integration": + idx = integrations.index(mod) + colors.append(cmap_int(scale(norm_int[idx], 0.3, 0.6))) + elif mod["Type"] == "Dependency": + idx = dependencies.index(mod) + colors.append(cmap_dep(scale(norm_dep[idx], 0.3, 0.85))) + else: + colors.append("#999999") + + elif mode == "diff": + ''' + # ------- BOTH POSITIVE AND NEGATIVE IN THE SAME TREEMAP -------- + # Reds for positive, Greens for negative + cmap_pos = cm.get_cmap("Reds") + cmap_neg = cm.get_cmap("Greens") + + max_size = max(abs(mod["Size (Bytes)"]) for mod in modules) + + for mod in modules: + value = mod["Size (Bytes)"] + intensity = abs(value) / max_size + color = cmap_pos(intensity) if value > 0 else cmap_neg(intensity) + colors.append(color) + + ''' + cmap_pos = cm.get_cmap("Oranges") + cmap_neg = cm.get_cmap("Blues") + + positives = [mod for mod in modules if mod["Size (Bytes)"] > 0] + negatives = [mod for mod in modules if mod["Size (Bytes)"] < 0] + + sizes_pos = [mod["Size (Bytes)"] for mod in positives] + sizes_neg = [abs(mod["Size (Bytes)"]) for mod in negatives] + + sum_pos = sum(sizes_pos) + sum_neg = sum(sizes_neg) + + canvas_area = 50 * 100 # each half has same max area + + # Determine which side is dominant (fills fully) + if sum_pos >= sum_neg: + # Red fills right, green scales left + norm_sizes_pos = [s / sum_pos * canvas_area for s in sizes_pos] + norm_sizes_neg = [s / sum_pos * canvas_area for s in sizes_neg] + rects_pos = squarify.squarify(norm_sizes_pos, 50, 0, 50, 100) + rects_neg = squarify.squarify(norm_sizes_neg, 0, 0, 50, 100) + else: + # Green fills left, red scales right + norm_sizes_neg = [s / sum_neg * canvas_area for s in sizes_neg] + norm_sizes_pos = [s / sum_neg * canvas_area for s in sizes_pos] + rects_neg = squarify.squarify(norm_sizes_neg, 0, 0, 50, 100) + rects_pos = squarify.squarify(norm_sizes_pos, 50, 0, 50, 100) + + rects = rects_neg + rects_pos + modules = negatives + positives + + # Draw colors + def rescale_intensity(val, min_val=0.3, max_val=0.8): + return min_val + (max_val - min_val) * val + + max_size = max(sizes_pos + sizes_neg) or 1 + colors = [] + + for mod in negatives: + raw = abs(mod["Size (Bytes)"]) / max_size + intensity = rescale_intensity(raw) + colors.append(cmap_neg(intensity)) + + for mod in positives: + raw = mod["Size (Bytes)"] / max_size + intensity = rescale_intensity(raw) + colors.append(cmap_pos(intensity)) + + # Draw rectangles and labels + for rect, mod, color in zip(rects, modules, colors, strict=False): + x, y, dx, dy = rect["x"], rect["y"], rect["dx"], rect["dy"] + ax.add_patch(plt.Rectangle((x, y), dx, dy, color=color, ec="white")) + + # Font size config + MIN_FONT_SIZE = 6 + MAX_FONT_SIZE = 12 + FONT_SIZE_SCALE = 0.4 + AVG_SIDE = (dx * dy) ** 0.5 + font_size = max(MIN_FONT_SIZE, min(MAX_FONT_SIZE, AVG_SIDE * FONT_SIZE_SCALE)) + name = mod["Name"] + size_str = f"({mod['Size']})" + + CHAR_WIDTH_FACTOR = 0.1 + CHAR_HEIGHT_FACTOR = 0.5 + name_fits = (len(name) + 2) * font_size * CHAR_WIDTH_FACTOR < dx and dy > font_size * CHAR_HEIGHT_FACTOR + size_fits = (len(size_str) + 2) * font_size * CHAR_WIDTH_FACTOR < dx + both_fit = dy > font_size * CHAR_HEIGHT_FACTOR * 2 + + if dx < 5 or dy < 5: + label = None + elif not name_fits and dx > 5: + max_chars = int(dx / (font_size * CHAR_WIDTH_FACTOR)) - 2 + if 4 <= max_chars: + name = name[: max_chars - 3] + "..." + name_fits = True + + if name_fits and size_fits and both_fit: + label = f"{name}\n{size_str}" + elif name_fits: + label = name + else: + label = None + + if label: + ax.text(x + dx / 2, y + dy / 2, label, va="center", ha="center", fontsize=font_size, color="black") + + ax.set_xlim(0, 100) + ax.set_ylim(0, 100) + + plt.title(title, fontsize=16) + + if mode == "status": + legend_handles = [ + Patch(color=cm.get_cmap("Purples")(0.6), label="Integration"), + Patch(color=cm.get_cmap("Reds")(0.6), label="Dependency"), + ] + elif mode == "diff": + legend_handles = [ + Patch(color=cm.get_cmap("Oranges")(0.7), label="Increase"), + Patch(color=cm.get_cmap("Blues")(0.7), label="Decrease"), + ] + + plt.legend(handles=legend_handles, title="Type", loc="center left", bbox_to_anchor=(1.0, 0.5)) + plt.subplots_adjust(right=0.8) + plt.tight_layout() + if show: + plt.show() + if path: + plt.savefig(path, bbox_inches='tight') + + +def get_dependencies_sizes( + deps: List[str], download_urls: List[str], compressed: bool +) -> List[Dict[str, Union[str, int]]]: + file_data = [] + for dep, url in zip(deps, download_urls, strict=False): + if compressed: + response = requests.head(url) + response.raise_for_status() + size_str = response.headers.get("Content-Length") + if size_str is None: + raise ValueError(f"Missing size for {dep}") + size = int(size_str) + + else: + with requests.get(url, stream=True) as response: + response.raise_for_status() + wheel_data = response.content + + with tempfile.TemporaryDirectory() as tmpdir: + wheel_path = Path(tmpdir) / "package.whl" + with open(wheel_path, "wb") as f: + f.write(wheel_data) + extract_path = Path(tmpdir) / "extracted" + with zipfile.ZipFile(wheel_path, "r") as zip_ref: + zip_ref.extractall(extract_path) + + size = 0 + for dirpath, _, filenames in os.walk(extract_path): + for name in filenames: + file_path = os.path.join(dirpath, name) + size += os.path.getsize(file_path) + file_data.append({"File Path": str(dep), "Type": "Dependency", "Name": str(dep), "Size (Bytes)": int(size)}) + return cast(List[Dict[str, Union[str, int]]], file_data) + + +def get_dependencies_list(file_path: str) -> Tuple[List[str], List[str]]: + download_urls = [] + deps = [] + with open(file_path, "r", encoding="utf-8") as file: + file_content = file.read() + for line in file_content.splitlines(): + match = re.search(r"([\w\-\d\.]+) @ (https?://[^\s#]+)", line) + if match: + deps.append(match.group(1)) + download_urls.append(match.group(2)) + else: + raise WrongDependencyFormat("The dependency format 'name @ link' is no longer supported.") + + return deps, download_urls + + +def group_modules( + modules: List[Dict[str, Union[str, int]]], platform: str, version: str, i: Optional[int] +) -> List[Dict[str, Union[str, int, date]]]: + if modules == []: + return [ + { + "Name": "", + "Type": "", + "Size (Bytes)": 0, + "Size": "", + "Platform": "", + "Version": "", + } + ] + grouped_aux: Dict[tuple[str, str], int] = {} + for file in modules: + key = (str(file["Name"]), str(file["Type"])) + grouped_aux[key] = grouped_aux.get(key, 0) + int(file["Size (Bytes)"]) + if i is None: + return [ + {"Name": name, "Type": type, "Size (Bytes)": size, "Size": convert_size(size)} + for (name, type), size in grouped_aux.items() + ] + else: + return [ + { + "Name": name, + "Type": type, + "Size (Bytes)": size, + "Size": convert_size(size), + "Platform": platform, + "Version": version, + } + for (name, type), size in grouped_aux.items() + ] + + +def get_gitignore_files(repo_path: Union[str, Path]) -> List[str]: + gitignore_path = os.path.join(repo_path, ".gitignore") + with open(gitignore_path, "r", encoding="utf-8") as file: + gitignore_content = file.read() + ignored_patterns = [ + line.strip() for line in gitignore_content.splitlines() if line.strip() and not line.startswith("#") + ] + return ignored_patterns + + +def compress(file_path: str) -> int: + compressor = zlib.compressobj() + compressed_size = 0 + # original_size = os.path.getsize(file_path) + with open(file_path, "rb") as f: + while chunk := f.read(8192): # Read in 8KB chunks + compressed_chunk = compressor.compress(chunk) + compressed_size += len(compressed_chunk) + compressed_size += len(compressor.flush()) + return compressed_size + + +class WrongDependencyFormat(Exception): + def __init__(self, mensaje: str) -> None: + super().__init__(mensaje) + + +class GitRepo: + def __init__(self, url: Union[Path, str]) -> None: + self.url = url + self.repo_dir: str + + def __enter__(self): + self.repo_dir = tempfile.mkdtemp() + try: + self._run("git status") + except Exception: + # If it is not already a repo + self._run(f"git clone --quiet {self.url} {self.repo_dir}") + return self + + def _run(self, command: str) -> List[str]: + result = subprocess.run(command, shell=True, capture_output=True, text=True, check=True, cwd=self.repo_dir) + return result.stdout.strip().split("\n") + + def get_module_commits( + self, module_path: str, initial: Optional[str], final: Optional[str], time: Optional[str] + ) -> List[str]: + self._run("git fetch origin --quiet") + self._run("git checkout origin/HEAD") + if time: + return self._run(f'git log --since="{time}" --reverse --pretty=format:%H -- {module_path}') + elif not initial and not final: + return self._run(f"git log --reverse --pretty=format:%H -- {module_path}") + elif not final: + return self._run(f"git log --reverse --pretty=format:%H {initial}..HEAD -- {module_path}") + else: + try: + self._run(f"git merge-base --is-ancestor {initial} {final}") + except subprocess.CalledProcessError: + raise ValueError(f"Commit {initial} does not come before {final}") + return self._run(f"git log --reverse --pretty=format:%H {initial}..{final} -- {module_path}") + + def checkout_commit(self, commit: str) -> None: + self._run(f"git fetch --quiet --depth 1 origin {commit}") + self._run(f"git checkout --quiet {commit}") + + def sparse_checkout_commit(self, commit_sha: str, module: str) -> None: + self._run("git sparse-checkout init --cone") + self._run(f"git sparse-checkout set {module}") + self._run(f"git checkout {commit_sha}") + + def get_commit_metadata(self, commit: str) -> Tuple[str, str, str]: + result = self._run(f'git log -1 --date=format:"%b %d %Y" --pretty=format:"%ad\n%an\n%s" {commit}') + date, author, message = result + return date, author, message + + def get_creation_commit_module(self, module: str) -> str: + return self._run(f'git log --reverse --format="%H" -- {module}')[0] + + def __exit__( + self, + exception_type: Optional[Type[BaseException]], + exception_value: Optional[BaseException], + exception_traceback: Optional[TracebackType], + ) -> None: + if self.repo_dir and os.path.exists(self.repo_dir): + shutil.rmtree(self.repo_dir) diff --git a/ddev/src/ddev/cli/size/diff.py b/ddev/src/ddev/cli/size/diff.py new file mode 100644 index 0000000000000..a4f7655d2af15 --- /dev/null +++ b/ddev/src/ddev/cli/size/diff.py @@ -0,0 +1,299 @@ +# (C) Datadog, Inc. 2022-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +import os +import tempfile +import zipfile +from pathlib import Path +from typing import Dict, List, Optional, Tuple, cast + +import click +import requests +from rich.console import Console +from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn + +from ddev.cli.application import Application + +from .common import ( + GitRepo, + compress, + get_dependencies_list, + get_gitignore_files, + group_modules, + is_correct_dependency, + is_valid_integration, + plot_treemap, + print_csv, + print_table, + valid_platforms_versions, +) + +console = Console() + + +@click.command() +@click.argument("before") +@click.argument("after") +@click.option( + '--platform', help="Target platform (e.g. linux-aarch64). If not specified, all platforms will be analyzed" +) +@click.option('--python', 'version', help="Python version (e.g 3.12). If not specified, all versions will be analyzed") +@click.option('--compressed', is_flag=True, help="Measure compressed size") +@click.option('--csv', is_flag=True, help="Output in CSV format") +@click.option('--save_to_png_path', help="Path to save the treemap as PNG") +@click.option( + '--show_gui', + is_flag=True, + help="Display a pop-up window with a treemap showing size differences between the two commits.", +) +@click.pass_obj +def diff( + app: Application, + before: str, + after: str, + platform: Optional[str], + version: Optional[str], + compressed: bool, + csv: bool, + save_to_png_path: str, + show_gui: bool, +) -> None: + """ + Compare the size of integrations and dependencies between two commits. + """ + with Progress( + SpinnerColumn(), + TextColumn("[progress.description]{task.description}"), + BarColumn(), + TimeElapsedColumn(), + transient=True, + ) as progress: + task = progress.add_task("[cyan]Calculating differences...", total=None) + repo_url = app.repo.path + with GitRepo(repo_url) as gitRepo: + try: + valid_platforms, valid_versions = valid_platforms_versions(gitRepo.repo_dir) + if platform and platform not in valid_platforms: + raise ValueError(f"Invalid platform: {platform}") + elif version and version not in valid_versions: + raise ValueError(f"Invalid version: {version}") + if platform is None or version is None: + platforms = valid_platforms if platform is None else [platform] + versions = valid_versions if version is None else [version] + progress.remove_task(task) + + for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]): + if save_to_png_path: + base, ext = os.path.splitext(save_to_png_path) + save_to_png_path = f"{base}_{plat}_{ver}{ext}" + diff_mode( + app, + gitRepo, + before, + after, + plat, + ver, + compressed, + csv, + i, + progress, + save_to_png_path, + show_gui, + ) + else: + progress.remove_task(task) + diff_mode( + app, + gitRepo, + before, + after, + platform, + version, + compressed, + csv, + None, + progress, + save_to_png_path, + show_gui, + ) + + except Exception as e: + app.abort(str(e)) + + +def diff_mode( + app: Application, + gitRepo: GitRepo, + before: str, + after: str, + platform: str, + version: str, + compressed: bool, + csv: bool, + i: Optional[int], + progress: Progress, + save_to_png_path: str, + show_gui: bool, +) -> None: + files_b, dependencies_b, files_a, dependencies_a = get_repo_info( + gitRepo, platform, version, before, after, compressed, progress + ) + + integrations = get_diff(files_b, files_a, 'Integration') + dependencies = get_diff(dependencies_b, dependencies_a, 'Dependency') + if integrations + dependencies == [] and not csv: + app.display(f"No size differences were detected between the selected commits for {platform}.") + + grouped_modules = group_modules(integrations + dependencies, platform, version, i) + grouped_modules.sort(key=lambda x: abs(cast(int, x['Size (Bytes)'])), reverse=True) + for module in grouped_modules: + if cast(int, module['Size (Bytes)']) > 0: + module['Size'] = f"+{module['Size']}" + else: + if csv: + print_csv(app, i, grouped_modules) + elif show_gui or save_to_png_path: + print_table(app, "Diff", grouped_modules) + plot_treemap( + grouped_modules, + f"Disk Usage Differences for {platform} and Python version {version}", + show_gui, + "diff", + save_to_png_path, + ) + else: + print_table(app, "Diff", grouped_modules) + + +def get_repo_info( + gitRepo: GitRepo, + platform: str, + version: str, + before: str, + after: str, + compressed: bool, + progress: Progress, +) -> Tuple[Dict[str, int], Dict[str, int], Dict[str, int], Dict[str, int]]: + with progress: + repo = gitRepo.repo_dir + task = progress.add_task("[cyan]Calculating sizes for the first commit...", total=None) + gitRepo.checkout_commit(before) + files_b = get_files(repo, compressed) + dependencies_b = get_dependencies(repo, platform, version, compressed) + progress.remove_task(task) + + task = progress.add_task("[cyan]Calculating sizes for the second commit...", total=None) + gitRepo.checkout_commit(after) + files_a = get_files(repo, compressed) + dependencies_a = get_dependencies(repo, platform, version, compressed) + progress.remove_task(task) + + return files_b, dependencies_b, files_a, dependencies_a + + +def get_diff(size_before: Dict[str, int], size_after: Dict[str, int], type: str) -> List[Dict[str, str | int]]: + all_paths = set(size_before.keys()) | set(size_after.keys()) + diff_files = [] + + for path in all_paths: + size_b = size_before.get(path, 0) + size_a = size_after.get(path, 0) + size_delta = size_a - size_b + module = Path(path).parts[0] + if size_delta != 0: + if size_b == 0: + diff_files.append( + { + 'File Path': path, + 'Type': type, + 'Name': module + " (NEW)", + 'Size (Bytes)': size_delta, + } + ) + elif size_a == 0: + diff_files.append( + { + 'File Path': path, + 'Type': type, + 'Name': module + " (DELETED)", + 'Size (Bytes)': size_delta, + } + ) + else: + diff_files.append( + { + 'File Path': path, + 'Type': type, + 'Name': module, + 'Size (Bytes)': size_delta, + } + ) + + return cast(List[Dict[str, str | int]], diff_files) + + +def get_files(repo_path: str, compressed: bool) -> Dict[str, int]: + + ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"} + git_ignore = get_gitignore_files(repo_path) + included_folder = "datadog_checks" + os.sep + + file_data = {} + for root, _, files in os.walk(repo_path): + for file in files: + file_path = os.path.join(root, file) + + # Convert the path to a relative format within the repo + relative_path = os.path.relpath(file_path, repo_path) + + # Filter files + if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore): + size = compress(file_path) if compressed else os.path.getsize(file_path) + file_data[relative_path] = size + return file_data + + +def get_dependencies(repo_path: str, platform: str, version: str, compressed: bool) -> Dict[str, int]: + + resolved_path = os.path.join(repo_path, os.path.join(repo_path, ".deps", "resolved")) + + for filename in os.listdir(resolved_path): + file_path = os.path.join(resolved_path, filename) + + if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename): + deps, download_urls = get_dependencies_list(file_path) + return get_dependencies_sizes(deps, download_urls, compressed) + return {} + + +def get_dependencies_sizes(deps: List[str], download_urls: List[str], compressed: bool) -> Dict[str, int]: + file_data = {} + for dep, url in zip(deps, download_urls, strict=False): + if compressed: + response = requests.head(url) + response.raise_for_status() + size_str = response.headers.get("Content-Length") + if size_str is None: + raise ValueError(f"Missing size for {dep}") + size = int(size_str) + else: + with requests.get(url, stream=True) as response: + response.raise_for_status() + wheel_data = response.content + + with tempfile.TemporaryDirectory() as tmpdir: + wheel_path = Path(tmpdir) / "package.whl" + with open(wheel_path, "wb") as f: + f.write(wheel_data) + extract_path = Path(tmpdir) / "extracted" + with zipfile.ZipFile(wheel_path, 'r') as zip_ref: + zip_ref.extractall(extract_path) + + size = 0 + for dirpath, _, filenames in os.walk(extract_path): + for name in filenames: + file_path = os.path.join(dirpath, name) + size += os.path.getsize(file_path) + file_data[dep] = size + return file_data diff --git a/ddev/src/ddev/cli/size/status.py b/ddev/src/ddev/cli/size/status.py new file mode 100644 index 0000000000000..9894367e0730c --- /dev/null +++ b/ddev/src/ddev/cli/size/status.py @@ -0,0 +1,151 @@ +# (C) Datadog, Inc. 2022-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +import os +from pathlib import Path +from typing import Dict, List, Optional, Union, cast + +import click +from rich.console import Console + +from ddev.cli.application import Application + +from .common import ( + compress, + get_dependencies_list, + get_dependencies_sizes, + get_gitignore_files, + group_modules, + is_correct_dependency, + is_valid_integration, + plot_treemap, + print_csv, + print_table, + valid_platforms_versions, +) + +# REPO_PATH = Path(__file__).resolve().parents[5] + +console = Console() + + +@click.command() +@click.option( + '--platform', help="Target platform (e.g. linux-aarch64). If not specified, all platforms will be analyzed" +) +@click.option('--python', 'version', help="Python version (e.g 3.12). If not specified, all versions will be analyzed") +@click.option('--compressed', is_flag=True, help="Measure compressed size") +@click.option('--csv', is_flag=True, help="Output in CSV format") +@click.option('--save_to_png_path', help="Path to save the treemap as PNG") +@click.option( + '--show_gui', + is_flag=True, + help="Display a pop-up window with a treemap showing the current size distribution of modules.", +) +@click.pass_obj +def status( + app: Application, + platform: Optional[str], + version: Optional[str], + compressed: bool, + csv: bool, + save_to_png_path: str, + show_gui: bool, +) -> None: + """ + Show the current size of all integrations and dependencies. + """ + try: + repo_path = app.repo.path + valid_platforms, valid_versions = valid_platforms_versions(repo_path) + if platform and platform not in valid_platforms: + raise ValueError(f"Invalid platform: {platform}") + elif version and version not in valid_versions: + raise ValueError(f"Invalid version: {version}") + if platform is None or version is None: + platforms = valid_platforms if platform is None else [platform] + versions = valid_versions if version is None else [version] + for i, (plat, ver) in enumerate([(p, v) for p in platforms for v in versions]): + if save_to_png_path: + base, ext = os.path.splitext(save_to_png_path) + save_to_png_path = f"{base}_{plat}_{ver}{ext}" + status_mode(app, repo_path, plat, ver, compressed, csv, i, save_to_png_path, show_gui) + else: + status_mode(app, repo_path, platform, version, compressed, csv, None, save_to_png_path, show_gui) + + except Exception as e: + app.abort(str(e)) + + +def status_mode( + app: Application, + repo_path: Path, + platform: str, + version: str, + compressed: bool, + csv: bool, + i: Optional[int], + save_to_png_path: str, + show_gui: bool, +) -> None: + with console.status("[cyan]Calculating sizes...", spinner="dots"): + modules = get_files(compressed, repo_path) + get_dependencies(repo_path, platform, version, compressed) + grouped_modules = group_modules(modules, platform, version, i) + grouped_modules.sort(key=lambda x: x['Size (Bytes)'], reverse=True) + + if csv: + print_csv(app, i, grouped_modules) + elif show_gui or save_to_png_path: + print_table(app, "Status", grouped_modules) + plot_treemap( + grouped_modules, + f"Disk Usage Status for {platform} and Python version {version}", + show_gui, + "status", + save_to_png_path, + ) + else: + print_table(app, "Status", grouped_modules) + + +def get_files(compressed: bool, repo_path: Path) -> List[Dict[str, Union[str, int]]]: + + ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"} + git_ignore = get_gitignore_files(repo_path) + included_folder = "datadog_checks" + os.sep + + file_data = [] + for root, _, files in os.walk(repo_path): + for file in files: + file_path = os.path.join(root, file) + + # Convert the path to a relative format within the repo + relative_path = os.path.relpath(file_path, repo_path) + + # Filter files + if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore): + size = compress(file_path) if compressed else os.path.getsize(file_path) + integration = relative_path.split(os.sep)[0] + file_data.append( + { + "File Path": relative_path, + "Type": "Integration", + "Name": integration, + "Size (Bytes)": int(size), + } + ) + return cast(List[Dict[str, Union[str, int]]], file_data) + + +def get_dependencies( + repo_path: Path, platform: str, version: str, compressed: bool +) -> List[Dict[str, Union[str, int]]]: + + resolved_path = os.path.join(repo_path, os.path.join(repo_path, ".deps", "resolved")) + for filename in os.listdir(resolved_path): + file_path = os.path.join(resolved_path, filename) + if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename): + deps, download_urls = get_dependencies_list(file_path) + return get_dependencies_sizes(deps, download_urls, compressed) + return [] diff --git a/ddev/src/ddev/cli/size/timeline.py b/ddev/src/ddev/cli/size/timeline.py new file mode 100644 index 0000000000000..08ec61be68fb3 --- /dev/null +++ b/ddev/src/ddev/cli/size/timeline.py @@ -0,0 +1,479 @@ +import os +import re +import tempfile +import zipfile +from datetime import date, datetime +from pathlib import Path +from typing import Dict, List, Optional, Set, Tuple, Union, cast + +import click +import matplotlib.pyplot as plt +import requests +from rich.console import Console +from rich.progress import BarColumn, Progress, SpinnerColumn, TextColumn, TimeElapsedColumn + +from ddev.cli.application import Application + +from .common import ( + GitRepo, + WrongDependencyFormat, + compress, + convert_size, + get_gitignore_files, + is_correct_dependency, + is_valid_integration, + print_csv, + print_table, + valid_platforms_versions, +) + +DEPENDENCY_FILE_CHANGE = datetime.strptime("Sep 17 2024", "%b %d %Y").date() +MINIMUM_DATE = datetime.strptime("Apr 3 2024", "%b %d %Y").date() +console = Console() + + +@click.command() +@click.argument('type', type=click.Choice(['integration', 'dependency'])) +@click.argument('name') +@click.argument('initial', required=False) +@click.argument('final', required=False) +@click.option( + '--time', + help="Filter commits starting from a specific date. Accepts both absolute and relative formats, " + "such as '2025-03-01', '2 weeks ago', or 'yesterday'", +) +@click.option('--threshold', help="Only show modules with size differences greater than a threshold in bytes") +@click.option( + '--platform', + help="Target platform to analyze. Only required for dependencies. If not specified, all platforms will be analyzed", +) +@click.option('--compressed', is_flag=True, help="Measure compressed size") +@click.option('--csv', is_flag=True, help="Output results in CSV format") +@click.option('--save_to_png_path', help="Path to save the treemap as PNG") +@click.option( + '--show_gui', + is_flag=True, + help="Display a pop-up window with a line chart showing the size evolution of the selected module over time.", +) +@click.pass_obj +def timeline( + app: Application, + type: str, + name: str, + initial: Optional[str], + final: Optional[str], + time: Optional[str], + threshold: Optional[str], + platform: Optional[str], + compressed: bool, + csv: bool, + save_to_png_path: str, + show_gui: bool, +) -> None: + """ + Show the size evolution of a module (integration or dependency) over time. + """ + with Progress( + SpinnerColumn(), + TextColumn("[progress.description]{task.description}"), + BarColumn(), + TimeElapsedColumn(), + transient=True, + ) as progress: + module = name # module is the name of the integration or the dependency + task = progress.add_task("[cyan]Calculating timeline...", total=None) + url = app.repo.path + with GitRepo(url) as gitRepo: + try: + # with console.status("[cyan]Fetching commits...", spinner="dots"): + folder = module if type == 'integration' else '.deps/resolved' + commits = gitRepo.get_module_commits(folder, initial, final, time) + first_commit = gitRepo.get_creation_commit_module(module) + gitRepo.checkout_commit(commits[-1]) + valid_platforms, _ = valid_platforms_versions(gitRepo.repo_dir) + if platform and platform not in valid_platforms: + raise ValueError(f"Invalid platform: {platform}") + elif commits == [''] and type == "integration" and module_exists(gitRepo.repo_dir, module): + raise ValueError(f"No changes found: {module}") + elif commits == [''] and type == "integration" and not module_exists(gitRepo.repo_dir, module): + raise ValueError(f"Integration {module} not found in latest commit, is the name correct?") + elif ( + type == 'dependency' + and platform + and module not in get_dependency_list(gitRepo.repo_dir, {platform}) + ): + raise ValueError( + f"Dependency {module} not found in latest commit for the platform {platform}, " + "is the name correct?" + ) + elif ( + type == 'dependency' + and not platform + and module not in get_dependency_list(gitRepo.repo_dir, valid_platforms) + ): + raise ValueError(f"Dependency {module} not found in latest commit, is the name correct?") + elif type == 'dependency' and commits == ['']: + raise ValueError(f"No changes found: {module}") + if type == "dependency" and platform is None: + progress.remove_task(task) + for i, plat in enumerate(valid_platforms): + timeline_mode( + app, + gitRepo, + type, + module, + commits, + threshold, + plat, + compressed, + csv, + i, + None, + progress, + save_to_png_path, + show_gui, + ) + else: + progress.remove_task(task) + + timeline_mode( + app, + gitRepo, + type, + module, + commits, + threshold, + platform, + compressed, + csv, + None, + first_commit, + progress, + save_to_png_path, + show_gui, + ) + + except Exception as e: + progress.remove_task(task) + app.abort(str(e)) + + +def timeline_mode( + app: Application, + gitRepo: GitRepo, + type: str, + module: str, + commits: List[str], + threshold: Optional[str], + platform: Optional[str], + compressed: bool, + csv: bool, + i: Optional[int], + first_commit: Optional[str], + progress: Progress, + save_to_png_path: str, + show_gui: bool, +) -> None: + modules = get_repo_info(gitRepo, type, platform, module, commits, compressed, first_commit, progress) + if modules != []: + grouped_modules = group_modules(modules, platform, i) + trimmed_modules = trim_modules(grouped_modules, threshold) + if csv: + print_csv(app, i, trimmed_modules) + elif show_gui or save_to_png_path: + print_table(app, "Timeline for " + module, trimmed_modules) + plot_linegraph(trimmed_modules, module, platform, show_gui, save_to_png_path) + else: + print_table(app, "Timeline for " + module, trimmed_modules) + + +def get_repo_info( + gitRepo: GitRepo, + type: str, + platform: Optional[str], + module: str, + commits: List[str], + compressed: bool, + first_commit: Optional[str], + progress: Progress, +) -> List[Dict[str, Union[str, int, date]]]: + with progress: + if type == "integration": + file_data = process_commits(commits, module, gitRepo, progress, platform, type, compressed, first_commit) + else: + file_data = process_commits(commits, module, gitRepo, progress, platform, type, compressed, None) + return file_data + + +def process_commits( + commits: List[str], + module: str, + gitRepo: GitRepo, + progress: Progress, + platform: Optional[str], + type: str, + compressed: bool, + first_commit: Optional[str], +) -> List[Dict[str, Union[str, int, date]]]: + file_data = [] + task = progress.add_task("[cyan]Processing commits...", total=len(commits)) + repo = gitRepo.repo_dir + + folder = module if type == 'integration' else '.deps/resolved' + for commit in commits: + gitRepo.sparse_checkout_commit(commit, folder) + date_str, author, message = gitRepo.get_commit_metadata(commit) + date, message, commit = format_commit_data(date_str, message, commit, first_commit) + if type == 'dependency' and date < MINIMUM_DATE: + continue + elif type == 'dependency': + assert platform is not None + result = get_dependencies(repo, module, platform, commit, date, author, message, compressed) + if result: + file_data.append(result) + elif type == 'integration': + file_data = get_files(repo, module, commit, date, author, message, file_data, compressed) + progress.advance(task) + progress.remove_task(task) + + return file_data + + +def get_files( + repo_path: str, + module: str, + commit: str, + date: date, + author: str, + message: str, + file_data: List[Dict[str, Union[str, int, date]]], + compressed: bool, +) -> List[Dict[str, Union[str, int, date]]]: + if not module_exists(repo_path, module): + file_data.append( + { + "Size (Bytes)": 0, + "Date": date, + "Author": author, + "Commit Message": "(DELETED) " + message, + "Commit SHA": commit, + } + ) + return file_data + + ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"} + + git_ignore = get_gitignore_files(repo_path) + included_folder = "datadog_checks/" + for root, _, files in os.walk(repo_path): + for file in files: + file_path = os.path.join(root, file) + relative_path = os.path.relpath(file_path, repo_path) + + if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore): + size = compress(file_path) if compressed else os.path.getsize(file_path) + file_data.append( + { + "Size (Bytes)": size, + "Date": date, + "Author": author, + "Commit Message": message, + "Commit SHA": commit, + } + ) + return file_data + + +def get_dependencies( + repo_path: str, + module: str, + platform: str, + commit: str, + date: date, + author: str, + message: str, + compressed: bool, +) -> Optional[Dict[str, Union[str, int, date]]]: + resolved_path = os.path.join(repo_path, ".deps/resolved") + paths = os.listdir(resolved_path) + version = get_version(paths, platform) + for filename in paths: + file_path = os.path.join(resolved_path, filename) + if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename): + download_url = get_dependency(file_path, module) + return ( + get_dependency_size(download_url, commit, date, author, message, compressed) if download_url else None + ) + return None + + +def get_dependency(file_path: str, module: str) -> Optional[str]: + with open(file_path, "r", encoding="utf-8") as file: + file_content = file.read() + for line in file_content.splitlines(): + match = re.search(r"([\w\-\d\.]+) @ (https?://[^\s#]+)", line) + if not match: + raise WrongDependencyFormat("The dependency format 'name @ link' is no longer supported.") + name, url = match.groups() + if name == module: + return url + return None + + +def get_dependency_size( + download_url: str, commit: str, date: date, author: str, message: str, compressed: bool +) -> Dict[str, Union[str, int, date]]: + if compressed: + response = requests.head(download_url) + response.raise_for_status() + size_str = response.headers.get("Content-Length") + if size_str is None: + raise ValueError(f"Missing size for commit {commit}") + size = int(size_str) + else: + with requests.get(download_url, stream=True) as response: + response.raise_for_status() + wheel_data = response.content + + with tempfile.TemporaryDirectory() as tmpdir: + wheel_path = Path(tmpdir) / "package.whl" + with open(wheel_path, "wb") as f: + f.write(wheel_data) + extract_path = Path(tmpdir) / "extracted" + with zipfile.ZipFile(wheel_path, 'r') as zip_ref: + zip_ref.extractall(extract_path) + + size = 0 + for dirpath, _, filenames in os.walk(extract_path): + for name in filenames: + file_path = os.path.join(dirpath, name) + size += os.path.getsize(file_path) + + return {"Size (Bytes)": size, "Date": date, "Author": author, "Commit Message": message, "Commit SHA": commit} + + +def get_version(files: List[str], platform: str) -> str: + final_version = '' + for file in files: + if platform in file: + curr_version = file.split('_')[-1] + match = re.search(r"\d+(?:\.\d+)?", curr_version) + version = match.group(0) if match else None + if version and version > final_version: + final_version = version + return final_version if len(final_version) != 1 else 'py' + final_version + + +def group_modules( + modules: List[Dict[str, Union[str, int, date]]], platform: Optional[str], i: Optional[int] +) -> List[Dict[str, Union[str, int, date]]]: + grouped_aux: Dict[tuple[date, str, str, str], int] = {} + + for file in modules: + key = ( + cast(date, file['Date']), + cast(str, file['Author']), + cast(str, file['Commit Message']), + cast(str, file['Commit SHA']), + ) + grouped_aux[key] = grouped_aux.get(key, 0) + cast(int, file["Size (Bytes)"]) + if i is None: + return [ + { + "Commit SHA": commit, + "Size (Bytes)": size, + 'Size': convert_size(size), + 'Delta (Bytes)': 'N/A', + 'Delta': 'N/A', + "Date": date, + "Author": author, + "Commit Message": message, + } + for (date, author, message, commit), size in grouped_aux.items() + ] + else: + assert platform is not None + return [ + { + "Commit SHA": commit, + "Size (Bytes)": size, + 'Size': convert_size(size), + 'Delta (Bytes)': 'N/A', + 'Delta': 'N/A', + "Date": date, + "Author": author, + "Commit Message": message, + 'Platform': platform, + } + for (date, author, message, commit), size in grouped_aux.items() + ] + + +def trim_modules( + modules: List[Dict[str, Union[str, int, date]]], threshold: Optional[str] = None +) -> List[Dict[str, Union[str, int, date]]]: + modules[0]['Delta (Bytes)'] = 0 + modules[0]['Delta'] = ' ' + trimmed_modules = [modules[0]] + threshold_value = int(threshold) if threshold else 0 + + for i in range(1, len(modules)): + prev = modules[i - 1] + curr = modules[i] + delta = cast(int, curr['Size (Bytes)']) - cast(int, prev['Size (Bytes)']) + + if abs(delta) > threshold_value or i == len(modules) - 1: + curr['Delta (Bytes)'] = delta + curr['Delta'] = convert_size(delta) + trimmed_modules.append(curr) + + return trimmed_modules + + +def format_commit_data(date_str: str, message: str, commit: str, first_commit: Optional[str]) -> Tuple[date, str, str]: + if commit == first_commit: + message = "(NEW) " + message + message = message if len(message) <= 35 else message[:30].rsplit(" ", 1)[0] + "..." + message.split()[-1] + date = datetime.strptime(date_str, "%b %d %Y").date() + return date, message, commit[:7] + + +def module_exists(path: str, module: str) -> bool: + return os.path.exists(os.path.join(path, module)) + + +def get_dependency_list(path: str, platforms: Set[str]) -> Set[str]: + resolved_path = os.path.join(path, ".deps/resolved") + all_files = os.listdir(resolved_path) + dependencies = set() + + for platform in platforms: + version = get_version(all_files, platform) + for filename in all_files: + file_path = os.path.join(resolved_path, filename) + if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename): + with open(file_path, "r", encoding="utf-8") as file: + matches = re.findall(r"([\w\-\d\.]+) @ https?://[^\s#]+", file.read()) + dependencies.update(matches) + return dependencies + + +def plot_linegraph(modules, module, platform, show, path): + dates = [entry["Date"] for entry in modules] + sizes = [entry["Size (Bytes)"] for entry in modules] + title = f"Disk Usage Evolution of {module} for {platform}" if platform else f"Disk Usage Evolution of {module}" + + plt.figure(figsize=(10, 6)) + plt.plot(dates, sizes, linestyle='-') + plt.title(title) + plt.xlabel("Date") + plt.ylabel("Size (Bytes)") + plt.grid(True) + plt.xticks(rotation=45) + plt.tight_layout() + + if path: + plt.savefig(path) + if show: + plt.show() + plt.close() diff --git a/ddev/tests/cli/size/__init__.py b/ddev/tests/cli/size/__init__.py new file mode 100644 index 0000000000000..3eff9712cbcf5 --- /dev/null +++ b/ddev/tests/cli/size/__init__.py @@ -0,0 +1,3 @@ +# (C) Datadog, Inc. 2022-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) diff --git a/ddev/tests/cli/size/test_common.py b/ddev/tests/cli/size/test_common.py new file mode 100644 index 0000000000000..bed031f7e0260 --- /dev/null +++ b/ddev/tests/cli/size/test_common.py @@ -0,0 +1,173 @@ +import os +from unittest.mock import MagicMock, mock_open, patch + +from ddev.cli.size.common import ( + compress, + convert_size, + get_dependencies_list, + get_dependencies_sizes, + get_gitignore_files, + group_modules, + is_correct_dependency, + is_valid_integration, + print_csv, + valid_platforms_versions, +) + + +def to_native_path(path: str) -> str: + return path.replace("/", os.sep) + + +def test_valid_platforms_versions(): + filenames = [ + "linux-aarch64_3.12.txt", + "linux-aarch64_py2.txt", + "linux-aarch64_py3.txt", + "linux-x86_64_3.12.txt", + "linux-x86_64_py2.txt", + "linux-x86_64_py3.txt", + "macos-x86_64_3.12.txt", + "macos-x86_64_py2.txt", + "macos-x86_64_py3.txt", + "windows-x86_64_3.12.txt", + "windows-x86_64_py2.txt", + "windows-x86_64_py3.txt", + ] + + expected_platforms = {"linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"} + expected_versions = {"3.12"} + with patch("os.listdir", return_value=filenames): + platforms, versions = valid_platforms_versions("fake_repo") + assert platforms == expected_platforms + assert versions == expected_versions + + +def test_is_correct_dependency(): + assert is_correct_dependency("windows-x86_64", "3.12", "windows-x86_64-3.12") + assert not is_correct_dependency("windows-x86_64", "3.12", "linux-x86_64-3.12") + assert not is_correct_dependency("windows-x86_64", "3.13", "windows-x86_64-3.12") + + +def test_convert_size(): + assert convert_size(500) == "500 B" + assert convert_size(1024) == "1.0 KB" + assert convert_size(1048576) == "1.0 MB" + assert convert_size(1073741824) == "1.0 GB" + + +def test_is_valid_integration(): + included_folder = "datadog_checks" + os.sep + ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"} + git_ignore = [".git", "__pycache__"] + + assert is_valid_integration(to_native_path("datadog_checks/example.py"), included_folder, ignored_files, git_ignore) + assert not is_valid_integration(to_native_path("__pycache__/file.py"), included_folder, ignored_files, git_ignore) + assert not is_valid_integration( + to_native_path("datadog_checks_dev/example.py"), included_folder, ignored_files, git_ignore + ) + assert not is_valid_integration(to_native_path(".git/config"), included_folder, ignored_files, git_ignore) + + +def test_get_dependencies_list(): + file_content = ( + "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl" + ) + mock_open_obj = mock_open(read_data=file_content) + with patch("builtins.open", mock_open_obj): + deps, urls = get_dependencies_list("fake_path") + assert deps == ["dependency1", "dependency2"] + assert urls == ["https://example.com/dependency1.whl", "https://example.com/dependency2.whl"] + + +def test_get_dependencies_sizes(): + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.headers = {"Content-Length": "12345"} + with patch("requests.head", return_value=mock_response): + file_data = get_dependencies_sizes(["dependency1"], ["https://example.com/dependency1.whl"], True) + assert file_data == [ + {"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345} + ] + + +def test_group_modules(): + modules = [ + {"Name": "module1", "Type": "A", "Size (Bytes)": 1500}, + {"Name": "module2", "Type": "B", "Size (Bytes)": 3000}, + {"Name": "module1", "Type": "A", "Size (Bytes)": 2500}, + {"Name": "module3", "Type": "A", "Size (Bytes)": 4000}, + ] + + platform = "linux-aarch64" + version = "3.12" + + expected_output = [ + { + "Name": "module1", + "Type": "A", + "Size (Bytes)": 4000, + "Size": "3.91 KB", + "Platform": "linux-aarch64", + "Version": "3.12", + }, + { + "Name": "module2", + "Type": "B", + "Size (Bytes)": 3000, + "Size": "2.93 KB", + "Platform": "linux-aarch64", + "Version": "3.12", + }, + { + "Name": "module3", + "Type": "A", + "Size (Bytes)": 4000, + "Size": "3.91 KB", + "Platform": "linux-aarch64", + "Version": "3.12", + }, + ] + + assert group_modules(modules, platform, version, 0) == expected_output + + +def test_get_gitignore_files(): + mock_gitignore = f"__pycache__{os.sep}\n*.log\n" # Sample .gitignore file + repo_path = "fake_repo" + with patch("builtins.open", mock_open(read_data=mock_gitignore)): + with patch("os.path.exists", return_value=True): + ignored_patterns = get_gitignore_files(repo_path) + assert ignored_patterns == ["__pycache__" + os.sep, "*.log"] + + +def test_compress(): + fake_content = b'a' * 16384 + original_size = len(fake_content) + + m = mock_open(read_data=fake_content) + with patch("builtins.open", m): + compressed_size = compress(to_native_path("fake/path/file.py")) + + assert isinstance(compressed_size, int) + assert compressed_size > 0 + assert compressed_size < original_size + + +def test_print_csv(): + mock_app = MagicMock() + modules = [ + {"Name": "module1", "Size B": 123, "Size": "2 B"}, + {"Name": "module,with,comma", "Size B": 456, "Size": "2 B"}, + ] + + print_csv(mock_app, i=0, modules=modules) + + expected_calls = [ + (("Name,Size B",),), + (('module1,123',),), + (('"module,with,comma",456',),), + ] + + actual_calls = mock_app.display.call_args_list + assert actual_calls == expected_calls diff --git a/ddev/tests/cli/size/test_diff.py b/ddev/tests/cli/size/test_diff.py new file mode 100644 index 0000000000000..038c4b227d9d1 --- /dev/null +++ b/ddev/tests/cli/size/test_diff.py @@ -0,0 +1,291 @@ +# (C) Datadog, Inc. 2022-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +import os +from unittest.mock import MagicMock, mock_open, patch + +import pytest + +from ddev.cli.size.diff import get_dependencies, get_diff, get_files + + +def to_native_path(path: str) -> str: + return path.replace("/", os.sep) + + +def test_get_compressed_files(): + mock_repo_path = "root" + + mock_files = [ + (os.path.join("root", "integration", "datadog_checks"), [], ["file1.py", "file2.py"]), + (os.path.join("root", "integration_b", "datadog_checks"), [], ["file3.py"]), + ("root", [], ["ignored.py"]), + ] + + def fake_compress(file_path): + return 1000 + + fake_gitignore = {"ignored.py"} + + with ( + patch("os.walk", return_value=mock_files), + patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"root{os.sep}", "")), + patch("os.path.exists", return_value=True), + patch("builtins.open", mock_open(read_data="__pycache__/\n*.log\n")), + patch("ddev.cli.size.diff.get_gitignore_files", return_value=fake_gitignore), + patch( + "ddev.cli.size.diff.is_valid_integration", + side_effect=lambda path, folder, ignored, git_ignore: path.startswith("integration"), + ), + patch("ddev.cli.size.diff.compress", side_effect=fake_compress), + ): + + result = get_files(mock_repo_path, True) + + expected = { + to_native_path("integration/datadog_checks/file1.py"): 1000, + to_native_path("integration/datadog_checks/file2.py"): 1000, + to_native_path("integration_b/datadog_checks/file3.py"): 1000, + } + + assert result == expected + + +def test_get_compressed_dependencies(terminal): + platform = "windows-x86_64" + version = "3.12" + + fake_file_content = ( + "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl" + ) + + mock_head_response = MagicMock() + mock_head_response.status_code = 200 + mock_head_response.headers = {"Content-Length": "12345"} + + mock_get_response = MagicMock() + mock_get_response.__enter__.return_value = mock_get_response # for use in `with` block + mock_get_response.status_code = 200 + mock_get_response.headers = {"Content-Length": "12345"} + mock_get_response.content = b"Fake wheel file content" + + mock_repo_path = "root" + + with ( + patch("os.path.exists", return_value=True), + patch("os.path.isdir", return_value=True), + patch("os.listdir", return_value=[f"{platform}-{version}"]), + patch("os.path.isfile", return_value=True), + patch("builtins.open", mock_open(read_data=fake_file_content)), + patch("requests.head", return_value=mock_head_response), + patch("requests.get", return_value=mock_get_response), + ): + file_data = get_dependencies(mock_repo_path, platform, version, True) + + assert file_data == { + "dependency1": 12345, + "dependency2": 12345, + } + + +def test_get_diff(): + size_before = { + to_native_path("integration/foo.py"): 1000, + to_native_path("integration/bar.py"): 2000, + to_native_path("integration/deleted.py"): 1500, + } + size_after = { + to_native_path("integration/foo.py"): 1200, + to_native_path("integration/bar.py"): 2000, + to_native_path("integration/new.py"): 800, + } + + expected = [ + { + "File Path": to_native_path("integration/foo.py"), + "Type": "Integration", + "Name": "integration", + "Size (Bytes)": 200, + }, + { + "File Path": to_native_path("integration/deleted.py"), + "Type": "Integration", + "Name": "integration (DELETED)", + "Size (Bytes)": -1500, + }, + { + "File Path": to_native_path("integration/new.py"), + "Type": "Integration", + "Name": "integration (NEW)", + "Size (Bytes)": 800, + }, + ] + + result = get_diff(size_before, size_after, "Integration") + assert sorted(result, key=lambda x: x["File Path"]) == sorted(expected, key=lambda x: x["File Path"]) + + +@pytest.fixture +def mock_size_diff_dependencies(): + mock_git_repo = MagicMock() + mock_git_repo.repo_dir = "fake_repo" + + def get_compressed_files_side_effect(_, __): + get_compressed_files_side_effect.counter += 1 + if get_compressed_files_side_effect.counter % 2 == 1: + return {"path1.py": 1000} # before + else: + return {"path1.py": 1200, "path2.py": 500} # after + + get_compressed_files_side_effect.counter = 0 + + def get_compressed_dependencies_side_effect(_, __, ___, ____): + get_compressed_dependencies_side_effect.counter += 1 + if get_compressed_dependencies_side_effect.counter % 2 == 1: + return {"dep1.whl": 2000} # before + else: + return {"dep1.whl": 2500, "dep2.whl": 1000} # after + + get_compressed_dependencies_side_effect.counter = 0 + + with ( + patch( + "ddev.cli.size.diff.valid_platforms_versions", + return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}), + ), + patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=mock_git_repo), + patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None), + patch("ddev.cli.size.diff.GitRepo.checkout_commit"), + patch("tempfile.mkdtemp", return_value="fake_repo"), + patch("ddev.cli.size.diff.get_files", side_effect=get_compressed_files_side_effect), + patch("ddev.cli.size.diff.get_dependencies", side_effect=get_compressed_dependencies_side_effect), + patch("ddev.cli.size.common.group_modules", side_effect=lambda m, *_: m), + patch("ddev.cli.size.common.print_csv"), + patch("ddev.cli.size.common.print_table"), + patch("ddev.cli.size.common.plot_treemap"), + ): + yield + + +def test_diff_no_args(ddev, mock_size_diff_dependencies): + result = ddev('size', 'diff', 'commit1', 'commit2', '--compressed') + assert result.exit_code == 0 + + +def test_diff_with_platform_and_version(ddev, mock_size_diff_dependencies): + result = ddev( + 'size', 'diff', 'commit1', 'commit2', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed' + ) + assert result.exit_code == 0 + + +def test_diff_csv(ddev, mock_size_diff_dependencies): + result = ddev( + 'size', 'diff', 'commit1', 'commit2', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed', '--csv' + ) + assert result.exit_code == 0 + + +def test_diff_no_differences(ddev): + fake_repo = MagicMock() + fake_repo.repo_dir = "fake_repo" + + with ( + patch("ddev.cli.size.diff.GitRepo.__enter__", return_value=fake_repo), + patch( + "ddev.cli.size.diff.valid_platforms_versions", + return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}), + ), + patch("ddev.cli.size.diff.GitRepo.__exit__", return_value=None), + patch.object(fake_repo, "checkout_commit"), + patch("tempfile.mkdtemp", return_value="fake_repo"), + patch("os.path.exists", return_value=True), + patch("os.path.isdir", return_value=True), + patch("os.path.isfile", return_value=True), + patch("os.listdir", return_value=["linux-aarch64_3.12"]), + patch( + "ddev.cli.size.diff.get_files", + return_value={ + "path1.py": 1000, + "path2.py": 500, + }, + ), + patch( + "ddev.cli.size.diff.get_dependencies", + return_value={ + "dep1.whl": 2000, + "dep2.whl": 1000, + }, + ), + patch("ddev.cli.size.common.group_modules", side_effect=lambda m, *_: m), + ): + result = ddev( + 'size', 'diff', 'commit1', 'commit2', '--platform', 'linux-aarch64', '--python', '3.12', '--compressed' + ) + print(result.output) + print(result.exit_code) + + assert result.exit_code == 0 + + +def test_diff_invalid_platform(ddev): + mock_git_repo = MagicMock() + mock_git_repo.repo_dir = "fake_repo" + mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"] + mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c) + mock_git_repo.__enter__.return_value = mock_git_repo + with ( + patch("ddev.cli.size.diff.GitRepo", return_value=mock_git_repo), + patch( + "ddev.cli.size.timeline.valid_platforms_versions", + return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}), + ), + ): + result = ddev('size', 'diff', 'commit1', 'commit2', '--platform', 'linux', '--python', '3.12', '--compressed') + assert result.exit_code != 0 + + +def test_diff_invalid_version(ddev): + mock_git_repo = MagicMock() + mock_git_repo.repo_dir = "fake_repo" + mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"] + mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c) + mock_git_repo.__enter__.return_value = mock_git_repo + + with ( + patch("ddev.cli.size.diff.GitRepo", return_value=mock_git_repo), + patch( + "ddev.cli.size.timeline.valid_platforms_versions", + return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}), + ), + ): + result = ddev( + 'size', + 'diff', + 'commit1', + 'commit2', + '--platform', + 'linux-aarch64', + '--python', + '2.10', # invalid + '--compressed', + ) + assert result.exit_code != 0 + + +def test_diff_invalid_platform_and_version(ddev): + mock_git_repo = MagicMock() + mock_git_repo.repo_dir = "fake_repo" + mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"] + mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c) + mock_git_repo.__enter__.return_value = mock_git_repo + with ( + patch("ddev.cli.size.diff.GitRepo", return_value=mock_git_repo), + patch( + "ddev.cli.size.timeline.valid_platforms_versions", + return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}), + ), + ): + result = ddev('size', 'diff', 'commit1', 'commit2', '--platform', 'linux', '--python', '2.10', '--compressed') + assert result.exit_code != 0 diff --git a/ddev/tests/cli/size/test_status.py b/ddev/tests/cli/size/test_status.py new file mode 100644 index 0000000000000..967018f8b7c81 --- /dev/null +++ b/ddev/tests/cli/size/test_status.py @@ -0,0 +1,176 @@ +# (C) Datadog, Inc. 2022-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +import os +from pathlib import Path +from unittest.mock import MagicMock, mock_open, patch + +import pytest + +from ddev.cli.size.status import ( + get_dependencies, + get_files, +) + + +def to_native_path(path: str) -> str: + return path.replace("/", os.sep) + + +def test_get_files_compressed(): + mock_files = [ + (os.path.join("root", "integration", "datadog_checks"), [], ["file1.py", "file2.py"]), + (os.path.join("root", "integration_b", "datadog_checks"), [], ["file3.py"]), + ("root", [], ["ignored.py"]), + ] + mock_repo_path = "root" + + def fake_compress(file_path): + return 1000 + + fake_gitignore = {"ignored.py"} + + with ( + patch("os.walk", return_value=mock_files), + patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"root{os.sep}", "")), + patch("ddev.cli.size.status.get_gitignore_files", return_value=fake_gitignore), + patch( + "ddev.cli.size.status.is_valid_integration", + side_effect=lambda path, folder, ignored, git_ignore: path.startswith("integration"), + ), + patch("ddev.cli.size.status.compress", side_effect=fake_compress), + ): + result = get_files(True, mock_repo_path) + + expected = [ + { + "File Path": to_native_path("integration/datadog_checks/file1.py"), + "Type": "Integration", + "Name": "integration", + "Size (Bytes)": 1000, + }, + { + "File Path": to_native_path("integration/datadog_checks/file2.py"), + "Type": "Integration", + "Name": "integration", + "Size (Bytes)": 1000, + }, + { + "File Path": to_native_path("integration_b/datadog_checks/file3.py"), + "Type": "Integration", + "Name": "integration_b", + "Size (Bytes)": 1000, + }, + ] + + assert result == expected + + +def test_get_compressed_dependencies(): + platform = "windows-x86_64" + version = "3.12" + + fake_file_content = ( + "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl" + ) + + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.headers = {"Content-Length": "12345"} + mock_repo_path = "root" + + with ( + patch("os.path.exists", return_value=True), + patch("os.path.isdir", return_value=True), + patch("os.listdir", return_value=[f"{platform}-{version}"]), + patch("os.path.isfile", return_value=True), + patch("builtins.open", mock_open(read_data=fake_file_content)), + patch("requests.head", return_value=mock_response), + ): + file_data = get_dependencies(mock_repo_path, platform, version, True) + + assert file_data == [ + {"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345}, + {"File Path": "dependency2", "Type": "Dependency", "Name": "dependency2", "Size (Bytes)": 12345}, + ] + + +@pytest.fixture() +def mock_size_status(): + fake_repo_path = Path(os.path.join("fake_root")).resolve() + + mock_walk = [(os.path.join(str(fake_repo_path), "datadog_checks", "my_check"), [], ["__init__.py"])] + + mock_app = MagicMock() + mock_app.repo.path = fake_repo_path + + with ( + patch("ddev.cli.size.status.get_gitignore_files", return_value=set()), + patch( + "ddev.cli.size.status.valid_platforms_versions", + return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "windows-x86_64"}, {"3.12"}), + ), + patch("ddev.cli.size.status.compress", return_value=1234), + patch( + "ddev.cli.size.status.get_dependencies_list", return_value=(["dep1"], {"dep1": "https://example.com/dep1"}) + ), + patch( + "ddev.cli.size.status.get_dependencies_sizes", + return_value=[{"File Path": "dep1.whl", "Type": "Dependency", "Name": "dep1", "Size (Bytes)": 5678}], + ), + patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"fake_root{os.sep}", "")), + patch("ddev.cli.size.status.is_valid_integration", return_value=True), + patch("ddev.cli.size.status.is_correct_dependency", return_value=True), + patch("ddev.cli.size.status.print_csv"), + patch("ddev.cli.size.status.print_table"), + patch("ddev.cli.size.status.plot_treemap"), + patch("os.walk", return_value=mock_walk), + patch("os.listdir", return_value=["fake_dep.whl"]), + patch("os.path.isfile", return_value=True), + ): + yield mock_app + + +def test_status_no_args(ddev, mock_size_status): + result = ddev("size", "status", "--compressed") + assert result.exit_code == 0 + + +def test_status(ddev, mock_size_status): + result = ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--compressed") + print(result.output) + assert result.exit_code == 0 + + +def test_status_csv(ddev, mock_size_status): + result = ddev("size", "status", "--platform", "linux-aarch64", "--python", "3.12", "--compressed", "--csv") + print(result.output) + assert result.exit_code == 0 + + +def test_status_wrong_platform(ddev): + with patch( + "ddev.cli.size.timeline.valid_platforms_versions", + return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "windows-x86_64"}, {"3.12"}), + ): + result = ddev("size", "status", "--platform", "linux", "--python", "3.12", "--compressed") + assert result.exit_code != 0 + + +def test_status_wrong_version(ddev): + with patch( + "ddev.cli.size.timeline.valid_platforms_versions", + return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "windows-x86_64"}, {"3.12"}), + ): + result = ddev("size", "status", "--platform", "linux-aarch64", "--python", "2.10", "--compressed") + assert result.exit_code != 0 + + +def test_status_wrong_plat_and_version(ddev): + with patch( + "ddev.cli.size.timeline.valid_platforms_versions", + return_value=({"linux-x86_64", "macos-x86_64", "linux-aarch64", "windows-x86_64"}, {"3.12"}), + ): + result = ddev("size", "status", "--platform", "linux", "--python", "2.10", "--compressed") + assert result.exit_code != 0 diff --git a/ddev/tests/cli/size/test_timeline.py b/ddev/tests/cli/size/test_timeline.py new file mode 100644 index 0000000000000..7e7f7abc163b8 --- /dev/null +++ b/ddev/tests/cli/size/test_timeline.py @@ -0,0 +1,442 @@ +import os +from datetime import datetime +from pathlib import Path +from unittest.mock import MagicMock, mock_open, patch + +import pytest + +from ddev.cli.size.timeline import ( + format_commit_data, + get_dependencies, + get_dependency, + get_dependency_size, + get_files, + get_version, + group_modules, + trim_modules, +) + + +def test_get_compressed_files(): + with ( + patch("os.walk", return_value=[(os.path.join("fake_repo", "int1"), [], ["int1.py"])]), + patch("os.path.relpath", return_value=os.path.join("int1", "int1.py")), + patch("os.path.exists", return_value=True), + patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()), + patch("ddev.cli.size.timeline.is_valid_integration", return_value=True), + patch("ddev.cli.size.timeline.compress", return_value=1234), + ): + result = get_files("fake_repo", "int1", "abc1234", datetime(2025, 4, 4).date(), "auth", "Added int1", [], True) + assert result == [ + { + "Size (Bytes)": 1234, + "Date": datetime(2025, 4, 4).date(), + "Author": "auth", + "Commit Message": "Added int1", + "Commit SHA": "abc1234", + } + ] + + +def test_get_compressed_files_deleted_only(): + repo_path = "fake_repo" + module = "foo" + commit = "abc1234" + date = datetime.strptime("Apr 5 2025", "%b %d %Y").date() + author = "Author" + message = "deleted module" + + with ( + patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()), + patch("os.walk", return_value=[]), + patch("os.path.relpath", side_effect=lambda path, _: path.replace(f"{repo_path}{os.sep}", "")), + patch("os.path.exists", return_value=False), + ): + file_data = get_files(repo_path, module, commit, date, author, message, [], True) + + assert file_data == [ + { + "Size (Bytes)": 0, + "Date": date, + "Author": author, + "Commit Message": "(DELETED) " + message, + "Commit SHA": commit, + } + ] + + +def test_get_version(): + files = ["linux-x86_64_3.12.txt", "linux-x86_64_3.10.txt"] + version = get_version(files, "linux-x86_64") + assert version == "3.12" + + +def test_format_commit_data(): + date, message, commit = format_commit_data( + "Apr 4 2025", "this is a very long commit message that should be trimmed (#1234)", "abc1234def", "abc1234def" + ) + expected_date = datetime.strptime("Apr 4 2025", "%b %d %Y").date() + expected_message = "(NEW) this is a very long...(#1234)" + expected_commit = "abc1234" + assert date == expected_date + assert message == expected_message + assert commit == expected_commit + + +def test_trim_modules_keep_some_remove_some(): + modules = [ + {"Size (Bytes)": 1000}, + {"Size (Bytes)": 1100}, # diff = 100 -> should be removed if threshold = 200 + {"Size (Bytes)": 1400}, # diff = 300 -> should be kept + ] + expected = [ + {"Size (Bytes)": 1000, "Delta (Bytes)": 0, "Delta": " "}, + {"Size (Bytes)": 1400, "Delta (Bytes)": 300, "Delta": "300 B"}, + ] + trimmed = trim_modules(modules, threshold=200) + assert trimmed == expected + + +def test_group_modules(): + modules = [ + { + "Size (Bytes)": 1000, + "Date": datetime(2025, 4, 4).date(), + "Author": "A", + "Commit Message": "msg", + "Commit SHA": "c1", + }, + { + "Size (Bytes)": 500, + "Date": datetime(2025, 4, 4).date(), + "Author": "A", + "Commit Message": "msg", + "Commit SHA": "c1", + }, + { + "Size (Bytes)": 1500, + "Date": datetime(2025, 4, 5).date(), + "Author": "A", + "Commit Message": "msg2", + "Commit SHA": "c2", + }, + ] + expected = [ + { + "Commit SHA": "c1", + "Size (Bytes)": 1500, + "Size": "1.46 KB", + "Delta (Bytes)": "N/A", + "Delta": "N/A", + "Date": datetime(2025, 4, 4).date(), + "Author": "A", + "Commit Message": "msg", + "Platform": "linux-x86_64", + }, + { + "Commit SHA": "c2", + "Size (Bytes)": 1500, + "Size": "1.46 KB", + "Delta (Bytes)": "N/A", + "Delta": "N/A", + "Date": datetime(2025, 4, 5).date(), + "Author": "A", + "Commit Message": "msg2", + "Platform": "linux-x86_64", + }, + ] + grouped = group_modules(modules, "linux-x86_64", 0) + assert grouped == expected + + +def test_get_dependency(): + content = """dep1 @ https://example.com/dep1.whl +dep2 @ https://example.com/dep2.whl""" + with patch("builtins.open", mock_open(read_data=content)): + url = get_dependency(Path("some") / "path" / "file.txt", "dep2") + assert url == "https://example.com/dep2.whl" + + +def make_mock_response(size): + mock_response = MagicMock() + mock_response.__enter__.return_value = mock_response + mock_response.headers = {"Content-Length": size} + mock_response.raise_for_status = lambda: None + return mock_response + + +def test_get_dependency_size(): + mock_response = make_mock_response("45678") + with patch("requests.head", return_value=mock_response): + info = get_dependency_size( + "https://example.com/file.whl", "abc1234", datetime(2025, 4, 4).date(), "auth", "Fixed bug", True + ) + assert info == { + "Size (Bytes)": 45678, + "Date": datetime(2025, 4, 4).date(), + "Author": "auth", + "Commit Message": "Fixed bug", + "Commit SHA": "abc1234", + } + + +def test_get_compressed_dependencies(): + with ( + patch("os.path.exists", return_value=True), + patch("os.path.isdir", return_value=True), + patch("os.path.isfile", return_value=True), + patch("os.listdir", return_value=["linux-x86_64_3.12.txt"]), + patch("ddev.cli.size.timeline.get_dependency", return_value="https://example.com/dep1.whl"), + patch("ddev.cli.size.timeline.requests.head", return_value=make_mock_response("12345")), + ): + result = get_dependencies( + "fake_repo", "dep1", "linux-x86_64", "abc1234", datetime(2025, 4, 4).date(), "auth", "Added dep1", True + ) + assert result == { + "Size (Bytes)": 12345, + "Date": datetime(2025, 4, 4).date(), + "Author": "auth", + "Commit Message": "Added dep1", + "Commit SHA": "abc1234", + } + + +@pytest.fixture +def mock_timeline_gitrepo(): + mock_git_repo = MagicMock() + mock_git_repo.repo_dir = "fake_repo" + mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"] + mock_git_repo.get_creation_commit_module.return_value = "commit1" + mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Initial commit", c) + + with ( + patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_git_repo), + patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None), + patch("ddev.cli.size.timeline.GitRepo.sparse_checkout_commit"), + patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()), + patch("ddev.cli.size.timeline.compress", return_value=1234), + patch("os.walk", return_value=[(Path("/tmp") / "fake_repo" / "int", [], ["file1.py"])]), + patch("os.path.exists", return_value=True), + patch("ddev.cli.size.timeline.group_modules", side_effect=lambda m, *_: m), + patch("ddev.cli.size.timeline.trim_modules", side_effect=lambda m, *_: m), + patch("ddev.cli.size.timeline.print_table"), + patch("ddev.cli.size.timeline.print_csv"), + patch("os.path.exists", return_value=True), + patch("os.path.isdir", return_value=True), + patch("os.path.isfile", return_value=True), + patch("os.listdir", return_value=["linux-x86_64_3.12_dep1.whl", "linux-x86_64_3.12_dep2.whl"]), + ): + yield + + +@pytest.fixture +def app(): + mock_app = MagicMock() + mock_app.repo.path = "fake_repo" + return mock_app + + +def test_timeline_integration_compressed(ddev, mock_timeline_gitrepo, app): + result = ddev("size", "timeline", "integration", "int1", "commit1", "commit2", "--compressed", obj=app) + assert result.exit_code == 0 + + +@pytest.fixture +def mock_timeline_dependencies(): + mock_git_repo = MagicMock() + mock_git_repo.repo_dir = "fake_repo" + mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"] + mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c) + + with ( + patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_git_repo), + patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None), + patch("ddev.cli.size.timeline.GitRepo.sparse_checkout_commit"), + patch( + "ddev.cli.size.timeline.valid_platforms_versions", + return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}), + ), + patch("ddev.cli.size.timeline.get_dependency_list", return_value={"dep1"}), + patch("os.path.exists", return_value=True), + patch("os.path.isdir", return_value=True), + patch("os.listdir", return_value=["linux-x86_64-3.12"]), + patch("os.path.isfile", return_value=True), + patch("ddev.cli.size.timeline.get_gitignore_files", return_value=set()), + patch("ddev.cli.size.timeline.get_dependency", return_value="https://example.com/dep1.whl"), + patch("ddev.cli.size.timeline.requests.head") as mock_head, + patch("ddev.cli.size.timeline.group_modules", side_effect=lambda m, *_: m), + patch("ddev.cli.size.timeline.trim_modules", side_effect=lambda m, *_: m), + patch("ddev.cli.size.timeline.print_table"), + patch("ddev.cli.size.timeline.plot_linegraph"), + ): + mock_response = MagicMock() + mock_response.headers = {"Content-Length": "1024"} + mock_response.raise_for_status = lambda: None + mock_head.return_value = mock_response + + yield + + +def test_timeline_dependency_compressed(ddev, mock_timeline_dependencies, app): + result = ddev( + "size", + "timeline", + "dependency", + "dep1", + "commit1", + "commit2", + "--compressed", + "--platform", + "linux-x86_64", + obj=app, + ) + + assert result.exit_code == 0 + + +def test_timeline_invalid_platform(ddev): + mock_git_repo = MagicMock() + mock_git_repo.repo_dir = "fake_repo" + mock_git_repo.get_module_commits.return_value = ["commit1", "commit2"] + mock_git_repo.get_commit_metadata.side_effect = lambda c: ("Apr 4 2025", "Fix dep", c) + mock_git_repo.__enter__.return_value = mock_git_repo + + with ( + patch("ddev.cli.size.timeline.GitRepo", return_value=mock_git_repo), + patch( + "ddev.cli.size.timeline.valid_platforms_versions", + return_value=({'linux-x86_64', 'linux-aarch64', 'macos-x86_64'}, {'3.12'}), + ), + ): + + result = ddev( + "size", + "timeline", + "dependency", + "dep1", + "commit1", + "commit2", + "--compressed", + "--platform", + "invalid-platform", + ) + + assert result.exit_code != 0 + + +def test_timeline_no_changes_in_integration(ddev): + mock_git_repo = MagicMock() + mock_git_repo.repo_dir = "fake_repo" + mock_git_repo.get_module_commits.return_value = [""] + + with ( + patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_git_repo), + patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None), + patch("os.path.exists", return_value=True), + patch("os.path.isdir", return_value=True), + patch("os.listdir", return_value=[]), + ): + result = ddev("size", "timeline", "integration", "integration/foo", "commit1", "commit2", "--compressed") + assert result.exit_code != 0 + assert "No changes found" in result.output + + +def test_timeline_integration_not_found(ddev): + mock_repo = MagicMock() + mock_repo.repo_dir = "fake" + mock_repo.get_module_commits.return_value = [""] + mock_repo.get_creation_commit_module.return_value = "c1" + mock_repo.checkout_commit.return_value = None + + with ( + patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_repo), + patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None), + patch( + "ddev.cli.size.timeline.valid_platforms_versions", + return_value=({'linux-x86_64', 'macos-x86_64', 'linux-aarch64', 'windows-x86_64'}, {'3.12'}), + ), + patch("ddev.cli.size.timeline.module_exists", return_value=False), + ): + result = ddev("size", "timeline", "integration", "missing_module", "c1", "c2") + assert result.exit_code != 0 + assert "not found" in result.output + + +def test_timeline_dependency_missing_no_platform(ddev): + mock_repo = MagicMock() + mock_repo.repo_dir = "fake" + mock_repo.get_module_commits.return_value = ["c1"] + mock_repo.get_creation_commit_module.return_value = "c1" + mock_repo.checkout_commit.return_value = None + + with ( + patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_repo), + patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None), + patch("ddev.cli.size.timeline.valid_platforms_versions", return_value=({"linux-x86_64"}, {"3.12"})), + patch("ddev.cli.size.timeline.get_dependency_list", return_value=set()), + ): + result = ddev("size", "timeline", "dependency", "missing_module", "c1", "c2") + assert result.exit_code != 0 + assert "Dependency missing_module not found in latest commit" in result.output + + +def test_timeline_dependency_missing_for_platform(ddev, app): + mock_repo = MagicMock() + mock_repo.repo_dir = "fake" + mock_repo.get_module_commits.return_value = ["c1"] + mock_repo.get_creation_commit_module.return_value = "c1" + mock_repo.checkout_commit.return_value = None + + with ( + patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_repo), + patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None), + patch("ddev.cli.size.timeline.valid_platforms_versions", return_value=({"linux-x86_64"}, {"3.12"})), + patch("ddev.cli.size.timeline.get_dependency_list", return_value=set()), + ): + + result = ddev( + "size", + "timeline", + "dependency", + "missing_module", + "c1", + "c2", + "--platform", + "linux-x86_64", + ) + + assert result.exit_code != 0 + assert ( + "Dependency missing_module not found in latest commit for the platform linux-x86_64, is the name correct?" + in result.output + ) + + +def test_timeline_dependency_no_changes(ddev, app): + mock_repo = MagicMock() + mock_repo.repo_dir = "fake" + mock_repo.get_module_commits.return_value = [""] + mock_repo.get_creation_commit_module.return_value = "c1" + mock_repo.checkout_commit.return_value = None + + with ( + patch("ddev.cli.size.timeline.GitRepo.__enter__", return_value=mock_repo), + patch("ddev.cli.size.timeline.GitRepo.__exit__", return_value=None), + patch("ddev.cli.size.timeline.valid_platforms_versions", return_value=({"linux-x86_64"}, {"3.12"})), + patch("ddev.cli.size.timeline.get_dependency_list", return_value={"dep1"}), + ): + + result = ddev( + "size", + "timeline", + "dependency", + "dep1", + "c1", + "c2", + "--platform", + "linux-x86_64", + obj=app, + ) + + assert result.exit_code != 0 + assert "no changes found" in result.output.lower() diff --git a/package_size_analyzer/cli.py b/package_size_analyzer/cli.py new file mode 100644 index 0000000000000..23c9a15d0bc0c --- /dev/null +++ b/package_size_analyzer/cli.py @@ -0,0 +1,38 @@ +import argparse +from modes import status_mode + + +def main(): + parser = argparse.ArgumentParser(description="Package Size Analyzer CLI") + + # Define allowed choices + valid_modes = ["status", "diff", "timeline"] + valid_platforms = ["linux-aarch64", "linux-x86_64", "macos-x86_64", "windows-x86_64"] + valid_python_versions = ["3.12"] + + # Arguments + parser.add_argument("mode", choices=valid_modes, help="Mode of operation") + parser.add_argument("--platform", choices=valid_platforms, required=False, help="Target platform") + parser.add_argument("--python", choices=valid_python_versions, required=False, help="Python version (MAJOR.MINOR)") + parser.add_argument("--compressed", action="store_true", help="Measure compressed size") + + args = parser.parse_args() + + # Execute the corresponding function based on the selected mode + if args.mode == "status": + # if an argument is not specified, all possibilities are executed + if args.platform is None and args.python is None: + for platform in valid_platforms: + for version in valid_python_versions: + status_mode(platform, version, args.compressed) + elif args.platform is None: + for platform in valid_platforms: + status_mode(platform, args.python, args.compressed) + elif args.python is None: + for version in valid_python_versions: + status_mode(args.platform, version, args.compressed) + else: + status_mode(args.platform, args.python, args.compressed) + +if __name__ == "__main__": + main() diff --git a/package_size_analyzer/modes.py b/package_size_analyzer/modes.py new file mode 100644 index 0000000000000..b9e0f2f05b4ab --- /dev/null +++ b/package_size_analyzer/modes.py @@ -0,0 +1,184 @@ +import requests +import pandas as pd +import re +import os +from tabulate import tabulate +import zlib +import io + + +def status_mode(platform, version, compressed): + if compressed: + df1 = pd.DataFrame(get_compressed_files()) + print("Compressed integrations done") + + df2 = pd.DataFrame(get_compressed_dependencies(platform,version)) + print("Compressed dependencies done") + + + df = pd.concat([df1, df2], ignore_index=True) + + # Calculate the size for the whole module + df_grouped = df.groupby(["Name", 'Type'], as_index=False).agg({"Size (Bytes)": "sum"}) + df_grouped = df_grouped.sort_values(by="Size (Bytes)", ascending=False).reset_index(drop=True) + + + df_grouped["Size"] = df_grouped["Size (Bytes)"].apply(convert_size) + df_grouped.to_csv("compressed_status_" + platform + "_" + version + ".csv", index=False) + df.to_csv("compressed_status_all_" + platform + "_" + version + ".csv", index=False) + df_grouped = df_grouped.drop(columns=['Size (Bytes)']) + print('--------------', platform,version,'--------------') + print(tabulate(df_grouped, headers='keys', tablefmt='grid')) + print("CSV exported") + + + + +def get_compressed_files(): + print("Getting compressed integrations") + + ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"} + git_ignore = get_gitignore_files() + included_folder = "datadog_checks/" + + script_path = os.path.abspath(__file__) + parent_dir = os.path.dirname(script_path) + repo_path = os.path.dirname(parent_dir) + + file_data = [] + for root, _, files in os.walk(repo_path): + for file in files: + file_path = os.path.join(root, file) + + # Convert the path to a relative format within the repo + relative_path = os.path.relpath(file_path, repo_path) + + # Filter files + if is_valid_integration(relative_path, included_folder, ignored_files, git_ignore): + try: + # Compress the file + compressor = zlib.compressobj() + compressed_size = 0 + + # original_size = os.path.getsize(file_path) + with open(file_path, "rb") as f: + while chunk := f.read(8192): # Read in 8KB chunks + compressed_chunk = compressor.compress(chunk) + compressed_size += len(compressed_chunk) + + compressed_size += len(compressor.flush()) # Flush the buffer + integration = relative_path.split("/")[0] + file_data.append({ + "File Path": relative_path, + "Type": "Integration", + "Name": integration, + "Size (Bytes)": compressed_size + }) + + except Exception as e: + print(f"Error processing {relative_path}: {e}") + + return file_data + + +def get_compressed_dependencies(platform=None, version=None): + print("Getting compressed dependencies") + + script_path = os.path.abspath(__file__) + parent_dir = os.path.dirname(script_path) + repo_path = os.path.dirname(parent_dir) + resolved_path = os.path.join(repo_path, ".deps/resolved") + + if not os.path.exists(resolved_path) or not os.path.isdir(resolved_path): + print(f"Error: Directory not found {resolved_path}") + return [] + + file_data = [] + + for filename in os.listdir(resolved_path): + file_path = os.path.join(resolved_path, filename) + + if os.path.isfile(file_path) and is_correct_dependency(platform, version, filename): + deps, download_urls = get_dependencies(file_path) + return get_dependencies_sizes(deps, download_urls) + + + + + +def is_correct_dependency(platform, version, name): + return platform in name and version in name + +def get_dependencies_sizes(deps, download_urls): + file_data = [] + for dep, url in zip(deps, download_urls): + dep_response = requests.head(url) + if dep_response.status_code != 200: + print(f"Error {response.status_code}: Unable to fetch the dependencies file") + else: + size = dep_response.headers.get("Content-Length", None) + file_data.append({"File Path": dep, "Type": "Dependency", "Name": dep, "Size (Bytes)": int(size)}) + + return file_data + + +def get_dependencies(file_path): + download_urls = [] + deps = [] + try: + with open(file_path, "r", encoding="utf-8") as file: + file_content = file.read() + for line in file_content.splitlines(): + match = re.search(r"([\w\-\d\.]+) @ (https?://[^\s#]+)", line) + if match: + deps.append(match.group(1)) + download_urls.append(match.group(2)) + except Exception as e: + print(f"Error reading file {file_path}: {e}") + + return deps, download_urls + +def is_valid_integration(path, included_folder, ignored_files, git_ignore): + # It is not an integration + if path.startswith('.'): + return False + # It is part of an integration and it is not in the datadog_checks folder + elif not (included_folder in path): + return False + # It is an irrelevant file + elif any(ignore in path for ignore in ignored_files): + return False + # This file is contained in .gitignore + elif any(ignore in path for ignore in git_ignore): + return False + else: + return True + + +def get_gitignore_files(): + script_path = os.path.abspath(__file__) + parent_dir = os.path.dirname(script_path) + repo_path = os.path.dirname(parent_dir) + gitignore_path = os.path.join(repo_path, ".gitignore") + if not os.path.exists(gitignore_path): + print(f"Error: .gitignore file not found at {gitignore_path}") + return [] + + try: + with open(gitignore_path, "r", encoding="utf-8") as file: + gitignore_content = file.read() + ignored_patterns = [line.strip() for line in gitignore_content.splitlines() if line.strip() and not line.startswith("#")] + return ignored_patterns + except Exception as e: + print(f"Error reading .gitignore file: {e}") + return [] + +def convert_size(size_bytes): + """Transforms bytes into a human-friendly format (KB, MB, GB) with 3 decimal places.""" + for unit in ['B', 'KB', 'MB', 'GB']: + if size_bytes < 1024: + return (str(round(size_bytes, 2)) + unit) + size_bytes /= 1024 + return (str(round(size_bytes, 2)) + "TB") + + diff --git a/package_size_analyzer/test.py b/package_size_analyzer/test.py new file mode 100644 index 0000000000000..89bc7c64a3c11 --- /dev/null +++ b/package_size_analyzer/test.py @@ -0,0 +1,58 @@ + +import pytest +import requests +from unittest.mock import patch, mock_open, MagicMock +from modes import ( + get_compressed_dependencies, + get_gitignore_files, + convert_size, + is_valid_integration, + is_correct_dependency, + get_dependencies, + get_dependencies_sizes +) + +def test_is_correct_dependency(): + assert is_correct_dependency("windows-x86_64", "3.12", "windows-x86_64-3.12") == True + assert is_correct_dependency("windows-x86_64", "3.12", "linux-x86_64-3.12") == False + assert is_correct_dependency("windows-x86_64", "3.13", "windows-x86_64-3.12") == False + + +def test_convert_size(): + assert convert_size(500) == "500B" + assert convert_size(1024) == "1.0KB" + assert convert_size(1048576) == "1.0MB" + assert convert_size(1073741824) == "1.0GB" + +def test_is_valid_integration(): + included_folder = "datadog_checks/" + ignored_files = {"datadog_checks_dev", "datadog_checks_tests_helper"} + git_ignore = [".git", "__pycache__"] + + assert is_valid_integration("datadog_checks/example.py", included_folder, ignored_files, git_ignore) == True + assert is_valid_integration("__pycache__/file.py", included_folder, ignored_files, git_ignore) == False + assert is_valid_integration("datadog_checks_dev/example.py", included_folder, ignored_files, git_ignore) == False + assert is_valid_integration(".git/config", included_folder, ignored_files, git_ignore) == False + +def test_get_dependencies(): + file_content = "dependency1 @ https://example.com/dependency1.whl\ndependency2 @ https://example.com/dependency2.whl" + mock_open_obj = mock_open(read_data=file_content) + with patch("builtins.open", mock_open_obj): + deps, urls = get_dependencies("fake_path") + assert deps == ["dependency1", "dependency2"] + assert urls == ["https://example.com/dependency1.whl", "https://example.com/dependency2.whl"] + +def test_get_gitignore_files(): + mock_gitignore = "__pycache__/\n*.log\n" # Sample .gitignore file + with patch("builtins.open", mock_open(read_data=mock_gitignore)): + with patch("os.path.exists", return_value=True): + ignored_patterns = get_gitignore_files() + assert ignored_patterns == ["__pycache__/", "*.log"] + +def test_get_dependencies_sizes(): + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.headers = {"Content-Length": "12345"} + with patch("requests.head", return_value=mock_response): + file_data = get_dependencies_sizes(["dependency1"], ["https://example.com/dependency1.whl"]) + assert file_data == [{"File Path": "dependency1", "Type": "Dependency", "Name": "dependency1", "Size (Bytes)": 12345}]