diff --git a/SUPPRESSION_DEDUPLICATION.md b/SUPPRESSION_DEDUPLICATION.md new file mode 100644 index 00000000000..180f2bf92d3 --- /dev/null +++ b/SUPPRESSION_DEDUPLICATION.md @@ -0,0 +1,97 @@ +# Suppression File Synchronization + +## Overview + +DependencyCheck maintains two suppression files: +- **Base Suppression File**: `core/src/main/resources/dependencycheck-base-suppression.xml` - Manually curated suppressions that ship with releases +- **Generated Suppressions File**: Maintained on the `generatedSuppressions` branch - Auto-generated suppressions from GitHub issue reports + +## Strategy + +The two files serve different purposes and should remain separate: +- The **generated file** is automatically maintained via GitHub Actions when issues are reported +- The **base file** should contain only manually curated suppressions that are NOT in the generated file +- Both files are loaded at runtime, so suppressions in either file will be applied + +## Synchronization Tools + +Two tools are available to help keep the files in sync in case of overlap (current situation as of Nov 10, 2025): + +### 1. Git History Analyzer + +The `SuppressionSyncAnalyzer` analyzes git history of the generated suppressions file to find suppressions that were **modified or deleted**: + +- ✅ Focuses on intentional changes (not just duplicates) +- ✅ Provides git commit context (why was it changed?) +- ✅ Catches consolidations (e.g., 20 individual rules → 1 broad rule) +- ✅ Shows GitHub commit links for review +- ✅ Handles the "V" option to view commits in browser + +**Interactive mode** (recommended): +```bash +./deduplicate-suppressions.sh +# or explicitly: +./deduplicate-suppressions.sh analyzer +``` + +This will: +1. Fetch the latest from the `generatedSuppressions` branch +2. Analyze git history for modifications/deletions +3. Check if old versions exist in base file +4. Interactively show each one with: + - What's currently in base + - What happened in generated (modified/deleted) + - Git commit info with clickable GitHub link +5. Let you decide: Remove, Keep, View commit, Quit, or Auto-remove all + +**Non-interactive mode**: +```bash +./deduplicate-suppressions.sh analyzer --non-interactive +``` + +This automatically removes ALL obsolete suppressions from base. + +### 2. Duplicate Detector (Legacy) + +The `SuppressionDeduplicator` finds exact duplicates between the two files. This is less sophisticated but faster for simple cases. + +**Interactive mode**: +```bash +./deduplicate-suppressions.sh deduplicator +``` + +**Non-interactive mode**: +```bash +./deduplicate-suppressions.sh deduplicator --non-interactive +``` + +### Backup + +The tool automatically creates a backup of the base suppression file before making changes: +- Backup location: `dependencycheck-base-suppression.xml.backup` + +## How Issues Are Detected + +### Git History Analyzer +Finds suppressions in base that match the OLD version from git history where: +1. The suppression was **deleted** from generated +2. OR the suppression was **modified** in generated (indicating consolidation or correction) + +### Duplicate Detector +Two suppressions are considered duplicates if: +1. They have matching **key fields** (packageUrl, gav, filePath, or sha1) +2. AND they have overlapping **CPEs**, **CVEs**, or **vulnerability names** + +## Implementation Details + +- `utils/src/main/java/org/owasp/dependencycheck/utils/SuppressionSyncAnalyzer.java` - Git history analyzer +- `utils/src/main/java/org/owasp/dependencycheck/utils/SuppressionDeduplicator.java` - Duplicate detector +- `utils/src/test/java/org/owasp/dependencycheck/utils/SuppressionSyncAnalyzerTest.java` - Tests for git diff parsing + +### Running Tests + +To verify the git diff parsing logic works correctly: + +```bash +mvn -pl utils test -Dtest=SuppressionSyncAnalyzerTest +``` diff --git a/deduplicate-suppressions.sh b/deduplicate-suppressions.sh new file mode 100755 index 00000000000..32b4deacc9b --- /dev/null +++ b/deduplicate-suppressions.sh @@ -0,0 +1,51 @@ +#!/bin/bash +# Script to synchronize suppressions between base and generated files + +set -e + +BASE_SUPPRESSION="core/src/main/resources/dependencycheck-base-suppression.xml" +MODE="${1:-analyzer}" + +if [ "$MODE" = "--help" ] || [ "$MODE" = "-h" ]; then + echo "Usage: $0 [analyzer|deduplicator] [--non-interactive]" + echo "" + echo "Modes:" + echo " analyzer - Analyze git history for modified/deleted suppressions (recommended)" + echo " deduplicator - Find exact duplicates between files" + echo "" + echo "Options:" + echo " --non-interactive - Remove all without prompting" + echo "" + echo "Examples:" + echo " $0 # Run analyzer in interactive mode" + echo " $0 analyzer --non-interactive # Run analyzer and auto-remove all" + echo " $0 deduplicator # Run old duplicate detection" + exit 0 +fi + +# Determine which tool to run +if [ "$MODE" = "deduplicator" ]; then + MAIN_CLASS="org.owasp.dependencycheck.utils.SuppressionDeduplicator" + shift # Remove 'deduplicator' from args +elif [ "$MODE" = "analyzer" ] || [ "$MODE" = "--non-interactive" ]; then + MAIN_CLASS="org.owasp.dependencycheck.utils.SuppressionSyncAnalyzer" + if [ "$MODE" = "analyzer" ]; then + shift # Remove 'analyzer' from args + fi +else + echo "Unknown mode: $MODE" + echo "Run '$0 --help' for usage" + exit 1 +fi + +echo "Building utils module..." +mvn -pl utils clean compile -q + +echo "" +echo "Running synchronization tool..." +mvn -pl utils exec:java \ + -Dexec.mainClass="$MAIN_CLASS" \ + -Dexec.args="$BASE_SUPPRESSION $*" + +echo "" +echo "Done!" diff --git a/utils/src/main/java/org/owasp/dependencycheck/utils/SuppressionDeduplicator.java b/utils/src/main/java/org/owasp/dependencycheck/utils/SuppressionDeduplicator.java new file mode 100644 index 00000000000..c4fd44c9431 --- /dev/null +++ b/utils/src/main/java/org/owasp/dependencycheck/utils/SuppressionDeduplicator.java @@ -0,0 +1,542 @@ +/* + * This file is part of dependency-check-utils. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Copyright (c) 2025 Jeremy Long. All Rights Reserved. + */ +package org.owasp.dependencycheck.utils; + +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.transform.OutputKeys; +import javax.xml.transform.Transformer; +import javax.xml.transform.TransformerFactory; +import javax.xml.transform.dom.DOMSource; +import javax.xml.transform.stream.StreamResult; +import java.io.*; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.*; +import java.util.stream.Collectors; + +/** + * Interactive utility to deduplicate suppressions between the base suppression file + * and the generated suppressions file. + * + * This tool identifies duplicates and allows the user to interactively decide which + * suppression to keep (base or generated) or skip the decision. + */ +public class SuppressionDeduplicator { + + private static final String GENERATED_SUPPRESSIONS_URL = + "https://raw.githubusercontent.com/dependency-check/DependencyCheck/refs/heads/generatedSuppressions/generatedSuppressions.xml"; + + public static void main(String[] args) throws Exception { + if (args.length < 1) { + System.err.println("Usage: java SuppressionDeduplicator [--non-interactive]"); + System.err.println(" --non-interactive: Remove all duplicates from base without prompting"); + System.exit(1); + } + + String basePath = args[0]; + boolean interactive = true; + + if (args.length > 1 && "--non-interactive".equals(args[1])) { + interactive = false; + } + + System.out.println("=== DependencyCheck Suppression Deduplicator ===\n"); + System.out.println("Base suppression file: " + basePath); + System.out.println("Generated suppressions URL: " + GENERATED_SUPPRESSIONS_URL); + System.out.println(); + + // Parse both files + System.out.println("Parsing suppression files..."); + Document baseDoc = parseXmlFile(basePath); + Document generatedDoc = parseXmlFromUrl(GENERATED_SUPPRESSIONS_URL); + + List baseSuppressions = extractSuppressions(baseDoc, "base"); + List generatedSuppressions = extractSuppressions(generatedDoc, "generated"); + + System.out.println("Found " + baseSuppressions.size() + " suppressions in base file"); + System.out.println("Found " + generatedSuppressions.size() + " suppressions in generated file\n"); + + // Find duplicates + System.out.println("Identifying duplicates..."); + List duplicates = findDuplicates(baseSuppressions, generatedSuppressions); + + if (duplicates.isEmpty()) { + System.out.println("No duplicates found! Files are already synchronized."); + return; + } + + System.out.println("Found " + duplicates.size() + " potential duplicates\n"); + + // Process duplicates + Set toRemoveFromBase = new HashSet<>(); + + if (interactive) { + toRemoveFromBase = processInteractive(duplicates); + } else { + System.out.println("Non-interactive mode: Removing all duplicates from base file"); + toRemoveFromBase = duplicates.stream() + .map(d -> d.base) + .collect(Collectors.toSet()); + } + + if (toRemoveFromBase.isEmpty()) { + System.out.println("\nNo suppressions selected for removal. Exiting without changes."); + return; + } + + // Remove selected suppressions from base file using line-based approach + System.out.println("\nRemoving " + toRemoveFromBase.size() + " suppressions from base file..."); + + String backupPath = basePath + ".backup"; + System.out.println("Creating backup: " + backupPath); + Files.copy(Paths.get(basePath), Paths.get(backupPath), + java.nio.file.StandardCopyOption.REPLACE_EXISTING); + + System.out.println("Writing updated file: " + basePath); + removeSuppressionsLineByLine(basePath, toRemoveFromBase); + + System.out.println("\n=== Complete ==="); + System.out.println("Removed " + toRemoveFromBase.size() + " duplicate suppressions from base file"); + System.out.println("Backup saved to: " + backupPath); + } + + private static Set processInteractive(List duplicates) throws IOException { + Set toRemoveFromBase = new HashSet<>(); + BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); + + for (int i = 0; i < duplicates.size(); i++) { + DuplicatePair dup = duplicates.get(i); + + System.out.println("=".repeat(80)); + System.out.println("Duplicate " + (i + 1) + " of " + duplicates.size()); + System.out.println("=".repeat(80)); + + System.out.println("\n[BASE FILE]"); + System.out.println(dup.base.toString()); + + System.out.println("\n[GENERATED FILE]"); + System.out.println(dup.generated.toString()); + + System.out.println("\nMatch reason: " + dup.matchReason); + System.out.println("\nWhat would you like to do?"); + System.out.println(" (R) Remove from base file (keep generated)"); + System.out.println(" (K) Keep in base file (both will exist)"); + System.out.println(" (S) Skip this decision"); + System.out.println(" (Q) Quit without saving"); + System.out.println(" (A) Remove all remaining duplicates from base"); + System.out.print("\nChoice [R/K/S/Q/A]: "); + + String choice = reader.readLine().trim().toUpperCase(); + + switch (choice) { + case "R": + toRemoveFromBase.add(dup.base); + System.out.println("✓ Will remove from base\n"); + break; + case "K": + System.out.println("✓ Will keep in base\n"); + break; + case "S": + System.out.println("⊘ Skipped\n"); + break; + case "Q": + System.out.println("\nQuitting without changes."); + System.exit(0); + break; + case "A": + // Add current and all remaining + toRemoveFromBase.add(dup.base); + for (int j = i + 1; j < duplicates.size(); j++) { + toRemoveFromBase.add(duplicates.get(j).base); + } + System.out.println("✓ Will remove all remaining duplicates from base\n"); + return toRemoveFromBase; + default: + System.out.println("Invalid choice, skipping.\n"); + } + } + + return toRemoveFromBase; + } + + private static List findDuplicates(List baseList, List generatedList) { + List duplicates = new ArrayList<>(); + + for (Suppression base : baseList) { + for (Suppression generated : generatedList) { + String matchReason = findMatchReason(base, generated); + if (matchReason != null) { + duplicates.add(new DuplicatePair(base, generated, matchReason)); + break; // Only match once per base suppression + } + } + } + + return duplicates; + } + + private static String findMatchReason(Suppression s1, Suppression s2) { + // For exact matching (ignoring notes), all fields must match + // Check packageUrl (key field) + if (!Objects.equals(s1.packageUrl, s2.packageUrl)) { + return null; + } + + // Check gav (key field) + if (!Objects.equals(s1.gav, s2.gav)) { + return null; + } + + // Check filePath (key field) + if (!Objects.equals(s1.filePath, s2.filePath)) { + return null; + } + + // Check sha1 (key field) + if (!Objects.equals(s1.sha1, s2.sha1)) { + return null; + } + + // Check regex attributes + if (s1.packageUrlRegex != s2.packageUrlRegex || + s1.gavRegex != s2.gavRegex || + s1.filePathRegex != s2.filePathRegex) { + return null; + } + + // Check CPEs (must match exactly) + if (!s1.cpes.equals(s2.cpes)) { + return null; + } + + // Check CVEs (must match exactly) + if (!s1.cves.equals(s2.cves)) { + return null; + } + + // Check vulnerability names (must match exactly) + if (!s1.vulnerabilityNames.equals(s2.vulnerabilityNames)) { + return null; + } + + // At least one key field must be non-null + if (s1.packageUrl == null && s1.gav == null && s1.filePath == null && s1.sha1 == null) { + return null; + } + + // Build reason string showing what matched + StringBuilder reason = new StringBuilder("Exact match"); + if (s1.packageUrl != null) { + reason.append(" - packageUrl: ").append(s1.packageUrl); + } + if (s1.gav != null) { + reason.append(" - gav: ").append(s1.gav); + } + if (s1.filePath != null) { + reason.append(" - filePath: ").append(s1.filePath); + } + if (s1.sha1 != null) { + reason.append(" - sha1: ").append(s1.sha1); + } + if (!s1.cpes.isEmpty()) { + reason.append(" - CPEs: ").append(s1.cpes.size()).append(" match"); + } + if (!s1.cves.isEmpty()) { + reason.append(" - CVEs: ").append(s1.cves.size()).append(" match"); + } + + return reason.toString(); + } + + /** + * Remove suppressions from the file line-by-line to preserve formatting. + * This avoids the XML transformer reformatting the entire file. + */ + private static void removeSuppressionsLineByLine(String filePath, Set toRemove) throws Exception { + // Read the entire file + List lines = Files.readAllLines(Paths.get(filePath)); + + // Parse suppressions with their line ranges + List lineRanges = findSuppressionLineRanges(lines); + + // Determine which line ranges to remove + Set linesToRemove = new HashSet<>(); + for (SuppressionLineRange range : lineRanges) { + if (toRemove.contains(range.suppression)) { + for (int i = range.startLine; i <= range.endLine; i++) { + linesToRemove.add(i); + } + } + } + + // Write back only the lines we're keeping + try (BufferedWriter writer = new BufferedWriter(new FileWriter(filePath))) { + for (int i = 0; i < lines.size(); i++) { + if (!linesToRemove.contains(i)) { + writer.write(lines.get(i)); + writer.newLine(); + } + } + } + } + + /** + * Find the line ranges for each suppression in the file. + */ + private static List findSuppressionLineRanges(List lines) throws Exception { + List ranges = new ArrayList<>(); + + int startLine = -1; + StringBuilder currentXml = new StringBuilder(); + + for (int i = 0; i < lines.size(); i++) { + String line = lines.get(i); + String trimmed = line.trim(); + + if (trimmed.startsWith("") || trimmed.equals("")) { + startLine = i; + currentXml = new StringBuilder(); + currentXml.append(line).append("\n"); + } else if (startLine >= 0) { + currentXml.append(line).append("\n"); + + if (trimmed.equals("")) { + // End of suppression found + try { + // Parse this suppression + String xmlString = currentXml.toString(); + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + factory.setNamespaceAware(true); + DocumentBuilder builder = factory.newDocumentBuilder(); + Document doc = builder.parse(new java.io.ByteArrayInputStream(xmlString.getBytes())); + Element suppressElement = doc.getDocumentElement(); + + Suppression suppression = new Suppression(suppressElement, "base"); + ranges.add(new SuppressionLineRange(suppression, startLine, i)); + } catch (Exception e) { + // Skip malformed suppressions + System.err.println("Warning: Could not parse suppression at line " + startLine + ": " + e.getMessage()); + } + + startLine = -1; + currentXml = new StringBuilder(); + } + } + } + + return ranges; + } + + static class SuppressionLineRange { + final Suppression suppression; + final int startLine; + final int endLine; + + SuppressionLineRange(Suppression suppression, int startLine, int endLine) { + this.suppression = suppression; + this.startLine = startLine; + this.endLine = endLine; + } + } + + private static Document parseXmlFile(String path) throws Exception { + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + factory.setNamespaceAware(true); + DocumentBuilder builder = factory.newDocumentBuilder(); + return builder.parse(new File(path)); + } + + private static Document parseXmlFromUrl(String urlString) throws Exception { + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + factory.setNamespaceAware(true); + DocumentBuilder builder = factory.newDocumentBuilder(); + URL url = new URL(urlString); + + // Read the content and wrap it in a proper XML structure + // The generatedSuppressions.xml is a fragment, not a complete document + StringBuilder content = new StringBuilder(); + content.append("\n"); + content.append("\n"); + + try (InputStream is = url.openStream(); + BufferedReader reader = new BufferedReader(new InputStreamReader(is))) { + String line; + while ((line = reader.readLine()) != null) { + content.append(line).append("\n"); + } + } + + content.append("\n"); + + // Parse the wrapped content + return builder.parse(new java.io.ByteArrayInputStream(content.toString().getBytes("UTF-8"))); + } + + private static void writeXmlFile(Document doc, String path) throws Exception { + TransformerFactory transformerFactory = TransformerFactory.newInstance(); + Transformer transformer = transformerFactory.newTransformer(); + transformer.setOutputProperty(OutputKeys.INDENT, "yes"); + transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); + transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4"); + + DOMSource source = new DOMSource(doc); + StreamResult result = new StreamResult(new File(path)); + transformer.transform(source, result); + } + + private static List extractSuppressions(Document doc, String source) { + List suppressions = new ArrayList<>(); + NodeList nodes = doc.getElementsByTagName("suppress"); + + for (int i = 0; i < nodes.getLength(); i++) { + Element element = (Element) nodes.item(i); + suppressions.add(new Suppression(element, source)); + } + + return suppressions; + } + + static class Suppression { + final String source; + final String notes; + final String packageUrl; + final String gav; + final String filePath; + final String sha1; + final boolean packageUrlRegex; + final boolean gavRegex; + final boolean filePathRegex; + final Set cpes; + final Set cves; + final Set vulnerabilityNames; + final Element element; + + Suppression(Element element, String source) { + this.element = element; + this.source = source; + this.notes = getElementText(element, "notes"); + this.packageUrl = getElementText(element, "packageUrl"); + this.gav = getElementText(element, "gav"); + this.filePath = getElementText(element, "filePath"); + this.sha1 = getElementText(element, "sha1"); + this.packageUrlRegex = hasRegexAttribute(element, "packageUrl"); + this.gavRegex = hasRegexAttribute(element, "gav"); + this.filePathRegex = hasRegexAttribute(element, "filePath"); + this.cpes = getElementTexts(element, "cpe"); + this.cves = getElementTexts(element, "cve"); + this.vulnerabilityNames = getElementTexts(element, "vulnerabilityName"); + } + + private String getElementText(Element parent, String tagName) { + NodeList nodes = parent.getElementsByTagName(tagName); + if (nodes.getLength() > 0) { + return nodes.item(0).getTextContent().trim(); + } + return null; + } + + private boolean hasRegexAttribute(Element parent, String tagName) { + NodeList nodes = parent.getElementsByTagName(tagName); + if (nodes.getLength() > 0) { + Element elem = (Element) nodes.item(0); + String regex = elem.getAttribute("regex"); + return "true".equalsIgnoreCase(regex); + } + return false; + } + + private Set getElementTexts(Element parent, String tagName) { + Set texts = new HashSet<>(); + NodeList nodes = parent.getElementsByTagName(tagName); + for (int i = 0; i < nodes.getLength(); i++) { + String text = nodes.item(i).getTextContent().trim(); + if (!text.isEmpty()) { + texts.add(text); + } + } + return texts; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("Notes: ").append(notes != null ? notes.replaceAll("\\s+", " ").substring(0, Math.min(100, notes.length())) + "..." : "N/A").append("\n"); + + if (packageUrl != null) { + sb.append("PackageURL").append(packageUrlRegex ? " (regex)" : "").append(": ").append(packageUrl).append("\n"); + } + if (gav != null) { + sb.append("GAV").append(gavRegex ? " (regex)" : "").append(": ").append(gav).append("\n"); + } + if (filePath != null) { + sb.append("FilePath").append(filePathRegex ? " (regex)" : "").append(": ").append(filePath).append("\n"); + } + if (sha1 != null) { + sb.append("SHA1: ").append(sha1).append("\n"); + } + if (!cpes.isEmpty()) { + sb.append("CPEs: ").append(cpes).append("\n"); + } + if (!cves.isEmpty()) { + sb.append("CVEs: ").append(cves).append("\n"); + } + if (!vulnerabilityNames.isEmpty()) { + sb.append("Vuln Names: ").append(vulnerabilityNames).append("\n"); + } + + return sb.toString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof Suppression)) return false; + Suppression that = (Suppression) o; + return Objects.equals(packageUrl, that.packageUrl) && + Objects.equals(gav, that.gav) && + Objects.equals(filePath, that.filePath) && + Objects.equals(sha1, that.sha1) && + Objects.equals(cpes, that.cpes) && + Objects.equals(cves, that.cves); + } + + @Override + public int hashCode() { + return Objects.hash(packageUrl, gav, filePath, sha1, cpes, cves); + } + } + + static class DuplicatePair { + final Suppression base; + final Suppression generated; + final String matchReason; + + DuplicatePair(Suppression base, Suppression generated, String matchReason) { + this.base = base; + this.generated = generated; + this.matchReason = matchReason; + } + } +} diff --git a/utils/src/main/java/org/owasp/dependencycheck/utils/SuppressionSyncAnalyzer.java b/utils/src/main/java/org/owasp/dependencycheck/utils/SuppressionSyncAnalyzer.java new file mode 100644 index 00000000000..cd367b35e08 --- /dev/null +++ b/utils/src/main/java/org/owasp/dependencycheck/utils/SuppressionSyncAnalyzer.java @@ -0,0 +1,789 @@ +/* + * This file is part of dependency-check-utils. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Copyright (c) 2025 Jeremy Long. All Rights Reserved. + */ +package org.owasp.dependencycheck.utils; + +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.transform.OutputKeys; +import javax.xml.transform.Transformer; +import javax.xml.transform.TransformerFactory; +import javax.xml.transform.dom.DOMSource; +import javax.xml.transform.stream.StreamResult; +import java.io.*; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +/** + * Analyzes git history of the generated suppressions file to find suppressions that were + * modified or deleted, then checks if the old versions exist in the base file. + * + * This approach is superior to simple duplicate detection because it: + * - Focuses on suppressions that were intentionally changed/removed (not just duplicates) + * - Provides context via git commit messages + * - Catches consolidations that wouldn't be detected as exact duplicates + */ +public class SuppressionSyncAnalyzer { + + private static final String GITHUB_BASE = "https://github.com/dependency-check/DependencyCheck"; + private static final String GENERATED_BRANCH = "generatedSuppressions"; + private static final String GENERATED_FILE = "generatedSuppressions.xml"; + + public static void main(String[] args) throws Exception { + if (args.length < 1) { + System.err.println("Usage: java SuppressionSyncAnalyzer [--non-interactive]"); + System.err.println(" --non-interactive: Remove all obsolete suppressions without prompting"); + System.exit(1); + } + + String basePath = args[0]; + boolean interactive = true; + + if (args.length > 1 && "--non-interactive".equals(args[1])) { + interactive = false; + } + + System.out.println("=== DependencyCheck Suppression Sync Analyzer ===\n"); + System.out.println("This tool analyzes git history of the generated suppressions file"); + System.out.println("to find suppressions that were modified or deleted.\n"); + + // Check if we're in a git repository + if (!new File(".git").exists()) { + System.err.println("Error: Must be run from the root of the DependencyCheck git repository"); + System.exit(1); + } + + System.out.println("Base suppression file: " + basePath); + System.out.println(); + + // Fetch the latest from the generatedSuppressions branch + System.out.println("Fetching latest from generatedSuppressions branch..."); + try { + execGitCommand("git", "fetch", "origin", GENERATED_BRANCH + ":" + GENERATED_BRANCH); + } catch (Exception e) { + System.err.println("Warning: Could not fetch latest. Using local branch. (" + e.getMessage() + ")"); + } + + // Get git log for the generated file + System.out.println("Analyzing git history of generated suppressions..."); + List commits = getCommitsAffectingFile(GENERATED_BRANCH, GENERATED_FILE); + System.out.println("Found " + commits.size() + " commits affecting generated suppressions\n"); + + // Parse base file + System.out.println("Parsing base suppression file..."); + Document baseDoc = parseXmlFile(basePath); + List baseSuppressions = extractSuppressions(baseDoc); + System.out.println("Found " + baseSuppressions.size() + " suppressions in base file\n"); + + // Analyze each commit for modifications/deletions + System.out.println("Analyzing commits for modifications and deletions..."); + List obsolete = findObsoleteSuppressions(commits, baseSuppressions); + + if (obsolete.isEmpty()) { + System.out.println("No obsolete suppressions found in base file!"); + System.out.println("Base file is in sync with generated file changes."); + return; + } + + System.out.println("Found " + obsolete.size() + " suppressions in base that were modified/deleted in generated\n"); + + // Process obsolete suppressions + Set toRemoveFromBase = new HashSet<>(); + + if (interactive) { + toRemoveFromBase = processInteractive(obsolete); + } else { + System.out.println("Non-interactive mode: Removing all obsolete suppressions from base file"); + toRemoveFromBase = obsolete.stream() + .map(o -> o.baseSuppression) + .collect(Collectors.toSet()); + } + + if (toRemoveFromBase.isEmpty()) { + System.out.println("\nNo suppressions selected for removal. Exiting without changes."); + return; + } + + // Remove selected suppressions from base file using line-based approach + System.out.println("\nRemoving " + toRemoveFromBase.size() + " suppressions from base file..."); + + String backupPath = basePath + ".backup"; + System.out.println("Creating backup: " + backupPath); + Files.copy(Paths.get(basePath), Paths.get(backupPath), + java.nio.file.StandardCopyOption.REPLACE_EXISTING); + + System.out.println("Writing updated file: " + basePath); + removesuppressionsLineByLine(basePath, toRemoveFromBase); + + System.out.println("\n=== Complete ==="); + System.out.println("Removed " + toRemoveFromBase.size() + " obsolete suppressions from base file"); + System.out.println("Backup saved to: " + backupPath); + + // Print summary of commits + printRemovalSummary(obsolete, toRemoveFromBase); + } + + private static void printRemovalSummary(List obsolete, Set removed) { + System.out.println("\n=== Removal Summary ==="); + + // Group by commit + Map> byCommit = new LinkedHashMap<>(); + for (ObsoleteSuppression obs : obsolete) { + if (removed.contains(obs.baseSuppression)) { + byCommit.computeIfAbsent(obs.commit.shortHash, k -> new ArrayList<>()).add(obs); + } + } + + if (byCommit.isEmpty()) { + System.out.println("No suppressions were removed."); + return; + } + + System.out.println("Suppressions removed based on changes from commits:"); + for (Map.Entry> entry : byCommit.entrySet()) { + String hash = entry.getKey(); + List items = entry.getValue(); + ObsoleteSuppression first = items.get(0); + + System.out.println("\n " + hash + " - " + first.commit.message); + System.out.println(" Date: " + first.commit.date); + System.out.println(" URL: " + GITHUB_BASE + "/commit/" + first.commit.fullHash); + System.out.println(" Removed " + items.size() + " suppression(s)"); + } + + System.out.println("\n" + removed.size() + " total suppression(s) removed from " + byCommit.size() + " commit(s)"); + } + + private static Set processInteractive(List obsolete) throws IOException { + Set toRemoveFromBase = new HashSet<>(); + BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); + + for (int i = 0; i < obsolete.size(); i++) { + ObsoleteSuppression obs = obsolete.get(i); + + System.out.println("=".repeat(100)); + System.out.println("Obsolete Suppression " + (i + 1) + " of " + obsolete.size()); + System.out.println("=".repeat(100)); + + System.out.println("\n[CURRENT IN BASE FILE]"); + System.out.println(obs.baseSuppression.toDetailedString()); + + System.out.println("\n[WHAT HAPPENED IN GENERATED FILE]"); + if (obs.wasDeleted) { + System.out.println("✗ DELETED in commit " + obs.commit.shortHash); + } else { + System.out.println("✎ MODIFIED in commit " + obs.commit.shortHash); + if (obs.newVersion != null) { + System.out.println("\n[NEW VERSION IN GENERATED]"); + System.out.println(obs.newVersion); + } + } + + System.out.println("\n[COMMIT INFO]"); + System.out.println("Commit: " + obs.commit.shortHash); + System.out.println("Date: " + obs.commit.date); + System.out.println("Message: " + obs.commit.message); + System.out.println("URL: " + GITHUB_BASE + "/commit/" + obs.commit.fullHash); + + System.out.println("\nWhat would you like to do?"); + System.out.println(" (R) Remove from base file (recommended)"); + System.out.println(" (K) Keep in base file"); + System.out.println(" (V) View commit in browser"); + System.out.println(" (Q) Quit without saving"); + System.out.println(" (A) Remove all remaining obsolete suppressions"); + System.out.print("\nChoice [R/K/V/Q/A]: "); + + String choice = reader.readLine().trim().toUpperCase(); + + switch (choice) { + case "R": + toRemoveFromBase.add(obs.baseSuppression); + System.out.println("✓ Will remove from base\n"); + break; + case "K": + System.out.println("✓ Will keep in base\n"); + break; + case "V": + String url = GITHUB_BASE + "/commit/" + obs.commit.fullHash; + System.out.println("Opening: " + url); + openBrowser(url); + i--; // Re-show this suppression + continue; + case "Q": + System.out.println("\nQuitting without changes."); + System.exit(0); + break; + case "A": + toRemoveFromBase.add(obs.baseSuppression); + for (int j = i + 1; j < obsolete.size(); j++) { + toRemoveFromBase.add(obsolete.get(j).baseSuppression); + } + System.out.println("✓ Will remove all remaining obsolete suppressions\n"); + return toRemoveFromBase; + default: + System.out.println("Invalid choice, skipping.\n"); + } + } + + return toRemoveFromBase; + } + + private static void openBrowser(String url) { + try { + String os = System.getProperty("os.name").toLowerCase(); + if (os.contains("mac")) { + Runtime.getRuntime().exec("open " + url); + } else if (os.contains("nux")) { + Runtime.getRuntime().exec("xdg-open " + url); + } else if (os.contains("win")) { + Runtime.getRuntime().exec("rundll32 url.dll,FileProtocolHandler " + url); + } + } catch (IOException e) { + System.err.println("Could not open browser: " + e.getMessage()); + } + } + + private static List findObsoleteSuppressions( + List commits, List baseSuppressions) throws Exception { + + List obsolete = new ArrayList<>(); + Set processedKeys = new HashSet<>(); + + for (GitCommit commit : commits) { + // Get the diff for this commit + String diff = getFileDiffForCommit(commit.fullHash, GENERATED_BRANCH, GENERATED_FILE); +// System.out.println("Analyzing commit " + commit.shortHash + " - " + diff); + // Parse deleted/modified suppressions from diff + List deletions = parseDeletedSuppressions(diff); + System.out.println("Found " + deletions.size() + " deleted/modified suppressions in this commit"); + for (DiffSuppression deletion : deletions) { + System.out.println("Processing deleted/modified suppression: " + deletion.toString()); + // Check if this suppression exists in base + Suppression matchInBase = findMatchInBase(deletion, baseSuppressions); + + if (matchInBase != null) { + String key = matchInBase.getKey(); + if (!processedKeys.contains(key)) { + processedKeys.add(key); + + // Try to find if there's a new version in current generated file + String newVersion = findNewVersionInGenerated(deletion); + + obsolete.add(new ObsoleteSuppression( + matchInBase, + commit, + deletion.wasModified ? newVersion : null, + !deletion.wasModified + )); + } + } + } + } + + return obsolete; + } + + private static String findNewVersionInGenerated(DiffSuppression deletion) { + // This would require parsing the current generated file + // For now, return null - can be enhanced later + return null; + } + + private static Suppression findMatchInBase(DiffSuppression deletion, List baseSuppressions) { + for (Suppression base : baseSuppressions) { + if (deletion.matches(base)) { + return base; + } + } + return null; + } + + private static List parseDeletedSuppressions(String diff) { + List result = new ArrayList<>(); + + // Split diff into lines + String[] lines = diff.split("\n"); + + StringBuilder currentSuppression = new StringBuilder(); + boolean inSuppression = false; + boolean hasDeletedContent = false; + + for (String line : lines) { + String trimmed = line.trim(); + + // Check if we're entering a suppress block (with or without '-' prefix) + if (trimmed.equals("") || + (line.startsWith("-") && line.substring(1).trim().equals(""))) { + // Start of a new suppression + inSuppression = true; + hasDeletedContent = line.startsWith("-"); + currentSuppression = new StringBuilder(); + + // Add the opening tag (without the '-' if present) + String content = line.startsWith("-") ? line.substring(1) : line; + currentSuppression.append(content.trim()).append("\n"); + + } else if (inSuppression) { + // We're inside a suppression block + boolean isDeletedLine = line.startsWith("-"); + + if (isDeletedLine) { + hasDeletedContent = true; + String content = line.substring(1); // Remove the '-' + currentSuppression.append(content.trim()).append("\n"); + + // Check if we've reached the end of this suppression + if (content.trim().equals("")) { + if (hasDeletedContent) { + try { + DiffSuppression ds = DiffSuppression.fromXmlString(currentSuppression.toString()); + ds.wasModified = false; + result.add(ds); + } catch (Exception e) { + // Skip malformed suppressions + System.err.println("Warning: Could not parse suppression: " + e.getMessage()); + } + } + + inSuppression = false; + hasDeletedContent = false; + currentSuppression = new StringBuilder(); + } + } else if (trimmed.equals("")) { + // Context line showing end of suppression + currentSuppression.append(trimmed).append("\n"); + + // If we had deleted content, this is a deletion + if (hasDeletedContent) { + try { + DiffSuppression ds = DiffSuppression.fromXmlString(currentSuppression.toString()); + ds.wasModified = false; + result.add(ds); + } catch (Exception e) { + System.err.println("Warning: Could not parse suppression: " + e.getMessage()); + } + } + + inSuppression = false; + hasDeletedContent = false; + currentSuppression = new StringBuilder(); + } else if (!line.startsWith("+")) { + // Context line (no + or -) + currentSuppression.append(trimmed).append("\n"); + } else { + // Hit an addition line - if we had deleted content, this might be a modification + // For now, if we had any deleted content, treat it as a deletion + if (hasDeletedContent && trimmed.equals("")) { + try { + DiffSuppression ds = DiffSuppression.fromXmlString(currentSuppression.toString()); + ds.wasModified = true; + result.add(ds); + } catch (Exception e) { + System.err.println("Warning: Could not parse suppression: " + e.getMessage()); + } + + inSuppression = false; + hasDeletedContent = false; + currentSuppression = new StringBuilder(); + } + } + } + } + + return result; + } + + private static String getFileDiffForCommit(String commitHash, String branch, String file) throws Exception { + // Get the diff for just this file in this commit + return execGitCommand("git", "show", commitHash, "--", file); + } + + private static List getCommitsAffectingFile(String branch, String file) throws Exception { + // Get commits that modified the file, in reverse chronological order + // Using simple format without quotes to avoid parsing issues + String logOutput = execGitCommand("git", "log", "--pretty=format:%H|%h|%ai|%s", branch, "--", file); + + List commits = new ArrayList<>(); + for (String line : logOutput.split("\n")) { + if (line.trim().isEmpty()) continue; + + String[] parts = line.split("\\|", 4); + if (parts.length == 4) { + commits.add(new GitCommit( + parts[0].trim(), + parts[1].trim(), + parts[2].trim(), + parts[3].trim() + )); + } + } + + return commits; + } + + private static String execGitCommand(String... args) throws Exception { + ProcessBuilder pb = new ProcessBuilder(args); + pb.redirectErrorStream(true); + Process process = pb.start(); + + StringBuilder output = new StringBuilder(); + try (BufferedReader reader = new BufferedReader( + new InputStreamReader(process.getInputStream()))) { + String line; + while ((line = reader.readLine()) != null) { + output.append(line).append("\n"); + } + } + + int exitCode = process.waitFor(); + if (exitCode != 0) { + throw new RuntimeException("Git command failed: " + String.join(" ", args) + "\n" + output); + } + + return output.toString(); + } + + private static void removeSuppressions(Document doc, Set toRemove) { + // This method is no longer needed since we do line-based removal + // Kept for compatibility but does nothing + } + + /** + * Remove suppressions from the file line-by-line to preserve formatting. + * This avoids the XML transformer reformatting the entire file. + */ + private static void removesuppressionsLineByLine(String filePath, Set toRemove) throws Exception { + // Read the entire file + List lines = Files.readAllLines(Paths.get(filePath)); + + // Parse suppressions with their line ranges + List lineRanges = findSuppressionLineRanges(lines); + + // Determine which line ranges to remove + Set linesToRemove = new HashSet<>(); + for (SuppressionLineRange range : lineRanges) { + if (toRemove.contains(range.suppression)) { + for (int i = range.startLine; i <= range.endLine; i++) { + linesToRemove.add(i); + } + } + } + + // Write back only the lines we're keeping + try (BufferedWriter writer = new BufferedWriter(new FileWriter(filePath))) { + for (int i = 0; i < lines.size(); i++) { + if (!linesToRemove.contains(i)) { + writer.write(lines.get(i)); + writer.newLine(); + } + } + } + } + + /** + * Find the line ranges for each suppression in the file. + */ + private static List findSuppressionLineRanges(List lines) throws Exception { + List ranges = new ArrayList<>(); + + int startLine = -1; + StringBuilder currentXml = new StringBuilder(); + + for (int i = 0; i < lines.size(); i++) { + String line = lines.get(i); + String trimmed = line.trim(); + + if (trimmed.startsWith("") || trimmed.equals("")) { + startLine = i; + currentXml = new StringBuilder(); + currentXml.append(line).append("\n"); + } else if (startLine >= 0) { + currentXml.append(line).append("\n"); + + if (trimmed.equals("")) { + // End of suppression found + try { + // Parse this suppression + String xmlString = currentXml.toString(); + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + factory.setNamespaceAware(true); + DocumentBuilder builder = factory.newDocumentBuilder(); + Document doc = builder.parse(new java.io.ByteArrayInputStream(xmlString.getBytes())); + Element suppressElement = doc.getDocumentElement(); + + Suppression suppression = new Suppression(suppressElement); + ranges.add(new SuppressionLineRange(suppression, startLine, i)); + } catch (Exception e) { + // Skip malformed suppressions + System.err.println("Warning: Could not parse suppression at line " + startLine + ": " + e.getMessage()); + } + + startLine = -1; + currentXml = new StringBuilder(); + } + } + } + + return ranges; + } + + static class SuppressionLineRange { + final Suppression suppression; + final int startLine; + final int endLine; + + SuppressionLineRange(Suppression suppression, int startLine, int endLine) { + this.suppression = suppression; + this.startLine = startLine; + this.endLine = endLine; + } + } + + private static Document parseXmlFile(String path) throws Exception { + DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); + factory.setNamespaceAware(true); + DocumentBuilder builder = factory.newDocumentBuilder(); + return builder.parse(new File(path)); + } + + private static void writeXmlFile(Document doc, String path) throws Exception { + TransformerFactory transformerFactory = TransformerFactory.newInstance(); + Transformer transformer = transformerFactory.newTransformer(); + transformer.setOutputProperty(OutputKeys.INDENT, "yes"); + transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); + transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4"); + + DOMSource source = new DOMSource(doc); + StreamResult result = new StreamResult(new File(path)); + transformer.transform(source, result); + } + + private static List extractSuppressions(Document doc) { + List suppressions = new ArrayList<>(); + NodeList nodes = doc.getElementsByTagName("suppress"); + + for (int i = 0; i < nodes.getLength(); i++) { + Element element = (Element) nodes.item(i); + suppressions.add(new Suppression(element)); + } + + return suppressions; + } + + static class GitCommit { + final String fullHash; + final String shortHash; + final String date; + final String message; + + GitCommit(String fullHash, String shortHash, String date, String message) { + this.fullHash = fullHash; + this.shortHash = shortHash; + this.date = date; + this.message = message; + } + } + + static class ObsoleteSuppression { + final Suppression baseSuppression; + final GitCommit commit; + final String newVersion; + final boolean wasDeleted; + + ObsoleteSuppression(Suppression baseSuppression, GitCommit commit, + String newVersion, boolean wasDeleted) { + this.baseSuppression = baseSuppression; + this.commit = commit; + this.newVersion = newVersion; + this.wasDeleted = wasDeleted; + } + } + + static class DiffSuppression { + String packageUrl; + String gav; + String filePath; + String sha1; + Set cpes = new HashSet<>(); + Set cves = new HashSet<>(); + boolean wasModified; + + static DiffSuppression fromXmlString(String xml) throws Exception { + // Simple parsing of XML string + DiffSuppression ds = new DiffSuppression(); + + ds.packageUrl = extractValue(xml, "packageUrl"); + ds.gav = extractValue(xml, "gav"); + ds.filePath = extractValue(xml, "filePath"); + ds.sha1 = extractValue(xml, "sha1"); + ds.cpes = extractValues(xml, "cpe"); + ds.cves = extractValues(xml, "cve"); + + return ds; + } + + private static String extractValue(String xml, String tag) { + Pattern p = Pattern.compile("<" + tag + "[^>]*>([^<]+)"); + Matcher m = p.matcher(xml); + return m.find() ? m.group(1).trim() : null; + } + + private static Set extractValues(String xml, String tag) { + Set values = new HashSet<>(); + Pattern p = Pattern.compile("<" + tag + "[^>]*>([^<]+)"); + Matcher m = p.matcher(xml); + while (m.find()) { + values.add(m.group(1).trim()); + } + return values; + } + + boolean matches(Suppression s) { + // Match on key fields + if (packageUrl != null && packageUrl.equals(s.packageUrl)) { + return hasOverlappingSuppressions(s); + } + if (gav != null && gav.equals(s.gav)) { + return hasOverlappingSuppressions(s); + } + if (filePath != null && filePath.equals(s.filePath)) { + return hasOverlappingSuppressions(s); + } + if (sha1 != null && sha1.equals(s.sha1)) { + return true; + } + return false; + } + + private boolean hasOverlappingSuppressions(Suppression s) { + Set commonCpes = new HashSet<>(cpes); + commonCpes.retainAll(s.cpes); + + Set commonCves = new HashSet<>(cves); + commonCves.retainAll(s.cves); + + return !commonCpes.isEmpty() || !commonCves.isEmpty() || + (cpes.isEmpty() && s.cpes.isEmpty() && cves.isEmpty() && s.cves.isEmpty()); + } + } + + static class Suppression { + final String notes; + final String packageUrl; + final String gav; + final String filePath; + final String sha1; + final Set cpes; + final Set cves; + final Set vulnerabilityNames; + final Element element; + + Suppression(Element element) { + this.element = element; + this.notes = getElementText(element, "notes"); + this.packageUrl = getElementText(element, "packageUrl"); + this.gav = getElementText(element, "gav"); + this.filePath = getElementText(element, "filePath"); + this.sha1 = getElementText(element, "sha1"); + this.cpes = getElementTexts(element, "cpe"); + this.cves = getElementTexts(element, "cve"); + this.vulnerabilityNames = getElementTexts(element, "vulnerabilityName"); + } + + String getKey() { + if (packageUrl != null) return "packageUrl:" + packageUrl; + if (gav != null) return "gav:" + gav; + if (filePath != null) return "filePath:" + filePath; + if (sha1 != null) return "sha1:" + sha1; + return "unknown"; + } + + private String getElementText(Element parent, String tagName) { + NodeList nodes = parent.getElementsByTagName(tagName); + if (nodes.getLength() > 0) { + return nodes.item(0).getTextContent().trim(); + } + return null; + } + + private Set getElementTexts(Element parent, String tagName) { + Set texts = new HashSet<>(); + NodeList nodes = parent.getElementsByTagName(tagName); + for (int i = 0; i < nodes.getLength(); i++) { + String text = nodes.item(i).getTextContent().trim(); + if (!text.isEmpty()) { + texts.add(text); + } + } + return texts; + } + + String toDetailedString() { + StringBuilder sb = new StringBuilder(); + + if (notes != null) { + sb.append("Notes: ").append(notes.replaceAll("\\s+", " ")).append("\n\n"); + } + + if (packageUrl != null) { + sb.append("PackageURL: ").append(packageUrl).append("\n"); + } + if (gav != null) { + sb.append("GAV: ").append(gav).append("\n"); + } + if (filePath != null) { + sb.append("FilePath: ").append(filePath).append("\n"); + } + if (sha1 != null) { + sb.append("SHA1: ").append(sha1).append("\n"); + } + if (!cpes.isEmpty()) { + sb.append("CPEs: ").append(cpes).append("\n"); + } + if (!cves.isEmpty()) { + sb.append("CVEs: ").append(cves).append("\n"); + } + if (!vulnerabilityNames.isEmpty()) { + sb.append("Vulnerability Names: ").append(vulnerabilityNames).append("\n"); + } + + return sb.toString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof Suppression)) return false; + Suppression that = (Suppression) o; + return getKey().equals(that.getKey()); + } + + @Override + public int hashCode() { + return Objects.hash(getKey()); + } + } +} diff --git a/utils/src/test/java/org/owasp/dependencycheck/utils/SuppressionSyncAnalyzerTest.java b/utils/src/test/java/org/owasp/dependencycheck/utils/SuppressionSyncAnalyzerTest.java new file mode 100644 index 00000000000..ceeb3b7f74f --- /dev/null +++ b/utils/src/test/java/org/owasp/dependencycheck/utils/SuppressionSyncAnalyzerTest.java @@ -0,0 +1,159 @@ +/* + * This file is part of dependency-check-utils. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Copyright (c) 2025 Jeremy Long. All Rights Reserved. + */ +package org.owasp.dependencycheck.utils; + +import org.junit.jupiter.api.Test; + +import java.lang.reflect.Method; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.*; + +/** + * Tests for SuppressionSyncAnalyzer, particularly the git diff parsing logic. + */ +class SuppressionSyncAnalyzerTest { + + /** + * Test parsing a simple deletion from a git diff. + */ + @Test + void testParseDeletedSuppressions_SimpleDeletion() throws Exception { + String gitDiff = "diff --git a/generatedSuppressions.xml b/generatedSuppressions.xml\n" + + "index abc123..def456 100644\n" + + "--- a/generatedSuppressions.xml\n" + + "+++ b/generatedSuppressions.xml\n" + + "@@ -1,10 +1,5 @@\n" + + "- \n" + + "- \n" + + "- ^pkg:maven/com\\.example/test@.*$\n" + + "- cpe:/a:example:test\n" + + "- \n" + + " \n" + + " result = parseDeletedSuppressions(gitDiff); + + System.out.println("Found " + result.size() + " deletions"); + for (Object o : result) { + System.out.println(" - " + o); + } + + assertEquals(1, result.size(), "Should find exactly one deleted suppression"); + + Object suppression = result.get(0); + String packageUrl = (String) getField(suppression, "packageUrl"); + System.out.println("PackageURL: " + packageUrl); + assertEquals("^pkg:maven/com\\.example/test@.*$", packageUrl); + + @SuppressWarnings("unchecked") + java.util.Set cpes = (java.util.Set) getField(suppression, "cpes"); + System.out.println("CPEs: " + cpes); + assertTrue(cpes.contains("cpe:/a:example:test")); + } + + /** + * Test parsing a deletion where the opening tag is context (no '-' prefix). + */ + @Test + void testParseDeletedSuppressions_ContextOpeningTag() throws Exception { + String gitDiff = "@@ -720,10 +720,5 @@\n" + + " \n" + + "- \n" + + "- ^pkg:maven/com\\.lightbend\\.akka/akka-projection-grpc.*$\n" + + "- cpe:/a:grpc:grpc\n" + + "-\n" + + "-\n"; + + List result = parseDeletedSuppressions(gitDiff); + + System.out.println("Found " + result.size() + " deletions with context opening tag"); + assertEquals(1, result.size(), "Should find one deleted suppression with context opening tag"); + + Object suppression = result.get(0); + String packageUrl = (String) getField(suppression, "packageUrl"); + System.out.println("PackageURL: " + packageUrl); + assertEquals("^pkg:maven/com\\.lightbend\\.akka/akka-projection-grpc.*$", packageUrl); + + @SuppressWarnings("unchecked") + java.util.Set cpes = (java.util.Set) getField(suppression, "cpes"); + assertTrue(cpes.contains("cpe:/a:grpc:grpc")); + } + + /** + * Test parsing multiple deletions in one diff. + */ + @Test + void testParseDeletedSuppressions_MultipleDeletions() throws Exception { + String gitDiff = "diff --git a/generatedSuppressions.xml b/generatedSuppressions.xml\n" + + "@@ -100,20 +100,5 @@\n" + + "- \n" + + "- \n" + + "- ^pkg:maven/com\\.graphql-java/graphql-java-extended-scalars@.*$\n" + + "- cpe:/a:graphql-java_project:graphql-java\n" + + "- \n" + + "- \n" + + "- \n" + + "- ^pkg:maven/com\\.graphql-java-kickstart/graphql-java-kickstart@.*$\n" + + "- cpe:/a:graphql-java_project:graphql-java\n" + + "- \n" + + "+ \n" + + "+ \n" + + "+ ^pkg:(?!maven/com\\.graphql-java/graphql-java@).*$\n" + + "+ cpe:/a:graphql-java:graphql-java\n" + + "+ \n"; + + List result = parseDeletedSuppressions(gitDiff); + + System.out.println("Found " + result.size() + " deletions"); + assertEquals(2, result.size(), "Should find two deleted suppressions"); + + // Verify first deletion + Object first = result.get(0); + String firstUrl = (String) getField(first, "packageUrl"); + System.out.println("First packageURL: " + firstUrl); + assertEquals("^pkg:maven/com\\.graphql-java/graphql-java-extended-scalars@.*$", firstUrl); + + // Verify second deletion + Object second = result.get(1); + String secondUrl = (String) getField(second, "packageUrl"); + System.out.println("Second packageURL: " + secondUrl); + assertEquals("^pkg:maven/com\\.graphql-java-kickstart/graphql-java-kickstart@.*$", secondUrl); + } + + // Helper methods to access private fields/methods via reflection + + @SuppressWarnings("unchecked") + private List parseDeletedSuppressions(String diff) throws Exception { + Class clazz = Class.forName("org.owasp.dependencycheck.utils.SuppressionSyncAnalyzer"); + Method method = clazz.getDeclaredMethod("parseDeletedSuppressions", String.class); + method.setAccessible(true); + return (List) method.invoke(null, diff); + } + + private Object getField(Object obj, String fieldName) throws Exception { + var field = obj.getClass().getDeclaredField(fieldName); + field.setAccessible(true); + return field.get(obj); + } +}