Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
eeaebcf
Antithesis SSI PoC
robertomonteromiguel Nov 7, 2025
d4fc523
Add comments
robertomonteromiguel Nov 7, 2025
d345188
Disable the tracer instrumentation
robertomonteromiguel Nov 10, 2025
9759062
Antithesis: Run default_antithesis scenario on a docker compose
robertomonteromiguel Nov 10, 2025
8c149fe
fix the test
robertomonteromiguel Nov 10, 2025
febc1f6
activate only some tests
robertomonteromiguel Nov 11, 2025
2dd6cf8
fix test the tests
robertomonteromiguel Nov 11, 2025
d6f461b
The singleton driver
robertomonteromiguel Nov 11, 2025
5029f8b
only one test case. Copy logs to antithesis output dir
robertomonteromiguel Nov 12, 2025
ee2e67f
execute only a few tests
robertomonteromiguel Nov 13, 2025
7fc2e7f
Added docker ssi profiling
robertomonteromiguel Nov 14, 2025
8a1930b
profiling
robertomonteromiguel Nov 14, 2025
b3aeb46
add to the classpath
robertomonteromiguel Nov 17, 2025
6bbf899
test java 21 correto
robertomonteromiguel Nov 17, 2025
951d98c
dotnet Antithesis instrumentation
robertomonteromiguel Nov 17, 2025
c122d9b
antithesis instrument the java tracer
robertomonteromiguel Nov 18, 2025
a34505e
Merge remote-tracking branch 'origin/robertomonteromiguel/antithesis_…
robertomonteromiguel Nov 19, 2025
68d5024
Merge remote-tracking branch 'origin/robertomonteromiguel/system_test…
robertomonteromiguel Nov 19, 2025
ada4f5d
Docker SSI
robertomonteromiguel Nov 19, 2025
d6cfa59
java erors
robertomonteromiguel Nov 24, 2025
b4f552a
clean
robertomonteromiguel Nov 24, 2025
413d96e
undo unused changes
robertomonteromiguel Nov 24, 2025
9561685
Use the app.sh
robertomonteromiguel Nov 26, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion tests/appsec/iast/source/test_uri.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,5 @@ class TestURI(BaseSourceTest):
endpoint = "/iast/source/uri/test"
requests_kwargs = [{"method": "GET"}]
source_type = "http.request.uri"
source_value = "http://localhost:7777/iast/source/uri/test"
source_value = "http://weblog:7777/iast/source/uri/test"
source_names = None
7 changes: 7 additions & 0 deletions tests/appsec/test_traces.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
@scenarios.external_processing
@scenarios.stream_processing_offload
@scenarios.default
@scenarios.default_antithesis
@scenarios.appsec_lambda_default
class Test_RetainTraces:
"""Retain trace (manual keep & appsec.event = true)"""
Expand Down Expand Up @@ -64,6 +65,8 @@ def validate_appsec_event_span_tags(span: dict):
@scenarios.external_processing
@scenarios.stream_processing_offload
@scenarios.default
@scenarios.default_antithesis
@scenarios.default_antithesis_debug
@scenarios.appsec_lambda_default
class Test_AppSecEventSpanTags:
"""AppSec correctly fill span tags."""
Expand Down Expand Up @@ -152,6 +155,7 @@ def test_root_span_coherence(self):
@scenarios.external_processing
@scenarios.stream_processing_offload
@scenarios.default
@scenarios.default_antithesis
@scenarios.appsec_lambda_default
class Test_AppSecObfuscator:
"""AppSec obfuscates sensitive data."""
Expand Down Expand Up @@ -308,6 +312,7 @@ def validate_appsec_span_tags(span: dict, appsec_data: dict): # noqa: ARG001
@scenarios.external_processing
@scenarios.stream_processing_offload
@scenarios.default
@scenarios.default_antithesis
@scenarios.appsec_lambda_default
class Test_CollectRespondHeaders:
"""AppSec should collect some headers for http.response and store them in span tags."""
Expand Down Expand Up @@ -340,6 +345,7 @@ def validate_response_headers(span: dict):
@scenarios.external_processing
@scenarios.stream_processing_offload
@scenarios.default
@scenarios.default_antithesis
@scenarios.appsec_lambda_default
class Test_CollectDefaultRequestHeader:
HEADERS = {
Expand Down Expand Up @@ -376,6 +382,7 @@ def test_collect_default_request_headers(self):
@scenarios.external_processing
@scenarios.stream_processing_offload
@scenarios.default
@scenarios.default_antithesis
@scenarios.appsec_lambda_default
class Test_ExternalWafRequestsIdentification:
def setup_external_wafs_header_collection(self):
Expand Down
37 changes: 37 additions & 0 deletions tests/docker_ssi/test_docker_ssi_profiling.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
from urllib.parse import urlparse
import requests
import time
from utils import scenarios, weblog, features
from utils import logger


@features.profiling
@scenarios.docker_ssi_profiling
class TestDockerSSIAppsecFeatures:
"""Test the ssi in a simulated host injection environment (docker container + test agent)
We test that the injection is performed and profiling is enabled and telemetry is generated.
"""

def setup_profiling(self):
parsed_url = urlparse(scenarios.docker_ssi_profiling.weblog_url)
self.r = weblog.request("GET", parsed_url.path, domain=parsed_url.hostname, port=parsed_url.port)
logger.info(f"Setup Docker SSI profiling installation {self.r}")

def test_profiling(self):
agent_port = scenarios.docker_ssi_profiling.agent_port
agent_host = scenarios.docker_ssi_profiling.agent_host
profiling_request_found = False
timeout = 90
mustend = time.time() + timeout
while time.time() < mustend:
response = requests.get(
f"http://{agent_host}:{agent_port}/test/session/requests",
timeout=60,
)
logger.info(f"Profiling request response: {response.json()}")
for request in response.json():
logger.info(f"Profiling request: {request}")
if request["url"].endswith("/profiling/v1/input"):
profiling_request_found = True
time.sleep(1)
assert profiling_request_found, "No profiling request found"
2 changes: 2 additions & 0 deletions tests/test_the_test/test_group_rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ def test_tracer_release():
scenarios.docker_ssi_appsec,
scenarios.docker_ssi_crashtracking,
scenarios.docker_ssi_servicenaming,
scenarios.docker_ssi_profiling,
scenarios.external_processing_blocking, # need to declare a white list of library in get-workflow-parameters
scenarios.external_processing, # need to declare a white list of library in get-workflow-parameters
scenarios.stream_processing_offload_blocking, # need to declare a white list of library in get-workflow-parameters
Expand Down Expand Up @@ -67,6 +68,7 @@ def test_tracer_release():
scenarios.multi_installer_auto_injection,
scenarios.demo_aws,
scenarios.otel_collector_e2e,
scenarios.default_antithesis,
]

for scenario in get_all_scenarios():
Expand Down
14 changes: 14 additions & 0 deletions utils/_context/_scenarios/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from .aws_lambda import LambdaScenario
from .core import Scenario, scenario_groups
from .default import DefaultScenario
from .default_antithesis import DefaultAntithesisScenario
from .endtoend import DockerScenario, EndToEndScenario
from .integrations import CrossedTracingLibraryScenario, IntegrationsScenario, AWSIntegrationsScenario
from .open_telemetry import OpenTelemetryScenario
Expand Down Expand Up @@ -37,6 +38,8 @@ class _Scenarios:
mock_the_test_2 = TestTheTestScenario("MOCK_THE_TEST_2", doc="Mock scenario that check system-tests internals")

default = DefaultScenario("DEFAULT")
default_antithesis = DefaultAntithesisScenario("DEFAULT_ANTITHESIS")
default_antithesis_debug = DefaultScenario("DEFAULT_ANTITHESIS_DEBUG")

# performance scenario just spawn an agent and a weblog, and spies the CPU and mem usage
performances = PerformanceScenario(
Expand Down Expand Up @@ -999,6 +1002,17 @@ class _Scenarios:
appsec_enabled="true",
scenario_groups=[scenario_groups.all, scenario_groups.docker_ssi],
)
docker_ssi_profiling = DockerSSIScenario(
"DOCKER_SSI_PROFILING",
doc="Validates the crashtracking for ssi on a docker environment",
extra_env_vars={
"DD_PROFILING_UPLOAD_PERIOD": "2",
"DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD": "1000",
"DD_PROFILING_START_FORCE_FIRST": "true",
},
profiling_enabled="auto",
scenario_groups=[scenario_groups.all, scenario_groups.docker_ssi],
)
docker_ssi_crashtracking = DockerSSIScenario(
"DOCKER_SSI_CRASHTRACKING",
doc="Validates the crashtracking for ssi on a docker environment",
Expand Down
1 change: 1 addition & 0 deletions utils/_context/_scenarios/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ class _ScenarioGroups:
parametric = ScenarioGroup()
appsec_low_waf_timeout = ScenarioGroup()
default = ScenarioGroup()
default_antithesis = ScenarioGroup()
feature_flag_exposure = ScenarioGroup()

def __getitem__(self, key: str) -> ScenarioGroup:
Expand Down
192 changes: 192 additions & 0 deletions utils/_context/_scenarios/default_antithesis.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,192 @@
"""Default Antithesis scenario - a minimal scenario that doesn't start any containers."""

from logging import FileHandler
import os
import pytest

from watchdog.observers.polling import PollingObserver
from watchdog.events import FileSystemEventHandler, FileSystemEvent

from utils import interfaces
from utils.interfaces._core import ProxyBasedInterfaceValidator
from utils._context.component_version import ComponentVersion
from utils._logger import logger, get_log_formatter
from .core import Scenario, scenario_groups


class DefaultAntithesisScenario(Scenario):
"""A minimal scenario that doesn't start containers.

This scenario is designed for Antithesis testing where containers
are managed externally and we only want to run the test logic.

This scenario will run all tests that are decorated with @scenarios.default
by checking for the "DEFAULT" scenario marker during test collection.
"""

def __init__(self, name: str) -> None:
super().__init__(
name,
github_workflow=None,
doc="Antithesis scenario that doesn't start containers - for external container management",
# Include DEFAULT scenario groups for tests using @scenario_groups decorators
scenario_groups=[
scenario_groups.essentials,
scenario_groups.telemetry,
scenario_groups.default,
scenario_groups.default_antithesis,
],
)
self._library: ComponentVersion | None = None

# Interface timeout properties (will be set based on library in configure)
self.library_interface_timeout = 35 # Default timeout
self.agent_interface_timeout = 30
self.backend_interface_timeout = 0

def pytest_configure(self, config: pytest.Config) -> None:
"""Configure the scenario but don't delete the logs folder if it exists."""
# Store replay and worker status
self.replay = config.option.replay
self.is_main_worker = not hasattr(config, "workerinput")

# Create log folder WITHOUT removing it if it exists
if self.is_main_worker:
self._create_log_subfolder("", remove_if_exists=False)

# Set up logging handler
handler = FileHandler(f"{self.host_log_folder}/tests.log", encoding="utf-8")
handler.setFormatter(get_log_formatter())
logger.addHandler(handler)

# Call configure
self.configure(config)

def configure(self, config: pytest.Config) -> None:
"""Configure the scenario but don't start any containers."""
# Get library information from command line or environment
library_name = config.option.library or os.environ.get("DD_LANG", "")
library_version = os.environ.get("DD_LIBRARY_VERSION", "unknown")

if library_name:
self._library = ComponentVersion(library_name, library_version)

# Configure interfaces like in endtoend.py
# interfaces.agent.configure(self.host_log_folder, replay=self.replay)
interfaces.library.configure(self.host_log_folder, replay=self.replay)
interfaces.backend.configure(self.host_log_folder, replay=self.replay)
interfaces.library_dotnet_managed.configure(self.host_log_folder, replay=self.replay)
interfaces.library_stdout.configure(self.host_log_folder, replay=self.replay)
# interfaces.agent_stdout.configure(self.host_log_folder, replay=self.replay)

# Set library-specific interface timeouts
if library_name == "java":
self.library_interface_timeout = 35
elif library_name in ("golang",):
self.library_interface_timeout = 10
elif library_name in ("nodejs", "ruby"):
self.library_interface_timeout = 0
elif library_name in ("php",):
# possibly something weird on obfuscator, let increase the delay for now
self.library_interface_timeout = 10
elif library_name in ("python",):
self.library_interface_timeout = 5
else:
self.library_interface_timeout = 40
logger.debug(f"Library interface timeout set to::: {self.library_interface_timeout}")

logger.debug("Getting warmups")
if not self.replay:
self.warmups.insert(1, self._start_interfaces_watchdog)

@property
def library(self) -> ComponentVersion:
"""Return the library component version."""
if not self._library:
library_name = os.environ.get("DD_LANG", "")
library_version = os.environ.get("DD_LIBRARY_VERSION", "unknown")
self._library = ComponentVersion(library_name, library_version)
return self._library

@property
def host_log_folder(self) -> str:
"""Override to use 'logs' folder instead of 'logs_default_antithesis'."""
return "logs"

@property
def weblog_variant(self):
return os.environ.get("SYSTEM_TESTS_WEBLOG_VARIANT", "")

def start_interfaces_watchdog(self, interfaces_list: list[ProxyBasedInterfaceValidator]) -> None:
"""Start file system watchdog to automatically ingest interface files."""

class Event(FileSystemEventHandler):
def __init__(self, interface: ProxyBasedInterfaceValidator) -> None:
super().__init__()
self.interface = interface

def _ingest(self, event: FileSystemEvent) -> None:
if event.is_directory:
return
self.interface.ingest_file(event.src_path)

on_modified = _ingest
on_created = _ingest

# Using polling observer to avoid issues with OS-dependent notifiers
observer = PollingObserver()

for interface in interfaces_list:
logger.debug(f"Starting watchdog for {interface} at {interface.log_folder}")
observer.schedule(Event(interface), path=interface.log_folder)

observer.start()

def _start_interfaces_watchdog(self) -> None:
"""Start the interfaces watchdog for library and agent interfaces."""
# self.start_interfaces_watchdog([interfaces.library, interfaces.agent])
self.start_interfaces_watchdog([interfaces.library])

def post_setup(self, session: pytest.Session) -> None: # noqa: ARG002
"""Wait for all interfaces to finish collecting messages after test setup."""
if self.replay:
logger.terminal.write_sep("-", "Load all data from logs")
logger.terminal.flush()

interfaces.library.load_data_from_logs()
interfaces.library.check_deserialization_errors()

interfaces.agent.load_data_from_logs()
interfaces.agent.check_deserialization_errors()

interfaces.backend.load_data_from_logs()
else:
# Wait for library interface to finish collecting traces
self._wait_interface(interfaces.library, self.library_interface_timeout)
interfaces.library.check_deserialization_errors()

# Wait for agent interface to finish collecting traces
# self._wait_interface(interfaces.agent, self.agent_interface_timeout)
# interfaces.agent.check_deserialization_errors()

# Wait for backend interface
self._wait_interface(interfaces.backend, self.backend_interface_timeout)

# Load .NET managed library data if applicable
interfaces.library_dotnet_managed.load_data()

def _wait_interface(self, interface: ProxyBasedInterfaceValidator, timeout: int) -> None:
"""Wait for an interface to finish collecting messages.

Args:
interface: The interface validator to wait for
timeout: Timeout in seconds to wait for the interface

"""
logger.terminal.write_sep("-", f"Wait for {interface} ({timeout}s)")
logger.terminal.flush()
interface.wait(timeout)

def pytest_sessionfinish(self, session: pytest.Session, exitstatus: int) -> None:
"""Clean up after the test session."""
# No containers to clean up
13 changes: 12 additions & 1 deletion utils/_context/_scenarios/docker_ssi.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,18 @@ class DockerSSIScenario(Scenario):
_network: Network = None

def __init__(
self, name, doc, extra_env_vars: dict | None = None, scenario_groups=None, appsec_enabled=None
self,
name,
doc,
extra_env_vars: dict | None = None,
scenario_groups=None,
appsec_enabled=None,
profiling_enabled=None,
) -> None:
super().__init__(name, doc=doc, github_workflow="dockerssi", scenario_groups=scenario_groups)

self._appsec_enabled = appsec_enabled
self._profiling_enabled = profiling_enabled
self.agent_port = _get_free_port()
self.agent_host = "localhost"
self._weblog_injection = DockerSSIContainer(extra_env_vars=extra_env_vars)
Expand Down Expand Up @@ -111,6 +118,7 @@ def configure(self, config: pytest.Config):
self._custom_library_version,
self._custom_injector_version,
self._appsec_enabled,
self._profiling_enabled,
)
self.ssi_image_builder.configure()
self.ssi_image_builder.build_weblog()
Expand Down Expand Up @@ -300,6 +308,7 @@ def __init__(
custom_library_version,
custom_injector_version,
appsec_enabled=None,
profiling_enabled=None,
) -> None:
self.scenario_name = scenario_name
self.host_log_folder = host_log_folder
Expand All @@ -319,6 +328,7 @@ def __init__(
self._custom_library_version = custom_library_version
self._custom_injector_version = custom_injector_version
self._appsec_enabled = appsec_enabled
self._profiling_enabled = profiling_enabled

@property
def dd_lang(self) -> str:
Expand Down Expand Up @@ -484,6 +494,7 @@ def build_weblog_image(self, ssi_installer_docker_tag):
"DD_INSTALLER_LIBRARY_VERSION": self._custom_library_version,
"DD_INSTALLER_INJECTOR_VERSION": self._custom_injector_version,
"DD_APPSEC_ENABLED": self._appsec_enabled,
"DD_PROFILING_ENABLED": self._profiling_enabled,
},
)
self.print_docker_build_logs(self.ssi_all_docker_tag, build_logs)
Expand Down
Loading
Loading