Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
123 changes: 76 additions & 47 deletions development/validation.py
Original file line number Diff line number Diff line change
@@ -1,53 +1,82 @@
import tomllib
import sys
import os
try:
import tomllib # Python 3.11+
except ModuleNotFoundError: # Python <=3.10
import tomli as tomllib

# file = "alert_example.toml"
# with open(file,"rb") as toml:
# alert = tomllib.load(toml)
import os
import re
import sys
from uuid import UUID

DETECTIONS_DIR = "detections"
SUPPORTED_RULE_TYPES = {
"query": ["description", "name", "rule_id", "risk_score", "severity", "type", "query"],
"eql": ["description", "name", "rule_id", "risk_score", "severity", "type", "query", "language"],
"threshold": ["description", "name", "rule_id", "risk_score", "severity", "type", "query", "threshold"],
}
SUPPORTED_SEVERITIES = {"low", "medium", "high", "critical"}
DATE_PATTERN = re.compile(r"^\d{4}/\d{2}/\d{2}$")
seen_rule_ids = set()
failure = 0

for root, dirs, files in os.walk("detections/"):
for file in files:
if file.endswith(".toml"):
full_path = os.path.join(root, file)
with open(full_path,"rb") as toml:
alert = tomllib.load(toml)

present_fields = []
missing_fields = []

try:
if alert['metadata']['creation_date']:
pass
except KeyError:
print("The metadata table does not contain a creation_date on: " + full_path)
failure = 1


if alert['rule']['type'] == "query": # query based alert
required_fields = ['description', 'name','rule_id','risk_score','severity','type','query']
elif alert['rule']['type'] == "eql": # event correlation alert
required_fields = ['description', 'name','rule_id','risk_score','severity','type','query','language']
elif alert['rule']['type'] == "threshold": # threshold based alert
required_fields = ['description', 'name','rule_id','risk_score','severity','type','query','threshold']
else:
print("Unsupported rule type found in: " + full_path)
break
for table in alert:
for field in alert[table]:
present_fields.append(field)

for field in required_fields:
if field not in present_fields:
missing_fields.append(field)

if missing_fields:
print("The following fields do not exist in " + file + ": " + str(missing_fields))
failure = 1
else:
print("Validation Passed for: " + file)

def fail(message: str) -> None:
global failure
print(message)
failure = 1


for root, _, files in os.walk(DETECTIONS_DIR):
for file in sorted(files):
if not file.endswith(".toml"):
continue

full_path = os.path.join(root, file)
try:
with open(full_path, "rb") as toml_file:
alert = tomllib.load(toml_file)
except Exception as exc:
fail(f"Unable to parse {full_path}: {exc}")
continue

metadata = alert.get("metadata", {})
rule = alert.get("rule", {})

creation_date = metadata.get("creation_date")
if not creation_date:
fail(f"The metadata table does not contain a creation_date on: {full_path}")
elif not DATE_PATTERN.match(creation_date):
fail(f"creation_date must use YYYY/MM/DD in: {full_path}")

rule_type = rule.get("type")
if rule_type not in SUPPORTED_RULE_TYPES:
fail(f"Unsupported or missing rule type in {full_path}: {rule_type}")
continue

missing_fields = [field for field in SUPPORTED_RULE_TYPES[rule_type] if field not in rule]
if missing_fields:
fail(f"The following rule fields do not exist in {full_path}: {missing_fields}")
continue

try:
UUID(str(rule["rule_id"]))
except (ValueError, TypeError):
fail(f"rule_id must be a valid UUID in: {full_path}")

if rule["rule_id"] in seen_rule_ids:
fail(f"Duplicate rule_id found in: {full_path}")
else:
seen_rule_ids.add(rule["rule_id"])

severity = str(rule["severity"]).lower()
if severity not in SUPPORTED_SEVERITIES:
fail(f"severity must be one of {sorted(SUPPORTED_SEVERITIES)} in: {full_path}")

risk_score = rule.get("risk_score")
if not isinstance(risk_score, int) or not 0 <= risk_score <= 100:
fail(f"risk_score must be an integer from 0 to 100 in: {full_path}")

print(f"Validation Passed for: {full_path}")

if failure != 0:
sys.exit(1)
sys.exit(1)
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# Requires Python 3.11+ (for tomllib)
requests>=2.28.0
python-dateutil>=2.8.0
tomli>=2.0.1; python_version < "3.11"
15 changes: 10 additions & 5 deletions setup/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,19 +26,24 @@ All instances are deployed into a dedicated VPC (`10.0.0.0/16`) with:
- Internal security group allowing all lab-to-lab traffic
- External access locked to your IP via `allowed_ip`

## Deployment
## Validation-first workflow

Use these commands to validate the lab configuration without deploying infrastructure:

```bash
cd setup/terraform

# Copy and fill in your variables
# Copy and fill in your variables for local validation only
cp terraform.tfvars.example terraform.tfvars

terraform init
terraform plan
terraform apply
terraform fmt -check
terraform init -backend=false
terraform validate
terraform plan -refresh=false
```

Only run `terraform apply` when you intentionally want to deploy the lab.

## Requirements

- AWS credentials configured (`aws configure` or environment variables)
Expand Down
35 changes: 29 additions & 6 deletions setup/terraform/main.tf
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
terraform {
required_version = ">= 1.5.0"

required_providers {
aws = {
source = "hashicorp/aws"
Expand Down Expand Up @@ -223,8 +225,15 @@ resource "aws_instance" "wazuh_manager" {
vpc_security_group_ids = [aws_security_group.wazuh.id, aws_security_group.lab_internal.id]

root_block_device {
volume_size = 30
volume_type = "gp3"
volume_size = 30
volume_type = "gp3"
encrypted = true
delete_on_termination = true
}

metadata_options {
http_endpoint = "enabled"
http_tokens = "required"
}

user_data = <<-EOF
Expand All @@ -246,8 +255,15 @@ resource "aws_instance" "windows_target" {
vpc_security_group_ids = [aws_security_group.windows.id, aws_security_group.lab_internal.id]

root_block_device {
volume_size = 50
volume_type = "gp3"
volume_size = 50
volume_type = "gp3"
encrypted = true
delete_on_termination = true
}

metadata_options {
http_endpoint = "enabled"
http_tokens = "required"
}

user_data = <<-USERDATA
Expand Down Expand Up @@ -285,8 +301,15 @@ resource "aws_instance" "kali_attacker" {
vpc_security_group_ids = [aws_security_group.kali.id, aws_security_group.lab_internal.id]

root_block_device {
volume_size = 30
volume_type = "gp3"
volume_size = 30
volume_type = "gp3"
encrypted = true
delete_on_termination = true
}

metadata_options {
http_endpoint = "enabled"
http_tokens = "required"
}

tags = {
Expand Down
20 changes: 20 additions & 0 deletions setup/terraform/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,31 @@ variable "aws_region" {
variable "ubuntu_ami" {
description = "AMI ID for Ubuntu 22.04 LTS (Wazuh Manager)"
type = string

validation {
condition = can(regex("^ami-[a-z0-9]+$", var.ubuntu_ami))
error_message = "ubuntu_ami must be a valid AMI ID (for example ami-0123456789abcdef0)."
}
}

variable "windows_ami" {
description = "AMI ID for Windows Server 2022 (target host)"
type = string

validation {
condition = can(regex("^ami-[a-z0-9]+$", var.windows_ami))
error_message = "windows_ami must be a valid AMI ID (for example ami-0123456789abcdef0)."
}
}

variable "kali_ami" {
description = "AMI ID for Kali Linux (attack simulation)"
type = string

validation {
condition = can(regex("^ami-[a-z0-9]+$", var.kali_ami))
error_message = "kali_ami must be a valid AMI ID (for example ami-0123456789abcdef0)."
}
}

variable "key_name" {
Expand All @@ -27,6 +42,11 @@ variable "key_name" {
variable "allowed_ip" {
description = "Your public IP in CIDR notation for access control (e.g. 203.0.113.10/32)"
type = string

validation {
condition = can(cidrhost(var.allowed_ip, 0)) && !contains(["0.0.0.0/0", "::/0"], var.allowed_ip)
error_message = "allowed_ip must be a valid single-admin CIDR and must not allow global access."
}
}

variable "wazuh_instance_type" {
Expand Down