Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Get snapshots information from json file #341

Merged
merged 3 commits into from
Nov 1, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 8 additions & 3 deletions .github/workflows/unit-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v3

- name: Setup Go environment
uses: actions/setup-go@v3
with:
Expand Down Expand Up @@ -48,5 +48,10 @@ jobs:
role-duration-seconds: 7200

- name: Unit tests
working-directory: test/
run: go test ./unittest/... -v
working-directory: test/unittest
run: |
TESTS=$(go test -list . | grep -v 'ok\s\+github.com/atlassian-labs/data-center-terraform/test/unittest')
for test in ${TESTS[@]}; do
go test -run ${test} -v
rm -rf /tmp/* >/dev/null 2>&1 || true
done
15 changes: 15 additions & 0 deletions config.tfvars
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,9 @@ max_cluster_capacity = 5
# List of addtional namespaces to be created in the cluster
#additional_namespaces = ["extra_namespace"]

# Path to a JSON file with EBS and RDS snapshot IDs
# snapshots_json_file_path = "test/dcapt-snapshots.json"

################################################################################
# Osquery settings. Atlassian only!
################################################################################
Expand Down Expand Up @@ -215,6 +218,9 @@ jira_db_iops = 1000
# Set `null` if the snapshot does not have a default db name.
jira_db_name = "jira"

# Dataset size. Used only when snapshots_json_file_path is defined. Defaults to large
# jira_dataset_size = "large"

# Database restore configuration
# If you want to restore the database from a snapshot, uncomment the following line and provide the snapshot identifier.
# This will restore the database from the snapshot and will not create a new database.
Expand Down Expand Up @@ -310,6 +316,9 @@ confluence_db_iops = 1000
# Set `null` if the snapshot does not have a default db name.
confluence_db_name = "confluence"

# Dataset size. Used only when snapshots_json_file_path is defined. Defaults to large
# confluence_dataset_size = "large"

# Database restore configuration
# If you want to restore the database from a snapshot, uncomment the following lines and provide the snapshot identifier.
# This will restore the database from the snapshot and will not create a new database.
Expand Down Expand Up @@ -420,6 +429,9 @@ bitbucket_db_name = "bitbucket"
#bitbucket_elasticsearch_storage = "<REQUESTS_STORAGE>"
#bitbucket_elasticsearch_replicas = "<NUMBER_OF_NODES>"

# Dataset size. Used only when snapshots_json_file_path is defined. Defaults to large
# bitbucket_dataset_size = "large"

# Dataset Restore

# Database restore configuration
Expand Down Expand Up @@ -590,6 +602,9 @@ crowd_db_name = "crowd"
# if you encounter such an issue. This will apply to Crowd pods.
#crowd_termination_grace_period = 0

# Dataset size. Used only when snapshots_json_file_path is defined. Defaults to large
# crowd_dataset_size = "large"

# Dataset Restore

# Database restore configuration
Expand Down
20 changes: 10 additions & 10 deletions dc-infrastructure.tf
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ module "jira" {
db_instance_class = var.jira_db_instance_class
db_iops = var.jira_db_iops
db_name = var.jira_db_name
db_snapshot_id = var.jira_db_snapshot_id
db_snapshot_id = local.jira_rds_snapshot_id
db_master_username = var.jira_db_master_username
db_master_password = var.jira_db_master_password

Expand Down Expand Up @@ -145,7 +145,7 @@ module "jira" {
nfs_limits_cpu = var.jira_nfs_limits_cpu
nfs_limits_memory = var.jira_nfs_limits_memory

shared_home_snapshot_id = var.jira_shared_home_snapshot_id
shared_home_snapshot_id = local.jira_ebs_snapshot_id

# If local Helm charts path is provided, Terraform will then install using local charts and ignores remote registry
local_jira_chart_path = local.local_jira_chart_path
Expand All @@ -171,8 +171,8 @@ module "confluence" {
db_name = var.confluence_db_name
}

db_snapshot_id = var.confluence_db_snapshot_id
db_snapshot_build_number = var.confluence_db_snapshot_build_number
db_snapshot_id = local.confluence_rds_snapshot_id
db_snapshot_build_number = local.confluence_db_snapshot_build_number
db_master_username = var.confluence_db_master_username
db_master_password = var.confluence_db_master_password

Expand Down Expand Up @@ -209,7 +209,7 @@ module "confluence" {
nfs_limits_cpu = var.confluence_nfs_limits_cpu
nfs_limits_memory = var.confluence_nfs_limits_memory

shared_home_snapshot_id = var.confluence_shared_home_snapshot_id
shared_home_snapshot_id = local.confluence_ebs_snapshot_id

# If local Helm charts path is provided, Terraform will then install using local charts and ignores remote registry
local_confluence_chart_path = local.local_confluence_chart_path
Expand All @@ -230,7 +230,7 @@ module "bitbucket" {
db_instance_class = var.bitbucket_db_instance_class
db_iops = var.bitbucket_db_iops
db_name = var.bitbucket_db_name
db_snapshot_id = var.bitbucket_db_snapshot_id
db_snapshot_id = local.bitbucket_rds_snapshot_id
db_master_username = var.bitbucket_db_master_username
db_master_password = var.bitbucket_db_master_password

Expand Down Expand Up @@ -273,7 +273,7 @@ module "bitbucket" {
elasticsearch_storage = var.bitbucket_elasticsearch_storage
elasticsearch_replicas = var.bitbucket_elasticsearch_replicas

shared_home_snapshot_id = var.bitbucket_shared_home_snapshot_id
shared_home_snapshot_id = local.bitbucket_ebs_snapshot_id

# If local Helm charts path is provided, Terraform will then install using local charts and ignores remote registry
local_bitbucket_chart_path = local.local_bitbucket_chart_path
Expand All @@ -296,8 +296,8 @@ module "crowd" {
db_name = var.crowd_db_name
db_master_username = var.crowd_db_master_username
db_master_password = var.crowd_db_master_password
db_snapshot_id = var.crowd_db_snapshot_id
db_snapshot_build_number = var.crowd_db_snapshot_build_number
db_snapshot_id = local.crowd_rds_snapshot_id
db_snapshot_build_number = local.crowd_db_snapshot_build_number

replica_count = var.crowd_replica_count
installation_timeout = var.crowd_installation_timeout
Expand All @@ -323,7 +323,7 @@ module "crowd" {
nfs_limits_cpu = var.crowd_nfs_limits_cpu
nfs_limits_memory = var.crowd_nfs_limits_memory

shared_home_snapshot_id = var.crowd_shared_home_snapshot_id
shared_home_snapshot_id = local.crowd_ebs_snapshot_id

# If local Helm charts path is provided, Terraform will then install using local charts and ignores remote registry
local_crowd_chart_path = local.local_crowd_chart_path
Expand Down
35 changes: 35 additions & 0 deletions docs/docs/userguide/configuration/CONFIGURATION.md
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,41 @@ grafana_pvc_disk_size = "20Gi"
to override the default Prometheus 10Gi PVC storage request when creating enabling monitoring for the first time.
[AWS documentation](https://docs.aws.amazon.com/eks/latest/userguide/storage-classes.html){.external}.

### Snapshot Configuration

It is possible to restore DC products from snapshots. Each DC product requires a valid public RDS and EBS (shared-home) snapshot defined by the following variables:

```
<product>_db_snapshot_id
```

```
<product>_shared_home_snapshot_id
```

Note that COnfluence and Crowd also require a build number:

```
confluence_db_snapshot_build_number = "8017"
crowd_db_snapshot_build_number = "5023"
```

Snapshots must be public and exist in the target region.

!!! info "Snapshots JSON File"

It is also possible to use a special snapshots JSON file with pre-defined snapshot ID and build numbers for all products for both small and large dataset sizes.

You can find example JSON in `test/dcapt-snapshots.json`. To use snapshots JSON rather than dedicated environment variables, set in `config.tfvars`:

```
snapshots_json_file_path = "test/dcapt-snapshots.json"
```

If `snapshots_json_file_path` snapshot variables defined in `config.tfvars` are ignored.

Only use snapshots JSON suggested by [DCAPT team](https://github.com/atlassian/dc-app-performance-toolkit){.external}.

## Product specific configuration

=== "Bamboo"
Expand Down
21 changes: 21 additions & 0 deletions install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,15 @@ pre_flight_checks() {
PRODUCT_VERSION=$(get_variable ${PRODUCT_VERSION_VAR} "${CONFIG_ABS_PATH}")
MAJOR_MINOR_VERSION=$(echo "$PRODUCT_VERSION" | cut -d '.' -f1-2)
EBS_SNAPSHOT_ID=$(get_variable ${SHARED_HOME_SNAPSHOT_VAR} "${CONFIG_ABS_PATH}")
DATASET_SIZE=$(get_variable ${PRODUCT}_dataset_size "${CONFIG_ABS_PATH}")
if [ -z "$DATASET_SIZE" ]; then
DATASET_SIZE="large"
fi
log "Dataset size is ${DATASET_SIZE}"
SNAPSHOTS_JSON_FILE_PATH=$(get_variable 'snapshots_json_file_path' "${CONFIG_ABS_PATH}")
if [ "${SNAPSHOTS_JSON_FILE_PATH}" ]; then
EBS_SNAPSHOT_ID=$(cat ${SNAPSHOTS_JSON_FILE_PATH} | jq ".${PRODUCT}.versions[] | select(.version == \"${PRODUCT_VERSION}\") | .data[] | select(.size == \"${DATASET_SIZE}\" and .type == \"ebs\") | .snapshots[] | .[\"${REGION}\"]" | sed 's/"//g')
fi
if [ ! -z ${EBS_SNAPSHOT_ID} ]; then
log "Checking EBS snapshot ${EBS_SNAPSHOT_ID} compatibility with ${PRODUCT} version ${PRODUCT_VERSION}"
EBS_SNAPSHOT_DESCRIPTION=$(aws ec2 describe-snapshots --snapshot-ids=${EBS_SNAPSHOT_ID} --region ${REGION} --query 'Snapshots[0].Description')
Expand Down Expand Up @@ -147,6 +156,9 @@ pre_flight_checks() {
fi
fi
RDS_SNAPSHOT_ID=$(get_variable ${RDS_SNAPSHOT_VAR} "${CONFIG_ABS_PATH}")
if [ "${SNAPSHOTS_JSON_FILE_PATH}" ]; then
RDS_SNAPSHOT_ID=$(cat ${SNAPSHOTS_JSON_FILE_PATH} | jq ".${PRODUCT}.versions[] | select(.version == \"${PRODUCT_VERSION}\") | .data[] | select(.size == \"${DATASET_SIZE}\" and .type == \"rds\") | .snapshots[] | .[\"${REGION}\"]" | sed 's/"//g')
fi
if [ ! -z ${RDS_SNAPSHOT_ID} ]; then
log "Checking RDS snapshot ${RDS_SNAPSHOT_ID} compatibility with ${PRODUCT} version ${PRODUCT_VERSION}"
RDS_SNAPSHOT_VERSION=$(echo "${RDS_SNAPSHOT_ID}" | sed 's/.*dcapt-\(.*\)/\1/' | sed 's/-/./g' | cut -d '.' -f 2-)
Expand Down Expand Up @@ -181,6 +193,15 @@ verify_configuration_file() {
HAS_VALIDATION_ERR=1
fi

SNAPSHOTS_JSON_FILE_PATH=$(get_variable 'snapshots_json_file_path' "${CONFIG_ABS_PATH}")
if [ "${SNAPSHOTS_JSON_FILE_PATH}" ]; then
if [ ! -e "${SNAPSHOTS_JSON_FILE_PATH}" ]; then
log "Snapshots json file not found at ${SNAPSHOTS_JSON_FILE_PATH}"
log "Please make sure 'snapshots_json_file_path' in ${CONFIG_ABS_PATH} points to an existing valid json file"
HAS_VALIDATION_ERR=1
fi
fi

if [ -n "${INVALID_CONTENT}" ]; then
log "Configuration file '${CONFIG_ABS_PATH##*/}' is not valid." "ERROR"
log "Terraform uses this file to generate customised infrastructure for '${ENVIRONMENT_NAME}' on your AWS account."
Expand Down
96 changes: 96 additions & 0 deletions locals.tf
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,100 @@ locals {
local_bamboo_chart_path = var.local_helm_charts_path != "" && var.bamboo_install_local_chart ? "${var.local_helm_charts_path}/bamboo" : ""
local_agent_chart_path = var.local_helm_charts_path != "" && var.bamboo_install_local_chart ? "${var.local_helm_charts_path}/bamboo-agent" : ""
local_crowd_chart_path = var.local_helm_charts_path != "" && var.crowd_install_local_chart ? "${var.local_helm_charts_path}/crowd" : ""

# snapshots_json = var.snapshots_json_file_path != "" ? jsondecode(file(var.snapshots_json_file_path)) : jsondecode("{\"jira\": {\"versions\":[]},\"confluence\": {\"versions\":[]},\"crowd\": {\"versions\":[]},\"bitbucket\": {\"versions\":[]}}")
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we need this?

snapshots_json = var.snapshots_json_file_path != "" ? jsondecode(file(var.snapshots_json_file_path)) : null
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we put a sentence or two describing the filtering process and how the snapshot values are determined?

I had to read it a couple of times to see how this is used, and I am sure I will forget it before I will see this code again :)

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If snapshots_json_file_path is set then we parse the json and get snap IDs from there. If legacy var $product_db_snapshot_id is defined we will use it if either snapshots_json_file_path is undefined or we failed to find snapshot id for the product version in the json. This way we don't change an existing way of supplying snap IDs, just introduce a new one.


filtered_bitbucket_snapshots = local.snapshots_json != null ? flatten([
for version in local.snapshots_json.bitbucket.versions :
[for snapshot in version.data :
merge({ version = version.version }, snapshot)
]
]) : []

bitbucket_rds_snapshot = local.snapshots_json != null ? [
for snapshot in local.filtered_bitbucket_snapshots :
snapshot.type == "rds" && snapshot.size == var.bitbucket_dataset_size && snapshot.version == var.bitbucket_version_tag ? snapshot.snapshots[0][var.region] : ""
] : []

bitbucket_ebs_snapshot = local.snapshots_json != null ? [
for snapshot in local.filtered_bitbucket_snapshots :
snapshot.type == "ebs" && snapshot.size == var.bitbucket_dataset_size && snapshot.version == var.bitbucket_version_tag ? snapshot.snapshots[0][var.region] : ""
] : []

filtered_confluence_snapshots = local.snapshots_json != null ? flatten([
for version in local.snapshots_json.confluence.versions :
[for snapshot in version.data :
merge({ version = version.version }, snapshot)
]
]) : []

confluence_rds_snapshot = local.snapshots_json != null ? [
for snapshot in local.filtered_confluence_snapshots :
snapshot.type == "rds" && snapshot.size == var.confluence_dataset_size && snapshot.version == var.confluence_version_tag ? snapshot.snapshots[0][var.region] : ""
] : []

confluence_ebs_snapshot = local.snapshots_json != null ? [
for snapshot in local.filtered_confluence_snapshots :
snapshot.type == "ebs" && snapshot.size == var.confluence_dataset_size && snapshot.version == var.confluence_version_tag ? snapshot.snapshots[0][var.region] : ""
] : []

confluence_build_numbers = local.snapshots_json != null ? flatten([
for version in local.snapshots_json.confluence.versions :
version.version == var.confluence_version_tag ? version.build_number : ""
]) : []

filtered_crowd_snapshots = local.snapshots_json != null ? flatten([
for version in local.snapshots_json.crowd.versions :
[for snapshot in version.data :
merge({ version = version.version }, snapshot)
]
]) : []

crowd_rds_snapshot = local.snapshots_json != null ? [
for snapshot in local.filtered_crowd_snapshots :
snapshot.type == "rds" && snapshot.size == var.crowd_dataset_size && snapshot.version == var.crowd_version_tag ? snapshot.snapshots[0][var.region] : ""
] : []

crowd_ebs_snapshot = local.snapshots_json != null ? [
for snapshot in local.filtered_crowd_snapshots :
snapshot.type == "ebs" && snapshot.size == var.crowd_dataset_size && snapshot.version == var.crowd_version_tag ? snapshot.snapshots[0][var.region] : ""
] : []

crowd_build_numbers = local.snapshots_json != null ? flatten([
for version in local.snapshots_json.crowd.versions :
version.version == var.crowd_version_tag ? version.build_number : ""
]) : []

filtered_jira_snapshots = local.snapshots_json != null ? flatten([
for version in local.snapshots_json.jira.versions :
[for snapshot in version.data :
merge({ version = version.version }, snapshot)
]
]) : []

jira_rds_snapshot = local.snapshots_json != null ? [
for snapshot in local.filtered_jira_snapshots :
snapshot.type == "rds" && snapshot.size == var.jira_dataset_size && snapshot.version == var.jira_version_tag ? snapshot.snapshots[0][var.region] : ""
] : []

jira_ebs_snapshot = local.snapshots_json != null ? [
for snapshot in local.filtered_jira_snapshots :
snapshot.type == "ebs" && snapshot.size == var.jira_dataset_size && snapshot.version == var.jira_version_tag ? snapshot.snapshots[0][var.region] : ""
] : []

jira_rds_snapshot_id = length(compact(local.jira_rds_snapshot)) > 0 ? compact(local.jira_rds_snapshot)[0] : var.jira_db_snapshot_id != null ? var.jira_db_snapshot_id : null
jira_ebs_snapshot_id = length(compact(local.jira_ebs_snapshot)) > 0 ? compact(local.jira_ebs_snapshot)[0] : var.jira_shared_home_snapshot_id != null ? var.jira_shared_home_snapshot_id : null

confluence_rds_snapshot_id = length(compact(local.confluence_rds_snapshot)) > 0 ? compact(local.confluence_rds_snapshot)[0] : var.confluence_db_snapshot_id != null ? var.confluence_db_snapshot_id : null
confluence_ebs_snapshot_id = length(compact(local.confluence_ebs_snapshot)) > 0 ? compact(local.confluence_ebs_snapshot)[0] : var.confluence_shared_home_snapshot_id != null ? var.confluence_shared_home_snapshot_id : null
confluence_db_snapshot_build_number = length(compact(local.confluence_build_numbers)) > 0 ? compact(local.confluence_build_numbers)[0] : var.confluence_db_snapshot_build_number != null ? var.confluence_db_snapshot_build_number : null

crowd_rds_snapshot_id = length(compact(local.crowd_rds_snapshot)) > 0 ? compact(local.crowd_rds_snapshot)[0] : var.crowd_db_snapshot_id != null ? var.crowd_db_snapshot_id : null
crowd_ebs_snapshot_id = length(compact(local.crowd_ebs_snapshot)) > 0 ? compact(local.crowd_ebs_snapshot)[0] : var.crowd_shared_home_snapshot_id != null ? var.crowd_shared_home_snapshot_id : null
crowd_db_snapshot_build_number = length(compact(local.crowd_build_numbers)) > 0 ? compact(local.crowd_build_numbers)[0] : var.crowd_db_snapshot_build_number != null ? var.crowd_db_snapshot_build_number : null

bitbucket_rds_snapshot_id = length(compact(local.bitbucket_rds_snapshot)) > 0 ? compact(local.bitbucket_rds_snapshot)[0] : var.bitbucket_db_snapshot_id != null ? var.bitbucket_db_snapshot_id : null
bitbucket_ebs_snapshot_id = length(compact(local.bitbucket_ebs_snapshot)) > 0 ? compact(local.bitbucket_ebs_snapshot)[0] : var.bitbucket_shared_home_snapshot_id != null ? var.bitbucket_shared_home_snapshot_id : null

}
2 changes: 1 addition & 1 deletion modules/AWS/eks/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ variable "osquery_fleet_enrollment_host" {

variable "kinesis_log_producers_role_arns" {
description = "AWS kinesis log producer role"
type = object({
type = object({
eu = string
non-eu = string
})
Expand Down
40 changes: 40 additions & 0 deletions outputs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -111,3 +111,43 @@ output "confluence_s3_bucket" {
description = "Confluence S3 bucket name"
value = var.confluence_s3_attachments_storage ? "${local.cluster_name}-confluence-storage" : null
}

output "jira_rds_snapshot" {
value = local.jira_rds_snapshot_id
}

output "jira_ebs_snapshot" {
value = local.jira_ebs_snapshot_id
}

output "confluence_rds_snapshot" {
value = local.confluence_rds_snapshot_id
}

output "confluence_ebs_snapshot" {
value = local.confluence_ebs_snapshot_id
}

output "confluence_db_snapshot_build_number" {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Only Confluence and Crowd require build number?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

value = local.confluence_db_snapshot_build_number
}

output "bitbucket_rds_snapshot" {
value = local.bitbucket_rds_snapshot_id
}

output "bitbucket_ebs_snapshot" {
value = local.bitbucket_ebs_snapshot_id
}

output "crowd_rds_snapshot" {
value = local.crowd_rds_snapshot_id
}

output "crowd_ebs_snapshot" {
value = local.crowd_ebs_snapshot_id
}

output "crowd_db_snapshot_build_number" {
value = local.crowd_db_snapshot_build_number
}
Loading
Loading