Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
106 changes: 106 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
name: ci

# Static checks, native variable validation tests, the Python pre-flight check,
# and the plan-based pytest suite. Runs on every PR with relevant path changes
# and on pushes to master / release/v* / the in-flight v26 chore branches.

on:
pull_request:
paths:
- "**/*.tf"
- "**/*.tftest.hcl"
- "templates/TEMPLATE_terraform.tfvars"
- "scripts/installer/**"
- "tests/**"
- "Makefile"
- ".github/workflows/ci.yml"
- ".tflint.hcl"
push:
branches:
- master
- "release/v*"
workflow_dispatch:

# A push to a branch with an open PR fires both the push and pull_request
# triggers, producing duplicate runs. Group by ref + event so the second one
# cancels the first.
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.event_name }}
cancel-in-progress: true

jobs:
ci:
name: ci
runs-on: ubuntu-latest
timeout-minutes: 20

steps:
- name: Checkout
uses: actions/checkout@v4

- name: Setup Terraform
uses: hashicorp/setup-terraform@v3
with:
terraform_version: 1.14.8
terraform_wrapper: false

- name: terraform fmt
run: terraform fmt -check -recursive -diff

- name: Setup tflint
uses: terraform-linters/setup-tflint@v4
with:
tflint_version: v0.61.0

- name: tflint init
run: tflint --init

- name: tflint
# TFLINT_CONFIG_FILE is required so child modules walked by --recursive
# use the root .tflint.hcl — they don't auto-discover it.
env:
TFLINT_CONFIG_FILE: ${{ github.workspace }}/.tflint.hcl
run: tflint --recursive

- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.12"

- name: Install Python dependencies
run: |
pip install -r scripts/installer/requirements.txt
pip install -r tests/requirements.txt

- name: Terraform init (modules only)
run: terraform init -backend=false

- name: Generate test fixtures
env:
CX_SKIP_SSM: "true"
run: |
mkdir -p tests/logs
make generate_test_data

- name: Run terraform test
run: make terraform_test

- name: extractors.py smoke
# Confirms the Python tfvars parser reads the fixture end-to-end without a
# Docker daemon. Only became possible once extractors.py stopped shelling
# out to hcl2json. The `installer` package lives under scripts/, so we
# have to add it to PYTHONPATH for the inline invocation.
env:
PYTHONPATH: scripts
run: |
cp tests/datafiles/terraform.tfvars terraform.tfvars
trap "rm -f terraform.tfvars" EXIT
python3 -c "from installer.utils.extractors import tf_vars_json_payload; assert tf_vars_json_payload['tower_container_version'].startswith('v'); print(f'parsed {len(tf_vars_json_payload)} keys')"

# Follow-up: wire the @pytest.mark.local plan-based suite in here. Now that
# extractors.py runs Docker-free, the conftest session_setup also runs
# Docker-free (hcl2 swap + tfvars-backup gating + CX_SKIP_AWS_CHECK), and
# tests/requirements.txt is fixed, the remaining blocker is the bare
# `terraform plan` invocations in the pytest executor — those need the
# AWS provider to initialize, and the fixture's `aws_profile` + lack of
# AWS_* env creds in CI need a coherent solution. Tackle separately.
79 changes: 0 additions & 79 deletions .github/workflows/terraform-test.yml

This file was deleted.

36 changes: 16 additions & 20 deletions 005_parameter_store.tf
Original file line number Diff line number Diff line change
Expand Up @@ -26,22 +26,20 @@ data "aws_ssm_parameter" "wave_lite_secrets" {
# Generate individual SSM Parameters
# ------------------------------------------------
resource "aws_ssm_parameter" "client_supplied_secrets_tower" {
for_each = local.tower_secret_keys
name = nonsensitive(local.tower_secrets[each.key]["ssm_key"])
value = local.tower_secrets[each.key]["value"]
type = "SecureString"
overwrite = var.flag_overwrite_ssm_keys
for_each = local.tower_secret_keys
name = nonsensitive(local.tower_secrets[each.key]["ssm_key"])
value = local.tower_secrets[each.key]["value"]
type = "SecureString"
}


resource "aws_ssm_parameter" "client_supplied_secrets_seqerakit" {
# for_each = local.seqerakit_secret_keys

for_each = var.flag_run_seqerakit == true ? local.seqerakit_secret_keys : []
name = nonsensitive(local.seqerakit_secrets[each.key]["ssm_key"])
value = local.seqerakit_secrets[each.key]["value"]
type = "SecureString"
overwrite = var.flag_overwrite_ssm_keys
for_each = var.flag_run_seqerakit == true ? local.seqerakit_secret_keys : []
name = nonsensitive(local.seqerakit_secrets[each.key]["ssm_key"])
value = local.seqerakit_secrets[each.key]["value"]
type = "SecureString"
}


Expand All @@ -50,19 +48,17 @@ resource "aws_ssm_parameter" "client_supplied_secrets_groundswell" {
# count = var.flag_enable_groundswell == true ? 1 : 0
# for_each = local.groundswell_secret_keys

for_each = var.flag_enable_groundswell == true ? local.groundswell_secret_keys : []
name = nonsensitive(local.groundswell_secrets[each.key]["ssm_key"])
value = local.groundswell_secrets[each.key]["value"]
type = "SecureString"
overwrite = var.flag_overwrite_ssm_keys
for_each = var.flag_enable_groundswell == true ? local.groundswell_secret_keys : []
name = nonsensitive(local.groundswell_secrets[each.key]["ssm_key"])
value = local.groundswell_secrets[each.key]["value"]
type = "SecureString"
}


resource "aws_ssm_parameter" "client_supplied_secrets_wave_lite" {

for_each = var.flag_use_wave_lite == true ? local.wave_lite_secret_keys : []
name = nonsensitive(local.wave_lite_secrets[each.key]["ssm_key"])
value = local.wave_lite_secrets[each.key]["value"]
type = "SecureString"
overwrite = var.flag_overwrite_ssm_keys
for_each = var.flag_use_wave_lite == true ? local.wave_lite_secret_keys : []
name = nonsensitive(local.wave_lite_secrets[each.key]["ssm_key"])
value = local.wave_lite_secrets[each.key]["value"]
type = "SecureString"
}
1 change: 1 addition & 0 deletions scripts/installer/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
python-hcl2==8.1.2
107 changes: 34 additions & 73 deletions scripts/installer/utils/extractors.py
Original file line number Diff line number Diff line change
@@ -1,96 +1,57 @@
from datetime import datetime
import json
import logging
import os
from pathlib import Path
import platform
import subprocess
import sys
import tempfile
from pathlib import Path

import hcl2

base_import_dir = Path(__file__).resolve().parents[2]
if base_import_dir not in sys.path:
sys.path.append(str(base_import_dir))

from installer.utils.logger import logger

## ------------------------------------------------------------------------------------
## Convert terraform.tfvars to JSON
## Notes:
## 1. As of May 16, 2025, the bespoke parser to convert terraform.tfvars into json is replaced with a new container-based
## solution from tmccombs/hcl2json. The home-rolled parser was originally created to avoid introducing packages from
## the internet. Due to more complicated parsing needs, the new parser solution has been implemented.
## To refer to the previous parser, please refer to previous Git histroy.
## Convert terraform.tfvars to a Python dict.
##
## 2. The new parser relies on the containerized solution provided here: https://github.com/tmccombs/hcl2json.
## Seqera will vendor their own copy of the image within Harbor.
## Earlier revisions of this file shelled out to the `tmccombs/hcl2json` Docker image
## (see git history). That worked but required a Docker daemon to be running anywhere
## the installer was exercised — including CI and `terraform test` runs that touch the
## connection-strings external data source. Switching to `python-hcl2` removes the
## daemon dependency in exchange for one in-process Python library.
##
## WARNING / REMINDER: DONT ADD ANY stdout emissions in this logic or you'll break the TF `external` mechanism!!
## WARNING: do not emit anything to stdout from this module — the data.external block in
## modules/connection_strings/v1.0.0/main.tf relies on a single JSON line on stdout from
## generate_db_connection_string.py, and any extra output corrupts the protocol.
## ------------------------------------------------------------------------------------


def get_tfvars_as_json():
"""
Uses the `tmccombs/hcl2json` Docker image to convert `terraform.tfvars` into JSON format and return it as
a Python dictionary.
def _unwrap_hcl_strings(value):
"""python-hcl2 returns string scalars with their surrounding double quotes preserved
(so `foo = "bar"` becomes the Python string `'"bar"'`). The downstream consumers
expect plain Python strings. Walk the parsed structure and strip those.
"""
if isinstance(value, str):
if len(value) >= 2 and value[0] == '"' and value[-1] == '"':
return value[1:-1]
return value
if isinstance(value, list):
return [_unwrap_hcl_strings(item) for item in value]
if isinstance(value, dict):
return {key: _unwrap_hcl_strings(val) for key, val in value.items()}
return value


# Check for tfvars
tfvars_original_path = os.path.abspath("terraform.tfvars")
if not os.path.exists(tfvars_original_path):
def get_tfvars_as_json():
"""Parse the project-root `terraform.tfvars` into a dict."""
tfvars_path = os.path.abspath("terraform.tfvars")
if not os.path.exists(tfvars_path):
raise FileNotFoundError(
f"terraform.tfvars file not found in path: {tfvars_original_path}."
f"terraform.tfvars file not found in path: {tfvars_path}."
)

# Because this is a 3rd party container, we are locking down as much as possible for security reasons:
# Single local file mounted as read-only, non-root user, disabled network capabilities, and
# use of immutable container hash fingerprint over mutable tag.
cmd = [
"docker",
"run",
"--platform",
"linux/amd64",
"-i",
"--rm",
"-v",
f"{os.getcwd()}/terraform.tfvars:/tmp/terraform.tfvars:ro",
"--user",
"1000:1000",
"--network",
"none",
# "tmccombs/hcl2json@sha256:312ac54d3418b87b2ad64f82027483cb8b7750db6458b7b9ebe42ec278696e96",
"ghcr.io/seqeralabs/cx-field-tools-installer/hcl2json@sha256:48af2029d19d824ba1bd1662c008e691c04d5adcb055464e07b2dc04980dcbf5",
"/tmp/terraform.tfvars",
]

# Assign result to variable (to then be returned directly or written to file for debugging)
result = subprocess.run(cmd, capture_output=True, text=True)

# Capture Docker runtime failures
if result.returncode != 0:
raise RuntimeError(f"Docker command failed:\nSTDERR: {result.stderr.strip()}")

# Capture Docker stdout
payload = result.stdout.strip()

# Redirect output to temporary JSON file (debugging only)
if logging.getLevelName(logger.getEffectiveLevel()) == "DEBUG":
timestamp = datetime.now().strftime("%Y_%b%d_%I-%M%p")
with tempfile.NamedTemporaryFile(
delete=False,
prefix=f"terraform_tfvars_{timestamp}_",
suffix=".json",
mode="w+",
dir="/tmp",
) as temp_output:
temp_output.write(payload)
with open(tfvars_path) as fp:
parsed = hcl2.load(fp)

# Capture invalid json errors
try:
# logger.info(payload)
return json.loads(payload) # json.loads converts json string to python object.
except json.JSONDecodeError as e:
raise RuntimeError("Failed to decode Docker output as JSON.") from e
return _unwrap_hcl_strings(parsed)


tf_vars_json_payload = get_tfvars_as_json()
Loading
Loading