Skip to content

Latest commit

 

History

History
284 lines (209 loc) · 12 KB

File metadata and controls

284 lines (209 loc) · 12 KB

AWS LoadTest Distribuited Terraform Module

This module proposes a simple and uncomplicated way to run your load tests created with JMeter or TaurusBzt on AWS as IaaS.

bp

Basic usage with JMeter

module "loadtest" {

    source  = "marcosborges/loadtest-distribuited/aws"
    version = "1.0.0"
  
    name = "nome-da-implantacao"
    executor = "jmeter"
    loadtest_dir_source = "./assets"
    loadtest_entrypoint = "jmeter -n -t -R \"{NODES_IPS}\" *.jmx"
    nodes_size = 2

    subnet_id = data.aws_subnet.current.id
}

data "aws_subnet" "current" {
    filter {
        name   = "tag:Name"
        values = ["my-subnet-name"]
    }
}

bp

bp


Basic usage with Taurus

In its basic use it is necessary to provide information about which network will be used, where are your test plan scripts and finally define the number of nodes needed to carry out the desired load.

module "loadtest" {

    source  = "marcosborges/loadtest-distribuited/aws"
    version = "1.0.0"
  
    name = "nome-da-implantacao"
    executor = "bzt"
    loadtest_dir_source = "./load-test-plan"
    loadtest_entrypoint = "bzt -q -o execution.0.distributed=\"{NODES_IPS}\" *.yml"
    nodes_size = 2

    subnet_id = data.aws_subnet.current.id
}

data "aws_subnet" "current" {
    filter {
        name   = "tag:Name"
        values = ["my-subnet-name"]
    }
}

Advanced Config:

The module also provides advanced settings.

  1. It is possible to automate the splitting of the contents of a bulk file between the load nodes.

  2. It is possible to export the ssh key used in remote access.

  3. We can define a pre-configured and customized image.

  4. We can customize too many instances provisioning parameters: tags, monitoring, public_ip, security_group, etc...

module "loadtest" {

    source  = "marcosborges/loadtest-distribuited/aws"
    version = "1.0.0"
  
    subnet_id = data.aws_subnet.current.id

    name = "nome-da-implantacao"
    executor = "bzt"
    loadtest_dir_source = "./assets"
    loadtest_dir_destination = "/loadtest"
    loadtest_entrypoint = "bzt -q -o execution.0.distributed=\"{NODES_IPS}\" *.yml"
    nodes_size = 3

    #AUTO SPLIT
    split_data_mass_between_nodes = {
        enable = true
        data_mass_filename = "../plan/data/data.csv"
    }

    #EXPORT SSH KEY
    ssh_export_pem = true

    #CUSTOMIZE IMAGE
    leader_ami_id = data.aws_ami.my_image.id
    nodes_ami_id = data.aws_ami.my_image.id

    #CUSTOMIZE TAGS
    leader_tags = {
        "Name" = "nome-da-implantacao-leader",
        "Owner": "nome-do-proprietario",
        "Environment": "producao",
        "Role": "leader"
    }
    nodes_tags = {
        "Name": "nome-da-implantacao",
        "Owner": "nome-do-proprietario",
        "Environment": "producao",
        "Role": "node"
    }
    tags = {
        "Name": "nome-da-implantacao",
        "Owner": "nome-do-proprietario",
        "Environment": "producao"
    }
 
    # SETUP INSTANCE SIZE
    leader_instance_type = "t2.medium"
    nodes_intance_type = "t2.medium"
 
    # SETUP JVM PARAMETERS
    leader_jvm_args = " -Xms12g -Xmx80g -XX:MaxMetaspaceSize=512m -XX:+UseG1GC -XX:MaxGCPauseMillis=100 -XX:G1ReservePercent=20 "
    nodes_jvm_args = " -Xms12g -Xmx80g -XX:MaxMetaspaceSize=512m -XX:+UseG1GC -XX:MaxGCPauseMillis=100 -XX:G1ReservePercent=20 "

    # DISABLE AUTO SETUP
    auto_setup = false

    # SET JMETER VERSION. WORK ONLY WHEN AUTO-SETUP IS TRUE
    jmeter_version = "5.4.1"

    # ASSOCIATE PUBLIC IP
    leader_associate_public_ip_address = true
    nodes_associate_public_ip_address = true
    
    # ENABLE MONITORING
    leader_monitoring = true
    nodes_monitoring = true

    #  SETUP SSH USERNAME
    ssh_user = "ec2-user"

    # SETUP ALLOWEDs CIDRS FOR SSH ACCESS
    ssh_cidr_ingress_block = ["0.0.0.0/0"]
    
}

data "aws_subnet" "current" {
    filter {
        name   = "tag:Name"
        values = ["my-subnet-name"]
    }
}

data "aws_ami" "my_image" {
    most_recent = true
    filter {
        name   = "owner-alias"
        values = ["amazon"]
    }
    filter {
        name   = "name"
        values = ["amzn2-ami-hvm*"]
    }
}

Defaults

Instance type: https://aws.amazon.com/pt/ec2/instance-types/c5/

Examples with another executors


Requirements

Name Version
terraform >= 0.13.1
aws >= 3.63
null >= 3.1.0
tls >= 3.1.0

Providers

Name Version
aws >= 3.63
null >= 3.1.0
tls >= 3.1.0

Modules

No modules.

Resources

Name Type
aws_iam_instance_profile.jmeter resource
aws_iam_role.jmeter resource
aws_instance.leader resource
aws_instance.nodes resource
aws_key_pair.jmeter resource
aws_security_group.jmeter resource
null_resource.publish_split_data resource
null_resource.split_data resource
tls_private_key.jmeter resource
aws_subnet.current data source
aws_vpc.current data source

Inputs

Name Description Type Default Required
auto_setup Install and configure instances Amazon Linux2 with JMeter and Taurus bool true no
executor Executor of the loadtest string "jmeter" no
jmeter_version JMeter version string "5.4.1" no
leader_ami_id Id of the AMI string n/a yes
leader_associate_public_ip_address Associate public IP address to the leader bool true no
leader_instance_type Instance type of the cluster leader string "t2.medium" no
leader_jvm_args JVM Leader JVM_ARGS string " -Xms2g -Xmx4g -XX:MaxMetaspaceSize=512m -XX:+UseG1GC -XX:MaxGCPauseMillis=100 -XX:G1ReservePercent=20 " no
leader_monitoring Enable monitoring for the leader bool true no
leader_tags Tags of the cluster leader map n/a yes
loadtest_dir_destination Path to the destination loadtest directory string "/loadtest" no
loadtest_dir_source Path to the source loadtest directory string n/a yes
loadtest_entrypoint Path to the entrypoint command string "bzt -q -o execution.0.distributed=\"{NODES_IPS}\" *.yml" no
name Name of the provision string n/a yes
nodes_ami_id Id of the AMI string n/a yes
nodes_associate_public_ip_address Associate public IP address to the nodes bool true no
nodes_intance_type Instance type of the cluster nodes string "t2.medium" no
nodes_jvm_args JVM Nodes JVM_ARGS string "-Xms4g -Xmx8g -XX:MaxMetaspaceSize=256m -XX:+UseG1GC -XX:MaxGCPauseMillis=100 -XX:G1ReservePercent=20 -Dnashorn.args=--no-deprecation-warning -XX:+HeapDumpOnOutOfMemoryError " no
nodes_monitoring Enable monitoring for the leader bool true no
nodes_size Total number of nodes in the cluster number 2 no
nodes_tags Tags of the cluster nodes map n/a yes
region Name of the region string "us-east-1" no
split_data_mass_between_nodes Split data mass between nodes
object({
enable = bool
data_mass_filename = string
})
{
"data_mass_filename": "../plan/data/data.csv",
"enable": true
}
no
ssh_cidr_ingress_block SSH user for the leader list
[
"0.0.0.0/0"
]
no
ssh_export_pem n/a bool true no
ssh_user SSH user for the leader string "ec2-user" no
subnet_id Id of the subnet string n/a yes
tags Common tags map n/a yes
taurus_version Taurus version string "1.16.0" no
vpc_id Id of the VPC string n/a yes

Outputs

Name Description
leader_private_ip The private IP address of the leader server instance.
leader_public_ip The public IP address of the leader server instance.
nodes_private_ip The private IP address of the nodes instances.
nodes_public_ip The public IP address of the nodes instances.