diff --git a/README.md b/README.md index d9c81a1..1d1c9b3 100644 --- a/README.md +++ b/README.md @@ -70,26 +70,29 @@ Check [examples](./examples) for non-python examples. |------|-------------|------|:--------:| | [function\_name](#input\_function\_name) | A unique name for your Lambda Function. | `string` | yes | | [handler](#input\_handler) | The function entrypoint in your code. | `string` | yes | -| [output\_path](#input\_output\_path) | A path to which the source directory is archived before uploading to AWS. | `string` | yes | | [runtime](#input\_runtime) | The identifier of the function's runtime. | `string` | yes | -| [source\_dir](#input\_source\_dir) | A path to the directory which contains source files. | `string` | yes | | [allowed\_services](#input\_allowed\_services) | A list of AWS Services that are allowed to access this lambda. | `list(string)` | no | | [build\_command](#input\_build\_command) | This is the build command to execute. It can be provided as a relative path to the current working directory or as an absolute path. It is evaluated in a shell, and can use environment variables or Terraform variables. | `string` | no | | [build\_triggers](#input\_build\_triggers) | A map of values which should cause the build command to re-run. Values are meant to be interpolated references to variables or attributes of other resources. | `map(string)` | no | | [dead\_letter\_config](#input\_dead\_letter\_config) | Nested block to configure the function's dead letter queue. |
object({
target_arn = string
}) | no |
| [description](#input\_description) | Description of what your Lambda Function does. | `string` | no |
| [environment](#input\_environment) | A map that defines environment variables for the Lambda function. | object({
variables = map(string)
}) | no |
-| [exclude\_files](#input\_exclude\_files) | A list of directories or folders to ignore, e.g.object({
mode = string
}) | no |
diff --git a/build.tf b/build.tf
index 9242df6..77b785e 100644
--- a/build.tf
+++ b/build.tf
@@ -12,6 +12,7 @@ resource "null_resource" "build" {
}
data "archive_file" "source" {
+ count = var.source_dir != null ? 1 : 0
type = "zip"
source_dir = var.source_dir
excludes = var.exclude_files
diff --git a/main.tf b/main.tf
index 48fba36..1177a7e 100644
--- a/main.tf
+++ b/main.tf
@@ -76,10 +76,21 @@ resource "aws_cloudwatch_log_group" "this" {
# Lambda function
#---------------------------------------------------------------------------------------------------
+locals {
+ lambda_filename = try(
+ data.archive_file.source[0].output_path,
+ var.output_path
+ )
+ lambda_source_code_hash = try(
+ data.archive_file.source[0].output_base64sha256,
+ filebase64sha256(var.output_path)
+ )
+}
+
resource "aws_lambda_function" "this" {
- filename = data.archive_file.source.output_path
+ filename = local.lambda_filename
role = aws_iam_role.this.arn
- source_code_hash = data.archive_file.source.output_base64sha256
+ source_code_hash = local.lambda_source_code_hash
runtime = var.runtime
handler = var.handler
@@ -126,6 +137,10 @@ resource "aws_lambda_function" "this" {
# configuration and Terraform will show a perpetual difference of adding the key.
kms_key_arn = var.environment == null ? null : var.lambda_kms_key_arn
+ s3_bucket = var.s3_bucket
+ s3_key = var.s3_key
+ s3_object_version = var.s3_object_version
+
tags = var.tags
lifecycle {
diff --git a/s3.tf b/s3.tf
new file mode 100644
index 0000000..b7a0278
--- /dev/null
+++ b/s3.tf
@@ -0,0 +1,15 @@
+locals {
+ create_s3_object = (
+ !(var.s3_bucket == null || var.s3_key == null || var.output_path == null)
+ )
+}
+
+resource "aws_s3_object" "this" {
+ count = local.create_s3_object ? 1 : 0
+
+ bucket = var.s3_bucket
+ key = var.s3_key
+ source = var.output_path
+
+ etag = filemd5(var.output_path)
+}
diff --git a/variables.tf b/variables.tf
index 49e5527..7059177 100644
--- a/variables.tf
+++ b/variables.tf
@@ -27,18 +27,38 @@ variable "build_triggers" {
}
variable "source_dir" {
- description = "A path to the directory which contains source files."
+ description = "A path to the directory which contains source files to be archived into a deployment package. If set to `null`, then no archive file is created."
type = string
+ default = null
}
variable "output_path" {
- description = "A path to which the source directory is archived before uploading to AWS."
+ description = "A path to the deployment archive which will be uploaded to AWS. If `source_dir` is not `null`, then a file is created at `output_path` containing the archived contents of `source_dir`."
+ type = string
+ default = null
+}
+
+variable "s3_bucket" {
+ description = "An existing S3 bucket, containing the function's deployment package. If `output_path` is also specified, the archive will be uploaded here."
+ type = string
+ default = null
+}
+
+variable "s3_key" {
+ description = "S3 key of an object containing the function's deployment package. If `output_path` is also specified, the archive will be uploaded here."
type = string
+ default = null
+}
+
+variable "s3_object_version" {
+ description = "S3 object version containing the function's deployment package."
+ type = string
+ default = null
}
variable "exclude_files" {
description = <