Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/antonbabenko/pre-commit-terraform
rev: v1.103.0
rev: v1.104.0
hooks:
- id: terraform_fmt
- id: terraform_wrapper_module_for_each
Expand Down
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,7 @@ source_path = [
npm_tmp_dir = "/tmp/dir/location"
prefix_in_zip = "foo/bar1",
}, {
path = "src/python-app3",
path = "src/nodejs-app2",
commands = [
"npm install",
":zip"
Expand All @@ -424,7 +424,7 @@ source_path = [
"node_modules/.+", # Include all node_modules
],
}, {
path = "src/python-app3",
path = "src/go-app1",
commands = ["go build"],
patterns = <<END
bin/.*
Expand All @@ -437,9 +437,9 @@ source_path = [
*Few notes:*

- If you specify a source path as a string that references a folder and the runtime begins with `python` or `nodejs`, the build process will automatically build python and nodejs dependencies if `requirements.txt` or `package.json` file will be found in the source folder. If you want to customize this behavior, please use the object notation as explained below.
- If you use the `commands` option and chain multiple commands, only the exit code of last command will be checked for success. If you prefer to fail fast, start the commands with the bash option `set -e` or powershell option `$ErrorActionPreference="Stop"`
- All arguments except `path` are optional.
- `patterns` - List of Python regex filenames should satisfy. Default value is "include everything" which is equal to `patterns = [".*"]`. This can also be specified as multiline heredoc string (no comments allowed). Some examples of valid patterns:
- If you use the `commands` option and chain multiple commands, only the exit code of last command will be checked for success. If you prefer to fail fast, start the commands with the bash option `set -e` or powershell option `$ErrorActionPreference="Stop"`

```txt
!.*/.*\.txt # Filter all txt files recursively
Expand Down
2 changes: 1 addition & 1 deletion examples/event-source-mapping/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ Note that this example may create resources which cost money. Run `terraform des
| Name | Source | Version |
|------|--------|---------|
| <a name="module_lambda_function"></a> [lambda\_function](#module\_lambda\_function) | ../../ | n/a |
| <a name="module_vpc"></a> [vpc](#module\_vpc) | terraform-aws-modules/vpc/aws | ~> 5.0 |
| <a name="module_vpc"></a> [vpc](#module\_vpc) | terraform-aws-modules/vpc/aws | ~> 6.0 |

## Resources

Expand Down
2 changes: 1 addition & 1 deletion examples/event-source-mapping/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ resource "aws_kinesis_stream" "this" {
# Amazon MQ
module "vpc" {
source = "terraform-aws-modules/vpc/aws"
version = "~> 5.0"
version = "~> 6.0"

name = random_pet.this.id
cidr = local.vpc_cidr
Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion examples/simple-cicd/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ module "lambda_function" {
runtime = "python3.12"

source_path = [
"${path.module}/src/python-app1",
"${path.module}/../fixtures/python-app1",
]
trigger_on_package_timestamp = false
}
2 changes: 1 addition & 1 deletion examples/with-efs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ Note that this example may create resources which cost money. Run `terraform des
| Name | Source | Version |
|------|--------|---------|
| <a name="module_lambda_function_with_efs"></a> [lambda\_function\_with\_efs](#module\_lambda\_function\_with\_efs) | ../../ | n/a |
| <a name="module_vpc"></a> [vpc](#module\_vpc) | terraform-aws-modules/vpc/aws | ~> 5.0 |
| <a name="module_vpc"></a> [vpc](#module\_vpc) | terraform-aws-modules/vpc/aws | ~> 6.0 |

## Resources

Expand Down
2 changes: 1 addition & 1 deletion examples/with-efs/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ module "lambda_function_with_efs" {

module "vpc" {
source = "terraform-aws-modules/vpc/aws"
version = "~> 5.0"
version = "~> 6.0"

name = random_pet.this.id
cidr = "10.10.0.0/16"
Expand Down
4 changes: 2 additions & 2 deletions examples/with-vpc-s3-endpoint/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@ Note that this example may create resources which cost money. Run `terraform des
| <a name="module_lambda_s3_write"></a> [lambda\_s3\_write](#module\_lambda\_s3\_write) | ../../ | n/a |
| <a name="module_s3_bucket"></a> [s3\_bucket](#module\_s3\_bucket) | terraform-aws-modules/s3-bucket/aws | ~> 5.0 |
| <a name="module_security_group_lambda"></a> [security\_group\_lambda](#module\_security\_group\_lambda) | terraform-aws-modules/security-group/aws | ~> 4.0 |
| <a name="module_vpc"></a> [vpc](#module\_vpc) | terraform-aws-modules/vpc/aws | ~> 5.0 |
| <a name="module_vpc_endpoints"></a> [vpc\_endpoints](#module\_vpc\_endpoints) | terraform-aws-modules/vpc/aws//modules/vpc-endpoints | ~> 5.0 |
| <a name="module_vpc"></a> [vpc](#module\_vpc) | terraform-aws-modules/vpc/aws | ~> 6.0 |
| <a name="module_vpc_endpoints"></a> [vpc\_endpoints](#module\_vpc\_endpoints) | terraform-aws-modules/vpc/aws//modules/vpc-endpoints | ~> 6.0 |

## Resources

Expand Down
4 changes: 2 additions & 2 deletions examples/with-vpc-s3-endpoint/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ data "aws_ec2_managed_prefix_list" "this" {

module "vpc" {
source = "terraform-aws-modules/vpc/aws"
version = "~> 5.0"
version = "~> 6.0"

name = random_pet.this.id
cidr = "10.0.0.0/16"
Expand Down Expand Up @@ -101,7 +101,7 @@ module "vpc" {

module "vpc_endpoints" {
source = "terraform-aws-modules/vpc/aws//modules/vpc-endpoints"
version = "~> 5.0"
version = "~> 6.0"

vpc_id = module.vpc.vpc_id

Expand Down
2 changes: 1 addition & 1 deletion examples/with-vpc/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ Note that this example may create resources which cost money. Run `terraform des
| Name | Source | Version |
|------|--------|---------|
| <a name="module_lambda_function_in_vpc"></a> [lambda\_function\_in\_vpc](#module\_lambda\_function\_in\_vpc) | ../../ | n/a |
| <a name="module_vpc"></a> [vpc](#module\_vpc) | terraform-aws-modules/vpc/aws | ~> 5.0 |
| <a name="module_vpc"></a> [vpc](#module\_vpc) | terraform-aws-modules/vpc/aws | ~> 6.0 |

## Resources

Expand Down
2 changes: 1 addition & 1 deletion examples/with-vpc/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ module "lambda_function_in_vpc" {

module "vpc" {
source = "terraform-aws-modules/vpc/aws"
version = "~> 5.0"
version = "~> 6.0"

name = random_pet.this.id
cidr = "10.10.0.0/16"
Expand Down
108 changes: 69 additions & 39 deletions package.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,32 +243,50 @@ def generate_content_hash(source_paths, hash_func=hashlib.sha256, log=None):

if log:
log = log.getChild("hash")
_log = log if log.isEnabledFor(DEBUG3) else None

hash_obj = hash_func()

for source_path in source_paths:
if os.path.isdir(source_path):
source_dir = source_path
_log = log if log.isEnabledFor(DEBUG3) else None
for source_file in list_files(source_dir, log=_log):
for source_path, pf, prefix in source_paths:
if pf is not None:
for path_from_pattern in pf.filter(source_path, prefix):
if os.path.isdir(path_from_pattern):
# Hash only the path of the directory
source_dir = path_from_pattern
source_file = None
else:
source_dir = os.path.dirname(path_from_pattern)
source_file = os.path.relpath(path_from_pattern, source_dir)
update_hash(hash_obj, source_dir, source_file)
if log:
log.debug(os.path.join(source_dir, source_file))
log.debug(path_from_pattern)
else:
source_dir = os.path.dirname(source_path)
source_file = os.path.relpath(source_path, source_dir)
update_hash(hash_obj, source_dir, source_file)
if log:
log.debug(source_path)
if os.path.isdir(source_path):
source_dir = source_path
for source_file in list_files(source_dir, log=_log):
update_hash(hash_obj, source_dir, source_file)
if log:
log.debug(os.path.join(source_dir, source_file))
else:
source_dir = os.path.dirname(source_path)
source_file = os.path.relpath(source_path, source_dir)
update_hash(hash_obj, source_dir, source_file)
if log:
log.debug(source_path)

return hash_obj


def update_hash(hash_obj, file_root, file_path):
def update_hash(hash_obj, file_root, file_path=None):
"""
Update a hashlib object with the relative path and contents of a file.
Update a hashlib object with the relative path and, if the given
file_path is not None, its content.
"""

if file_path is None:
hash_obj.update(file_root.encode())
return

relative_path = os.path.join(file_root, file_path)
hash_obj.update(relative_path.encode())

Expand Down Expand Up @@ -562,7 +580,6 @@ class ZipContentFilter:
def __init__(self, args):
self._args = args
self._rules = None
self._excludes = set()
self._log = logging.getLogger("zip")

def compile(self, patterns):
Expand Down Expand Up @@ -668,7 +685,7 @@ def hash(self, extra_paths):
if not self._source_paths:
raise ValueError("BuildPlanManager.plan() should be called first")

content_hash_paths = self._source_paths + extra_paths
content_hash_paths = self._source_paths + [(p, None, None) for p in extra_paths]

# Generate a hash based on file names and content. Also use the
# runtime value, build command, and content of the build paths
Expand All @@ -677,7 +694,7 @@ def hash(self, extra_paths):
content_hash = generate_content_hash(content_hash_paths, log=self._log)
return content_hash

def plan(self, source_path, query):
def plan(self, source_path, query, log=None):
claims = source_path
if not isinstance(source_path, list):
claims = [source_path]
Expand All @@ -686,11 +703,14 @@ def plan(self, source_path, query):
build_plan = []
build_step = []

if log:
log = log.getChild("plan")

def step(*x):
build_step.append(x)

def hash(path):
source_paths.append(path)
def hash(path, patterns=None, prefix=None):
source_paths.append((path, patterns, prefix))

def pip_requirements_step(path, prefix=None, required=False, tmp_dir=None):
command = runtime
Expand Down Expand Up @@ -759,23 +779,20 @@ def npm_requirements_step(path, prefix=None, required=False, tmp_dir=None):
step("npm", runtime, requirements, prefix, tmp_dir)
hash(requirements)

def commands_step(path, commands):
def commands_step(path, commands, patterns):
if not commands:
return

if isinstance(commands, str):
commands = map(str.strip, commands.splitlines())

if path:
path = os.path.normpath(path)
step("set:workdir", path)

batch = []
for c in commands:
if isinstance(c, str):
if c.startswith(":zip"):
if path:
hash(path)
if batch:
step("sh", "\n".join(batch))
batch.clear()
Expand All @@ -786,12 +803,18 @@ def commands_step(path, commands):
prefix = prefix.strip()
_path = os.path.normpath(_path)
step("zip:embedded", _path, prefix)
if path:
hash(path, patterns, prefix)
elif n == 2:
_, _path = c
_path = os.path.normpath(_path)
step("zip:embedded", _path)
if path:
hash(path, patterns=patterns)
elif n == 1:
step("zip:embedded")
if path:
hash(path, patterns=patterns)
else:
raise ValueError(
":zip invalid call signature, use: "
Expand All @@ -805,7 +828,7 @@ def commands_step(path, commands):

for claim in claims:
if isinstance(claim, str):
path = claim
path = os.path.normpath(claim)
if not os.path.exists(path):
abort(
'Could not locate source_path "{path}". Paths are relative to directory where `terraform plan` is being run ("{pwd}")'.format(
Expand All @@ -823,12 +846,14 @@ def commands_step(path, commands):

elif isinstance(claim, dict):
path = claim.get("path")
if path:
path = os.path.normpath(path)
patterns = claim.get("patterns")
commands = claim.get("commands")
if patterns:
step("set:filter", patterns_list(self._args, patterns))
if commands:
commands_step(path, commands)
commands_step(path, commands, patterns)
else:
prefix = claim.get("prefix_in_zip")
pip_requirements = claim.get("pip_requirements")
Expand All @@ -849,7 +874,7 @@ def commands_step(path, commands):
)
else:
pip_requirements_step(
pip_requirements,
os.path.normpath(pip_requirements),
prefix,
required=True,
tmp_dir=claim.get("pip_tmp_dir"),
Expand All @@ -875,31 +900,33 @@ def commands_step(path, commands):
)
else:
npm_requirements_step(
npm_requirements,
os.path.normpath(npm_requirements),
prefix,
required=True,
tmp_dir=claim.get("npm_tmp_dir"),
)
if path:
path = os.path.normpath(path)
step("zip", path, prefix)
if patterns:
# Take patterns into account when computing hash
pf = ZipContentFilter(args=self._args)
pf.compile(patterns)

for path_from_pattern in pf.filter(path, prefix):
hash(path_from_pattern)
else:
hash(path)
hash(path, patterns, prefix)
else:
raise ValueError("Unsupported source_path item: {}".format(claim))

if build_step:
build_plan.append(build_step)
build_step = []

self._source_paths = source_paths
if log.isEnabledFor(DEBUG3):
log.debug("source_paths: %s", json.dumps(source_paths, indent=2))

for p, patterns, prefix in source_paths:
if self._source_paths is None:
self._source_paths = []
pf = None
if patterns is not None:
pf = ZipContentFilter(args=self._args)
pf.compile(patterns)
self._source_paths.append((p, pf, prefix))

return build_plan

def execute(self, build_plan, zip_stream, query):
Expand Down Expand Up @@ -1691,7 +1718,7 @@ def prepare_command(args):
if log.isEnabledFor(DEBUG3):
log.debug("QUERY: %s", json.dumps(query_data, indent=2))
else:
log_excludes = ("source_path", "hash_extra_paths", "paths")
log_excludes = ("source_path", "hash_extra_paths", "hash_internal", "paths")
qd = {k: v for k, v in query_data.items() if k not in log_excludes}
log.debug("QUERY (excerpt): %s", json.dumps(qd, indent=2))

Expand All @@ -1704,6 +1731,7 @@ def prepare_command(args):
hash_extra_paths = query.hash_extra_paths
source_path = query.source_path
hash_extra = query.hash_extra
hash_internal = query.hash_internal
recreate_missing_package = yesno_bool(
args.recreate_missing_package
if args.recreate_missing_package is not None
Expand All @@ -1712,7 +1740,7 @@ def prepare_command(args):
docker = query.docker

bpm = BuildPlanManager(args, log=log)
build_plan = bpm.plan(source_path, query)
build_plan = bpm.plan(source_path, query, log)

if log.isEnabledFor(DEBUG2):
log.debug("BUILD_PLAN: %s", json.dumps(build_plan, indent=2))
Expand All @@ -1723,6 +1751,8 @@ def prepare_command(args):
content_hash = bpm.hash(hash_extra_paths)
content_hash.update(json.dumps(build_plan, sort_keys=True).encode())
content_hash.update(runtime.encode())
for c in hash_internal:
content_hash.update(c.encode())
content_hash.update(hash_extra.encode())
content_hash = content_hash.hexdigest()

Expand Down
Loading