我在 github 上看到一个旧的 repo,它似乎正是我的用例所需要的。不幸的是,代码需要更新,因为 lambda 和/或 python 不再支持某些操作。
当我尝试执行
PermissionError: [Errno 13] Permission denied: '/tmp/tmpg4l5twh9/terraform'
时,我不断得到terraform --version
。 (我在本地环境中以及将 lambda 函数打包并部署到 AWS 时遇到了同样的错误)。
这是我更新的沙箱代码,用于更新 lambda 函数:
import os
import subprocess
import urllib.request
import tarfile
import json
import tempfile
# Fetch the latest version of Terraform
URL = 'https://api.github.com/repos/hashicorp/terraform/releases/latest'
with urllib.request.urlopen(URL) as response:
data = response.read()
encoding = response.info().get_content_charset('utf-8')
TERRAFORM_DOWNLOAD_URL = json.loads(data.decode(encoding))['tarball_url']
# Download and extract the latest version of Terraform
with tempfile.TemporaryDirectory() as tmpdir:
TERRAFORM_TAR_PATH = os.path.join(tmpdir, 'terraform.tar.gz')
TERRAFORM_PATH = os.path.join(tmpdir, 'terraform')
urllib.request.urlretrieve(TERRAFORM_DOWNLOAD_URL, TERRAFORM_TAR_PATH)
with tarfile.open(TERRAFORM_TAR_PATH, "r:gz") as tf:
print(f"Extracting {TERRAFORM_TAR_PATH} to {TERRAFORM_PATH}")
tf.extractall(path=tmpdir)
# Remove the tar file after it's extracted
os.remove(TERRAFORM_TAR_PATH)
print(f"All files extracted to {TERRAFORM_PATH}")
print(f"{TERRAFORM_PATH} contents: {os.listdir(tmpdir)}")
# Add Terraform to PATH
os.rename(f'{tmpdir}/{os.listdir(tmpdir)[0]}', TERRAFORM_PATH)
os.environ["PATH"] += os.pathsep + TERRAFORM_PATH
os.chmod(TERRAFORM_PATH, 0o777)
# os.system(f'chmod -R 777 {TERRAFORM_PATH}')
print(os.listdir(TERRAFORM_PATH))
subprocess.check_output([TERRAFORM_PATH, "--version"])
我不相信这是 有没有办法通过 AWS lambda 函数部署 terraform 文件的方法? 因为我正在使用以前有效的解决方案。
我最终按照建议使用了 codebuild,但看起来 lambda 仍然可行。
resource "aws_codebuild_project" "on_success" {
name = "${random_pet.generator.id}-on-success"
description = "CodeBuild project for applying Terraform plans"
service_role = aws_iam_role.<ROLE_NAME>.arn
environment {
compute_type = "BUILD_GENERAL1_SMALL"
image = "hashicorp/terraform:latest"
# image = "public.ecr.aws/hashicorp/terraform:latest"
type = "LINUX_CONTAINER"
environment_variable {
name = "S3_SAUCE_BUCKET_ID"
value = aws_s3_bucket.sauce.id
}
environment_variable {
name = "TF_PACKAGE_NAME"
value = var.TF_PACKAGE_NAME
}
}
source {
type = "NO_SOURCE"
buildspec = file("buildspec.yml")
}
artifacts {
type = "NO_ARTIFACTS"
}
cache {
type = "LOCAL"
modes = ["LOCAL_DOCKER_LAYER_CACHE"]
}
# This file will be available to the buildspec.yml phases
secondary_sources {
source_identifier = "local"
type = "S3"
location = "${aws_s3_object.on_success.bucket}/${aws_s3_object.on_success.key}"
}
}
locals {
outputs = {
ON_SUCCESS_IAM_ROLE = aws_iam_role.<ROLE_NAME>.arn
S3_SAUCE_BUCKET_ID = aws_s3_bucket.<BUCKET>.id
S3_DESTINATION_BUCKET_ARN = aws_s3_bucket.<BUCKET>.arn
AWS_DESTINATION_ACCOUNT_ID = data.aws_caller_identity.destination.account_id
}
}
resource "local_file" "on_success_variables" {
filename = "${path.root}/modules/on_success/${var.ON_SUCCESS_VARS_FILE}"
content = <<-EOT
%{for key, value in local.outputs~}
${key}="${value}"
%{endfor~}
EOT
}
data "archive_file" "on_success" {
type = "zip"
source_dir = "${path.root}/modules/on_success"
output_path = "${path.root}/tmp/${var.TF_PACKAGE_NAME}"
# excludes = fileset("${path.root}/modules/on_success/", "*.zip")
excludes = setunion(fileset("${path.root}/modules/on_success", "*.zip"), [".terraform", ".git", ".scripts", ".vscode"])
}
version: 0.2
phases:
build:
on-failure: ABORT
commands:
- cd "$CODEBUILD_SRC_DIR/s3/01"
- terraform init
- terraform apply -auto-approve