diff --git a/.github/workflows/base-deploy.yml b/.github/workflows/base-deploy.yml index 59bf5e8f..2e01aaec 100644 --- a/.github/workflows/base-deploy.yml +++ b/.github/workflows/base-deploy.yml @@ -153,21 +153,246 @@ jobs: ./dist/lambda.zip \ --region eu-west-2 - - name: "Upload lambda artifact for the current workflow" + - name: "Upload lambda artifact for downstream jobs" uses: actions/upload-artifact@v6 with: name: lambda-${{ needs.metadata.outputs.tag }} path: ./dist/lambda.zip - deploy: - name: "Deploy to ${{ needs.metadata.outputs.environment }}" + sign-lambda-artifact: + name: "Sign lambda artifact for PreProd" + if: ${{ needs.metadata.outputs.environment == 'preprod' }} + runs-on: ubuntu-latest + needs: [ metadata, download-lambda-artifact ] + timeout-minutes: 45 + permissions: + id-token: write + contents: read + environment: preprod + steps: + - name: "Checkout repository at ref" + uses: actions/checkout@v6 + with: + ref: ${{ needs.metadata.outputs.ref }} + fetch-depth: 0 + + - name: "Setup Terraform" + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: ${{ needs.metadata.outputs.terraform_version }} + + - name: "Configure AWS Credentials" + uses: aws-actions/configure-aws-credentials@v6 + with: + role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/service-roles/github-actions-api-deployment-role + aws-region: eu-west-2 + + - name: "Download unsigned lambda artifact from current workflow" + uses: actions/download-artifact@v7 + with: + name: lambda-${{ needs.metadata.outputs.tag }} + path: ./dist + + - name: "Terraform Init (PREPROD api-layer)" + env: + ENVIRONMENT: preprod + WORKSPACE: "default" + run: | + echo "Running: make terraform env=$ENVIRONMENT workspace=$WORKSPACE stack=api-layer tf-command=init" + make terraform env=$ENVIRONMENT stack=api-layer tf-command=init workspace=$WORKSPACE + working-directory: ./infrastructure + + - name: "Extract PREPROD Terraform outputs" + id: preprod_tf_output + run: | + BUCKET=$(terraform output -raw lambda_artifact_bucket) + PROFILE=$(terraform output -raw signing_profile_name) + echo "bucket_name=$BUCKET" >> $GITHUB_OUTPUT + echo "signing_profile_name=$PROFILE" >> $GITHUB_OUTPUT + working-directory: ./infrastructure/stacks/api-layer + + - name: "Upload unsigned lambda artifact to PREPROD S3" + run: | + aws s3 cp ./dist/lambda.zip \ + s3://${{ steps.preprod_tf_output.outputs.bucket_name }}/artifacts/${{ needs.metadata.outputs.tag }}/lambda.zip \ + --region eu-west-2 + + - name: "Get uploaded source object version" + id: source_object + run: | + VERSION_ID=$(aws s3api head-object \ + --bucket "${{ steps.preprod_tf_output.outputs.bucket_name }}" \ + --key "artifacts/${{ needs.metadata.outputs.tag }}/lambda.zip" \ + --query 'VersionId' \ + --output text \ + --region eu-west-2) + echo "version_id=$VERSION_ID" >> $GITHUB_OUTPUT + + - name: "Start signing job" + id: signing + env: + SIGNING_PROFILE_NAME: ${{ steps.preprod_tf_output.outputs.signing_profile_name }} + run: | + JOB_ID=$(aws signer start-signing-job \ + --source "s3={bucketName=${{ steps.preprod_tf_output.outputs.bucket_name }},key=artifacts/${{ needs.metadata.outputs.tag }}/lambda.zip,version=${{ steps.source_object.outputs.version_id }}}" \ + --destination "s3={bucketName=${{ steps.preprod_tf_output.outputs.bucket_name }},prefix=signed-artifacts/${{ needs.metadata.outputs.tag }}/}" \ + --profile-name "$SIGNING_PROFILE_NAME" \ + --query 'jobId' \ + --output text \ + --region eu-west-2) + echo "job_id=$JOB_ID" >> $GITHUB_OUTPUT + + - name: "Wait for signing job" + run: | + aws signer wait successful-signing-job \ + --job-id "${{ steps.signing.outputs.job_id }}" \ + --region eu-west-2 + + - name: "Resolve signed artifact location" + id: signed_object + run: | + SIGNED_BUCKET=$(aws signer describe-signing-job \ + --job-id "${{ steps.signing.outputs.job_id }}" \ + --region eu-west-2 \ + --query 'signedObject.s3.bucketName' \ + --output text) + + SIGNED_KEY=$(aws signer describe-signing-job \ + --job-id "${{ steps.signing.outputs.job_id }}" \ + --region eu-west-2 \ + --query 'signedObject.s3.key' \ + --output text) + + echo "bucket_name=$SIGNED_BUCKET" >> $GITHUB_OUTPUT + echo "object_key=$SIGNED_KEY" >> $GITHUB_OUTPUT + + - name: "Download signed lambda artifact" + run: | + aws s3 cp \ + "s3://${{ steps.signed_object.outputs.bucket_name }}/${{ steps.signed_object.outputs.object_key }}" \ + ./dist/lambda.zip \ + --region eu-west-2 + + - name: "Upload signed lambda artifact for current workflow" + uses: actions/upload-artifact@v6 + with: + name: lambda-signed-${{ needs.metadata.outputs.tag }} + path: ./dist/lambda.zip + + + deploy-preprod: + name: "Deploy to preprod" + if: ${{ needs.metadata.outputs.environment == 'preprod' }} + runs-on: ubuntu-latest + needs: [metadata, download-lambda-artifact, sign-lambda-artifact] + timeout-minutes: 45 + permissions: + id-token: write + contents: write + environment: preprod + steps: + - name: "Checkout repository at ref" + uses: actions/checkout@v6 + with: + ref: ${{ needs.metadata.outputs.ref }} + fetch-depth: 0 + + - name: "Setup Terraform" + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: ${{ needs.metadata.outputs.terraform_version }} + + - name: "Download signed Lambda Artifact" + uses: actions/download-artifact@v7 + with: + name: lambda-signed-${{ needs.metadata.outputs.tag }} + path: ./dist + + - name: "Configure AWS Credentials (IAM Bootstrap Role)" + uses: aws-actions/configure-aws-credentials@v6 + with: + role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/service-roles/github-actions-iam-bootstrap-role + aws-region: eu-west-2 + + - name: "Deploy IAM roles (iams-developer-roles stack)" + working-directory: ./infrastructure + run: | + make terraform env=preprod stack=iams-developer-roles tf-command=apply workspace=default + + - name: "Configure AWS Credentials (Main Deployment Role)" + uses: aws-actions/configure-aws-credentials@v6 + with: + role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/service-roles/github-actions-api-deployment-role + aws-region: eu-west-2 + + - name: "Terraform Apply" + env: + ENVIRONMENT: preprod + WORKSPACE: "default" + TF_VAR_API_CA_CERT: ${{ secrets.API_CA_CERT }} + TF_VAR_API_CLIENT_CERT: ${{ secrets.API_CLIENT_CERT }} + TF_VAR_API_PRIVATE_KEY_CERT: ${{ secrets.API_PRIVATE_KEY_CERT }} + TF_VAR_SPLUNK_HEC_TOKEN: ${{ secrets.SPLUNK_HEC_TOKEN }} + TF_VAR_SPLUNK_HEC_ENDPOINT: ${{ secrets.SPLUNK_HEC_ENDPOINT }} + TF_VAR_OPERATOR_EMAILS: ${{ vars.SECRET_ROTATION_OPERATOR_EMAILS }} + TF_VAR_PROXYGEN_PRIVATE_KEY_PTL: ${{ secrets.PROXYGEN_PRIVATE_KEY_PTL }} + TF_VAR_PROXYGEN_PRIVATE_KEY_PROD: ${{ secrets.PROXYGEN_PRIVATE_KEY_PROD }} + working-directory: ./infrastructure + shell: bash + run: | + set -euo pipefail + mkdir -p ./build + echo "Running: make terraform env=$ENVIRONMENT workspace=$WORKSPACE stack=networking tf-command=apply" + make terraform env=$ENVIRONMENT stack=networking tf-command=apply workspace=$WORKSPACE + echo "Running: make terraform env=$ENVIRONMENT workspace=$WORKSPACE stack=api-layer tf-command=apply" + make terraform env=$ENVIRONMENT stack=api-layer tf-command=apply workspace=$WORKSPACE + + - name: "Extract S3 bucket name from Terraform output" + id: tf_output + run: | + BUCKET=$(terraform output -raw lambda_artifact_bucket) + echo "bucket_name=$BUCKET" >> $GITHUB_OUTPUT + working-directory: ./infrastructure/stacks/api-layer + + - name: "Validate Feature Toggles" + env: + ENV: preprod + run: | + pip install boto3 + python scripts/feature_toggle/validate_toggles.py + + - name: "Tag and Release" + env: + ENVIRONMENT: preprod + REF: ${{ needs.metadata.outputs.ref }} + INPUT_RELEASE_TYPE: ${{ inputs.release_type }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_REPOSITORY: ${{ github.repository }} + run: | + pip install requests + python scripts/workflow/tag_and_release.py + + - name: "Capture release tag" + id: release_tag + run: | + echo "release_tag=$(cat release_tag.txt)" >> $GITHUB_OUTPUT + + - name: "Upload lambda artifact to S3" + run: | + aws s3 cp ./dist/lambda.zip \ + s3://${{ steps.tf_output.outputs.bucket_name }}/artifacts/${{ steps.release_tag.outputs.release_tag }}/lambda.zip \ + --region eu-west-2 + + deploy-prod: + name: "Deploy to prod" + if: ${{ needs.metadata.outputs.environment == 'prod' }} runs-on: ubuntu-latest needs: [metadata, download-lambda-artifact] timeout-minutes: 45 permissions: id-token: write contents: write - environment: ${{ needs.metadata.outputs.environment }} + environment: prod steps: - name: "Checkout repository at ref" uses: actions/checkout@v6 @@ -195,7 +420,7 @@ jobs: - name: "Deploy IAM roles (iams-developer-roles stack)" working-directory: ./infrastructure run: | - make terraform env=${{ needs.metadata.outputs.environment }} stack=iams-developer-roles tf-command=apply workspace=default + make terraform env=prod stack=iams-developer-roles tf-command=apply workspace=default - name: "Configure AWS Credentials (Main Deployment Role)" uses: aws-actions/configure-aws-credentials@v6 @@ -205,7 +430,7 @@ jobs: - name: "Terraform Apply" env: - ENVIRONMENT: ${{ needs.metadata.outputs.environment }} + ENVIRONMENT: prod WORKSPACE: "default" TF_VAR_API_CA_CERT: ${{ secrets.API_CA_CERT }} TF_VAR_API_CLIENT_CERT: ${{ secrets.API_CLIENT_CERT }} @@ -215,7 +440,6 @@ jobs: TF_VAR_OPERATOR_EMAILS: ${{ vars.SECRET_ROTATION_OPERATOR_EMAILS }} TF_VAR_PROXYGEN_PRIVATE_KEY_PTL: ${{ secrets.PROXYGEN_PRIVATE_KEY_PTL }} TF_VAR_PROXYGEN_PRIVATE_KEY_PROD: ${{ secrets.PROXYGEN_PRIVATE_KEY_PROD }} - working-directory: ./infrastructure shell: bash run: | @@ -235,15 +459,14 @@ jobs: - name: "Validate Feature Toggles" env: - ENV: ${{ needs.metadata.outputs.environment }} + ENV: prod run: | pip install boto3 python scripts/feature_toggle/validate_toggles.py - name: "Tag and Release" - if: ${{ needs.metadata.outputs.environment == 'preprod' || needs.metadata.outputs.environment == 'prod' }} env: - ENVIRONMENT: ${{ needs.metadata.outputs.environment }} + ENVIRONMENT: prod REF: ${{ needs.metadata.outputs.ref }} INPUT_RELEASE_TYPE: ${{ inputs.release_type }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -266,7 +489,7 @@ jobs: regression-tests: name: "Regression Tests" if: ${{ needs.metadata.outputs.environment == 'preprod' }} - needs: deploy + needs: deploy-preprod permissions: id-token: write contents: read diff --git a/infrastructure/modules/lambda/lambda.tf b/infrastructure/modules/lambda/lambda.tf index db67f93a..e44e948d 100644 --- a/infrastructure/modules/lambda/lambda.tf +++ b/infrastructure/modules/lambda/lambda.tf @@ -1,7 +1,7 @@ resource "aws_lambda_function" "eligibility_signposting_lambda" { #checkov:skip=CKV_AWS_116: No deadletter queue is configured for this Lambda function, as the requests are synchronous #checkov:skip=CKV_AWS_115: Concurrent execution limit will be set at APIM level, not at Lambda level - #checkov:skip=CKV_AWS_272: Skipping code signing but flagged to create ticket to investigate on ELI-238 + #checkov:skip=CKV_AWS_272: Code signing not yet enforced in prod - tracked for removal when prod enforcement is enabled # If the file is not in the current working directory you will need to include a # path.module in the filename. filename = var.file_name @@ -11,6 +11,8 @@ resource "aws_lambda_function" "eligibility_signposting_lambda" { source_code_hash = filebase64sha256(var.file_name) + code_signing_config_arn = contains(var.environments_with_signing, var.environment) ? aws_lambda_code_signing_config.signing_config.arn : null + runtime = var.runtime timeout = 30 memory_size = 2048 @@ -39,10 +41,10 @@ resource "aws_lambda_function" "eligibility_signposting_lambda" { } layers = compact([ - var.environment == "prod" || var.environment == "preprod" ? - "arn:aws:lambda:${var.region}:580247275435:layer:LambdaInsightsExtension:${var.lambda_insights_extension_version}" - : - null + var.environment == "prod" || var.environment == "preprod" ? + "arn:aws:lambda:${var.region}:580247275435:layer:LambdaInsightsExtension:${var.lambda_insights_extension_version}" + : + null ]) @@ -66,4 +68,3 @@ resource "aws_lambda_provisioned_concurrency_config" "campaign_pc" { qualifier = aws_lambda_alias.campaign_alias[0].name provisioned_concurrent_executions = var.provisioned_concurrency_count } - diff --git a/infrastructure/modules/lambda/variables.tf b/infrastructure/modules/lambda/variables.tf index ffc76497..e6366e56 100644 --- a/infrastructure/modules/lambda/variables.tf +++ b/infrastructure/modules/lambda/variables.tf @@ -88,3 +88,9 @@ variable "hashing_secret_name" { description = "hashing secret name" type = string } + +variable "environments_with_signing" { + description = "List of environments where Lambda code signing is enabled; enforcement behaviour depends on the configured code signing policy" + type = list(string) + default = ["test"] +} diff --git a/infrastructure/stacks/api-layer/lambda.tf b/infrastructure/stacks/api-layer/lambda.tf index b3eba156..cb38efb7 100644 --- a/infrastructure/stacks/api-layer/lambda.tf +++ b/infrastructure/stacks/api-layer/lambda.tf @@ -18,7 +18,7 @@ module "eligibility_signposting_lambda_function" { environment = var.environment runtime = "python3.13" lambda_func_name = "${terraform.workspace == "default" ? "" : "${terraform.workspace}-"}eligibility_signposting_api" - security_group_ids = [data.aws_security_group.main_sg.id] + security_group_ids = [data.aws_security_group.main_sg.id] vpc_intra_subnets = [for v in data.aws_subnet.private_subnets : v.id] file_name = "../../../dist/lambda.zip" handler = "eligibility_signposting_api.app.lambda_handler" @@ -33,6 +33,7 @@ module "eligibility_signposting_lambda_function" { stack_name = local.stack_name provisioned_concurrency_count = 5 api_domain_name = local.api_domain_name + environments_with_signing = ["test", "preprod"] } @@ -69,7 +70,7 @@ resource "aws_lambda_function" "create_secret_lambda" { variables = { SECRET_NAME = module.secrets_manager.aws_hashing_secret_name } } vpc_config { - subnet_ids = [for s in data.aws_subnet.private_subnets : s.id] + subnet_ids = [for s in data.aws_subnet.private_subnets : s.id] security_group_ids = [data.aws_security_group.main_sg.id] } } @@ -98,7 +99,7 @@ resource "aws_lambda_function" "promote_secret_lambda" { variables = { SECRET_NAME = module.secrets_manager.aws_hashing_secret_name } } vpc_config { - subnet_ids = [for s in data.aws_subnet.private_subnets : s.id] + subnet_ids = [for s in data.aws_subnet.private_subnets : s.id] security_group_ids = [data.aws_security_group.main_sg.id] } }