Skip to content

Commit

Permalink
Merge pull request #7956 from ministryofjustice/date_2024_09_30
Browse files Browse the repository at this point in the history
GitHub Actions Code Formatter workflow
  • Loading branch information
ASTRobinson authored Sep 30, 2024
2 parents dcc14ef + ddd137b commit 13fe4e0
Show file tree
Hide file tree
Showing 17 changed files with 78 additions and 78 deletions.
Original file line number Diff line number Diff line change
@@ -1,44 +1,44 @@
# In client environments the dms_user_target_endpoint.write_database must be defined
# The endpoint for user (USER_ and PROBATION_AREA_USER) is the Delius primary database.
resource "aws_dms_endpoint" "dms_user_target_endpoint_db" {
count = try(var.dms_config.user_target_endpoint.write_database, null) == null ? 0 : 1
database_name = var.dms_config.user_target_endpoint.write_database
endpoint_id = "user-data-to-${lower(var.dms_config.user_target_endpoint.write_database)}"
endpoint_type = "target"
engine_name = "oracle"
username = local.dms_audit_username
password = join(",",[jsondecode(data.aws_secretsmanager_secret_version.delius_core_application_passwords.secret_string)[local.dms_audit_username],jsondecode(data.aws_secretsmanager_secret_version.delius_core_application_passwords.secret_string)[local.dms_audit_username]])
server_name = join(".",[var.oracle_db_server_names["primarydb"],var.account_config.route53_inner_zone_info.name])
port = local.db_tcps_port
extra_connection_attributes = "UseDirectPathFullLoad=false;ArchivedLogDestId=1;AdditionalArchivedLogDestId=32;asm_server=${join(".",[var.oracle_db_server_names["primarydb"],var.account_config.route53_inner_zone_info.name])}:${local.db_tcps_port}/+ASM;asm_user=${local.dms_audit_username};UseBFile=true;UseLogminerReader=false;"
# We initially use an empty wallet for encryption - a populated wallet will be added by DMS configuration
ssl_mode = "verify-ca"
certificate_arn = aws_dms_certificate.empty_oracle_wallet.certificate_arn
# Ignore subsequent replacement with a valid wallet
# lifecycle {
# ignore_changes = [certificate_arn]
# }
depends_on = [aws_dms_certificate.empty_oracle_wallet]
count = try(var.dms_config.user_target_endpoint.write_database, null) == null ? 0 : 1
database_name = var.dms_config.user_target_endpoint.write_database
endpoint_id = "user-data-to-${lower(var.dms_config.user_target_endpoint.write_database)}"
endpoint_type = "target"
engine_name = "oracle"
username = local.dms_audit_username
password = join(",", [jsondecode(data.aws_secretsmanager_secret_version.delius_core_application_passwords.secret_string)[local.dms_audit_username], jsondecode(data.aws_secretsmanager_secret_version.delius_core_application_passwords.secret_string)[local.dms_audit_username]])
server_name = join(".", [var.oracle_db_server_names["primarydb"], var.account_config.route53_inner_zone_info.name])
port = local.db_tcps_port
extra_connection_attributes = "UseDirectPathFullLoad=false;ArchivedLogDestId=1;AdditionalArchivedLogDestId=32;asm_server=${join(".", [var.oracle_db_server_names["primarydb"], var.account_config.route53_inner_zone_info.name])}:${local.db_tcps_port}/+ASM;asm_user=${local.dms_audit_username};UseBFile=true;UseLogminerReader=false;"
# We initially use an empty wallet for encryption - a populated wallet will be added by DMS configuration
ssl_mode = "verify-ca"
certificate_arn = aws_dms_certificate.empty_oracle_wallet.certificate_arn
# Ignore subsequent replacement with a valid wallet
# lifecycle {
# ignore_changes = [certificate_arn]
# }
depends_on = [aws_dms_certificate.empty_oracle_wallet]
}

# In repository environments the end point for audit (AUDITED_INTERACTION, BUSINESS_INTERACTION) is the Delius primary database.
resource "aws_dms_endpoint" "dms_audit_target_endpoint_db" {
count = try(var.dms_config.audit_target_endpoint.write_database, null) == null ? 0 : 1
database_name = var.dms_config.audit_target_endpoint.write_database
endpoint_id = "audit-data-to-${lower(var.dms_config.audit_target_endpoint.write_database)}"
endpoint_type = "target"
engine_name = "oracle"
username = local.dms_audit_username
password = join(",",[jsondecode(data.aws_secretsmanager_secret_version.delius_core_application_passwords.secret_string)[local.dms_audit_username],jsondecode(data.aws_secretsmanager_secret_version.delius_core_application_passwords.secret_string)[local.dms_audit_username]])
server_name = join(".",[var.oracle_db_server_names["primarydb"],var.account_config.route53_inner_zone_info.name])
port = local.db_tcps_port
extra_connection_attributes = "UseDirectPathFullLoad=false;ArchivedLogDestId=1;AdditionalArchivedLogDestId=32;asm_server=${join(".",[var.oracle_db_server_names["primarydb"],var.account_config.route53_inner_zone_info.name])}:${local.db_tcps_port}/+ASM;asm_user=${local.dms_audit_username};UseBFile=true;UseLogminerReader=false;"
# We initially use an empty wallet for encryption - a populated wallet will be added by DMS configuration
ssl_mode = "verify-ca"
certificate_arn = aws_dms_certificate.empty_oracle_wallet.certificate_arn
# Ignore subsequent replacement with a valid wallet
# lifecycle {
# ignore_changes = [certificate_arn]
# }
depends_on = [aws_dms_certificate.empty_oracle_wallet]
count = try(var.dms_config.audit_target_endpoint.write_database, null) == null ? 0 : 1
database_name = var.dms_config.audit_target_endpoint.write_database
endpoint_id = "audit-data-to-${lower(var.dms_config.audit_target_endpoint.write_database)}"
endpoint_type = "target"
engine_name = "oracle"
username = local.dms_audit_username
password = join(",", [jsondecode(data.aws_secretsmanager_secret_version.delius_core_application_passwords.secret_string)[local.dms_audit_username], jsondecode(data.aws_secretsmanager_secret_version.delius_core_application_passwords.secret_string)[local.dms_audit_username]])
server_name = join(".", [var.oracle_db_server_names["primarydb"], var.account_config.route53_inner_zone_info.name])
port = local.db_tcps_port
extra_connection_attributes = "UseDirectPathFullLoad=false;ArchivedLogDestId=1;AdditionalArchivedLogDestId=32;asm_server=${join(".", [var.oracle_db_server_names["primarydb"], var.account_config.route53_inner_zone_info.name])}:${local.db_tcps_port}/+ASM;asm_user=${local.dms_audit_username};UseBFile=true;UseLogminerReader=false;"
# We initially use an empty wallet for encryption - a populated wallet will be added by DMS configuration
ssl_mode = "verify-ca"
certificate_arn = aws_dms_certificate.empty_oracle_wallet.certificate_arn
# Ignore subsequent replacement with a valid wallet
# lifecycle {
# ignore_changes = [certificate_arn]
# }
depends_on = [aws_dms_certificate.empty_oracle_wallet]
}
22 changes: 11 additions & 11 deletions terraform/environments/digital-prison-reporting/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -626,17 +626,17 @@ module "glue_s3_data_reconciliation_job" {
)

arguments = merge(local.glue_datahub_job_extra_operational_datastore_args, {
"--extra-jars" = local.glue_jobs_latest_jar_location
"--extra-files" = local.shared_log4j_properties_path
"--class" = "uk.gov.justice.digital.job.DataReconciliationJob"
"--dpr.aws.region" = local.account_region
"--dpr.config.s3.bucket" = module.s3_glue_job_bucket.bucket_id
"--dpr.log.level" = local.glue_job_common_log_level
"--dpr.raw.s3.path" = "s3://${module.s3_raw_bucket.bucket_id}/"
"--dpr.raw.archive.s3.path" = "s3://${module.s3_raw_archive_bucket.bucket_id}/"
"--dpr.structured.s3.path" = "s3://${module.s3_structured_bucket.bucket_id}/"
"--dpr.curated.s3.path" = "s3://${module.s3_curated_bucket.bucket_id}/"
"--dpr.contract.registryName" = module.s3_schema_registry_bucket.bucket_id
"--extra-jars" = local.glue_jobs_latest_jar_location
"--extra-files" = local.shared_log4j_properties_path
"--class" = "uk.gov.justice.digital.job.DataReconciliationJob"
"--dpr.aws.region" = local.account_region
"--dpr.config.s3.bucket" = module.s3_glue_job_bucket.bucket_id
"--dpr.log.level" = local.glue_job_common_log_level
"--dpr.raw.s3.path" = "s3://${module.s3_raw_bucket.bucket_id}/"
"--dpr.raw.archive.s3.path" = "s3://${module.s3_raw_archive_bucket.bucket_id}/"
"--dpr.structured.s3.path" = "s3://${module.s3_structured_bucket.bucket_id}/"
"--dpr.curated.s3.path" = "s3://${module.s3_curated_bucket.bucket_id}/"
"--dpr.contract.registryName" = module.s3_schema_registry_bucket.bucket_id
# dpr.reconciliation.datasource properties can be modified to configure
# the job for either Nomis, a DPS database or some other data store
"--dpr.reconciliation.datasource.glue.connection.name" = aws_glue_connection.glue_nomis_connection[0].name
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
resource "aws_dynamodb_table" "this" {
#checkov:skip=CKV_AWS_119: "Ensure DynamoDB Tables are encrypted using a KMS Customer Managed CMK"
#checkov:skip=CKV_AWS_28: "Ensure DynamoDB point in time recovery (backup) is enabled. TO DO Will be addressed as part of https://dsdmoj.atlassian.net/browse/DPR2-1083"
#checkov:skip=CKV_AWS_119: "Ensure DynamoDB Tables are encrypted using a KMS Customer Managed CMK"
#checkov:skip=CKV_AWS_28: "Ensure DynamoDB point in time recovery (backup) is enabled. TO DO Will be addressed as part of https://dsdmoj.atlassian.net/browse/DPR2-1083"

count = var.create_table && !var.autoscaling_enabled ? 1 : 0

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,10 @@ EOF
}

data "aws_iam_policy_document" "extra-policy-document" {
#checkov:skip=CKV_AWS_356: "Ensure no IAM policies documents allow "*" as a statement's resource for restrictable actions. TO DO Will be addressed as part of https://dsdmoj.atlassian.net/browse/DPR2-1083"
#checkov:skip=CKV_AWS_109: "Ensure IAM policies does not allow permissions management / resource exposure without constraints"
#checkov:skip=CKV_AWS_111: "Ensure IAM policies does not allow write access without constraints"
#checkov:skip=CKV_AWS_110: "Ensure IAM policies does not allow privilege escalation"
#checkov:skip=CKV_AWS_356: "Ensure no IAM policies documents allow "*" as a statement's resource for restrictable actions. TO DO Will be addressed as part of https://dsdmoj.atlassian.net/browse/DPR2-1083"
#checkov:skip=CKV_AWS_109: "Ensure IAM policies does not allow permissions management / resource exposure without constraints"
#checkov:skip=CKV_AWS_111: "Ensure IAM policies does not allow write access without constraints"
#checkov:skip=CKV_AWS_110: "Ensure IAM policies does not allow privilege escalation"

statement {
actions = [
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
resource "aws_scheduler_schedule" "schedule" {
count = var.create_pipeline_schedule ? 1 : 0

state = var.enable_pipeline_schedule ? "ENABLED" : "DISABLED"
state = var.enable_pipeline_schedule ? "ENABLED" : "DISABLED"
name = var.pipeline_name
description = var.description
flexible_time_window {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ resource "aws_redshift_cluster_iam_roles" "this" {
################################################################################

resource "aws_redshift_parameter_group" "this" {
#checkov:skip=CKV_AWS_105: "Ensure Redshift uses SSL"
#checkov:skip=CKV_AWS_105: "Ensure Redshift uses SSL"

count = var.create_redshift_cluster && var.create_parameter_group ? 1 : 0

Expand Down
4 changes: 2 additions & 2 deletions terraform/environments/digital-prison-reporting/policy.tf
Original file line number Diff line number Diff line change
Expand Up @@ -562,8 +562,8 @@ data "aws_iam_policy_document" "domain_builder_publish" {
}

resource "aws_iam_policy" "domain_builder_publish_policy" {
#checkov:skip=CKV_AWS_356: "Ensure no IAM policies documents allow "*" as a statement's resource for restrictable actions"
#checkov:skip=CKV_AWS_111: "Ensure IAM policies does not allow write access without constraints"
#checkov:skip=CKV_AWS_356: "Ensure no IAM policies documents allow "*" as a statement's resource for restrictable actions"
#checkov:skip=CKV_AWS_111: "Ensure IAM policies does not allow write access without constraints"

name = "${local.project}-domain-builder-publish-policy"
description = "Additional policy to allow execution of query publish in Athena"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ resource "aws_api_gateway_deployment" "deployment" {
aws_api_gateway_resource.get_zipped_step_function_invoke,
aws_api_gateway_method.get_zipped_step_function_invoke,
aws_api_gateway_integration.get_zipped_step_function_invoke,
aws_api_gateway_integration_response.integration_response_200,
aws_api_gateway_integration_response.integration_response_200,
]))
}

Expand All @@ -122,7 +122,7 @@ resource "aws_api_gateway_integration_response" "integration_response_200" {
resource_id = aws_api_gateway_resource.get_zipped_step_function_invoke.id
http_method = aws_api_gateway_method.get_zipped_step_function_invoke.http_method
status_code = "200"

response_templates = {
"application/json" = ""
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@
"mattprice": "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIEiltWt3w24iTeybMTXYLNiS8WusBqTw929Q4kMtFwuA"
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@

resource "aws_s3_bucket_notification" "data_store" {
depends_on = [aws_sns_topic_policy.s3_events_policy]
bucket = module.s3-data-bucket.bucket.id
bucket = module.s3-data-bucket.bucket.id

# Only for copy events as those are events triggered by data being copied
#  from landing bucket.
topic {
topic_arn = aws_sns_topic.s3_events.arn
events = [
events = [
"s3:ObjectCreated:*"
]
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ module "send_table_to_ap" {
environment_variables = {
AP_DESTINATION_BUCKET = local.land_bucket
}
reserved_concurrent_executions = 100
reserved_concurrent_executions = 100
}


Expand Down
4 changes: 2 additions & 2 deletions terraform/environments/electronic-monitoring-data/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@

module "buddi" {
source = "./modules/landing_zone/"
count = local.is-production ? 1 : 0
count = local.is-production ? 1 : 0

supplier = "buddi"

Expand Down Expand Up @@ -86,7 +86,7 @@ module "buddi" {

module "g4s" {
source = "./modules/landing_zone/"
count = local.is-production ? 1 : 0
count = local.is-production ? 1 : 0

supplier = "g4s"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,17 +40,17 @@ resource "random_string" "this" {

module "landing-bucket" {
source = "github.com/ministryofjustice/modernisation-platform-terraform-s3-bucket?ref=52a40b0"
bucket_name = "${var.supplier}-${random_string.this.result}"
bucket_name = "${var.supplier}-${random_string.this.result}"
replication_enabled = false
providers = {
providers = {
# Here we use the default provider Region for replication. Destination buckets can be within the same Region as the
# source bucket. On the other hand, if you need to enable cross-region replication, please contact the Modernisation
# Platform team to add a new provider for the additional Region.
# Leave this provider block in even if you are not using replication
aws.bucket-replication = aws
}
versioning_enabled = false
lifecycle_rule = [
versioning_enabled = false
lifecycle_rule = [
{
id = "main"
enabled = "Enabled"
Expand Down Expand Up @@ -91,7 +91,7 @@ module "landing-bucket" {
}
]

tags = merge(var.local_tags, { resource-type = "landing-bucket" })
tags = merge(var.local_tags, { resource-type = "landing-bucket" })

}

Expand Down
4 changes: 2 additions & 2 deletions terraform/environments/electronic-monitoring-data/s3.tf
Original file line number Diff line number Diff line change
Expand Up @@ -544,7 +544,7 @@ module "s3-data-bucket" {
# ------------------------------------------------------------------------

module "s3-fms-landing-bucket" {
source = "github.com/ministryofjustice/modernisation-platform-terraform-s3-bucket?ref=f759060"
source = "github.com/ministryofjustice/modernisation-platform-terraform-s3-bucket?ref=f759060"

for_each = local.live_feed_levels

Expand Down Expand Up @@ -602,7 +602,7 @@ module "s3-fms-landing-bucket" {
# ------------------------------------------------------------------------

module "s3-mdss-landing-bucket" {
source = "github.com/ministryofjustice/modernisation-platform-terraform-s3-bucket?ref=f759060"
source = "github.com/ministryofjustice/modernisation-platform-terraform-s3-bucket?ref=f759060"

for_each = local.live_feed_levels

Expand Down
6 changes: 3 additions & 3 deletions terraform/environments/ppud/eventbridge.tf
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# Eventbridge Rule for Certificate Expiration

resource "aws_cloudwatch_event_rule" "certificate_approaching_expiration_dev" {
count = local.is-development == true ? 1 : 0
name = "Certificate-Approaching-Expiration"
description = "PPUD certificate is approaching expiration"
count = local.is-development == true ? 1 : 0
name = "Certificate-Approaching-Expiration"
description = "PPUD certificate is approaching expiration"
event_pattern = <<EOF
{
"source": [ "aws.acm"],
Expand Down
4 changes: 2 additions & 2 deletions terraform/environments/tribunals/dns_ssl.tf
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@ resource "aws_route53_record" "cert_validation" {

// sub-domain validation only required for non-production sites
resource "aws_route53_record" "external_validation_subdomain" {
count = local.is-production ? 0 : 1
provider = aws.core-vpc
count = local.is-production ? 0 : 1
provider = aws.core-vpc

allow_overwrite = true
name = local.domain_name_sub[0]
Expand Down
2 changes: 1 addition & 1 deletion terraform/environments/xhibit-portal/shield.tf
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ locals {
}

data "aws_shield_protection" "excluded" {
for_each = toset(local.excluded_resource_arns)
for_each = toset(local.excluded_resource_arns)
resource_arn = each.key
}

Expand Down

0 comments on commit 13fe4e0

Please sign in to comment.