Browse Source

Adds a codebuild module for the documentation from content_source

To be tagged v4.1.10
Fred Damstra [afs macbook] 3 years ago
parent
commit
2a9f97b0b7

+ 36 - 0
base/codebuild_splunk_docs/README.md

@@ -0,0 +1,36 @@
+# Codebuild Splunk Docs
+
+Based heavily off codebuild_artifact and codebuild_splunk_apps, this module creates a CodeBuild job to deposit docs into the `xdr-[environment]-portal-shared-artifacts` bucket.
+
+It uses an ECR container from common services.
+
+# How to Use this Module
+
+This module is where the CodeBuild artifacts are created. It uses the Terraform GitHub Provider and requires a Personal Access Token. This should be your Personal Access Token not mdr-aws-codebuild's token (see how-to below). The provider will look in the environmental variables for the token. 
+
+```
+export GITHUB_TOKEN=<gihub_token>
+```
+
+This module should NOT create the github repo. That is a manual process. I am not comfortable with terraform adding/removing github repos. The github repo should have the same name as the name variable in the terrafgrunt.hcl file. The user associated with the Personal Access Token needs to have admin permissions on the github repo. 
+
+## Github Service Account ( mdr-aws-codebuild )
+
+AWS CodeBuild needs a Github Personal Access Token to pull code after the code in a repository has been updated. This is the `mdr-aws-codebuild` account. The secret github token is stored in the C2 account and automatically added.
+
+Despite the Web Interface, there can be only one GHE token per account and region.
+
+The `mdr-aws-codebuild` user must have read access to the repositories you wish to build from.
+
+## Creating a Personal Access Token
+
+1) In github, go to `settings->Developer Settings->Personal Access Tokens`
+2) Click 'Generate Token'
+3) Give it a name (e.g. `terragrunt`)
+4) Give it the following permission groups:
+  * repo
+  * admin:repo_hook
+5) Create the token.
+
+Record it someplace safe.
+

+ 93 - 0
base/codebuild_splunk_docs/cloudwatch.tf

@@ -0,0 +1,93 @@
+# creates a role and schedules a build for each server type
+# 
+# Being polite aws users, we randomize the schedule over the hours of the early morning
+resource "random_integer" "hour" {
+  min = 5 # Midnight ET
+  max = 11 # 6am ET
+}
+
+resource "random_integer" "minute" {
+  min = 0
+  max = 59
+}
+
+resource "aws_cloudwatch_event_rule" "schedule_rule" {
+  name = "scheduled_build_docs_${var.repository}"
+  schedule_expression = "cron(${random_integer.minute.result} ${random_integer.hour.result} * * ? *)"
+}
+
+resource "aws_iam_role" "codebuild_role" {
+  name_prefix = "splunk_docs_codebuild_role"
+  path        = "/aws_services/"
+
+  assume_role_policy = <<EOF
+{
+  "Version": "2012-10-17",
+  "Statement": [
+    {
+      "Effect": "Allow",
+      "Principal": {
+        "Service": [
+          "events.amazonaws.com",
+          "codebuild.amazonaws.com"
+        ]
+      },
+      "Action": "sts:AssumeRole"
+    }
+  ]
+}
+EOF
+}
+
+resource "aws_iam_policy" "codebuild_policy" {
+  name_prefix = "splunk_docs_policy"
+  path        = "/aws_services/"
+
+  policy = <<POLICY
+{
+    "Version": "2012-10-17",
+    "Statement": [
+        {
+            "Effect": "Allow",
+            "Resource": [
+                "arn:${var.aws_partition}:logs:${var.aws_region}:${var.aws_account_id}:log-group:/aws/codebuild/*"
+            ],
+            "Action": [
+                "logs:CreateLogGroup",
+                "logs:CreateLogStream",
+                "logs:PutLogEvents"
+            ]
+        },
+        {
+           "Action": [
+              "codebuild:StartBuild",
+              "codebuild:StopBuild",
+              "codebuild:BatchGet*",
+              "codebuild:Get*",
+              "codebuild:List*",
+              "codecommit:GetBranch",
+              "codecommit:GetCommit",
+              "codecommit:GetRepository",
+              "codecommit:ListBranches"
+            ],
+            "Effect": "Allow",
+            "Resource": "*"
+          }
+    ]
+}
+POLICY
+}
+
+resource "aws_iam_policy_attachment" "service_role_attachment" {
+  name = "splunk_docs_policy_attachment"
+  policy_arn = "${aws_iam_policy.codebuild_policy.arn}"
+  roles = ["${aws_iam_role.codebuild_role.id}"]
+}
+
+resource "aws_cloudwatch_event_target" "trigger_build" {
+  target_id = "trigger_build_docs_${var.repository}"
+  rule = aws_cloudwatch_event_rule.schedule_rule.name
+  arn = aws_codebuild_project.this.id
+
+  role_arn = aws_iam_role.codebuild_role.arn
+}

+ 27 - 0
base/codebuild_splunk_docs/ghe-key.tf

@@ -0,0 +1,27 @@
+data "aws_secretsmanager_secret" "ghe-key" {
+  name = "GHE/mdr-aws-codebuild/key"
+  provider = aws.c2
+}
+
+data "aws_secretsmanager_secret_version" "ghe-key" {
+  secret_id = data.aws_secretsmanager_secret.ghe-key.id
+  provider = aws.c2
+}
+
+#locals {
+#  If key was in json format, we would need to decode it.
+#  secret_ghe_key = jsondecode(data.aws_secretsmanager_secret_version.ghe-key.secret_string)
+#}
+
+
+# Note some AWS craziness here. The GitHub credential is not tied to a build, even though it _looks_
+# like it is in the Web UI. There can only be one GitHub credential per account+region::
+# https://docs.aws.amazon.com/cdk/api/v1/docs/@aws-cdk_aws-codebuild.GitHubSourceCredentials.html
+#
+# "Note: CodeBuild only allows a single credential for GitHub to be saved in a given AWS account 
+#        in a given region - any attempt to add more than one will result in an error."
+resource "aws_codebuild_source_credential" "github_token" {
+  auth_type   = "PERSONAL_ACCESS_TOKEN"
+  server_type = "GITHUB_ENTERPRISE"
+  token       = data.aws_secretsmanager_secret_version.ghe-key.secret_string
+}

+ 117 - 0
base/codebuild_splunk_docs/iam.tf

@@ -0,0 +1,117 @@
+resource "aws_iam_role" "codebuild_splunk_docs_role" {
+  name_prefix = "codebuild_splunk_docs_role"
+  path        = "/aws_services/"
+
+  assume_role_policy = <<EOF
+{
+    "Version": "2012-10-17",
+    "Statement": [
+      {
+        "Effect": "Allow",
+        "Principal": {
+          "Service": [
+            "codebuild.amazonaws.com"
+            ]
+        },
+        "Action": "sts:AssumeRole"
+      }
+    ]
+  }
+EOF
+}
+
+resource "aws_iam_role_policy_attachment" "codebuild_splunk_docs_role_policy_attach" {
+  role       = aws_iam_role.codebuild_splunk_docs_role.name
+  policy_arn = aws_iam_policy.codebuild_splunk_docs_policy.arn
+}
+
+# Some things about this policy I'm not perfectly sure about, like
+# should the account number be hardcoded?  Also, it reads like we'll have to
+# update it each time we have a new repository added to codecommit - that
+# or we'll need to authorize the codebuild role to be able to pull from any 
+# codecommit repo.  Which may be fine?
+resource "aws_iam_policy" "codebuild_splunk_docs_policy" {
+  name_prefix = "codebuild_splunk_docs_policy"
+  description = "Policy for AWS codebuild to build and store artifacts"
+  path     = "/aws_services/"
+
+  policy = <<EOF
+{
+    "Version": "2012-10-17",
+    "Statement": [
+        {
+            "Effect": "Allow",
+            "Resource": [
+                "arn:${var.aws_partition}:logs:${var.aws_region}:${var.aws_account_id}:log-group:/aws/codebuild/*"
+            ],
+            "Action": [
+                "logs:CreateLogGroup",
+                "logs:CreateLogStream",
+                "logs:PutLogEvents"
+            ]
+        },
+        {
+            "Effect": "Allow",
+            "Resource": [
+                "arn:${var.aws_partition}:s3:::codepipeline-${var.aws_region}-*"
+            ],
+            "Action": [
+                "s3:PutObject",
+                "s3:GetObject",
+                "s3:GetObjectVersion"
+            ]
+        },
+        {
+            "Effect": "Allow",
+            "Resource": [
+                "arn:${var.aws_partition}:codecommit:${var.aws_region}:${var.aws_account_id}:*"
+            ],
+            "Action": [
+                "codecommit:GitPull"
+            ]
+        },
+        {
+            "Effect": "Allow",
+            "Resource": [
+                "arn:${var.aws_partition}:s3:::xdr-${var.environment}-codebuild_splunk_apps/*",
+                "arn:${var.aws_partition}:s3:::*"
+            ],
+            "Action": [
+                "s3:PutObject",
+                "s3:GetObject*",
+                "s3:ListBucket",
+                "s3:DeleteObject"
+            ]
+        },
+        {
+            "Sid": "WriteToECR",
+            "Effect": "Allow",
+            "Resource": [
+                "*"
+            ],
+            "Action": [
+              "ecr:GetAuthorizationToken",
+              "ecr:BatchCheckLayerAvailability",
+              "ecr:CompleteLayerUpload",
+              "ecr:GetAuthorizationToken",
+              "ecr:InitiateLayerUpload",
+              "ecr:PutImage",
+              "ecr:UploadLayerPart"
+            ]
+        },
+        {
+            "Sid": "PullFromECR",
+            "Effect": "Allow",
+            "Resource": [
+                "*"
+            ],
+            "Action": [
+              "ecr:GetDownloadUrlForLayer",
+              "ecr:BatchGetImage",
+              "ecr:BatchCheckLayerAvailability"
+            ]
+        }
+    ]
+}
+EOF
+}

+ 149 - 0
base/codebuild_splunk_docs/kms.tf

@@ -0,0 +1,149 @@
+#Codebuild artifacts by rule must be encrypted by a KMS key
+# using the default aws/s3 key doesn't work with cross-account access
+resource "aws_kms_key" "s3_codebuild_splunk_docs_artifacts" {
+  description             = "Codebuild Artifacts S3 bucket for Documentation - ${var.repository}"
+  enable_key_rotation     = true
+  policy                  = data.aws_iam_policy_document.codebuild_splunk_docs_kms_key_encryption_policy.json
+}
+
+resource "aws_kms_alias" "codebuilt-artifacts" {
+  name          = "alias/codebuild-splunk-docs-${var.repository}"
+  target_key_id = aws_kms_key.s3_codebuild_splunk_docs_artifacts.key_id
+}
+
+data "aws_iam_policy_document" "codebuild_splunk_docs_kms_key_encryption_policy" {
+  #policy_id = "key-consolepolicy-3"
+  statement {
+    sid = "Enable IAM User Permissions"
+    effect = "Allow"
+    principals {
+      type = "AWS"
+      identifiers = [ 
+        "arn:${var.aws_partition}:iam::${var.aws_account_id}:role/user/mdr_terraformer",
+        "arn:${var.aws_partition}:iam::${var.aws_account_id}:user/MDRAdmin"
+        ]
+    }
+    actions   = [ "kms:*" ]
+    resources = [ "*" ]
+  }
+
+  statement {
+    sid = "Allow access for Key Administrators"
+    effect = "Allow"
+    principals {
+      type = "AWS"
+      identifiers = [
+        "arn:${var.aws_partition}:iam::${var.aws_account_id}:role/user/mdr_terraformer",
+      ]
+    }
+
+    actions = [
+      "kms:Create*",
+      "kms:Describe*",
+      "kms:Enable*",
+      "kms:List*",
+      "kms:Put*",
+      "kms:Update*",
+      "kms:Revoke*",
+      "kms:Disable*",
+      "kms:Get*",
+      "kms:Delete*",
+      "kms:TagResource",
+      "kms:UntagResource",
+      "kms:ScheduleKeyDeletion",
+      "kms:CancelKeyDeletion"
+    ]
+    resources = [ "*" ]
+  }
+
+  statement {
+    sid =  "Allow use of the key"
+    effect = "Allow"
+    principals {
+      type = "AWS"
+      identifiers = [
+        "arn:${var.aws_partition}:iam::${var.aws_account_id}:role/msoc-default-instance-role"
+      ]
+    }
+    actions = [
+      "kms:Encrypt",
+      "kms:Decrypt",
+      "kms:ReEncrypt*",
+      "kms:GenerateDataKey*",
+      "kms:DescribeKey"
+    ]
+    resources = [ "*" ]
+  }
+
+  statement  {
+    sid = "Allow access through Amazon S3 for all principals in the account that are authorized to use Amazon S3"
+    effect = "Allow"
+    principals {
+      type = "AWS"
+      identifiers = [ "*" ]
+    }
+    actions = [
+        "kms:Encrypt",
+        "kms:Decrypt",
+        "kms:ReEncrypt*",
+        "kms:GenerateDataKey*",
+        "kms:DescribeKey"
+    ]
+    resources = [ "*" ]
+
+    condition {
+      test = "StringEquals"
+      variable = "kms.ViaService"
+      values = [ "s3.${var.aws_region}.amazonaws.com" ]
+    }
+
+    condition {
+      test = "StringEquals"
+      variable = "kms.CallerAccount"
+      values = [ var.aws_account_id ]
+    }
+  }
+
+  statement  {
+    sid = "Allow access from the codebuild role"
+    effect = "Allow"
+    principals {
+      type = "AWS"
+    
+      identifiers = [ 
+        aws_iam_role.codebuild_splunk_docs_role.arn
+      ]
+    }
+    actions = [
+      "kms:Encrypt",
+      "kms:Decrypt",
+      "kms:ReEncrypt*",
+      "kms:GenerateDataKey*",
+      "kms:DescribeKey"
+    ]
+    resources = [ "*" ]
+  }
+  
+  statement {
+    sid = "Allow attachment of persistent resources"
+    effect = "Allow"
+    principals {
+      type = "AWS"
+      identifiers = [
+        "arn:${var.aws_partition}:iam::${var.aws_account_id}:role/msoc-default-instance-role",
+        "arn:${var.aws_partition}:iam::${var.aws_account_id}:role/portal-instance-role"
+      ]
+    }
+    actions = [
+      "kms:CreateGrant",
+      "kms:ListGrants",
+      "kms:RevokeGrant"
+    ]
+    resources = [ "*" ]
+    condition {
+      test = "Bool"
+      variable =  "kms:GrantIsForAWSResource"
+      values = [ "true" ]
+      }
+    }
+}

+ 78 - 0
base/codebuild_splunk_docs/main.tf

@@ -0,0 +1,78 @@
+data "github_repository" "this" {
+    name    = var.repository
+}
+
+resource "aws_codebuild_project" "this" {
+  name                  = "splunk_docs_${var.repository}"
+  description           = "Splunk Documentation build from ${var.repository} repository"
+  service_role          = aws_iam_role.codebuild_splunk_docs_role.arn
+  encryption_key        = aws_kms_key.s3_codebuild_splunk_docs_artifacts.arn
+  badge_enabled         = var.badge_enabled
+  concurrent_build_limit = 1
+  build_timeout          = 60
+
+  source {
+    type                = "GITHUB_ENTERPRISE"
+    buildspec           = "buildspec.docs.yml"
+    location            = data.github_repository.this.http_clone_url
+    report_build_status = true
+    git_clone_depth     = 1
+
+    git_submodules_config {
+      fetch_submodules = true
+    }
+  }
+
+  source_version = var.source_version
+
+  environment {
+    compute_type        = "BUILD_GENERAL1_SMALL"
+    image               = "${var.common_services_account}.dkr.ecr.us-gov-east-1.amazonaws.com/content_generator:latest"
+    image_pull_credentials_type = "SERVICE_ROLE"
+    type                = "LINUX_CONTAINER"
+    environment_variable {
+      name = "ARTIFACTS_PATH"
+      type = "PLAINTEXT"
+      value = "s3://xdr-${var.environment}-portal-shared-artifacts/splunk-search-docs/${var.repository}/"
+    }
+  }
+
+  # Example: s3://xdr-moose-test-splunk-docs/sh-es/content_source/
+  artifacts {
+    type                = "S3"
+    location            = "xdr-${var.environment}-portal-shared-artifacts"
+    name                = var.repository
+    path                = "/splunk-search-docs/"
+    namespace_type      = "NONE"
+    packaging           = "NONE"
+  }
+
+  tags = merge(var.standard_tags, var.tags)
+
+  # Govcloud incompatible with "project visibility"
+  # See https://github.com/hashicorp/terraform-provider-aws/issues/22473#issuecomment-1081187035
+  lifecycle { ignore_changes = [ project_visibility ] }
+}
+
+resource "aws_codebuild_webhook" "this" {
+  count = var.enable_webhooks ? 1 : 0
+
+  project_name  = aws_codebuild_project.this.name
+  branch_filter = var.webhook_branch_filter
+
+}
+
+resource "github_repository_webhook" "this" {
+  count = var.enable_webhooks ? 1 : 0
+
+  active     = true
+  events     = ["push"]
+  repository = data.github_repository.this.name
+
+  configuration {
+    url          = aws_codebuild_webhook.this[0].payload_url
+    secret       = aws_codebuild_webhook.this[0].secret
+    content_type = "json"
+    insecure_ssl = false
+  }
+}

+ 51 - 0
base/codebuild_splunk_docs/vars.tf

@@ -0,0 +1,51 @@
+locals {
+  # creates a job for each of these types, using <splunk_prefix>:<server type> as the tag
+  splunk_server_types = toset([
+    #"cm",
+    #"hf",
+    #"idx",
+    "sh-es",
+    "sh-cust",
+  ])
+}
+
+variable "repository" {
+  description = "Name of the repository. Must be part of the provider in the terragrunt.hcl. This will determine both the name of the repository and the folder in S3 where application artifacts are stored."
+  type = string
+}
+
+variable "source_version" {
+  description = "Tag or branch for the git repository."
+  type = string
+  default = "master"
+}
+
+variable "enable_webhooks" {
+  description = "Build on changes?"
+  type = bool
+  default = false
+}
+
+variable "tags" {
+  description = "Tags to add to the resource (in addition to global standard tags)"
+  type        = map
+  default     = { }
+}
+variable "standard_tags" { type = map }
+variable "environment" { type = string }
+variable "aws_region" { type = string }
+variable "aws_partition" { type = string }
+variable "aws_partition_alias" { type = string }
+variable "aws_account_id" { type = string }
+variable "common_services_account" { type = string }
+variable "splunk_prefix" { type = string }
+
+variable "badge_enabled" {
+    type = string
+    default = "false"
+}
+
+variable "webhook_branch_filter" {
+    type = string
+    default = "^(master|develop)$"
+}

+ 1 - 1
base/generic_s3_bucket_with_role/README.md

@@ -12,7 +12,7 @@ All items transition to a intelligent tiering after 30 days.
 For testing, the instance must assume-role first. To do this from the command-line:
 
 ```
-aws --region us-gov-east-1 sts assume-role --role-arn [ARN] --role-session-name ftd_testing
+aws --region us-gov-east-1 sts assume-role --role-session-name ftd_testing --role-arn [ARN]
 # Output will contain AccessKeyId and SecretAcessKeyID
 AWS_ACCESS_KEY_ID=[REPLACE] AWS_SECRET_ACCESS_KEY=[REPLACE] AWS_SESSION_TOKEN=[replace] aws --region us-gov-east-1 s3 ls
 ```