Skip to content

Commit 25c61c4

Browse files
daniel-citgtsorbo
andauthored
feat!: Add support for Log Analytics and Remove BigQuery log destination (#1025)
Co-authored-by: Grant Sorbo <[email protected]>
1 parent ee3a1d8 commit 25c61c4

File tree

10 files changed

+59
-153
lines changed

10 files changed

+59
-153
lines changed

1-org/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ to Bigquery and Pub/Sub. This will result in additional charges for those copies
8181

8282
- This module implements but does not enable [bucket policy retention](https://cloud.google.com/storage/docs/bucket-lock) for organization logs. If needed, enable a retention policy by configuring the `log_export_storage_retention_policy` variable.
8383

84-
- This module implements but does not enable [object versioning](https://cloud.google.com/storage/docs/object-versioning) for organization logs. If needed, enable object versioning by setting the `audit_logs_table_delete_contents_on_destroy` variable to true.
84+
- This module implements but does not enable [object versioning](https://cloud.google.com/storage/docs/object-versioning) for organization logs. If needed, enable object versioning by setting the `log_export_storage_versioning` variable to true.
8585

8686
- Bucket policy retention and object versioning are **mutually exclusive**.
8787

1-org/envs/shared/README.md

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,6 @@
44
| Name | Description | Type | Default | Required |
55
|------|-------------|------|---------|:--------:|
66
| audit\_data\_users | Google Workspace or Cloud Identity group that have access to audit logs. | `string` | n/a | yes |
7-
| audit\_logs\_table\_delete\_contents\_on\_destroy | (Optional) If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present. | `bool` | `false` | no |
8-
| audit\_logs\_table\_expiration\_days | Period before tables expire for all audit logs in milliseconds. Default is 30 days. | `number` | `30` | no |
97
| billing\_data\_users | Google Workspace or Cloud Identity group that have access to billing data set. | `string` | n/a | yes |
108
| billing\_export\_dataset\_location | The location of the dataset for billing data export. | `string` | `"US"` | no |
119
| cai\_monitoring\_kms\_force\_destroy | If set to true, delete KMS keyring and keys when destroying the module; otherwise, destroying the module will fail if KMS keys are present. | `bool` | `false` | no |
@@ -42,8 +40,8 @@
4240
| domains\_to\_allow | The list of domains to allow users from in IAM. |
4341
| interconnect\_project\_id | The Dedicated Interconnect project ID |
4442
| interconnect\_project\_number | The Dedicated Interconnect project number |
45-
| logs\_export\_bigquery\_dataset\_name | The log bucket for destination of log exports. See https://cloud.google.com/logging/docs/routing/overview#buckets |
46-
| logs\_export\_logbucket\_name | The log bucket for destination of log exports. See https://cloud.google.com/logging/docs/routing/overview#buckets |
43+
| logs\_export\_logbucket\_linked\_dataset\_name | The resource name of the Log Bucket linked BigQuery dataset created for Log Analytics. See https://cloud.google.com/logging/docs/log-analytics . |
44+
| logs\_export\_logbucket\_name | The log bucket for destination of log exports. See https://cloud.google.com/logging/docs/routing/overview#buckets . |
4745
| logs\_export\_pubsub\_topic | The Pub/Sub topic for destination of log exports |
4846
| logs\_export\_storage\_bucket\_name | The storage bucket for destination of log exports |
4947
| network\_folder\_name | The network folder name. |

1-org/envs/shared/log_sinks.tf

Lines changed: 13 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,16 @@ locals {
1818
parent_resource_id = local.parent_folder != "" ? local.parent_folder : local.org_id
1919
parent_resource_type = local.parent_folder != "" ? "folder" : "organization"
2020
parent_resources = { resource = local.parent_resource_id }
21-
main_logs_filter = <<EOF
21+
logs_filter = <<EOF
2222
logName: /logs/cloudaudit.googleapis.com%2Factivity OR
2323
logName: /logs/cloudaudit.googleapis.com%2Fsystem_event OR
2424
logName: /logs/cloudaudit.googleapis.com%2Fdata_access OR
25+
logName: /logs/cloudaudit.googleapis.com%2Faccess_transparency OR
26+
logName: /logs/cloudaudit.googleapis.com%2Fpolicy OR
2527
logName: /logs/compute.googleapis.com%2Fvpc_flows OR
2628
logName: /logs/compute.googleapis.com%2Ffirewall OR
27-
logName: /logs/cloudaudit.googleapis.com%2Faccess_transparency
29+
logName: /logs/dns.googleapis.com%2Fdns_queries
2830
EOF
29-
all_logs_filter = ""
3031
}
3132

3233
resource "random_string" "suffix" {
@@ -42,22 +43,11 @@ module "logs_export" {
4243
resource_type = local.parent_resource_type
4344
logging_destination_project_id = module.org_audit_logs.project_id
4445

45-
/******************************************
46-
Send logs to BigQuery
47-
*****************************************/
48-
bigquery_options = {
49-
logging_sink_name = "sk-c-logging-bq"
50-
logging_sink_filter = local.main_logs_filter
51-
dataset_name = "audit_logs"
52-
expiration_days = var.audit_logs_table_expiration_days
53-
delete_contents_on_destroy = var.audit_logs_table_delete_contents_on_destroy
54-
}
55-
5646
/******************************************
5747
Send logs to Storage
5848
*****************************************/
5949
storage_options = {
60-
logging_sink_filter = local.all_logs_filter
50+
logging_sink_filter = local.logs_filter
6151
logging_sink_name = "sk-c-logging-bkt"
6252
storage_bucket_name = "bkt-${module.org_audit_logs.project_id}-org-logs-${random_string.suffix.result}"
6353
location = var.log_export_storage_location
@@ -72,7 +62,7 @@ module "logs_export" {
7262
Send logs to Pub\Sub
7363
*****************************************/
7464
pubsub_options = {
75-
logging_sink_filter = local.main_logs_filter
65+
logging_sink_filter = local.logs_filter
7666
logging_sink_name = "sk-c-logging-pub"
7767
topic_name = "tp-org-logs-${random_string.suffix.result}"
7868
create_subscriber = true
@@ -82,14 +72,16 @@ module "logs_export" {
8272
Send logs to Logbucket
8373
*****************************************/
8474
logbucket_options = {
85-
logging_sink_name = "sk-c-logging-logbkt"
86-
logging_sink_filter = local.all_logs_filter
87-
name = "logbkt-org-logs-${random_string.suffix.result}"
88-
location = local.default_region
75+
logging_sink_name = "sk-c-logging-logbkt"
76+
logging_sink_filter = local.logs_filter
77+
name = "logbkt-org-logs-${random_string.suffix.result}"
78+
location = local.default_region
79+
enable_analytics = true
80+
linked_dataset_id = "ds_c_logbkt_analytics"
81+
linked_dataset_description = "BigQuery Dataset for Logbucket analytics"
8982
}
9083
}
9184

92-
9385
/******************************************
9486
Billing logs (Export configured manually)
9587
*****************************************/

1-org/envs/shared/outputs.tf

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -111,12 +111,12 @@ output "logs_export_storage_bucket_name" {
111111

112112
output "logs_export_logbucket_name" {
113113
value = module.logs_export.logbucket_destination_name
114-
description = "The log bucket for destination of log exports. See https://cloud.google.com/logging/docs/routing/overview#buckets"
114+
description = "The log bucket for destination of log exports. See https://cloud.google.com/logging/docs/routing/overview#buckets ."
115115
}
116116

117-
output "logs_export_bigquery_dataset_name" {
118-
value = module.logs_export.bigquery_destination_name
119-
description = "The log bucket for destination of log exports. See https://cloud.google.com/logging/docs/routing/overview#buckets"
117+
output "logs_export_logbucket_linked_dataset_name" {
118+
value = module.logs_export.logbucket_linked_dataset_name
119+
description = "The resource name of the Log Bucket linked BigQuery dataset created for Log Analytics. See https://cloud.google.com/logging/docs/log-analytics ."
120120
}
121121

122122
output "tags" {

1-org/envs/shared/variables.tf

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -35,12 +35,6 @@ variable "domains_to_allow" {
3535
type = list(string)
3636
}
3737

38-
variable "audit_logs_table_expiration_days" {
39-
description = "Period before tables expire for all audit logs in milliseconds. Default is 30 days."
40-
type = number
41-
default = 30
42-
}
43-
4438
variable "scc_notification_name" {
4539
description = "Name of the Security Command Center Notification. It must be unique in the organization. Run `gcloud scc notifications describe <scc_notification_name> --organization=org_id` to check if it already exists."
4640
type = string
@@ -94,12 +88,6 @@ variable "log_export_storage_versioning" {
9488
default = false
9589
}
9690

97-
variable "audit_logs_table_delete_contents_on_destroy" {
98-
description = "(Optional) If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present."
99-
type = bool
100-
default = false
101-
}
102-
10391
variable "log_export_storage_retention_policy" {
10492
description = "Configuration of the bucket's data retention policy for how long objects in the bucket should be retained."
10593
type = object({

1-org/modules/centralized-logging/README.md

Lines changed: 3 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# Centralized Logging Module
22

3-
This module handles logging configuration enabling one or more resources such as organization, folders, or projects to send logs to multiple destinations: [GCS bucket](https://cloud.google.com/logging/docs/export/using_exported_logs#gcs-overview), [Big Query](https://cloud.google.com/logging/docs/export/bigquery), [Pub/Sub](https://cloud.google.com/logging/docs/export/using_exported_logs#pubsub-overview), and [Log Buckets](https://cloud.google.com/logging/docs/routing/overview#buckets).
3+
This module handles logging configuration enabling one or more resources such as organization, folders, or projects to send logs to multiple destinations: [GCS bucket](https://cloud.google.com/logging/docs/export/using_exported_logs#gcs-overview), [Pub/Sub](https://cloud.google.com/logging/docs/export/using_exported_logs#pubsub-overview), and [Log Buckets](https://cloud.google.com/logging/docs/routing/overview#buckets) with [Log Analytics](https://cloud.google.com/logging/docs/log-analytics#analytics).
44

55
## Usage
66

@@ -25,19 +25,6 @@ module "logs_export" {
2525
storage_bucket_name = "bkt-logs"
2626
location = "us-central1"
2727
}
28-
29-
bigquery_options = {
30-
dataset_name = "ds_logs"
31-
logging_sink_name = "sk-c-logging-bq"
32-
logging_sink_filter = <<EOF
33-
logName: /logs/cloudaudit.googleapis.com%2Factivity OR
34-
logName: /logs/cloudaudit.googleapis.com%2Fsystem_event OR
35-
logName: /logs/cloudaudit.googleapis.com%2Fdata_access OR
36-
logName: /logs/compute.googleapis.com%2Fvpc_flows OR
37-
logName: /logs/compute.googleapis.com%2Ffirewall OR
38-
logName: /logs/cloudaudit.googleapis.com%2Faccess_transparency
39-
EOF
40-
}
4128
}
4229
```
4330

@@ -72,8 +59,7 @@ module "logging_logbucket" {
7259

7360
| Name | Description | Type | Default | Required |
7461
|------|-------------|------|---------|:--------:|
75-
| bigquery\_options | Destination BigQuery options:<br>- dataset\_name: The name of the bigquery dataset to be created and used for log entries.<br>- logging\_sink\_name: The name of the log sink to be created.<br>- logging\_sink\_filter: The filter to apply when exporting logs. Only log entries that match the filter are exported. Default is "" which exports all logs.<br>- expiration\_days: Table expiration time. If null logs will never be deleted.<br>- partitioned\_tables: Options that affect sinks exporting data to BigQuery. use\_partitioned\_tables - (Required) Whether to use BigQuery's partition tables.<br>- delete\_contents\_on\_destroy: If set to true, delete all contained objects in the logging destination. | <pre>object({<br> dataset_name = optional(string, null)<br> logging_sink_name = optional(string, null)<br> logging_sink_filter = optional(string, "")<br> expiration_days = optional(number, null)<br> partitioned_tables = optional(bool, true)<br> delete_contents_on_destroy = optional(bool, false)<br> })</pre> | `null` | no |
76-
| logbucket\_options | Destination LogBucket options:<br>- name: The name of the log bucket to be created and used for log entries matching the filter.<br>- logging\_sink\_name: The name of the log sink to be created.<br>- logging\_sink\_filter: The filter to apply when exporting logs. Only log entries that match the filter are exported. Default is "" which exports all logs.<br>- location: The location of the log bucket. Default: global.<br>- retention\_days: The number of days data should be retained for the log bucket. Default 30. | <pre>object({<br> name = optional(string, null)<br> logging_sink_name = optional(string, null)<br> logging_sink_filter = optional(string, "")<br> location = optional(string, "global")<br> retention_days = optional(number, 30)<br> })</pre> | `null` | no |
62+
| logbucket\_options | Destination LogBucket options:<br>- name: The name of the log bucket to be created and used for log entries matching the filter.<br>- logging\_sink\_name: The name of the log sink to be created.<br>- logging\_sink\_filter: The filter to apply when exporting logs. Only log entries that match the filter are exported. Default is "" which exports all logs.<br>- location: The location of the log bucket. Default: global.<br>- enable\_analytics: Whether or not Log Analytics is enabled. A Log bucket with Log Analytics enabled can be queried in the Log Analytics page using SQL queries. Cannot be disabled once enabled.<br>- linked\_dataset\_id: The ID of the linked BigQuery dataset. A valid link dataset ID must only have alphanumeric characters and underscores within it and have up to 100 characters.<br>- linked\_dataset\_description: A use-friendly description of the linked BigQuery dataset. The maximum length of the description is 8000 characters.<br>- retention\_days: The number of days data should be retained for the log bucket. Default 30. | <pre>object({<br> name = optional(string, null)<br> logging_sink_name = optional(string, null)<br> logging_sink_filter = optional(string, "")<br> location = optional(string, "global")<br> enable_analytics = optional(bool, true)<br> linked_dataset_id = optional(string, null)<br> linked_dataset_description = optional(string, null)<br> retention_days = optional(number, 30)<br> })</pre> | `null` | no |
7763
| logging\_destination\_project\_id | The ID of the project that will have the resources where the logs will be created. | `string` | n/a | yes |
7864
| logging\_project\_key | (Optional) The key of logging destination project if it is inside resources map. It is mandatory when resource\_type = project and logging\_target\_type = logbucket. | `string` | `""` | no |
7965
| pubsub\_options | Destination Pubsub options:<br>- topic\_name: The name of the pubsub topic to be created and used for log entries matching the filter.<br>- logging\_sink\_name: The name of the log sink to be created.<br>- logging\_sink\_filter: The filter to apply when exporting logs. Only log entries that match the filter are exported. Default is "" which exports all logs.<br>- create\_subscriber: Whether to create a subscription to the topic that was created and used for log entries matching the filter. If 'true', a pull subscription is created along with a service account that is granted roles/pubsub.subscriber and roles/pubsub.viewer to the topic. | <pre>object({<br> topic_name = optional(string, null)<br> logging_sink_name = optional(string, null)<br> logging_sink_filter = optional(string, "")<br> create_subscriber = optional(bool, true)<br> })</pre> | `null` | no |
@@ -85,8 +71,8 @@ module "logging_logbucket" {
8571

8672
| Name | Description |
8773
|------|-------------|
88-
| bigquery\_destination\_name | The resource name for the destination BigQuery. |
8974
| logbucket\_destination\_name | The resource name for the destination Log Bucket. |
75+
| logbucket\_linked\_dataset\_name | The resource name of the Log Bucket linked BigQuery dataset. |
9076
| pubsub\_destination\_name | The resource name for the destination Pub/Sub. |
9177
| storage\_destination\_name | The resource name for the destination Storage. |
9278

1-org/modules/centralized-logging/main.tf

Lines changed: 5 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -39,34 +39,30 @@ locals {
3939
for v in local.exports_list : "${v.res}_${v.type}" => v
4040
}
4141
destinations_options = {
42-
bgq = var.bigquery_options
4342
pub = var.pubsub_options
4443
sto = var.storage_options
4544
lbk = var.logbucket_options
4645
}
4746

4847
logging_sink_name_map = {
49-
bgq = try("sk-to-ds-logs-${var.logging_destination_project_id}", "sk-to-ds-logs")
5048
pub = try("sk-to-tp-logs-${var.logging_destination_project_id}", "sk-to-tp-logs")
5149
sto = try("sk-to-bkt-logs-${var.logging_destination_project_id}", "sk-to-bkt-logs")
5250
lbk = try("sk-to-logbkt-logs-${var.logging_destination_project_id}", "sk-to-logbkt-logs")
5351
}
5452

5553
logging_tgt_name = {
56-
bgq = replace("${local.logging_tgt_prefix.bgq}${random_string.suffix.result}", "-", "_")
5754
pub = "${local.logging_tgt_prefix.pub}${random_string.suffix.result}"
5855
sto = "${local.logging_tgt_prefix.sto}${random_string.suffix.result}"
5956
lbk = "${local.logging_tgt_prefix.lbk}${random_string.suffix.result}"
6057
}
6158

6259
destination_uri_map = {
63-
bgq = try(module.destination_bigquery[0].destination_uri, "")
6460
pub = try(module.destination_pubsub[0].destination_uri, "")
6561
sto = try(module.destination_storage[0].destination_uri, "")
6662
lbk = try(module.destination_logbucket[0].destination_uri, "")
6763
}
64+
6865
logging_tgt_prefix = {
69-
bgq = "ds_logs_"
7066
pub = "tp-logs-"
7167
sto = try("bkt-logs-${var.logging_destination_project_id}-", "bkt-logs-")
7268
lbk = "logbkt-logs-"
@@ -92,22 +88,24 @@ module "log_export" {
9288
parent_resource_type = var.resource_type
9389
unique_writer_identity = true
9490
include_children = local.include_children
95-
bigquery_options = each.value.type == "bgq" ? { use_partitioned_tables = true } : null
9691
}
9792

9893
#-------------------------#
9994
# Send logs to Log Bucket #
10095
#-------------------------#
10196
module "destination_logbucket" {
10297
source = "terraform-google-modules/log-export/google//modules/logbucket"
103-
version = "~> 7.5.0"
98+
version = "~> 7.7"
10499

105100
count = var.logbucket_options != null ? 1 : 0
106101

107102
project_id = var.logging_destination_project_id
108103
name = coalesce(var.logbucket_options.name, local.logging_tgt_name.lbk)
109104
log_sink_writer_identity = module.log_export["${local.value_first_resource}_lbk"].writer_identity
110105
location = var.logbucket_options.location
106+
enable_analytics = var.logbucket_options.enable_analytics
107+
linked_dataset_id = var.logbucket_options.linked_dataset_id
108+
linked_dataset_description = var.logbucket_options.linked_dataset_description
111109
retention_days = var.logbucket_options.retention_days
112110
grant_write_permission_on_bkt = false
113111
}
@@ -126,35 +124,6 @@ resource "google_project_iam_member" "logbucket_sink_member" {
126124
member = module.log_export["${each.value}_lbk"].writer_identity
127125
}
128126

129-
130-
#-----------------------#
131-
# Send logs to BigQuery #
132-
#-----------------------#
133-
module "destination_bigquery" {
134-
source = "terraform-google-modules/log-export/google//modules/bigquery"
135-
version = "~> 7.4"
136-
137-
count = var.bigquery_options != null ? 1 : 0
138-
139-
project_id = var.logging_destination_project_id
140-
dataset_name = coalesce(var.bigquery_options.dataset_name, local.logging_tgt_name.bgq)
141-
log_sink_writer_identity = module.log_export["${local.value_first_resource}_bgq"].writer_identity
142-
expiration_days = var.bigquery_options.expiration_days
143-
delete_contents_on_destroy = var.bigquery_options.delete_contents_on_destroy
144-
}
145-
146-
#-----------------------------------------#
147-
# Bigquery Service account IAM membership #
148-
#-----------------------------------------#
149-
resource "google_project_iam_member" "bigquery_sink_member" {
150-
for_each = var.bigquery_options != null ? var.resources : {}
151-
152-
project = var.logging_destination_project_id
153-
role = "roles/bigquery.dataEditor"
154-
member = module.log_export["${each.value}_bgq"].writer_identity
155-
}
156-
157-
158127
#----------------------#
159128
# Send logs to Storage #
160129
#----------------------#

0 commit comments

Comments
 (0)