Add bigquery table.
This commit is contained in:
parent
e88ba72449
commit
5241df2d94
@ -6,3 +6,5 @@ A small terraform script to launch GKE with:
|
|||||||
- GCR for custom docker images
|
- GCR for custom docker images
|
||||||
- Auto-scaling node pool
|
- Auto-scaling node pool
|
||||||
- K8s database encryption
|
- K8s database encryption
|
||||||
|
|
||||||
|
Contains some settings that are inadvisable in production (for example, disabling deletion protection). I include them because this project's goal is to provide a template for quickly spinning up a test/dev environment which may involve frequent deletions.
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
|
# Example message:
|
||||||
|
# {"time": "2021-07-20T05:05:47", "service": "foo", "log": "bar"}
|
||||||
|
|
||||||
variable "project" {
|
variable "project" {
|
||||||
description = "Project ID."
|
description = "Project ID."
|
||||||
type = string
|
type = string
|
||||||
@ -116,10 +119,56 @@ resource "google_dataflow_job" "etl_job" {
|
|||||||
|
|
||||||
parameters = {
|
parameters = {
|
||||||
inputSubscription = google_pubsub_subscription.bigquery_etl.id
|
inputSubscription = google_pubsub_subscription.bigquery_etl.id
|
||||||
outputTableSpec = "your-project:your-dataset.your-table-name"
|
outputTableSpec = "${google_bigquery_table.pubsub_etl.project}:${google_bigquery_table.pubsub_etl.dataset_id}.${google_bigquery_table.pubsub_etl.table_id}"
|
||||||
}
|
}
|
||||||
|
|
||||||
additional_experiments = [
|
additional_experiments = [
|
||||||
"enable_streaming_engine"
|
"enable_streaming_engine",
|
||||||
|
"enable_windmill_service"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#################### BigQuery #############################
|
||||||
|
|
||||||
|
resource "google_bigquery_dataset" "pubsub_etl" {
|
||||||
|
project = var.project
|
||||||
|
dataset_id = "pubsub_etl"
|
||||||
|
friendly_name = "PubSub ETL"
|
||||||
|
description = "Dataset where PubSub ETL data goes."
|
||||||
|
location = "US"
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "google_bigquery_table" "pubsub_etl" {
|
||||||
|
project = var.project
|
||||||
|
dataset_id = google_bigquery_dataset.pubsub_etl.dataset_id
|
||||||
|
table_id = "pubsub_etl"
|
||||||
|
|
||||||
|
deletion_protection = false
|
||||||
|
|
||||||
|
time_partitioning {
|
||||||
|
type = "DAY"
|
||||||
|
}
|
||||||
|
|
||||||
|
schema = <<EOF
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"description": "Time of the event",
|
||||||
|
"mode": "NULLABLE",
|
||||||
|
"name": "time",
|
||||||
|
"type": "DATETIME"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Name of the service",
|
||||||
|
"mode": "NULLABLE",
|
||||||
|
"name": "service",
|
||||||
|
"type": "STRING"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Text of the log",
|
||||||
|
"mode": "NULLABLE",
|
||||||
|
"name": "log",
|
||||||
|
"type": "STRING"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user