Compare commits
11 Commits
23a0d041ba
...
7167e3fa32
Author | SHA1 | Date | |
---|---|---|---|
![]() |
7167e3fa32 | ||
![]() |
3654b13a66 | ||
![]() |
c1d18e0fe5 | ||
![]() |
fc7e54ea8a | ||
![]() |
9222476444 | ||
![]() |
35f3265659 | ||
![]() |
6f2ee8f880 | ||
![]() |
2a225de2f9 | ||
![]() |
09c7cc8d03 | ||
![]() |
ed106036dd | ||
![]() |
b0092be5d6 |
70
kubernetes/autoscale/k8s/autoscale.yaml
Normal file
70
kubernetes/autoscale/k8s/autoscale.yaml
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: autoscale-test
|
||||||
|
namespace: default
|
||||||
|
spec:
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: autoscale-test
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: autoscale-test
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: echoservice
|
||||||
|
image: gcr.io/hip-wharf-319304/echoservice:latest
|
||||||
|
imagePullPolicy: Always
|
||||||
|
env:
|
||||||
|
- name: "WORKERS"
|
||||||
|
value: "8"
|
||||||
|
ports:
|
||||||
|
- name: http
|
||||||
|
containerPort: 8080
|
||||||
|
protocol: TCP
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /
|
||||||
|
port: http
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /
|
||||||
|
port: http
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
cpu: "250m"
|
||||||
|
memory: "256Mi"
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: autoscale-test
|
||||||
|
namespace: default
|
||||||
|
spec:
|
||||||
|
selector:
|
||||||
|
app: autoscale-test
|
||||||
|
type: LoadBalancer
|
||||||
|
ports:
|
||||||
|
- port: 80
|
||||||
|
targetPort: "http"
|
||||||
|
---
|
||||||
|
apiVersion: autoscaling/v2beta2
|
||||||
|
kind: HorizontalPodAutoscaler
|
||||||
|
metadata:
|
||||||
|
name: autoscale-test
|
||||||
|
namespace: default
|
||||||
|
spec:
|
||||||
|
scaleTargetRef:
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
name: autoscale-test
|
||||||
|
minReplicas: 1
|
||||||
|
maxReplicas: 10
|
||||||
|
metrics:
|
||||||
|
- type: Resource
|
||||||
|
resource:
|
||||||
|
name: cpu
|
||||||
|
target:
|
||||||
|
type: Utilization
|
||||||
|
averageUtilization: 50
|
17
kubernetes/autoscale/service/Dockerfile
Normal file
17
kubernetes/autoscale/service/Dockerfile
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
FROM python:3.9
|
||||||
|
|
||||||
|
RUN useradd -m -g nogroup server
|
||||||
|
USER server
|
||||||
|
|
||||||
|
WORKDIR /home/server
|
||||||
|
ENV FLASK_APP=main.py
|
||||||
|
|
||||||
|
# Perform the install of requirements first to avoid re-installing on every code change
|
||||||
|
COPY requirements.txt /home/server/
|
||||||
|
RUN pip install -r /home/server/requirements.txt
|
||||||
|
|
||||||
|
COPY *.py /home/server/
|
||||||
|
|
||||||
|
# CMD ["python", "/home/server/main.py"]
|
||||||
|
|
||||||
|
CMD ["/home/server/.local/bin/gunicorn", "--config", "/home/server/gunicorn.conf.py", "main:app"]
|
22
kubernetes/autoscale/service/Makefile
Normal file
22
kubernetes/autoscale/service/Makefile
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
IMAGE_NAME:=echoservice
|
||||||
|
GCP_PROJECT:=hip-wharf-319304
|
||||||
|
|
||||||
|
.PHONY: all
|
||||||
|
all: build push
|
||||||
|
|
||||||
|
.PHONY: build
|
||||||
|
build:
|
||||||
|
docker build -t $(IMAGE_NAME) .
|
||||||
|
|
||||||
|
.PHONY: push
|
||||||
|
push:
|
||||||
|
docker tag $(IMAGE_NAME) gcr.io/$(GCP_PROJECT)/$(IMAGE_NAME)
|
||||||
|
docker push gcr.io/$(GCP_PROJECT)/$(IMAGE_NAME)
|
||||||
|
|
||||||
|
.PHONY: clean
|
||||||
|
clean:
|
||||||
|
docker rmi $(IMAGE_NAME) gcr.io/$(GCP_PROJECT)/$(IMAGE_NAME) $(IMAGE_NAME)
|
||||||
|
|
||||||
|
.PHONY: run
|
||||||
|
run:
|
||||||
|
docker run --rm -i -t -p "8080:8080" $(IMAGE_NAME)
|
9
kubernetes/autoscale/service/gunicorn.conf.py
Normal file
9
kubernetes/autoscale/service/gunicorn.conf.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
env_port = int(os.environ.get("PORT", 8080))
|
||||||
|
|
||||||
|
bind = f"0.0.0.0:{env_port}"
|
||||||
|
workers = int(os.environ.get("WORKERS", 8))
|
||||||
|
preload_app = True
|
||||||
|
max_requests = 100
|
||||||
|
max_requests_jitter = 50
|
28
kubernetes/autoscale/service/main.py
Normal file
28
kubernetes/autoscale/service/main.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
from flask import Flask, request
|
||||||
|
|
||||||
|
app = Flask(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_request_params():
|
||||||
|
request_json = request.get_json(silent=True)
|
||||||
|
request_args = request.args
|
||||||
|
if request_json:
|
||||||
|
return request_json
|
||||||
|
elif request_args:
|
||||||
|
return request_args
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@app.route("/", methods=("GET", "POST"))
|
||||||
|
def index():
|
||||||
|
return json.dumps(
|
||||||
|
{"headers": {k: v for k, v in request.headers}, "params": get_request_params()}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
app.run(host="0.0.0.0", port=int(os.environ.get("PORT", 8080)), debug=True)
|
2
kubernetes/autoscale/service/requirements.txt
Normal file
2
kubernetes/autoscale/service/requirements.txt
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
flask==1.1.2
|
||||||
|
gunicorn==20.1.0
|
@ -207,16 +207,43 @@ module "cf_to_pubsub" {
|
|||||||
source = "../modules/cf_to_pubsub"
|
source = "../modules/cf_to_pubsub"
|
||||||
project = var.project
|
project = var.project
|
||||||
region = var.region
|
region = var.region
|
||||||
topic_name = "bigquery-etl"
|
function_name = "cf-to-pubsub"
|
||||||
|
function_description = "CloudFunction to PubSub"
|
||||||
|
function_source_name = "cf_to_pubsub"
|
||||||
source_bucket = google_storage_bucket.bucket
|
source_bucket = google_storage_bucket.bucket
|
||||||
service_cloudbuild = google_project_service.cloudbuild
|
service_cloudbuild = google_project_service.cloudbuild
|
||||||
|
|
||||||
|
environment_variables = {
|
||||||
|
GCP_PROJECT = var.project
|
||||||
|
GCP_TOPIC = "bigquery-etl"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
output "log_to_bq_endpoint" {
|
output "cf_to_pubsub_endpoint" {
|
||||||
description = "https endpoint to log to BigQuery."
|
description = "https endpoint to log to BigQuery through pubsub."
|
||||||
value = module.cf_to_pubsub.https_trigger_url
|
value = module.cf_to_pubsub.https_trigger_url
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module "cf_to_bq" {
|
||||||
|
source = "../modules/cf_to_pubsub"
|
||||||
|
project = var.project
|
||||||
|
region = var.region
|
||||||
|
function_name = "cf-to-bq"
|
||||||
|
function_description = "CloudFunction to BigQuery"
|
||||||
|
function_source_name = "cf_to_bq"
|
||||||
|
source_bucket = google_storage_bucket.bucket
|
||||||
|
service_cloudbuild = google_project_service.cloudbuild
|
||||||
|
|
||||||
|
environment_variables = {
|
||||||
|
BQ_TABLE = "${var.project}.pubsub_etl.pubsub_etl"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
output "cf_to_bq_endpoint" {
|
||||||
|
description = "https endpoint to log to BigQuery directly."
|
||||||
|
value = module.cf_to_bq.https_trigger_url
|
||||||
|
}
|
||||||
|
|
||||||
#################### PubSub to BigQuery ###################
|
#################### PubSub to BigQuery ###################
|
||||||
|
|
||||||
module "bigquery" {
|
module "bigquery" {
|
||||||
|
@ -99,9 +99,13 @@ resource "google_project_service" "dataflow" {
|
|||||||
disable_dependent_services = true
|
disable_dependent_services = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resource "random_id" "temp_storage" {
|
||||||
|
byte_length = 4
|
||||||
|
}
|
||||||
|
|
||||||
resource "google_storage_bucket" "temp_storage" {
|
resource "google_storage_bucket" "temp_storage" {
|
||||||
project = var.project
|
project = var.project
|
||||||
name = "${var.project}-bigquery-etl"
|
name = "${var.project}-etl-temp-${random_id.temp_storage.hex}"
|
||||||
force_destroy = true
|
force_destroy = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11,9 +11,22 @@ variable "region" {
|
|||||||
type = string
|
type = string
|
||||||
}
|
}
|
||||||
|
|
||||||
variable "topic_name" {
|
variable "function_name" {
|
||||||
description = "The name of topic where the events should be published."
|
description = "Name for the cloud function. If unspecified, one will be generated."
|
||||||
type = string
|
type = string
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "function_description" {
|
||||||
|
description = "Description for the cloud function."
|
||||||
|
type = string
|
||||||
|
default = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "environment_variables" {
|
||||||
|
description = "Environment variables for the execution of the cloud function."
|
||||||
|
type = map(any)
|
||||||
|
default = {}
|
||||||
}
|
}
|
||||||
|
|
||||||
variable "source_bucket" {
|
variable "source_bucket" {
|
||||||
@ -23,11 +36,16 @@ variable "source_bucket" {
|
|||||||
variable "function_source_name" {
|
variable "function_source_name" {
|
||||||
description = "Name of the folder containing the source code for the function."
|
description = "Name of the folder containing the source code for the function."
|
||||||
type = string
|
type = string
|
||||||
default = "cf_to_pubsub"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
variable "service_cloudbuild" {
|
variable "service_cloudbuild" {
|
||||||
description = "THe cloudbuild google_project_service."
|
description = "The cloudbuild google_project_service."
|
||||||
|
}
|
||||||
|
|
||||||
|
variable "allow_external" {
|
||||||
|
description = "Whether or not to allow outside traffic ingress."
|
||||||
|
type = bool
|
||||||
|
default = true
|
||||||
}
|
}
|
||||||
|
|
||||||
output "https_trigger_url" {
|
output "https_trigger_url" {
|
||||||
@ -35,6 +53,14 @@ output "https_trigger_url" {
|
|||||||
value = google_cloudfunctions_function.function.https_trigger_url
|
value = google_cloudfunctions_function.function.https_trigger_url
|
||||||
}
|
}
|
||||||
|
|
||||||
|
locals {
|
||||||
|
function_name = var.function_name == "" ? "cf-${random_id.function_id.hex}" : var.function_name
|
||||||
|
}
|
||||||
|
|
||||||
|
resource "random_id" "function_id" {
|
||||||
|
byte_length = 4
|
||||||
|
}
|
||||||
|
|
||||||
resource "random_id" "cf_bucket_id" {
|
resource "random_id" "cf_bucket_id" {
|
||||||
byte_length = 4
|
byte_length = 4
|
||||||
}
|
}
|
||||||
@ -53,8 +79,8 @@ resource "google_storage_bucket_object" "remote_archive" {
|
|||||||
}
|
}
|
||||||
|
|
||||||
resource "google_cloudfunctions_function" "function" {
|
resource "google_cloudfunctions_function" "function" {
|
||||||
name = "cf-to-pubsub"
|
name = local.function_name
|
||||||
description = "CloudFunction to PubSub"
|
description = var.function_description
|
||||||
runtime = "python39"
|
runtime = "python39"
|
||||||
|
|
||||||
available_memory_mb = 128
|
available_memory_mb = 128
|
||||||
@ -63,13 +89,9 @@ resource "google_cloudfunctions_function" "function" {
|
|||||||
trigger_http = true
|
trigger_http = true
|
||||||
entry_point = "main"
|
entry_point = "main"
|
||||||
max_instances = 4
|
max_instances = 4
|
||||||
ingress_settings = "ALLOW_ALL"
|
ingress_settings = var.allow_external ? "ALLOW_ALL" : "ALLOW_INTERNAL_ONLY"
|
||||||
# ingress_settings = "ALLOW_INTERNAL_ONLY"
|
|
||||||
|
|
||||||
environment_variables = {
|
environment_variables = var.environment_variables
|
||||||
GCP_PROJECT = var.project
|
|
||||||
GCP_TOPIC = var.topic_name
|
|
||||||
}
|
|
||||||
|
|
||||||
depends_on = [
|
depends_on = [
|
||||||
var.service_cloudbuild
|
var.service_cloudbuild
|
||||||
|
25
terraform/modules/cf_to_pubsub/functions/cf_to_bq/main.py
Normal file
25
terraform/modules/cf_to_pubsub/functions/cf_to_bq/main.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
from google.cloud import bigquery
|
||||||
|
|
||||||
|
client = bigquery.Client()
|
||||||
|
|
||||||
|
|
||||||
|
def push_to_pubsub(request_params):
|
||||||
|
errors = client.insert_rows_json(os.environ["BQ_TABLE"], [request_params])
|
||||||
|
if errors != []:
|
||||||
|
raise Exception("Encountered errors while inserting rows: {}".format(errors))
|
||||||
|
|
||||||
|
|
||||||
|
def main(request):
|
||||||
|
request_json = request.get_json(silent=True)
|
||||||
|
request_args = request.args
|
||||||
|
|
||||||
|
if request_json:
|
||||||
|
push_to_pubsub(request_json)
|
||||||
|
elif request_args:
|
||||||
|
push_to_pubsub(request_args)
|
||||||
|
else:
|
||||||
|
return ("No data provided.", 400)
|
||||||
|
return {"status": "ok", "source": "cf_to_bq"}
|
@ -0,0 +1,2 @@
|
|||||||
|
Flask==1.1.2
|
||||||
|
google-cloud-bigquery==2.22.0
|
@ -4,7 +4,7 @@ import os
|
|||||||
from google.cloud import pubsub_v1
|
from google.cloud import pubsub_v1
|
||||||
|
|
||||||
publisher = pubsub_v1.PublisherClient()
|
publisher = pubsub_v1.PublisherClient()
|
||||||
topic = publisher.topic_path(os.environ.get("GCP_PROJECT"), os.environ.get("GCP_TOPIC"))
|
topic = publisher.topic_path(os.environ["GCP_PROJECT"], os.environ["GCP_TOPIC"])
|
||||||
|
|
||||||
|
|
||||||
def push_to_pubsub(request_params):
|
def push_to_pubsub(request_params):
|
||||||
@ -20,5 +20,5 @@ def main(request):
|
|||||||
elif request_args:
|
elif request_args:
|
||||||
push_to_pubsub(request_args)
|
push_to_pubsub(request_args)
|
||||||
else:
|
else:
|
||||||
raise Exception("No data provided.")
|
return ("No data provided.", 400)
|
||||||
return {"status": "ok"}
|
return {"status": "ok", "source": "cf_to_pubsub"}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user