Compare commits
11 Commits
23a0d041ba
...
7167e3fa32
Author | SHA1 | Date |
---|---|---|
Tom Alexander | 7167e3fa32 | 3 years ago |
Tom Alexander | 3654b13a66 | 3 years ago |
Tom Alexander | c1d18e0fe5 | 3 years ago |
Tom Alexander | fc7e54ea8a | 3 years ago |
Tom Alexander | 9222476444 | 3 years ago |
Tom Alexander | 35f3265659 | 3 years ago |
Tom Alexander | 6f2ee8f880 | 3 years ago |
Tom Alexander | 2a225de2f9 | 3 years ago |
Tom Alexander | 09c7cc8d03 | 3 years ago |
Tom Alexander | ed106036dd | 3 years ago |
Tom Alexander | b0092be5d6 | 3 years ago |
@ -0,0 +1,70 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: autoscale-test
|
||||
namespace: default
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: autoscale-test
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: autoscale-test
|
||||
spec:
|
||||
containers:
|
||||
- name: echoservice
|
||||
image: gcr.io/hip-wharf-319304/echoservice:latest
|
||||
imagePullPolicy: Always
|
||||
env:
|
||||
- name: "WORKERS"
|
||||
value: "8"
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8080
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
resources:
|
||||
requests:
|
||||
cpu: "250m"
|
||||
memory: "256Mi"
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: autoscale-test
|
||||
namespace: default
|
||||
spec:
|
||||
selector:
|
||||
app: autoscale-test
|
||||
type: LoadBalancer
|
||||
ports:
|
||||
- port: 80
|
||||
targetPort: "http"
|
||||
---
|
||||
apiVersion: autoscaling/v2beta2
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
name: autoscale-test
|
||||
namespace: default
|
||||
spec:
|
||||
scaleTargetRef:
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
name: autoscale-test
|
||||
minReplicas: 1
|
||||
maxReplicas: 10
|
||||
metrics:
|
||||
- type: Resource
|
||||
resource:
|
||||
name: cpu
|
||||
target:
|
||||
type: Utilization
|
||||
averageUtilization: 50
|
@ -0,0 +1,17 @@
|
||||
FROM python:3.9
|
||||
|
||||
RUN useradd -m -g nogroup server
|
||||
USER server
|
||||
|
||||
WORKDIR /home/server
|
||||
ENV FLASK_APP=main.py
|
||||
|
||||
# Perform the install of requirements first to avoid re-installing on every code change
|
||||
COPY requirements.txt /home/server/
|
||||
RUN pip install -r /home/server/requirements.txt
|
||||
|
||||
COPY *.py /home/server/
|
||||
|
||||
# CMD ["python", "/home/server/main.py"]
|
||||
|
||||
CMD ["/home/server/.local/bin/gunicorn", "--config", "/home/server/gunicorn.conf.py", "main:app"]
|
@ -0,0 +1,22 @@
|
||||
IMAGE_NAME:=echoservice
|
||||
GCP_PROJECT:=hip-wharf-319304
|
||||
|
||||
.PHONY: all
|
||||
all: build push
|
||||
|
||||
.PHONY: build
|
||||
build:
|
||||
docker build -t $(IMAGE_NAME) .
|
||||
|
||||
.PHONY: push
|
||||
push:
|
||||
docker tag $(IMAGE_NAME) gcr.io/$(GCP_PROJECT)/$(IMAGE_NAME)
|
||||
docker push gcr.io/$(GCP_PROJECT)/$(IMAGE_NAME)
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
docker rmi $(IMAGE_NAME) gcr.io/$(GCP_PROJECT)/$(IMAGE_NAME) $(IMAGE_NAME)
|
||||
|
||||
.PHONY: run
|
||||
run:
|
||||
docker run --rm -i -t -p "8080:8080" $(IMAGE_NAME)
|
@ -0,0 +1,9 @@
|
||||
import os
|
||||
|
||||
env_port = int(os.environ.get("PORT", 8080))
|
||||
|
||||
bind = f"0.0.0.0:{env_port}"
|
||||
workers = int(os.environ.get("WORKERS", 8))
|
||||
preload_app = True
|
||||
max_requests = 100
|
||||
max_requests_jitter = 50
|
@ -0,0 +1,28 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
from flask import Flask, request
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
|
||||
def get_request_params():
|
||||
request_json = request.get_json(silent=True)
|
||||
request_args = request.args
|
||||
if request_json:
|
||||
return request_json
|
||||
elif request_args:
|
||||
return request_args
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@app.route("/", methods=("GET", "POST"))
|
||||
def index():
|
||||
return json.dumps(
|
||||
{"headers": {k: v for k, v in request.headers}, "params": get_request_params()}
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host="0.0.0.0", port=int(os.environ.get("PORT", 8080)), debug=True)
|
@ -0,0 +1,2 @@
|
||||
flask==1.1.2
|
||||
gunicorn==20.1.0
|
@ -0,0 +1,25 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
from google.cloud import bigquery
|
||||
|
||||
client = bigquery.Client()
|
||||
|
||||
|
||||
def push_to_pubsub(request_params):
|
||||
errors = client.insert_rows_json(os.environ["BQ_TABLE"], [request_params])
|
||||
if errors != []:
|
||||
raise Exception("Encountered errors while inserting rows: {}".format(errors))
|
||||
|
||||
|
||||
def main(request):
|
||||
request_json = request.get_json(silent=True)
|
||||
request_args = request.args
|
||||
|
||||
if request_json:
|
||||
push_to_pubsub(request_json)
|
||||
elif request_args:
|
||||
push_to_pubsub(request_args)
|
||||
else:
|
||||
return ("No data provided.", 400)
|
||||
return {"status": "ok", "source": "cf_to_bq"}
|
@ -0,0 +1,2 @@
|
||||
Flask==1.1.2
|
||||
google-cloud-bigquery==2.22.0
|
Loading…
Reference in New Issue