1
0
mirror of https://github.com/tektoncd/catalog.git synced 2024-11-21 05:55:35 +00:00

Copy gcs-uload 0.1 to 0.2

Signed-off-by: Andrea Frittoli <andrea.frittoli@gmail.com>
This commit is contained in:
Andrea Frittoli 2022-03-30 16:11:48 +01:00 committed by tekton-robot
parent a5afb97831
commit 7eb56668e4
3 changed files with 244 additions and 0 deletions

View File

@ -0,0 +1,62 @@
# Google Cloud Storage Tasks
These `Tasks` are for copying to and from GCS buckets from Pipelines.
These `Tasks` do a similar job to the `GCS` `PipelineResource` and
are intended as its replacement. This is part of our plan to [offer replacement
`Tasks` for Pipeline Resources](https://github.com/tektoncd/catalog/issues/95)
as well as
[document those replacements](https://github.com/tektoncd/pipeline/issues/1369).
## `gcs-upload`
A `Task` that uploads files or directories from a Workspace to a GCS bucket.
### Workspaces
* **credentials**: A workspace that contains a service account key as a JSON file.
This workspace should be populated from a Secret in your TaskRuns and PipelineRuns.
* **source**: A workspace where files will be uploaded from.
### Parameters
* **path**: The path to files or directories relative to the source workspace that you'd like to upload. (_required_)
* **location**: The address (including "gs://") where you'd like to upload files to. (_required_)
* **serviceAccountPath**: The path to the service account credential file in your credentials workspace. (_default_: "service\_account.json")
## Platforms
The Task can be run on `linux/amd64` platform.
## Usage
### `gcs-upload`
This TaskRun uses the gcs-upload Task to upload a file from a ConfigMap.
```yaml
kind: ConfigMap
apiVersion: v1
metadata:
name: test-input-data
data:
test_file.txt: "Hello, world!"
---
apiVersion: tekton.dev/v1beta1
kind: TaskRun
metadata:
name: upload-configmap-file-to-gcs
spec:
taskRef:
name: gcs-upload
workspaces:
- name: credentials
secret:
secretName: my-gcs-credentials
defaultMode: 0400
- name: source
configMap:
name: test-input-data
```

View File

@ -0,0 +1,58 @@
apiVersion: tekton.dev/v1beta1
kind: Task
metadata:
name: gcs-upload
labels:
app.kubernetes.io/version: "0.1"
annotations:
tekton.dev/pipelines.minVersion: "0.12.1"
tekton.dev/categories: Cloud, Storage
tekton.dev/tags: cloud, gcs
tekton.dev/displayName: "Upload to GCS"
tekton.dev/platforms: "linux/amd64"
spec:
description: >-
A Task that uploads a GCS bucket.
This task uploads files or directories from a Workspace to a GCS bucket.
workspaces:
- name: credentials
description: A secret with a service account key to use as GOOGLE_APPLICATION_CREDENTIALS.
- name: source
description: A workspace where files will be uploaded from.
params:
- name: path
description: The path to files or directories relative to the source workspace that you'd like to upload.
type: string
- name: location
description: The address (including "gs://") where you'd like to upload files to.
type: string
- name: serviceAccountPath
description: The path inside the credentials workspace to the GOOGLE_APPLICATION_CREDENTIALS key file.
type: string
default: service_account.json
steps:
- name: upload
image: gcr.io/google.com/cloudsdktool/cloud-sdk:310.0.0@sha256:cb03669fcdb9191d55a6200f2911fff3baec0b8c39b156d95b68aabe975ac506 #tag: 310.0.0
script: |
#!/usr/bin/env bash
set -xe
CRED_PATH="$(workspaces.credentials.path)/$(params.serviceAccountPath)"
SOURCE="$(workspaces.source.path)/$(params.path)"
if [[ -f "$CRED_PATH" ]]; then
GOOGLE_APPLICATION_CREDENTIALS="$CRED_PATH"
fi
if [[ "${GOOGLE_APPLICATION_CREDENTIALS}" != "" ]]; then
echo GOOGLE_APPLICATION_CREDENTIALS is set, activating Service Account...
gcloud auth activate-service-account --key-file=${GOOGLE_APPLICATION_CREDENTIALS}
fi
if [[ -d "$SOURCE" ]]; then
gsutil -m rsync -d -r "$SOURCE" "$(params.location)"
else
gsutil cp "$SOURCE" "$(params.location)"
fi

View File

@ -0,0 +1,124 @@
apiVersion: triggers.tekton.dev/v1alpha1
kind: TriggerTemplate
metadata:
name: gcs-triggertemplate
spec:
params:
- name: pvc-name
- name: project-name
- name: secret-name
resourcetemplates:
- apiVersion: tekton.dev/v1beta1
kind: PipelineRun
metadata:
name: gcs-pr-$(uid)
spec:
workspaces:
- name: data
persistentVolumeClaim:
claimName: $(params.pvc-name)
- name: credentials
secret:
secretName: $(params.secret-name)
pipelineSpec:
workspaces:
- name: data
- name: credentials
tasks:
- name: gcs-create-bucket
taskRef:
name: gcs-create-bucket
workspaces:
- name: credentials
workspace: credentials
params:
- name: bucketName
value: gs://tekton-test-bucket-$(uid)
- name: project
value: $(params.project-name)
- name: create-data
runAfter: [gcs-create-bucket]
taskSpec:
workspaces:
- name: data
steps:
- name: write-data
image: ubuntu
script: |
#!/usr/bin/env bash
set -xe
mkdir -p $(workspaces.data.path)/$(uid)/test_data/
echo "Test data $(uid)" > $(workspaces.data.path)/$(uid)/test_data/test.txt
workspaces:
- name: data
workspace: data
- name: gcs-upload
taskRef:
name: gcs-upload
runAfter: [create-data]
workspaces:
- name: credentials
workspace: credentials
- name: source
workspace: data
params:
- name: path
value: $(uid)
- name: location
value: gs://tekton-test-bucket-$(uid)
- name: serviceAccountPath
value: service_account.json
- name: gcs-download
taskRef:
name: gcs-download
runAfter: [gcs-upload]
workspaces:
- name: credentials
workspace: credentials
- name: output
workspace: data
params:
- name: path
value: download-$(uid)
- name: location
value: gs://tekton-test-bucket-$(uid)
- name: typeDir
value: "true"
- name: serviceAccountPath
value: service_account.json
- name: verify-data
runAfter: [gcs-download]
workspaces:
- name: data
workspace: data
taskSpec:
workspaces:
- name: data
steps:
- image: ubuntu
script: |
#!/usr/bin/env bash
set -xe
cat $(workspaces.data.path)/download-$(uid)/test_data/test.txt | grep "Test data $(uid)"
- name: delete-bucket
taskRef:
name: gcs-delete-bucket
runAfter: [gcs-download]
workspaces:
- name: credentials
workspace: credentials
params:
- name: bucketName
value: gs://tekton-test-bucket-$(uid)
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: gcs-test-storage
spec:
resources:
requests:
storage: 16Mi
volumeMode: Filesystem
accessModes:
- ReadWriteOnce