Skip to content

Commit

Permalink
add prefix for remote storage (#1790)
Browse files Browse the repository at this point in the history
  • Loading branch information
DanielZhangQD authored Feb 27, 2020
1 parent ba1af6c commit 430fa95
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 0 deletions.
13 changes: 13 additions & 0 deletions charts/tidb-backup/templates/scripts/_start_backup.sh.tpl
Original file line number Diff line number Diff line change
Expand Up @@ -67,14 +67,23 @@ $creds
EOF

cd "${backup_base_dir}"
{{- if .Values.gcp.prefix }}
tar -cf - "${backup_name}" | pigz -p 16 \
| rclone --config /tmp/rclone.conf rcat gcp:${bucket}/{{ .Values.gcp.prefix }}/${backup_name}/${backup_name}.tgz
{{- else }}
tar -cf - "${backup_name}" | pigz -p 16 \
| rclone --config /tmp/rclone.conf rcat gcp:${bucket}/${backup_name}/${backup_name}.tgz
{{- end }}
{{- end }}

{{- if .Values.ceph }}
uploader \
--cloud=ceph \
{{- if .Values.ceph.prefix }}
--bucket={{ .Values.ceph.bucket }}/{{ .Values.ceph.prefix }} \
{{- else }}
--bucket={{ .Values.ceph.bucket }} \
{{- end }}
--endpoint={{ .Values.ceph.endpoint }} \
--backup-dir=${dirname}
{{- end }}
Expand All @@ -83,6 +92,10 @@ uploader \
uploader \
--cloud=aws \
--region={{ .Values.s3.region }} \
{{- if .Values.s3.prefix }}
--bucket={{ .Values.s3.bucket }}/{{ .Values.s3.prefix }} \
{{- else }}
--bucket={{ .Values.s3.bucket }} \
{{- end }}
--backup-dir=${dirname}
{{- end }}
12 changes: 12 additions & 0 deletions charts/tidb-backup/templates/scripts/_start_restore.sh.tpl
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,23 @@ host=`echo {{ .Values.clusterName }}_TIDB_SERVICE_HOST | tr '[a-z]' '[A-Z]' | tr
{{- if .Values.gcp }}
downloader \
--cloud=gcp \
{{- if .Values.gcp.prefix }}
--bucket={{ .Values.gcp.bucket }}/{{ .Values.gcp.prefix }} \
{{- else }}
--bucket={{ .Values.gcp.bucket }} \
{{- end }}
--srcDir=${BACKUP_NAME} \
--destDir=/data
{{- end }}

{{- if .Values.ceph }}
downloader \
--cloud=ceph \
{{- if .Values.ceph.prefix }}
--bucket={{ .Values.ceph.bucket }}/{{ .Values.ceph.prefix }} \
{{- else }}
--bucket={{ .Values.ceph.bucket }} \
{{- end }}
--endpoint={{ .Values.ceph.endpoint }} \
--srcDir=${BACKUP_NAME} \
--destDir=/data
Expand All @@ -25,7 +33,11 @@ downloader \
downloader \
--cloud=aws \
--region={{ .Values.s3.region }} \
{{- if .Values.s3.prefix }}
--bucket={{ .Values.s3.bucket }}/{{ .Values.s3.prefix }} \
{{- else }}
--bucket={{ .Values.s3.bucket }} \
{{- end }}
--srcDir=${BACKUP_NAME} \
--destDir=/data
{{- end }}
Expand Down
3 changes: 3 additions & 0 deletions charts/tidb-backup/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ restoreUsingExistingVolume: true
# backup to or restore from gcp bucket, the backup path is in the form of <clusterName>-<name>
gcp: {}
# bucket: ""
# prefix: ""
# secretName is not necessary on GKE if you use the workload identity feature
# secretName is the name of the secret which stores the gcp service account credentials json file
# The service account must have read/write permission to the above bucket.
Expand All @@ -117,6 +118,7 @@ gcp: {}
ceph: {}
# endpoint: ""
# bucket: ""
# prefix: ""
# secretName is the name of the secret which stores ceph object store access key and secret key
# You can create the secret by:
# kubectl create secret generic ceph-backup-secret --namespace=<namespace> --from-literal=access_key=<access-key> --from-literal=secret_key=<secret-key>
Expand All @@ -126,6 +128,7 @@ ceph: {}
s3: {}
# region: ""
# bucket: ""
# prefix: ""
# secretName is the name of the secret which stores s3 object store access key and secret key
# This is not necessary on AWS. Instead you should be able to get the credentials from the EKS service IAM role.
# You can create the secret by:
Expand Down

0 comments on commit 430fa95

Please sign in to comment.