Skip to content

Commit 2d48956

Browse files
committed
pvbackup: Refactor to native rclone configuration
* Instead of trying to configure individual providers, just use a k8s provided rclone configuration file. * This allow us to use any rclone supported remote backend
1 parent 2d3f7fe commit 2d48956

File tree

3 files changed

+33
-56
lines changed

3 files changed

+33
-56
lines changed

containers/tools/pvbackup/README.md

Lines changed: 8 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -4,27 +4,20 @@ I'm a simple container to backup/restore encrypted persistant volume data to an
44

55
## Usage
66

7-
### Docker
8-
**Backup**
9-
```
10-
docker run -it -e S3_BUCKET="" -e S3_KEY="" -e S3_SECRET="" -e TWOSECRET="" -V volume:/pvs/volume docker.io/haiku/persistsync backup volume
11-
```
7+
### Volumes
128

13-
**Restore**
14-
```
15-
docker run -it -e S3_BUCKET="" -e S3_KEY="" -e S3_SECRET="" -e TWOSECRET="" -V volume:/pvs/volume docker.io/haiku/persistsync restore volume
16-
```
9+
#### Required
10+
11+
* /root/.config/rclone/rclone.conf containing the rclone configuration
12+
* /root/.config/twosecret containing the encryption key for the backups
1713

1814
### Environment Flags
1915

2016
#### Required
2117

22-
* S3_ENDPOINT - s3 endpoint
23-
* S3_BUCKET - s3 bucket name
24-
* S3_KEY - s3 bucket access key
25-
* S3_SECRET - s3 bucket secret key
26-
* TWOSECRET - encryption password for backup
18+
* REMOTE_PREFIX - prefix path on remote. Likely bucket name for S3
2719

2820
#### Optional
2921

30-
* S3_MAX_AGE - maximum backup age in bucket. ex: 30d,1y,etc
22+
* REMOTE_NAME - name of remote specified in configuration file (defaults to "backup")
23+
* REMOTE_MAX_AGE - maximum backup age in bucket. ex: 30d,1y,etc

containers/tools/pvbackup/VERSION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
# ONLY UPDATE ONCE READY TO BUILD NEW RELEASE
2-
2.0.6
2+
3.0.0

containers/tools/pvbackup/pvsync.sh

Lines changed: 24 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -16,57 +16,41 @@ if ! [ -x "$(command -v gpg)" ]; then
1616
exit 1
1717
fi
1818

19-
2019
BASE="/pvs"
2120
ACTION="$1"
2221
VOLUME="$2"
2322

24-
S3_NAME="s3remote"
25-
26-
#S3_HOST="http://s3.wasabisys.com"
27-
#S3_BUCKET=""
28-
#S3_KEY=""
29-
#S3_SECRET=""
30-
#TWOSECRET=""
23+
RCLONE_CONFIG_PATH="$HOME/.config/rclone/rclone.conf"
24+
TWOSECRET_PATH="$HOME/.config/twosecret"
3125

32-
if [ -z "$S3_HOST" ]; then
33-
echo "Please set S3_HOST!"
34-
exit 1
35-
fi
36-
if [ -z "$S3_BUCKET" ]; then
37-
echo "Please set S3_BUCKET!"
38-
exit 1
39-
fi
40-
if [ -z "$S3_KEY" ]; then
41-
echo "Please set S3_KEY!"
26+
if [ ! -f $TWOSECRET_PATH ]; then
27+
echo "Missing twosecret key at $TWOSECRET_PATH"
4228
exit 1
4329
fi
44-
if [ -z "$S3_SECRET" ]; then
45-
echo "Please set S3_SECRET!"
30+
31+
if [ ! -f $RCLONE_CONFIG_PATH ]; then
32+
echo "Missing rclone configuration at $RCLONE_CONFIG_PATH"
4633
exit 1
4734
fi
48-
if [ -z "$TWOSECRET" ]; then
49-
echo "Please set TWOBUCKET!"
35+
36+
if [ ! -z "$REMOTE_PREFIX" ] ;then
37+
echo "REMOTE_PREFIX is not defined! This is the bucket name for s3 or other prefix path"
5038
exit 1
5139
fi
52-
if [ -z "$S3_PROVIDER" ]; then
53-
echo "Assuming S3 provider Other"
54-
S3_PROVIDER="Other"
40+
41+
if [ ! -z "$REMOTE_NAME" ] ;then
42+
echo "REMOTE_NAME is not defined. Defaulting to 'backup' (make sure this matches config file)"
43+
REMOTE_NAME="backup"
5544
fi
5645

5746
if [[ ! -d "$BASE/$VOLUME" ]]; then
5847
echo "Error: '$BASE/$VOLUME' isn't present on local container! (pvc not mounted?)"
5948
exit 1
6049
fi
6150

62-
rclone config create $S3_NAME s3 \
63-
provider=$S3_PROVIDER env_auth=false access_key_id=$S3_KEY \
64-
secret_access_key=$S3_SECRET region=$S3_REGION \
65-
endpoint=$S3_HOST no_check_bucket=true \
66-
acl=private > /dev/null
67-
68-
if [[ $? -ne 0 ]]; then
69-
echo "Error: Problem encounted configuring s3! (rclone)"
51+
rclone ls $REMOTE_NAME:$REMOTE_PREFIX/pv-$VOLUME > /dev/null
52+
if [ $? -ne 0 ]; then
53+
echo "Error: Unable to see within configured storage provider!"
7054
exit 1
7155
fi
7256

@@ -91,25 +75,25 @@ case $ACTION in
9175
exit 1
9276
fi
9377
rm /tmp/$SNAPSHOT_NAME
94-
rclone copy /tmp/$SNAPSHOT_NAME.gpg $S3_NAME:$S3_BUCKET/pv-$VOLUME/
78+
rclone copy /tmp/$SNAPSHOT_NAME.gpg $REMOTE_NAME:$REMOTE_PREFIX/pv-$VOLUME/
9579
if [[ $? -ne 0 ]]; then
9680
echo "Error: Problem encounted during upload! (rclone)"
9781
rm /tmp/$SNAPSHOT_NAME.gpg
9882
exit 1
9983
fi
100-
if [[ ! -z "$S3_MAX_AGE" ]]; then
101-
echo "Cleaning up old backups for $VOLUME over $S3_MAX_AGE old..."
102-
rclone delete --min-age "$S3_MAX_AGE" $S3_NAME:$S3_BUCKET/pv-$VOLUME/
84+
if [[ ! -z "$REMOTE_MAX_AGE" ]]; then
85+
echo "Cleaning up old backups for $VOLUME over $REMOTE_MAX_AGE old..."
86+
rclone delete --min-age "$REMOTE_MAX_AGE" $REMOTE_NAME:$REMOTE_PREFIX/pv-$VOLUME/
10387
fi
104-
echo "Snapshot of ${VOLUME} completed successfully! ($S3_BUCKET/pv-$VOLUME/$SNAPSHOT_NAME.gpg)"
88+
echo "Snapshot of ${VOLUME} completed successfully! ($REMOTE_PREFIX/pv-$VOLUME/$SNAPSHOT_NAME.gpg)"
10589
;;
10690

10791
restore)
10892
# We assume the latest is at the bottom of the rclone ls.
10993
# It seems to be true in my testing so far... but this feels sketch
110-
LATEST=$(rclone ls $S3_NAME:$S3_BUCKET/pv-$VOLUME/ | tail -1 | awk '{print $2}')
94+
LATEST=$(rclone ls $REMOTE_NAME:$REMOTE_PREFIX/pv-$VOLUME/ | tail -1 | awk '{print $2}')
11195
echo "Found $LATEST to be the latest snapshot..."
112-
rclone copy $S3_NAME:$S3_BUCKET/pv-$VOLUME/$LATEST /tmp/$LATEST
96+
rclone copy $REMOTE_NAME:$REMOTE_PREFIX/pv-$VOLUME/$LATEST /tmp/$LATEST
11397
if [[ $? -ne 0 ]]; then
11498
echo "Error: Problem encounted getting snapshot from s3! (rclone)"
11599
rm /tmp/$LATEST

0 commit comments

Comments
 (0)