Compare commits
18 Commits
1288239de3
...
main
Author | SHA1 | Date | |
---|---|---|---|
1e3600e140 | |||
67440a7f1f | |||
1f29978152 | |||
b607230f64 | |||
a6071706c6 | |||
f501a379ff | |||
d65c9d32d0 | |||
d576a7f752 | |||
a6d878638b | |||
7bdb1a17b4 | |||
082e5188aa | |||
014b843a2c | |||
05dfe25557 | |||
957697dc1a | |||
f3d579a0e9 | |||
f19b3fa1ee | |||
4d18ac3fc8 | |||
9cb9e30ad7 |
37
.drone.yml
Normal file
37
.drone.yml
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
---
|
||||||
|
# drone encrypt ResiLien/docker-s3-volume-watch $REGISTRY_USERNAME
|
||||||
|
kind: secret
|
||||||
|
name: REGISTRY_USERNAME
|
||||||
|
data: nWD+Q9IZSvWw6aDlxBgjvaQqsyU9Muub4A9wJMIfZ8A18g==
|
||||||
|
|
||||||
|
---
|
||||||
|
# drone encrypt ResiLien/docker-s3-volume-watch $REGISTRY_PASSWORD
|
||||||
|
kind: secret
|
||||||
|
name: REGISTRY_PASSWORD
|
||||||
|
data: fshphgiLW3gyuwhKHgSe7m6Y/LXtse2VEYABvjWYCov8w85+CMa5hjkHSiO+dgRCMOC4+OE5ZlZjD/5hAURi6TsBTZw0h5fWsnEjMR6L
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
type: docker
|
||||||
|
name: amd64
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: docker
|
||||||
|
image: thegeeklab/drone-docker-buildx
|
||||||
|
privileged: true
|
||||||
|
settings:
|
||||||
|
registry: registry.weko.io
|
||||||
|
username:
|
||||||
|
from_secret: REGISTRY_USERNAME
|
||||||
|
password:
|
||||||
|
from_secret: REGISTRY_PASSWORD
|
||||||
|
repo: registry.weko.io/simonc/docker-s3-volume-watch
|
||||||
|
tags:
|
||||||
|
- latest
|
||||||
|
platforms:
|
||||||
|
- linux/arm64
|
||||||
|
- linux/amd64
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -1 +0,0 @@
|
|||||||
.vagrant
|
|
11
Dockerfile
11
Dockerfile
@ -1,10 +1,11 @@
|
|||||||
FROM alpine:3.6
|
FROM peakcom/s5cmd:v2.1.0
|
||||||
MAINTAINER Elementar Sistemas <contato@elementarsistemas.com.br>
|
label maintainer="RésiLien <admin+docker@resilien.fr>"
|
||||||
|
|
||||||
RUN apk --no-cache add bash py-pip && pip install awscli
|
RUN apk --no-cache add inotify-tools bash tzdata
|
||||||
ADD watch /watch
|
ADD watch /watch
|
||||||
|
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
|
||||||
ENTRYPOINT [ "./watch" ]
|
ENV S3_SYNC_FLAGS ""
|
||||||
CMD ["/data"]
|
ENTRYPOINT [ "/watch" ]
|
||||||
|
CMD ["help"]
|
||||||
|
3
Makefile
3
Makefile
@ -1,9 +1,8 @@
|
|||||||
NAME = elementar/s3-volume
|
NAME = simonc/s3-sync
|
||||||
|
|
||||||
.PHONY: build release
|
.PHONY: build release
|
||||||
|
|
||||||
build:
|
build:
|
||||||
docker build -t $(NAME):latest .
|
docker build -t $(NAME):latest .
|
||||||
|
|
||||||
release:
|
release:
|
||||||
docker push $(NAME):latest
|
docker push $(NAME):latest
|
||||||
|
29
README.md
29
README.md
@ -1,4 +1,14 @@
|
|||||||
# docker-s3-volume
|
# docker-s3-volume-watch
|
||||||
|
|
||||||
|
Le principe est de synchroniser un dossier avec un volume s3.
|
||||||
|
|
||||||
|
Voici le fonctionnement :
|
||||||
|
- la commande prend comme paramètre le dossier source `/data/` et un bucket S3 `s3://bucket`
|
||||||
|
- il synchronise le bucket avec le dossier puis lance `inotify` pour resynchroniser le bucket dès qu'un évènement se produit dans le dossier (modification, création, suppression)
|
||||||
|
|
||||||
|
Inspiration : [docker-s3-volume](https://github.com/elementar/docker-s3-volume)
|
||||||
|
|
||||||
|
## README original
|
||||||
|
|
||||||
Creates a Docker container that is restored and backed up to a directory on s3.
|
Creates a Docker container that is restored and backed up to a directory on s3.
|
||||||
You could use this to run short lived processes that work with and persist data to and from S3.
|
You could use this to run short lived processes that work with and persist data to and from S3.
|
||||||
@ -49,6 +59,15 @@ Any environment variable available to the `aws-cli` command can be used. see
|
|||||||
http://docs.aws.amazon.com/cli/latest/userguide/cli-environment.html for more
|
http://docs.aws.amazon.com/cli/latest/userguide/cli-environment.html for more
|
||||||
information.
|
information.
|
||||||
|
|
||||||
|
### Configuring an endpoint URL
|
||||||
|
|
||||||
|
If you are using an S3-compatible service (such as Oracle OCI Object Storage), you may want to set the service's endpoint URL:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run -d --name my-data-container -e ENDPOINT_URL=... \
|
||||||
|
elementar/s3-volume /data s3://mybucket/someprefix
|
||||||
|
```
|
||||||
|
|
||||||
### Forcing a sync
|
### Forcing a sync
|
||||||
|
|
||||||
A final sync will always be performed on container shutdown. A sync can be
|
A final sync will always be performed on container shutdown. A sync can be
|
||||||
@ -69,6 +88,14 @@ docker run -d --name my-data-container \
|
|||||||
elementar/s3-volume --force-restore /data s3://mybucket/someprefix
|
elementar/s3-volume --force-restore /data s3://mybucket/someprefix
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Deletion and sync
|
||||||
|
|
||||||
|
By default if there are files that are deleted in your local file system, those will be deleted remotely. If you wish to turn this off, set the environment variable `S3_SYNC_FLAGS` to an empty string:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run -d -e S3_SYNC_FLAGS="" elementar/s3-volume /data s3://mybucket/someprefix
|
||||||
|
```
|
||||||
|
|
||||||
### Using Compose and named volumes
|
### Using Compose and named volumes
|
||||||
|
|
||||||
Most of the time, you will use this image to sync data for another container.
|
Most of the time, you will use this image to sync data for another container.
|
||||||
|
38
s3cfg
38
s3cfg
@ -1,38 +0,0 @@
|
|||||||
[default]
|
|
||||||
access_key =
|
|
||||||
bucket_location = US
|
|
||||||
cloudfront_host = cloudfront.amazonaws.com
|
|
||||||
cloudfront_resource = /2010-07-15/distribution
|
|
||||||
default_mime_type = binary/octet-stream
|
|
||||||
delete_removed = False
|
|
||||||
dry_run = False
|
|
||||||
encoding = UTF-8
|
|
||||||
encrypt = False
|
|
||||||
follow_symlinks = False
|
|
||||||
force = False
|
|
||||||
get_continue = False
|
|
||||||
gpg_command = /usr/local/bin/gpg
|
|
||||||
gpg_decrypt = %(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
|
|
||||||
gpg_encrypt = %(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
|
|
||||||
gpg_passphrase =
|
|
||||||
guess_mime_type = True
|
|
||||||
host_base = s3.amazonaws.com
|
|
||||||
host_bucket = %(bucket)s.s3.amazonaws.com
|
|
||||||
human_readable_sizes = False
|
|
||||||
list_md5 = False
|
|
||||||
log_target_prefix =
|
|
||||||
preserve_attrs = True
|
|
||||||
progress_meter = True
|
|
||||||
proxy_host =
|
|
||||||
proxy_port = 0
|
|
||||||
recursive = False
|
|
||||||
recv_chunk = 4096
|
|
||||||
reduced_redundancy = False
|
|
||||||
secret_key =
|
|
||||||
send_chunk = 4096
|
|
||||||
simpledb_host = sdb.amazonaws.com
|
|
||||||
skip_existing = False
|
|
||||||
socket_timeout = 300
|
|
||||||
urlencoding_mode = normal
|
|
||||||
use_https = False
|
|
||||||
verbosity = WARNING
|
|
48
watch
48
watch
@ -2,6 +2,13 @@
|
|||||||
|
|
||||||
[[ "$TRACE" ]] && set -x
|
[[ "$TRACE" ]] && set -x
|
||||||
|
|
||||||
|
DEBUG=${DEBUG:-"Unknown Error"}
|
||||||
|
|
||||||
|
function log {
|
||||||
|
now=`date +"%Y-%m-%d %T"`
|
||||||
|
echo "[${now}] $1"
|
||||||
|
}
|
||||||
|
|
||||||
function usage {
|
function usage {
|
||||||
cat <<-EOF
|
cat <<-EOF
|
||||||
Usage: $PROGNAME [OPTIONS] <local-path> <remote-path>
|
Usage: $PROGNAME [OPTIONS] <local-path> <remote-path>
|
||||||
@ -14,7 +21,7 @@ EOF
|
|||||||
}
|
}
|
||||||
|
|
||||||
function error_exit {
|
function error_exit {
|
||||||
echo "${1:-"Unknown Error"}" 1>&2
|
log "${1:-"Unknown Error"}" 1>&2
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -37,8 +44,10 @@ while true; do
|
|||||||
done
|
done
|
||||||
|
|
||||||
PROGNAME=$0
|
PROGNAME=$0
|
||||||
|
S3_PROG=/s5cmd
|
||||||
LOCAL=$1
|
LOCAL=$1
|
||||||
REMOTE=$2
|
REMOTE=$2
|
||||||
|
WATCH_EVENTS=modify
|
||||||
|
|
||||||
function restore {
|
function restore {
|
||||||
if [ "$(ls -A $LOCAL)" ]; then
|
if [ "$(ls -A $LOCAL)" ]; then
|
||||||
@ -47,40 +56,49 @@ function restore {
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "restoring $REMOTE => $LOCAL"
|
log "restoring $REMOTE => $LOCAL"
|
||||||
if ! aws s3 sync "$REMOTE" "$LOCAL"; then
|
if ! $S3_PROG sync "$REMOTE/*" "$LOCAL"; then
|
||||||
error_exit "restore failed"
|
error_exit "restore failed"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
function backup {
|
function backup {
|
||||||
echo "backup $LOCAL => $REMOTE"
|
log "backup $LOCAL => $REMOTE"
|
||||||
if ! aws s3 sync "$LOCAL" "$REMOTE" --delete; then
|
if ! $S3_PROG sync "$LOCAL" "$REMOTE" $S3_SYNC_FLAGS; then
|
||||||
echo "backup failed" 1>&2
|
log "backup failed" 1>&2
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
function final_backup {
|
function final_backup {
|
||||||
echo "backup $LOCAL => $REMOTE"
|
log "backup $LOCAL => $REMOTE"
|
||||||
while ! aws s3 sync "$LOCAL" "$REMOTE" --delete; do
|
while ! $S3_PROG sync "$LOCAL" "$REMOTE" $S3_SYNC_FLAGS; do
|
||||||
echo "backup failed, will retry" 1>&2
|
log "backup failed, will retry" 1>&2
|
||||||
sleep 1
|
sleep 1
|
||||||
done
|
done
|
||||||
exit 0
|
exit 0
|
||||||
}
|
}
|
||||||
|
|
||||||
function idle {
|
function idle {
|
||||||
echo "ready"
|
log "ready"
|
||||||
|
log "RESTORE_INTERVAL: ${RESTORE_INTERVAL}"
|
||||||
while true; do
|
while true; do
|
||||||
sleep ${BACKUP_INTERVAL:-42} &
|
restore
|
||||||
wait $!
|
if [[ -v RESTORE_INTERVAL ]]; then
|
||||||
[ -n "$BACKUP_INTERVAL" ] && backup
|
backup
|
||||||
|
timeout ${RESTORE_INTERVAL} inotifywait -m -e modify -e move -e create -e delete --timefmt '%Y-%m-%d %H:%M:%S' --format '%T %f' $LOCAL | while read date time file; do log "The file '$file' appeared in directory '$path' via '$action'"; backup; done
|
||||||
|
else
|
||||||
|
log "Without restore interval"
|
||||||
|
inotifywait -m -e modify -e move -e create -e delete --timefmt '%Y-%m-%d %H:%M:%S' --format '%T %f' $LOCAL |
|
||||||
|
while read date time file
|
||||||
|
do
|
||||||
|
log "The file '$file' appeared in directory '$path' via '$action'"
|
||||||
|
backup
|
||||||
|
done
|
||||||
|
fi
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
restore
|
|
||||||
|
|
||||||
trap final_backup SIGHUP SIGINT SIGTERM
|
trap final_backup SIGHUP SIGINT SIGTERM
|
||||||
trap "backup; idle" USR1
|
trap "backup; idle" USR1
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user