Compare commits

..

7 Commits

Author SHA1 Message Date
1e3600e140 feat: Add timezone
All checks were successful
continuous-integration/drone/push Build is passing
2023-07-17 09:35:27 +02:00
67440a7f1f feat: Add datetime on log
All checks were successful
continuous-integration/drone/push Build is passing
2023-07-16 10:50:30 +02:00
1f29978152 feat: Add multi arch
All checks were successful
continuous-integration/drone/push Build is passing
2023-06-26 15:23:39 +02:00
b607230f64 feat: Add registry configuration
All checks were successful
continuous-integration/drone/push Build is passing
2023-06-26 15:11:49 +02:00
a6071706c6 feat: Update repository name
Some checks failed
continuous-integration/drone/push Build is failing
2023-06-26 14:57:45 +02:00
f501a379ff feat: Add drone configuration 2023-06-26 14:56:05 +02:00
d65c9d32d0 wip 2023-06-26 14:49:37 +02:00
7 changed files with 87 additions and 74 deletions

37
.drone.yml Normal file
View File

@ -0,0 +1,37 @@
---
# drone encrypt ResiLien/docker-s3-volume-watch $REGISTRY_USERNAME
kind: secret
name: REGISTRY_USERNAME
data: nWD+Q9IZSvWw6aDlxBgjvaQqsyU9Muub4A9wJMIfZ8A18g==
---
# drone encrypt ResiLien/docker-s3-volume-watch $REGISTRY_PASSWORD
kind: secret
name: REGISTRY_PASSWORD
data: fshphgiLW3gyuwhKHgSe7m6Y/LXtse2VEYABvjWYCov8w85+CMa5hjkHSiO+dgRCMOC4+OE5ZlZjD/5hAURi6TsBTZw0h5fWsnEjMR6L
---
kind: pipeline
type: docker
name: amd64
platform:
os: linux
arch: amd64
steps:
- name: docker
image: thegeeklab/drone-docker-buildx
privileged: true
settings:
registry: registry.weko.io
username:
from_secret: REGISTRY_USERNAME
password:
from_secret: REGISTRY_PASSWORD
repo: registry.weko.io/simonc/docker-s3-volume-watch
tags:
- latest
platforms:
- linux/arm64
- linux/amd64

1
.gitignore vendored
View File

@ -1 +0,0 @@
.vagrant

View File

@ -1,11 +1,11 @@
FROM alpine:3.10
label maintainer="Elementar Sistemas <contato@elementarsistemas.com.br>"
FROM peakcom/s5cmd:v2.1.0
label maintainer="RésiLien <admin+docker@resilien.fr>"
RUN apk --no-cache add bash py3-pip && pip3 install --no-cache-dir awscli
RUN apk --no-cache add inotify-tools bash tzdata
ADD watch /watch
VOLUME /data
ENV S3_SYNC_FLAGS "--delete"
ENTRYPOINT [ "./watch" ]
CMD ["/data"]
ENV S3_SYNC_FLAGS ""
ENTRYPOINT [ "/watch" ]
CMD ["help"]

View File

@ -1,9 +1,8 @@
NAME = elementar/s3-volume
NAME = simonc/s3-sync
.PHONY: build release
build:
docker build -t $(NAME):latest .
release:
docker push $(NAME):latest

View File

@ -1,10 +1,14 @@
# docker-s3-volume
# docker-s3-volume-watch
[![Docker Build Status](https://img.shields.io/docker/build/elementar/s3-volume?style=plastic)](https://hub.docker.com/r/elementar/s3-volume)
[![Docker Layers Count](https://img.shields.io/microbadger/layers/elementar/s3-volume?style=plastic)](https://hub.docker.com/r/elementar/s3-volume)
[![Docker Version](https://img.shields.io/docker/v/elementar/s3-volume?style=plastic)](https://hub.docker.com/r/elementar/s3-volume)
[![Docker Pull Count](https://img.shields.io/docker/pulls/elementar/s3-volume?style=plastic)](https://hub.docker.com/r/elementar/s3-volume)
[![Docker Stars](https://img.shields.io/docker/stars/elementar/s3-volume?style=plastic)](https://hub.docker.com/r/elementar/s3-volume)
Le principe est de synchroniser un dossier avec un volume s3.
Voici le fonctionnement :
- la commande prend comme paramètre le dossier source `/data/` et un bucket S3 `s3://bucket`
- il synchronise le bucket avec le dossier puis lance `inotify` pour resynchroniser le bucket dès qu'un évènement se produit dans le dossier (modification, création, suppression)
Inspiration : [docker-s3-volume](https://github.com/elementar/docker-s3-volume)
## README original
Creates a Docker container that is restored and backed up to a directory on s3.
You could use this to run short lived processes that work with and persist data to and from S3.

38
s3cfg
View File

@ -1,38 +0,0 @@
[default]
access_key =
bucket_location = US
cloudfront_host = cloudfront.amazonaws.com
cloudfront_resource = /2010-07-15/distribution
default_mime_type = binary/octet-stream
delete_removed = False
dry_run = False
encoding = UTF-8
encrypt = False
follow_symlinks = False
force = False
get_continue = False
gpg_command = /usr/local/bin/gpg
gpg_decrypt = %(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
gpg_encrypt = %(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s
gpg_passphrase =
guess_mime_type = True
host_base = s3.amazonaws.com
host_bucket = %(bucket)s.s3.amazonaws.com
human_readable_sizes = False
list_md5 = False
log_target_prefix =
preserve_attrs = True
progress_meter = True
proxy_host =
proxy_port = 0
recursive = False
recv_chunk = 4096
reduced_redundancy = False
secret_key =
send_chunk = 4096
simpledb_host = sdb.amazonaws.com
skip_existing = False
socket_timeout = 300
urlencoding_mode = normal
use_https = False
verbosity = WARNING

54
watch
View File

@ -2,6 +2,13 @@
[[ "$TRACE" ]] && set -x
DEBUG=${DEBUG:-"Unknown Error"}
function log {
now=`date +"%Y-%m-%d %T"`
echo "[${now}] $1"
}
function usage {
cat <<-EOF
Usage: $PROGNAME [OPTIONS] <local-path> <remote-path>
@ -14,7 +21,7 @@ EOF
}
function error_exit {
echo "${1:-"Unknown Error"}" 1>&2
log "${1:-"Unknown Error"}" 1>&2
exit 1
}
@ -37,14 +44,10 @@ while true; do
done
PROGNAME=$0
S3_PROG=/s5cmd
LOCAL=$1
REMOTE=$2
if [ "$ENDPOINT_URL" ]; then
AWS="aws --endpoint-url $ENDPOINT_URL"
else
AWS=aws
fi
WATCH_EVENTS=modify
function restore {
if [ "$(ls -A $LOCAL)" ]; then
@ -53,40 +56,49 @@ function restore {
fi
fi
echo "restoring $REMOTE => $LOCAL"
if ! $AWS s3 sync "$REMOTE" "$LOCAL"; then
log "restoring $REMOTE => $LOCAL"
if ! $S3_PROG sync "$REMOTE/*" "$LOCAL"; then
error_exit "restore failed"
fi
}
function backup {
echo "backup $LOCAL => $REMOTE"
if ! $AWS s3 sync "$LOCAL" "$REMOTE" $S3_SYNC_FLAGS; then
echo "backup failed" 1>&2
log "backup $LOCAL => $REMOTE"
if ! $S3_PROG sync "$LOCAL" "$REMOTE" $S3_SYNC_FLAGS; then
log "backup failed" 1>&2
return 1
fi
}
function final_backup {
echo "backup $LOCAL => $REMOTE"
while ! $AWS s3 sync "$LOCAL" "$REMOTE" $S3_SYNC_FLAGS; do
echo "backup failed, will retry" 1>&2
log "backup $LOCAL => $REMOTE"
while ! $S3_PROG sync "$LOCAL" "$REMOTE" $S3_SYNC_FLAGS; do
log "backup failed, will retry" 1>&2
sleep 1
done
exit 0
}
function idle {
echo "ready"
log "ready"
log "RESTORE_INTERVAL: ${RESTORE_INTERVAL}"
while true; do
sleep ${BACKUP_INTERVAL:-42} &
wait $!
[ -n "$BACKUP_INTERVAL" ] && backup
restore
if [[ -v RESTORE_INTERVAL ]]; then
backup
timeout ${RESTORE_INTERVAL} inotifywait -m -e modify -e move -e create -e delete --timefmt '%Y-%m-%d %H:%M:%S' --format '%T %f' $LOCAL | while read date time file; do log "The file '$file' appeared in directory '$path' via '$action'"; backup; done
else
log "Without restore interval"
inotifywait -m -e modify -e move -e create -e delete --timefmt '%Y-%m-%d %H:%M:%S' --format '%T %f' $LOCAL |
while read date time file
do
log "The file '$file' appeared in directory '$path' via '$action'"
backup
done
fi
done
}
restore
trap final_backup SIGHUP SIGINT SIGTERM
trap "backup; idle" USR1