From 6c37b92405f540d69ac16856aa231ad7d529cfe6 Mon Sep 17 00:00:00 2001 From: duanhongyi Date: Mon, 12 Nov 2018 14:48:41 +0000 Subject: [PATCH 1/6] fix(WARNING): MSG: WALE_S3_ENDPOINT defined, ignoring AWS_REGION --- Dockerfile | 3 ++- rootfs/bin/create_bucket | 4 ++-- rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh | 2 +- .../{patch_wal_e_s3.py => patch_boto_s3.py} | 2 +- rootfs/patcher-script.py | 10 ++++++---- 5 files changed, 12 insertions(+), 9 deletions(-) rename rootfs/patcher-script.d/{patch_wal_e_s3.py => patch_boto_s3.py} (82%) diff --git a/Dockerfile b/Dockerfile index 16ba718..ff83d63 100644 --- a/Dockerfile +++ b/Dockerfile @@ -36,7 +36,8 @@ RUN apt-get purge -y --auto-remove $BUILD_DEPS && \ COPY rootfs / ENV WALE_ENVDIR=/etc/wal-e.d/env RUN mkdir -p $WALE_ENVDIR -RUN python3 /patcher-script.py +RUN python3 /patcher-script.py /bin/create_bucket +RUN python3 /patcher-script.py /usr/local/bin/wal-e CMD ["/docker-entrypoint.sh", "postgres"] EXPOSE 5432 diff --git a/rootfs/bin/create_bucket b/rootfs/bin/create_bucket index 2d23f63..21377d9 100755 --- a/rootfs/bin/create_bucket +++ b/rootfs/bin/create_bucket @@ -20,7 +20,7 @@ def bucket_exists(conn, name): return True bucket_name = os.getenv('BUCKET_NAME') -region = os.getenv('AWS_REGION') +region = os.getenv('S3_REGION') if os.getenv('DATABASE_STORAGE') == "s3": conn = boto.s3.connect_to_region(region) @@ -85,6 +85,6 @@ else: port=int(os.getenv('S3_PORT')), calling_format=OrdinaryCallingFormat()) # HACK(bacongobbler): allow boto to connect to minio by changing the region name for s3v4 auth - conn.auth_region_name = os.getenv('AWS_REGION') + conn.auth_region_name = os.getenv('S3_REGION') if not bucket_exists(conn, bucket_name): conn.create_bucket(bucket_name) diff --git a/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh b/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh index 9c343e1..e2c00af 100755 --- a/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh +++ b/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh @@ -36,7 +36,7 @@ if [[ "$DATABASE_STORAGE" == "s3" || "$DATABASE_STORAGE" == "minio" ]]; then else echo "1" > AWS_INSTANCE_PROFILE fi - echo $AWS_REGION > AWS_REGION + echo $AWS_REGION > S3_REGION echo $BUCKET_NAME > BUCKET_NAME elif [ "$DATABASE_STORAGE" == "gcs" ]; then GOOGLE_APPLICATION_CREDENTIALS="/var/run/secrets/deis/objectstore/creds/key.json" diff --git a/rootfs/patcher-script.d/patch_wal_e_s3.py b/rootfs/patcher-script.d/patch_boto_s3.py similarity index 82% rename from rootfs/patcher-script.d/patch_wal_e_s3.py rename to rootfs/patcher-script.d/patch_boto_s3.py index 1cdd429..1ac58d3 100644 --- a/rootfs/patcher-script.d/patch_wal_e_s3.py +++ b/rootfs/patcher-script.d/patch_boto_s3.py @@ -4,7 +4,7 @@ def patch_wal_e_hmac_auth_v4_handler(): _init = HmacAuthV4Handler.__init__ def wrap_init(self, *args, **kwargs): _init(self, *args, **kwargs) - self.region_name = os.getenv('AWS_REGION', self.region_name) + self.region_name = os.getenv('S3_REGION', self.region_name) HmacAuthV4Handler.__init__ = wrap_init diff --git a/rootfs/patcher-script.py b/rootfs/patcher-script.py index 2f6a7cc..3e7e178 100644 --- a/rootfs/patcher-script.py +++ b/rootfs/patcher-script.py @@ -1,3 +1,5 @@ +import sys + patch_script = """ def run_patch_scripts(patch_script_path): @@ -15,18 +17,18 @@ def run_patch_scripts(patch_script_path): """ -def main(): +def main(patch_file): result_list = [] - with open("/usr/local/bin/wal-e", "r") as f: + with open(patch_file, "r") as f: has_patched = False for line in f: if not has_patched and line.startswith('import'): result_list.append(patch_script) has_patched = True result_list.append(line) - with open("/usr/local/bin/wal-e", "w") as f: + with open(patch_file, "w") as f: for line in result_list: f.write(line) if __name__ == '__main__': - main() + main(sys.argv[1]) From 247bc5c3217acd0d60ebabd8d9f1004bf72b5bcb Mon Sep 17 00:00:00 2001 From: duanhongyi Date: Mon, 12 Nov 2018 16:53:44 +0000 Subject: [PATCH 2/6] feat(minio): support the latest minio --- Dockerfile | 8 ++++-- rootfs/bin/create_bucket | 6 ++-- rootfs/patcher-script.d/patch_boto_s3.py | 7 ++--- rootfs/patcher-script.d/patch_wal_e_s3.py | 18 ++++++++++++ rootfs/patcher-script.py | 35 ++++++++++++++--------- 5 files changed, 51 insertions(+), 23 deletions(-) create mode 100644 rootfs/patcher-script.d/patch_wal_e_s3.py diff --git a/Dockerfile b/Dockerfile index ff83d63..e99e863 100644 --- a/Dockerfile +++ b/Dockerfile @@ -36,8 +36,12 @@ RUN apt-get purge -y --auto-remove $BUILD_DEPS && \ COPY rootfs / ENV WALE_ENVDIR=/etc/wal-e.d/env RUN mkdir -p $WALE_ENVDIR -RUN python3 /patcher-script.py /bin/create_bucket -RUN python3 /patcher-script.py /usr/local/bin/wal-e + +ARG PATCH_CMD="python3 /patcher-script.py" +RUN $PATCH_CMD file /bin/create_bucket /patcher-script.d/patch_boto_s3.py +RUN $PATCH_CMD file /usr/local/bin/wal-e /patcher-script.d/patch_boto_s3.py +RUN $PATCH_CMD module wal_e.worker.worker_util /patcher-script.d/patch_wal_e_s3.py + CMD ["/docker-entrypoint.sh", "postgres"] EXPOSE 5432 diff --git a/rootfs/bin/create_bucket b/rootfs/bin/create_bucket index 21377d9..e9caec9 100755 --- a/rootfs/bin/create_bucket +++ b/rootfs/bin/create_bucket @@ -76,9 +76,11 @@ elif os.getenv('DATABASE_STORAGE') == "swift": conn.put_container(os.getenv('BUCKET_NAME')) else: - botoconfig.add_section('s3') + if not botoconfig.has_section("s3"): + botoconfig.add_section('s3') botoconfig.set('s3', 'use-sigv4', 'True') - botoconfig.add_section('Boto') + if not botoconfig.has_section("Boto"): + botoconfig.add_section('Boto') botoconfig.set('Boto', 'is_secure', 'False') conn = S3Connection( host=os.getenv('S3_HOST'), diff --git a/rootfs/patcher-script.d/patch_boto_s3.py b/rootfs/patcher-script.d/patch_boto_s3.py index 1ac58d3..1ebb176 100644 --- a/rootfs/patcher-script.d/patch_boto_s3.py +++ b/rootfs/patcher-script.d/patch_boto_s3.py @@ -1,4 +1,4 @@ -def patch_wal_e_hmac_auth_v4_handler(): +def patch_boto_s3_hmac_auth_v4_handler(): import os from boto.auth import HmacAuthV4Handler _init = HmacAuthV4Handler.__init__ @@ -6,7 +6,4 @@ def wrap_init(self, *args, **kwargs): _init(self, *args, **kwargs) self.region_name = os.getenv('S3_REGION', self.region_name) HmacAuthV4Handler.__init__ = wrap_init - - -if __name__ == '__main__': - patch_wal_e_hmac_auth_v4_handler() +patch_boto_s3_hmac_auth_v4_handler() diff --git a/rootfs/patcher-script.d/patch_wal_e_s3.py b/rootfs/patcher-script.d/patch_wal_e_s3.py new file mode 100644 index 0000000..1b2d4ea --- /dev/null +++ b/rootfs/patcher-script.d/patch_wal_e_s3.py @@ -0,0 +1,18 @@ +def patch_uri_put_file(): + import os + from wal_e.blobstore import s3 + from wal_e.blobstore.s3 import s3_util + def wrap_uri_put_file(creds, uri, fp, content_type=None, conn=None): + assert fp.tell() == 0 + k = s3_util._uri_to_key(creds, uri, conn=conn) + if content_type is not None: + k.content_type = content_type + if os.getenv('DATABASE_STORAGE') == 's3': + encrypt_key=True + else: + encrypt_key=False + k.set_contents_from_file(fp, encrypt_key=encrypt_key) + return k + s3.uri_put_file = wrap_uri_put_file + s3_util.uri_put_file = wrap_uri_put_file +patch_uri_put_file() diff --git a/rootfs/patcher-script.py b/rootfs/patcher-script.py index 3e7e178..56f149e 100644 --- a/rootfs/patcher-script.py +++ b/rootfs/patcher-script.py @@ -1,34 +1,41 @@ import sys -patch_script = """ +patch_script_tmp = """ def run_patch_scripts(patch_script_path): - import os - for patch in os.listdir(patch_script_path): - full_patch_file = os.path.join(patch_script_path, patch) - if full_patch_file.endswith('.py') and os.path.isfile(full_patch_file): - with open(full_patch_file, 'r') as f: - try: - exec(f.read()) - except: - pass -run_patch_scripts('/patcher-script.d') + with open(patch_script_path, 'r') as f: + try: + exec(f.read()) + except: + pass +run_patch_scripts("%s") """ -def main(patch_file): +def main(patch_file, patch_script_file): result_list = [] + patch_script = patch_script_tmp % patch_script_file with open(patch_file, "r") as f: has_patched = False for line in f: - if not has_patched and line.startswith('import'): + if (line.startswith('import') or line.startswith('from')) \ + and not has_patched: result_list.append(patch_script) has_patched = True result_list.append(line) + if not has_patched: result_list.append(patch_script) with open(patch_file, "w") as f: for line in result_list: f.write(line) if __name__ == '__main__': - main(sys.argv[1]) + patch_type = sys.argv[1] + if patch_type == 'file': + patch_file = sys.argv[2] + elif patch_type == 'module': + module = __import__(sys.argv[2], fromlist=True) + patch_file = module.__file__ + patch_script_file = sys.argv[3] + main(patch_file, patch_script_file) + From 90431d8df4c990acebe959fde75e3c22119bc502 Mon Sep 17 00:00:00 2001 From: duanhongyi Date: Sat, 24 Nov 2018 23:56:05 +0800 Subject: [PATCH 3/6] feat(wal-e): add WALE_S3_SSE environment variable --- charts/database/templates/database-deployment.yaml | 4 ++++ rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh | 5 +++++ rootfs/patcher-script.d/patch_wal_e_s3.py | 6 +++--- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/charts/database/templates/database-deployment.yaml b/charts/database/templates/database-deployment.yaml index 9b232f9..41f145a 100644 --- a/charts/database/templates/database-deployment.yaml +++ b/charts/database/templates/database-deployment.yaml @@ -41,6 +41,10 @@ spec: value: "{{.Values.global.storage}}" - name: PGCTLTIMEOUT value: "{{.Values.postgres.timeout}}" +{{- if eq .Values.global.storage "s3" }} + - name: S3_SSE + value: "{{.Values.s3.use_sse}}" +{{- end}} lifecycle: preStop: exec: diff --git a/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh b/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh index e2c00af..d18904f 100755 --- a/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh +++ b/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh @@ -17,6 +17,11 @@ if [[ "$DATABASE_STORAGE" == "s3" || "$DATABASE_STORAGE" == "minio" ]]; then else echo "https+path://s3-${AWS_REGION}.amazonaws.com:443" > WALE_S3_ENDPOINT fi + if [[ $S3_SSE ]]; then + echo $S3_SSE > WALE_S3_SSE + else + echo "false" > WALE_S3_SSE + fi else AWS_REGION="us-east-1" BUCKET_NAME="dbwal" diff --git a/rootfs/patcher-script.d/patch_wal_e_s3.py b/rootfs/patcher-script.d/patch_wal_e_s3.py index 1b2d4ea..fd248b3 100644 --- a/rootfs/patcher-script.d/patch_wal_e_s3.py +++ b/rootfs/patcher-script.d/patch_wal_e_s3.py @@ -7,10 +7,10 @@ def wrap_uri_put_file(creds, uri, fp, content_type=None, conn=None): k = s3_util._uri_to_key(creds, uri, conn=conn) if content_type is not None: k.content_type = content_type + encrypt_key = False if os.getenv('DATABASE_STORAGE') == 's3': - encrypt_key=True - else: - encrypt_key=False + if os.getenv('WALE_S3_SSE', 'false') == 'true': + encrypt_key = True k.set_contents_from_file(fp, encrypt_key=encrypt_key) return k s3.uri_put_file = wrap_uri_put_file From 365c9438fa46d9f9ff4053253fea19526bf7f83b Mon Sep 17 00:00:00 2001 From: Richard Wossal Date: Fri, 5 Oct 2018 15:56:05 +0200 Subject: [PATCH 4/6] feat(backup): honor s3 endpoint config value --- rootfs/bin/create_bucket | 34 +++++++++---------- .../001_setup_envdir.sh | 20 +++++++---- 2 files changed, 29 insertions(+), 25 deletions(-) diff --git a/rootfs/bin/create_bucket b/rootfs/bin/create_bucket index c03dfe9..fb30af5 100755 --- a/rootfs/bin/create_bucket +++ b/rootfs/bin/create_bucket @@ -12,6 +12,7 @@ from oauth2client.service_account import ServiceAccountCredentials from gcloud.storage.client import Client from gcloud import exceptions from azure.storage.blob import BlobService +from urllib.parse import urlparse def bucket_exists(conn, name): bucket = conn.lookup(name) @@ -23,25 +24,22 @@ bucket_name = os.getenv('BUCKET_NAME') region = os.getenv('S3_REGION') if os.getenv('DATABASE_STORAGE') == "s3": - conn = boto.s3.connect_to_region(region) + if os.getenv('S3_ENDPOINT'): + endpoint = urlparse(os.getenv('S3_ENDPOINT')) + conn = boto.s3.connect_to_region(region, + host=endpoint.hostname, + port=endpoint.port, + path=endpoint.path, + calling_format=boto.s3.connection.OrdinaryCallingFormat()) + else: + conn = boto.s3.connect_to_region(region) + if not bucket_exists(conn, bucket_name): - try: - if region == "us-east-1": - # use "US Standard" region. workaround for https://github.com/boto/boto3/issues/125 - conn.create_bucket(bucket_name) - else: - conn.create_bucket(bucket_name, location=region) - # NOTE(bacongobbler): for versions prior to v2.9.0, the bucket is created in the default region. - # if we got here, we need to propagate "us-east-1" into WALE_S3_ENDPOINT because the bucket - # exists in a different region and we cannot find it. - # TODO(bacongobbler): deprecate this once we drop support for v2.8.0 and lower - except S3CreateError as err: - if region != 'us-east-1': - print('Failed to create bucket in {}. We are now assuming that the bucket was created in us-east-1.'.format(region)) - with open(os.path.join(os.environ['WALE_ENVDIR'], "WALE_S3_ENDPOINT"), "w+") as file: - file.write('https+path://s3.amazonaws.com:443') - else: - raise + if region == "us-east-1": + # use "US Standard" region. workaround for https://github.com/boto/boto3/issues/125 + conn.create_bucket(bucket_name) + else: + conn.create_bucket(bucket_name, location=region) elif os.getenv('DATABASE_STORAGE') == "gcs": scopes = ['https://www.googleapis.com/auth/devstorage.full_control'] diff --git a/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh b/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh index d18904f..78890f7 100755 --- a/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh +++ b/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh @@ -7,15 +7,21 @@ if [[ "$DATABASE_STORAGE" == "s3" || "$DATABASE_STORAGE" == "minio" ]]; then AWS_SECRET_ACCESS_KEY=$(cat /var/run/secrets/deis/objectstore/creds/secretkey) if [[ "$DATABASE_STORAGE" == "s3" ]]; then AWS_REGION=$(cat /var/run/secrets/deis/objectstore/creds/region) + S3_ENDPOINT=$(cat /var/run/secrets/deis/objectstore/creds/endpoint) BUCKET_NAME=$(cat /var/run/secrets/deis/objectstore/creds/database-bucket) - # Convert $AWS_REGION into $WALE_S3_ENDPOINT to avoid "Connection reset by peer" from - # regions other than us-standard. - # See https://github.com/wal-e/wal-e/issues/167 - # See https://github.com/boto/boto/issues/2207 - if [[ "$AWS_REGION" == "us-east-1" ]]; then - echo "https+path://s3.amazonaws.com:443" > WALE_S3_ENDPOINT + if [[ "$S3_ENDPOINT" == "" ]]; then + # Convert $AWS_REGION into $WALE_S3_ENDPOINT to avoid "Connection reset by peer" from + # regions other than us-standard. + # See https://github.com/wal-e/wal-e/issues/167 + # See https://github.com/boto/boto/issues/2207 + if [[ "$AWS_REGION" == "us-east-1" ]]; then + echo "https+path://s3.amazonaws.com:443" > WALE_S3_ENDPOINT + else + echo "https+path://s3-${AWS_REGION}.amazonaws.com:443" > WALE_S3_ENDPOINT + fi else - echo "https+path://s3-${AWS_REGION}.amazonaws.com:443" > WALE_S3_ENDPOINT + echo "$S3_ENDPOINT" > S3_ENDPOINT + echo "$S3_ENDPOINT" | sed -E -e 's!http(s?)://!http\1+path://!' -e 's!/$!!' > WALE_S3_ENDPOINT fi if [[ $S3_SSE ]]; then echo $S3_SSE > WALE_S3_SSE From 97cd04c6ccf69b05e893f672e763c4a11e70f9d5 Mon Sep 17 00:00:00 2001 From: Kingdon Barrett Date: Sat, 24 Nov 2018 10:03:18 -0500 Subject: [PATCH 5/6] fix typo --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 2a73900..75409f9 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ # - Docker image name # - Kubernetes service, rc, pod, secret, volume names SHORT_NAME := postgres -DEIS_REGISTY ?= ${DEV_REGISTRY}/ +DEIS_REGISTRY ?= ${DEV_REGISTRY}/ IMAGE_PREFIX ?= deis include versioning.mk From 66f3bd0203510507a041021b67bf86bd2a8c902b Mon Sep 17 00:00:00 2001 From: Kingdon Barrett Date: Sat, 24 Nov 2018 13:36:52 -0500 Subject: [PATCH 6/6] trickle down from Values.global values in workflow/values.yaml only trickle down for use in dependant charts when they are in the global section --- charts/database/templates/database-deployment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charts/database/templates/database-deployment.yaml b/charts/database/templates/database-deployment.yaml index 41f145a..2322c16 100644 --- a/charts/database/templates/database-deployment.yaml +++ b/charts/database/templates/database-deployment.yaml @@ -43,7 +43,7 @@ spec: value: "{{.Values.postgres.timeout}}" {{- if eq .Values.global.storage "s3" }} - name: S3_SSE - value: "{{.Values.s3.use_sse}}" + value: "{{.Values.global.s3.use_sse}}" {{- end}} lifecycle: preStop: