From 1a715433b2c098db29a4970c77b86ab62c423490 Mon Sep 17 00:00:00 2001 From: Slava Maslennikov Date: Fri, 9 Mar 2018 14:57:02 -0800 Subject: [PATCH 1/2] Allow buckets with periods in their names ...and sort python imports. Fixes https://github.com/crohr/rpm-s3/issues/21 --- bin/rpm-s3 | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/bin/rpm-s3 b/bin/rpm-s3 index 54b88a8..75fa485 100755 --- a/bin/rpm-s3 +++ b/bin/rpm-s3 @@ -1,18 +1,19 @@ #!/usr/bin/env python """CLI for serialising metadata updates on an s3-hosted yum repository. """ +import boto +import boto.s3.connection +import collections +import logging +import optparse import os +import shutil +import subprocess import sys +import tempfile import time import urlparse -import tempfile -import shutil -import optparse -import logging -import collections import yum -import boto -import subprocess lib_root = os.path.dirname(os.path.dirname(__file__)) @@ -111,11 +112,13 @@ def getclient(base, host_url): return boto.connect_s3( os.getenv('AWS_ACCESS_KEY'), os.getenv('AWS_SECRET_KEY'), - host=host_url + host=host_url, + calling_format=boto.s3.connection.OrdinaryCallingFormat() ).get_bucket(base.netloc) else: return boto.connect_s3( - host=host_url + host=host_url, + calling_format=boto.s3.connection.OrdinaryCallingFormat() ).get_bucket(base.netloc) From 429845f1438ca09670989bc0a6b8cb13640c9f45 Mon Sep 17 00:00:00 2001 From: Slava Maslennikov Date: Fri, 9 Mar 2018 15:00:07 -0800 Subject: [PATCH 2/2] Use standard AWS access key ENV VAR names --- README.md | 10 +++++----- bin/rpm-s3 | 6 +++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 85926d0..720a636 100644 --- a/README.md +++ b/README.md @@ -10,10 +10,10 @@ If you're looking for the same kind of tool, but for APT repositories, I can rec 1. You have python installed (2.6+). -1. You have your S3 credentials available in the `AWS_ACCESS_KEY` and `AWS_SECRET_KEY` environment variables: +1. You have your S3 credentials available in the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` environment variables: - export AWS_ACCESS_KEY="key" - export AWS_SECRET_KEY="secret" + export AWS_ACCESS_KEY_ID="key" + export AWS_SECRET_ACCESS_KEY="secret" ## Installation @@ -31,7 +31,7 @@ Use the provided `/test/test.sh` script: vagrant up vagrant ssh - AWS_ACCESS_KEY=xx AWS_SECRET_KET=yy BUCKET=zz ./test/test.sh + AWS_ACCESS_KEY_ID=xx AWS_SECRET_ACCESS_KEY=yy BUCKET=zz ./test/test.sh Also: @@ -58,7 +58,7 @@ Have a `~/.rpmmacros` file ready with the following content: Pass the `--sign` option to `rpm-s3`: - AWS_ACCESS_KEY="key" AWS_SECRET_KEY="secret" ./bin/rpm-s3 --sign my-app-1.0.0.x86_64.rpm + AWS_ACCESS_KEY_ID="key" AWS_SECRET_ACCESS_KEY="secret" ./bin/rpm-s3 --sign my-app-1.0.0.x86_64.rpm ### Import gpg key to install signed packages diff --git a/bin/rpm-s3 b/bin/rpm-s3 index 75fa485..5f12ff2 100755 --- a/bin/rpm-s3 +++ b/bin/rpm-s3 @@ -108,10 +108,10 @@ class FileGrabber(object): def getclient(base, host_url): - if os.getenv('AWS_ACCESS_KEY'): + if os.getenv('AWS_ACCESS_KEY_ID'): return boto.connect_s3( - os.getenv('AWS_ACCESS_KEY'), - os.getenv('AWS_SECRET_KEY'), + os.getenv('AWS_ACCESS_KEY_ID'), + os.getenv('AWS_SECRET_ACCESS_KEY'), host=host_url, calling_format=boto.s3.connection.OrdinaryCallingFormat() ).get_bucket(base.netloc)