Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Big refactoring code and tests #18

Closed
wants to merge 20 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 18 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,7 +1,22 @@
*.pyc

# coverage files
*.coverage
htmlcov
cover

# temporary files
*~

# mac os x
.DS_Store
*.egg

# build artifacts from python setup.py build or python setup.py install
build/
dist/
*.egg-info
dist
*egg*/
docs/_build

# idea project file
/.idea
_build
20 changes: 16 additions & 4 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,19 @@
language: python
python:
- "2.6"
- "2.7"
# command to install dependencies, e.g. pip install -r requirements.txt --use-mirrors
install: pip install -r docs/requirements.txt --use-mirrors
# command to run tests, e.g. python setup.py test
script: nosetests
- "pypy"

install:
- pip install .
- pip install -r requirements.txt
- pip install -r requirements-test.txt

script:
- coverage run --source=flask_s3.py setup.py test
- coverage report -m

after_script:
coveralls --verbose


1 change: 1 addition & 0 deletions CONTRIBUTORS
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,4 @@ Contributors
* Rehan Dalal (rehandalal)
* Hannes Ljungberg (hannseman)
* Erik Taubeneck (eriktaubeneck)
* Mike Klimin (klinkin)
5 changes: 0 additions & 5 deletions docs/requirements.txt

This file was deleted.

4 changes: 3 additions & 1 deletion example/example/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,15 @@

s3 = FlaskS3(app)


@app.route('/')
def index():
template_str = """{{ url_for('static', filename="foo.js") }}"""
return render_template_string(template_str)


def upload_all():
create_all(app, user='MY_AWS_ID', password='MY_AWS_SECRET')

if __name__ == '__main__':
app.run(debug=True)
app.run(debug=True)
129 changes: 86 additions & 43 deletions flask_s3.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
# -*- coding: utf-8 -*-

__version__ = '0.1.6'
__author__ = 'Edward Robinson'
__license__ = 'WTFPL'


import os
import logging
from collections import defaultdict
Expand All @@ -7,6 +14,7 @@
from boto.s3.connection import S3Connection
from boto.exception import S3CreateError
from boto.s3.key import Key
from tqdm import tqdm

logger = logging.getLogger('flask_s3')

Expand All @@ -15,14 +23,14 @@ def url_for(endpoint, **values):
"""
Generates a URL to the given endpoint.

If the endpoint is for a static resource then an Amazon S3 URL is
If the endpoint is for a static resource then an Amazon S3 URL is
generated, otherwise the call is passed on to `flask.url_for`.

Because this function is set as a jinja environment variable when
`FlaskS3.init_app` is invoked, this function replaces
Because this function is set as a jinja environment variable when
`FlaskS3.init_app` is invoked, this function replaces
`flask.url_for` in templates automatically. It is unlikely that this
function will need to be directly called from within your
application code, unless you need to refer to static assets outside
function will need to be directly called from within your
application code, unless you need to refer to static assets outside
of your templates.
"""
app = current_app
Expand All @@ -33,18 +41,20 @@ def url_for(endpoint, **values):
scheme = 'http'
if app.config['S3_USE_HTTPS']:
scheme = 'https'
bucket_path = '%s.%s' % (app.config['S3_BUCKET_NAME'],
app.config['S3_BUCKET_DOMAIN'])
bucket_path = '{0}.{1}'.format(app.config['S3_BUCKET_NAME'],
app.config['S3_BUCKET_DOMAIN'])
if app.config['S3_CDN_DOMAIN']:
bucket_path = '%s' % app.config['S3_CDN_DOMAIN']
bucket_path = '{0}'.format(app.config['S3_CDN_DOMAIN'])
urls = app.url_map.bind(bucket_path, url_scheme=scheme)
return urls.build(endpoint, values=values, force_external=True)
return flask_url_for(endpoint, **values)


def _bp_static_url(blueprint):
""" builds the absolute url path for a blueprint's static folder """
u = u'%s%s' % (blueprint.url_prefix or '', blueprint.static_url_path or '')
u = u'{0}{1}'.format(
blueprint.url_prefix or '',
blueprint.static_url_path or '')
return u


Expand All @@ -57,13 +67,15 @@ def _gather_files(app, hidden):
dirs.extend([bp_details(x) for x in blueprints if x.static_folder])

valid_files = defaultdict(list)
for static_folder, static_url_loc in dirs:

for static_folder, static_url_loc in dirs:
if not os.path.isdir(static_folder):
logger.warning("WARNING - [%s does not exist]" % static_folder)
logger.warning(
"WARNING - [{0} does not exist]".format(static_folder))
else:
logger.debug("Checking static folder: %s" % static_folder)
logger.debug("Checking static folder: {0}".format(static_folder))
for root, _, files in os.walk(static_folder):
files = [os.path.join(root, x) \
files = [os.path.join(root, x)
for x in files if hidden or x[0] != '.']
if files:
valid_files[(static_folder, static_url_loc)].extend(files)
Expand All @@ -76,34 +88,43 @@ def _path_to_relative_url(path):


def _static_folder_path(static_url, static_folder, static_asset):
"""
Returns a path to a file based on the static folder, and not on the
"""
Returns a path to a file based on the static folder, and not on the
filesystem holding the file.

Returns a path relative to static_url for static_asset
"""
# first get the asset path relative to the static folder.
# static_asset is not simply a filename because it could be
# first get the asset path relative to the static folder.
# static_asset is not simply a filename because it could be
# sub-directory then file etc.
if not static_asset.startswith(static_folder):
raise ValueError("%s startic asset must be under %s static folder" %
(static_asset, static_folder))
raise ValueError(
"{0} startic asset must be under {1} static folder".format(static_asset,
static_folder))
rel_asset = static_asset[len(static_folder):]
# Now bolt the static url path and the relative asset location together
return u'%s/%s' % (static_url.rstrip('/'), rel_asset.lstrip('/'))
return u'{0}/{1}'.format(static_url.rstrip('/'), rel_asset.lstrip('/'))


def _write_files(app, static_url_loc, static_folder, files, bucket,
ex_keys=None):
""" Writes all the files inside a static folder to S3. """

if logger.level == logging.INFO:
files = tqdm(
files,
desc='Uploading from {0} to {1}'.format(
static_url_loc,
bucket.name))

for file_path in files:
asset_loc = _path_to_relative_url(file_path)
key_name = _static_folder_path(static_url_loc, static_folder,
asset_loc)
msg = "Uploading %s to %s as %s" % (file_path, bucket, key_name)
msg = "Uploading {0} to {1} as {2}".format(file_path, bucket, key_name)
logger.debug(msg)
if ex_keys and key_name in ex_keys:
logger.debug("%s excluded from upload" % key_name)
logger.debug("{0} excluded from upload".format(key_name))
else:
k = Key(bucket=bucket, name=key_name)
# Set custom headers
Expand All @@ -118,15 +139,33 @@ def _upload_files(app, files_, bucket):
_write_files(app, static_url, static_folder, names, bucket)


def _get_or_create_bucket(conn, bucket_name, location):
"""Helper for :function:`create_all`. Returns bucket object by name,
if not existing create and return it.

:internal:
"""
bucket = conn.lookup(bucket_name, validate=True)
if bucket:
return bucket
try:
bucket = conn.create_bucket(bucket_name, location=location)
bucket.make_public(recursive=False)
except S3CreateError as e:
raise e

return bucket


def create_all(app, user=None, password=None, bucket_name=None,
location='', include_hidden=False):
"""
Uploads of the static assets associated with a Flask application to
Uploads of the static assets associated with a Flask application to
Amazon S3.

All static assets are identified on the local filesystem, including
any static assets associated with *registered* blueprints. In turn,
each asset is uploaded to the bucket described by `bucket_name`. If
All static assets are identified on the local filesystem, including
any static assets associated with *registered* blueprints. In turn,
each asset is uploaded to the bucket described by `bucket_name`. If
the bucket does not exist then it is created.

Flask-S3 creates the same relative static asset folder structure on
Expand All @@ -143,22 +182,22 @@ def create_all(app, user=None, password=None, bucket_name=None,
:type user: `basestring` or None

:param password: an AWS Secret Access Key. You can find this key in
the Security Credentials section of your AWS
the Security Credentials section of your AWS
account.
:type password: `basestring` or None

:param bucket_name: the name of the bucket you wish to server your
static assets from. **Note**: while a valid
character, it is recommended that you do not
include periods in bucket_name if you wish to
static assets from. **Note**: while a valid
character, it is recommended that you do not
include periods in bucket_name if you wish to
serve over HTTPS. See Amazon's `bucket
restrictions`_ for more details.
:type bucket_name: `basestring` or None

:param location: the AWS region to host the bucket in; an empty
string indicates the default region should be used,
which is the US Standard region. Possible location
values include: `'DEFAULT'`, `'EU'`, `'USWest'`,
which is the US Standard region. Possible location
values include: `'DEFAULT'`, `'EU'`, `'USWest'`,
`'APSoutheast'`
:type location: `basestring` or None

Expand All @@ -178,38 +217,42 @@ def create_all(app, user=None, password=None, bucket_name=None,
bucket_name = app.config['S3_BUCKET_NAME']
if not bucket_name:
raise ValueError("No bucket name provided.")

# build list of static files
all_files = _gather_files(app, include_hidden)
logger.debug("All valid files: %s" % all_files)
conn = S3Connection(user, password) # connect to s3
for (static_folder, static_url_loc), files in all_files.iteritems():
logger.debug(
'{0} valid files in folder "{1}" with local url "{2}"'.format(len(files),
static_folder,
static_url_loc))
# connect to s3
conn = S3Connection(user, password)
# get_or_create bucket
try:
bucket = conn.create_bucket(bucket_name, location=location)
bucket.make_public(recursive=True)
except S3CreateError as e:
raise e
bucket = _get_or_create_bucket(conn, bucket_name, location)
_upload_files(app, all_files, bucket)


class FlaskS3(object):

"""
The FlaskS3 object allows your application to use Flask-S3.

When initialising a FlaskS3 object you may optionally provide your
:class:`flask.Flask` application object if it is ready. Otherwise,
When initialising a FlaskS3 object you may optionally provide your
:class:`flask.Flask` application object if it is ready. Otherwise,
you may provide it later by using the :meth:`init_app` method.

:param app: optional :class:`flask.Flask` application object
:type app: :class:`flask.Flask` or None
"""

def __init__(self, app=None):
if app is not None:
self.init_app(app)

def init_app(self, app):
"""
An alternative way to pass your :class:`flask.Flask` application
object to Flask-S3. :meth:`init_app` also takes care of some
object to Flask-S3. :meth:`init_app` also takes care of some
default `settings`_.

:param app: the :class:`flask.Flask` application object.
Expand Down
4 changes: 4 additions & 0 deletions requirements-test.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
mock
nose
coverage
coveralls
3 changes: 3 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
flask>=0.9
boto>=2.5.2
tqdm==1.0
7 changes: 7 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
[nosetests]
nocapture = 1
with-coverage = 1
cover-package = flask_s3
cover-erase = 1
cover-tests = 1
cover-inclusive = 1
Loading