Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cache busting #31

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,10 @@ uploading assets to S3.
upload to S3. SHA-1 file hashes are used to compute
file changes. You can delete `.file-hashes` from
your S3 bucket to force all files to upload again.
`S3_CACHE_BUSTING` Append a query string with a file hash to all static URLs
to force a fetch when file contents change. This is
useful when used with long expiry headers while
still retaining the ability to change static files.
`S3_CACHE_CONTROL` **Deprecated**. Please use `S3_HEADERS` instead.
`S3_USE_CACHE_CONTROL` **Deprecated**. Please use `S3_HEADERS` instead.
=========================== ===================================================
Expand Down
37 changes: 34 additions & 3 deletions flask_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from collections import defaultdict

from flask import url_for as flask_url_for
from flask import current_app
from flask import current_app, request
from boto.s3.connection import S3Connection
from boto.s3 import connect_to_region
from boto.exception import S3CreateError, S3ResponseError
Expand All @@ -14,6 +14,9 @@
logger = logging.getLogger('flask_s3')


file_hashes = {}


def hash_file(filename):
"""
Generate a hash for the contents of a file
Expand Down Expand Up @@ -55,7 +58,34 @@ def url_for(endpoint, **values):
if app.config['S3_CDN_DOMAIN']:
bucket_path = '%s' % app.config['S3_CDN_DOMAIN']
urls = app.url_map.bind(bucket_path, url_scheme=scheme)
return urls.build(endpoint, values=values, force_external=True)
url = urls.build(endpoint, values=values, force_external=True)

if app.config.get('S3_CACHE_BUSTING', False):
# We maintain a dictionary of file hashes to use as query parameters
# after filenames to force a cache miss when the files contents have
# changed. These hashes are calculated once per server restart when
# the file is first fetched

if url not in file_hashes:
filename = values['filename']
blueprints = app.blueprints.values()

if endpoint == 'static':
filename = os.path.join(app.static_folder, filename)
else:
for blueprint in blueprints:
if endpoint == "{}.static".format(blueprint.name):
filename = os.path.join(blueprint.static_folder,
filename)

if os.path.exists(filename):
file_hashes[url] = hash_file(filename)

file_hash = file_hashes.get(url, None)
if file_hash and "?" not in url:
return "{}?{}".format(url, file_hash[-6:])

return url
return flask_url_for(endpoint, **values)


Expand Down Expand Up @@ -285,7 +315,8 @@ def init_app(self, app):
('S3_CDN_DOMAIN', ''),
('S3_USE_CACHE_CONTROL', False),
('S3_HEADERS', {}),
('S3_ONLY_MODIFIED', False)]
('S3_ONLY_MODIFIED', False),
('S3_CACH_BUSTING', False)]

for k, v in defaults:
app.config.setdefault(k, v)
Expand Down
38 changes: 37 additions & 1 deletion tests/test_flask_static.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import os

from mock import Mock, patch, call
from flask import Flask, render_template_string, Blueprint
from flask import Flask, render_template_string, Blueprint, url_for

import flask_s3
from flask_s3 import FlaskS3
Expand Down Expand Up @@ -124,6 +124,42 @@ def test_url_for_cdn_domain(self):
exp = 'https://foo.cloudfront.net/static/bah.js'
self.assertEquals(self.client_get(ufs).data, exp)

def test_url_with_hash(self):
self.app.config['S3_CACHE_BUSTING'] = True

static_folder = tempfile.mkdtemp()
self.app.static_folder = static_folder

static_url_loc = static_folder
filename = os.path.join(static_folder, "foo.png")

# Write random data into file
with open(filename, 'wb') as f:
f.write(os.urandom(1024))

with self.app.test_request_context("/"):
ufs = "{{url_for('static', filename='foo.png')}}"
exp = "https://foo.s3.amazonaws.com/static/foo.png"
url = self.client_get(ufs).data

self.assertTrue(url.startswith(exp))
# We have a query string
self.assertTrue("?" in url)

again = self.client_get(ufs).data

# We get the same query string when fetching the same file
self.assertEquals(again, url)

# Change the contents of the file
with open(filename, 'wb') as f:
f.write(os.urandom(1025))

# Clear hashes (would be done on restart)
flask_s3.file_hashes = {}

changed = self.client_get(ufs).data
self.assertNotEquals(url, changed)


class S3Tests(unittest.TestCase):
Expand Down