0
0
mirror of https://github.com/twbs/bootstrap.git synced 2024-12-02 14:24:19 +01:00

Merge pull request #11890 from twbs/generic-caching

make S3 caching significantly more generic
This commit is contained in:
Chris Rebert 2013-12-15 21:26:52 -08:00
commit 6bb73df50e
2 changed files with 52 additions and 32 deletions

View File

@ -6,9 +6,9 @@ before_install:
install:
- if [ "$TWBS_TEST" = validate-html ]; then time gem install jekyll; fi
- time npm install -g grunt-cli
- time ./test-infra/node_modules_cache.py download || time npm install
- time ./test-infra/node_modules_cache.py download package.json ./node_modules || time npm install
after_script:
- if [ "$TWBS_TEST" = core ]; then time ./test-infra/node_modules_cache.py upload; fi
- if [ "$TWBS_TEST" = core ]; then time ./test-infra/node_modules_cache.py upload package.json ./node_modules; fi
env:
global:
- SAUCE_USERNAME: bootstrap

View File

@ -3,7 +3,7 @@ from __future__ import absolute_import, unicode_literals, print_function, divisi
from sys import argv
from os import environ, stat, remove as _delete_file
from os.path import isfile
from os.path import isfile, dirname, basename, abspath
from hashlib import sha256
from subprocess import check_call as run
@ -12,7 +12,6 @@ from boto.s3.key import Key
from boto.exception import S3ResponseError
NODE_MODULES_TARBALL = 'node_modules.tar.gz'
NEED_TO_UPLOAD_MARKER = '.need-to-upload'
BYTES_PER_MB = 1024 * 1024
try:
@ -25,7 +24,9 @@ def _sha256_of_file(filename):
hasher = sha256()
with open(filename, 'rb') as input_file:
hasher.update(input_file.read())
return hasher.hexdigest()
file_hash = hasher.hexdigest()
print('sha256({}) = {}'.format(filename, file_hash))
return file_hash
def _delete_file_quietly(filename):
@ -35,52 +36,71 @@ def _delete_file_quietly(filename):
pass
def _tarball_size():
kib = stat(NODE_MODULES_TARBALL).st_size // BYTES_PER_MB
def _tarball_size(directory):
kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB
return "{} MiB".format(kib)
def _tarball_filename_for(directory):
return abspath('./{}.tar.gz'.format(basename(directory)))
def _create_tarball(directory):
print("Creating tarball of {}...".format(directory))
run(['tar', '-czf', _tarball_filename_for(directory), '-C', dirname(directory), basename(directory)])
def _extract_tarball(directory):
print("Extracting tarball of {}...".format(directory))
run(['tar', '-xzf', _tarball_filename_for(directory), '-C', dirname(directory)])
def download(directory):
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
try:
print("Downloading {} tarball from S3...".format(basename(directory)))
key.get_contents_to_filename(_tarball_filename_for(directory))
except S3ResponseError as err:
open(NEED_TO_UPLOAD_MARKER, 'a').close()
print(err)
raise SystemExit("Cached {} download failed!".format(basename(directory)))
print("Downloaded {}.".format(_tarball_size(directory)))
_extract_tarball(directory)
print("{} successfully installed from cache.".format(directory))
def upload(directory):
_create_tarball(directory)
print("Uploading {} tarball to S3... ({})".format(basename(directory), _tarball_size(directory)))
key.set_contents_from_filename(_tarball_filename_for(directory))
print("{} cache successfully updated.".format(directory))
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
if __name__ == '__main__':
# Uses environment variables:
# AWS_ACCESS_KEY_ID - AWS Access Key ID
# AWS_SECRET_ACCESS_KEY - AWS Secret Access Key
argv.pop(0)
if len(argv) != 1:
raise SystemExit("USAGE: node_modules_cache.py <download | upload>")
mode = argv.pop()
if len(argv) != 3:
raise SystemExit("USAGE: node_modules_cache.py <download | upload> <dependencies file> <directory>")
mode, dependencies_file, directory = argv
conn = S3Connection()
bucket = conn.lookup(BUCKET_NAME)
if bucket is None:
raise SystemExit("Could not access bucket!")
package_json_hash = _sha256_of_file('package.json')
print('sha256(package.json) = ' + package_json_hash)
dependencies_file_hash = _sha256_of_file(dependencies_file)
key = Key(bucket, package_json_hash)
key = Key(bucket, dependencies_file_hash)
key.storage_class = 'REDUCED_REDUNDANCY'
if mode == 'download':
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
try:
print("Downloading tarball from S3...")
key.get_contents_to_filename(NODE_MODULES_TARBALL)
except S3ResponseError as err:
open(NEED_TO_UPLOAD_MARKER, 'a').close()
print(err)
raise SystemExit("Cached node_modules download failed!")
print("Downloaded {}.".format(_tarball_size()))
print("Extracting tarball...")
run(['tar', 'xzf', NODE_MODULES_TARBALL])
print("node_modules successfully installed from cache.")
download(directory)
elif mode == 'upload':
if isfile(NEED_TO_UPLOAD_MARKER):
print("Creating tarball...")
run(['tar', 'czf', NODE_MODULES_TARBALL, 'node_modules'])
print("Uploading tarball to S3... ({})".format(_tarball_size()))
key.set_contents_from_filename(NODE_MODULES_TARBALL)
print("node_modules cache successfully updated.")
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
if isfile(NEED_TO_UPLOAD_MARKER): # FIXME
upload(directory)
else:
print("No need to upload anything.")
else: