mirror of
https://github.com/twbs/bootstrap.git
synced 2024-11-29 11:24:18 +01:00
move some caching info+logic out of .travis.yml; fixes #13136
This commit is contained in:
parent
1759569dbe
commit
718a6c5da7
@ -8,11 +8,11 @@ before_install:
|
||||
- if [ "$TWBS_TEST" = validate-html ]; then echo "ruby=$(basename $GEMDIR) jekyll=$JEKYLL_VERSION" > pseudo_Gemfile.lock; fi
|
||||
install:
|
||||
- time npm install -g grunt-cli
|
||||
- ./test-infra/s3_cache.py download 'npm packages' test-infra/npm-shrinkwrap.canonical.json ./node_modules || time ./test-infra/uncached-npm-install.sh
|
||||
- if [ "$TWBS_TEST" = validate-html ]; then ./test-infra/s3_cache.py download rubygems pseudo_Gemfile.lock $GEMDIR || time gem install -N jekyll -v $JEKYLL_VERSION; fi
|
||||
- ./test-infra/s3_cache.py download npm-modules
|
||||
- if [ "$TWBS_TEST" = validate-html ]; then ./test-infra/s3_cache.py download rubygems; fi
|
||||
after_script:
|
||||
- if [ "$TWBS_TEST" = core ]; then ./test-infra/s3_cache.py upload 'npm packages' test-infra/npm-shrinkwrap.canonical.json ./node_modules; fi
|
||||
- if [ "$TWBS_TEST" = validate-html ]; then ./test-infra/s3_cache.py upload rubygems pseudo_Gemfile.lock $GEMDIR; fi
|
||||
- if [ "$TWBS_TEST" = core ]; then ./test-infra/s3_cache.py upload npm-modules; fi
|
||||
- if [ "$TWBS_TEST" = validate-html ]; then ./test-infra/s3_cache.py upload rubygems; fi
|
||||
env:
|
||||
global:
|
||||
- JEKYLL_VERSION: 1.5.0
|
||||
|
12
test-infra/S3Cachefile.json
Normal file
12
test-infra/S3Cachefile.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"npm-modules": {
|
||||
"key": "./npm-shrinkwrap.canonical.json",
|
||||
"cache": "../node_modules",
|
||||
"generate": "./uncached-npm-install.sh"
|
||||
},
|
||||
"rubygems": {
|
||||
"key": "../pseudo_Gemfile.lock",
|
||||
"cache": "$GEMDIR",
|
||||
"generate": "gem install -N jekyll -v $JEKYLL_VERSION"
|
||||
}
|
||||
}
|
@ -2,10 +2,11 @@
|
||||
from __future__ import absolute_import, unicode_literals, print_function, division
|
||||
|
||||
from sys import argv
|
||||
from os import environ, stat, remove as _delete_file
|
||||
from os.path import isfile, dirname, basename, abspath
|
||||
from os import environ, stat, chdir, remove as _delete_file
|
||||
from os.path import isfile, dirname, basename, abspath, realpath, expandvars
|
||||
from hashlib import sha256
|
||||
from subprocess import check_call as run
|
||||
from json import load
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
|
||||
@ -14,12 +15,9 @@ from boto.s3.key import Key
|
||||
from boto.exception import S3ResponseError
|
||||
|
||||
|
||||
CONFIG_FILE = './S3Cachefile.json'
|
||||
NEED_TO_UPLOAD_MARKER = '.need-to-upload'
|
||||
BYTES_PER_MB = 1024 * 1024
|
||||
try:
|
||||
BUCKET_NAME = environ['TWBS_S3_BUCKET']
|
||||
except KeyError:
|
||||
raise SystemExit("TWBS_S3_BUCKET environment variable not set!")
|
||||
|
||||
|
||||
@contextmanager
|
||||
@ -71,24 +69,24 @@ def _extract_tarball(directory):
|
||||
def download(directory):
|
||||
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
|
||||
try:
|
||||
print("Downloading {} tarball from S3...".format(friendly_name))
|
||||
print("Downloading {} tarball from S3...".format(cache_name))
|
||||
with timer():
|
||||
key.get_contents_to_filename(_tarball_filename_for(directory))
|
||||
except S3ResponseError as err:
|
||||
open(NEED_TO_UPLOAD_MARKER, 'a').close()
|
||||
print(err)
|
||||
raise SystemExit("Cached {} download failed!".format(friendly_name))
|
||||
raise SystemExit("Cached {} download failed!".format(cache_name))
|
||||
print("Downloaded {}.".format(_tarball_size(directory)))
|
||||
_extract_tarball(directory)
|
||||
print("{} successfully installed from cache.".format(friendly_name))
|
||||
print("{} successfully installed from cache.".format(cache_name))
|
||||
|
||||
|
||||
def upload(directory):
|
||||
_create_tarball(directory)
|
||||
print("Uploading {} tarball to S3... ({})".format(friendly_name, _tarball_size(directory)))
|
||||
print("Uploading {} tarball to S3... ({})".format(cache_name, _tarball_size(directory)))
|
||||
with timer():
|
||||
key.set_contents_from_filename(_tarball_filename_for(directory))
|
||||
print("{} cache successfully updated.".format(friendly_name))
|
||||
print("{} cache successfully updated.".format(cache_name))
|
||||
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
|
||||
|
||||
|
||||
@ -97,26 +95,57 @@ if __name__ == '__main__':
|
||||
# AWS_ACCESS_KEY_ID -- AWS Access Key ID
|
||||
# AWS_SECRET_ACCESS_KEY -- AWS Secret Access Key
|
||||
argv.pop(0)
|
||||
if len(argv) != 4:
|
||||
raise SystemExit("USAGE: s3_cache.py <download | upload> <friendly name> <dependencies file> <directory>")
|
||||
mode, friendly_name, dependencies_file, directory = argv
|
||||
if len(argv) != 2:
|
||||
raise SystemExit("USAGE: s3_cache.py <download | upload> <cache name>")
|
||||
mode, cache_name = argv
|
||||
script_dir = dirname(realpath(__file__))
|
||||
chdir(script_dir)
|
||||
try:
|
||||
with open(CONFIG_FILE, 'rt') as config_file:
|
||||
config = load(config_file)
|
||||
except (IOError, OSError, ValueError) as config_err:
|
||||
print(config_err)
|
||||
raise SystemExit("Error when trying to load config from JSON file!")
|
||||
|
||||
conn = S3Connection()
|
||||
bucket = conn.lookup(BUCKET_NAME)
|
||||
if bucket is None:
|
||||
raise SystemExit("Could not access bucket!")
|
||||
try:
|
||||
cache_info = config[cache_name]
|
||||
key_file = expandvars(cache_info["key"])
|
||||
fallback_cmd = cache_info["generate"]
|
||||
directory = expandvars(cache_info["cache"])
|
||||
except (TypeError, KeyError) as load_err:
|
||||
print(load_err)
|
||||
raise SystemExit("Config for cache named {!r} is missing or malformed!".format(cache_name))
|
||||
|
||||
dependencies_file_hash = _sha256_of_file(dependencies_file)
|
||||
try:
|
||||
try:
|
||||
BUCKET_NAME = environ['TWBS_S3_BUCKET']
|
||||
except KeyError:
|
||||
raise SystemExit("TWBS_S3_BUCKET environment variable not set!")
|
||||
|
||||
key = Key(bucket, dependencies_file_hash)
|
||||
key.storage_class = 'REDUCED_REDUNDANCY'
|
||||
conn = S3Connection()
|
||||
bucket = conn.lookup(BUCKET_NAME)
|
||||
if bucket is None:
|
||||
raise SystemExit("Could not access bucket!")
|
||||
|
||||
if mode == 'download':
|
||||
download(directory)
|
||||
elif mode == 'upload':
|
||||
if isfile(NEED_TO_UPLOAD_MARKER): # FIXME
|
||||
upload(directory)
|
||||
key_file_hash = _sha256_of_file(key_file)
|
||||
|
||||
key = Key(bucket, key_file_hash)
|
||||
key.storage_class = 'REDUCED_REDUNDANCY'
|
||||
|
||||
if mode == 'download':
|
||||
download(directory)
|
||||
elif mode == 'upload':
|
||||
if isfile(NEED_TO_UPLOAD_MARKER): # FIXME
|
||||
upload(directory)
|
||||
else:
|
||||
print("No need to upload anything.")
|
||||
else:
|
||||
print("No need to upload anything.")
|
||||
else:
|
||||
raise SystemExit("Unrecognized mode {!r}".format(mode))
|
||||
raise SystemExit("Unrecognized mode {!r}".format(mode))
|
||||
except BaseException as exc:
|
||||
if mode != 'download':
|
||||
raise
|
||||
print("Error!:", exc)
|
||||
print("Unable to download from cache.")
|
||||
print("Running fallback command to generate cache directory {!r}: {}".format(directory, fallback_cmd))
|
||||
with timer():
|
||||
run(fallback_cmd, shell=True)
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
cd .. # /bootstrap/
|
||||
cp test-infra/npm-shrinkwrap.canonical.json npm-shrinkwrap.json
|
||||
npm install
|
||||
rm npm-shrinkwrap.json
|
||||
|
Loading…
Reference in New Issue
Block a user