2013-12-13 12:03:15 +01:00
|
|
|
#!/usr/bin/env python2.7
|
2014-04-03 23:50:57 +02:00
|
|
|
# pylint: disable=C0301
|
2013-12-13 12:03:15 +01:00
|
|
|
from __future__ import absolute_import, unicode_literals, print_function, division
|
|
|
|
|
|
|
|
from sys import argv
|
2014-04-03 22:18:50 +02:00
|
|
|
from os import environ, stat, chdir, remove as _delete_file
|
2014-04-03 23:50:57 +02:00
|
|
|
from os.path import dirname, basename, abspath, realpath, expandvars
|
2013-12-13 12:03:15 +01:00
|
|
|
from hashlib import sha256
|
|
|
|
from subprocess import check_call as run
|
2014-04-03 23:50:57 +02:00
|
|
|
from json import load, dump as save
|
2014-04-03 20:34:15 +02:00
|
|
|
from contextlib import contextmanager
|
|
|
|
from datetime import datetime
|
2013-12-13 12:03:15 +01:00
|
|
|
|
|
|
|
from boto.s3.connection import S3Connection
|
|
|
|
from boto.s3.key import Key
|
|
|
|
from boto.exception import S3ResponseError
|
|
|
|
|
|
|
|
|
2014-04-03 22:18:50 +02:00
|
|
|
CONFIG_FILE = './S3Cachefile.json'
|
2014-04-03 23:50:57 +02:00
|
|
|
UPLOAD_TODO_FILE = './S3CacheTodo.json'
|
2013-12-13 12:03:15 +01:00
|
|
|
BYTES_PER_MB = 1024 * 1024
|
|
|
|
|
|
|
|
|
2014-04-03 20:34:15 +02:00
|
|
|
@contextmanager
|
|
|
|
def timer():
|
|
|
|
start = datetime.utcnow()
|
|
|
|
yield
|
|
|
|
end = datetime.utcnow()
|
|
|
|
elapsed = end - start
|
2014-04-03 20:53:35 +02:00
|
|
|
print("\tDone. Took", int(elapsed.total_seconds()), "second(s).")
|
2014-04-03 20:34:15 +02:00
|
|
|
|
|
|
|
|
2014-04-03 23:50:57 +02:00
|
|
|
@contextmanager
|
|
|
|
def todo_file(writeback=True):
|
|
|
|
try:
|
|
|
|
with open(UPLOAD_TODO_FILE, 'rt') as json_file:
|
|
|
|
todo = load(json_file)
|
|
|
|
except (IOError, OSError, ValueError):
|
|
|
|
todo = {}
|
|
|
|
|
|
|
|
yield todo
|
|
|
|
|
|
|
|
if writeback:
|
|
|
|
try:
|
|
|
|
with open(UPLOAD_TODO_FILE, 'wt') as json_file:
|
|
|
|
save(todo, json_file)
|
|
|
|
except (OSError, IOError) as save_err:
|
|
|
|
print("Error saving {}:".format(UPLOAD_TODO_FILE), save_err)
|
|
|
|
|
|
|
|
|
2013-12-13 12:03:15 +01:00
|
|
|
def _sha256_of_file(filename):
|
|
|
|
hasher = sha256()
|
|
|
|
with open(filename, 'rb') as input_file:
|
|
|
|
hasher.update(input_file.read())
|
2013-12-16 06:09:01 +01:00
|
|
|
file_hash = hasher.hexdigest()
|
|
|
|
print('sha256({}) = {}'.format(filename, file_hash))
|
|
|
|
return file_hash
|
2013-12-13 12:03:15 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _delete_file_quietly(filename):
|
|
|
|
try:
|
|
|
|
_delete_file(filename)
|
|
|
|
except (OSError, IOError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2014-04-03 23:50:57 +02:00
|
|
|
def mark_needs_uploading(cache_name):
|
|
|
|
with todo_file() as todo:
|
|
|
|
todo[cache_name] = True
|
|
|
|
|
|
|
|
|
|
|
|
def mark_uploaded(cache_name):
|
|
|
|
with todo_file() as todo:
|
|
|
|
todo.pop(cache_name, None)
|
|
|
|
|
|
|
|
|
|
|
|
def need_to_upload(cache_name):
|
|
|
|
with todo_file(writeback=False) as todo:
|
|
|
|
return todo.get(cache_name, False)
|
|
|
|
|
|
|
|
|
2013-12-16 06:09:01 +01:00
|
|
|
def _tarball_size(directory):
|
|
|
|
kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB
|
2013-12-13 12:03:15 +01:00
|
|
|
return "{} MiB".format(kib)
|
|
|
|
|
|
|
|
|
2013-12-16 06:09:01 +01:00
|
|
|
def _tarball_filename_for(directory):
|
|
|
|
return abspath('./{}.tar.gz'.format(basename(directory)))
|
|
|
|
|
|
|
|
|
|
|
|
def _create_tarball(directory):
|
|
|
|
print("Creating tarball of {}...".format(directory))
|
2014-04-03 20:34:15 +02:00
|
|
|
with timer():
|
|
|
|
run(['tar', '-czf', _tarball_filename_for(directory), '-C', dirname(directory), basename(directory)])
|
2013-12-16 06:09:01 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _extract_tarball(directory):
|
|
|
|
print("Extracting tarball of {}...".format(directory))
|
2014-04-03 20:34:15 +02:00
|
|
|
with timer():
|
|
|
|
run(['tar', '-xzf', _tarball_filename_for(directory), '-C', dirname(directory)])
|
2013-12-16 06:09:01 +01:00
|
|
|
|
|
|
|
|
|
|
|
def download(directory):
|
2014-04-03 23:50:57 +02:00
|
|
|
mark_uploaded(cache_name) # reset
|
2013-12-16 06:09:01 +01:00
|
|
|
try:
|
2014-04-03 22:18:50 +02:00
|
|
|
print("Downloading {} tarball from S3...".format(cache_name))
|
2014-04-03 20:34:15 +02:00
|
|
|
with timer():
|
|
|
|
key.get_contents_to_filename(_tarball_filename_for(directory))
|
2013-12-16 06:09:01 +01:00
|
|
|
except S3ResponseError as err:
|
2014-04-03 23:50:57 +02:00
|
|
|
mark_needs_uploading(cache_name)
|
2014-04-03 22:18:50 +02:00
|
|
|
raise SystemExit("Cached {} download failed!".format(cache_name))
|
2013-12-16 06:09:01 +01:00
|
|
|
print("Downloaded {}.".format(_tarball_size(directory)))
|
|
|
|
_extract_tarball(directory)
|
2014-04-03 22:18:50 +02:00
|
|
|
print("{} successfully installed from cache.".format(cache_name))
|
2013-12-16 06:09:01 +01:00
|
|
|
|
|
|
|
|
|
|
|
def upload(directory):
|
|
|
|
_create_tarball(directory)
|
2014-04-03 22:18:50 +02:00
|
|
|
print("Uploading {} tarball to S3... ({})".format(cache_name, _tarball_size(directory)))
|
2014-04-03 20:34:15 +02:00
|
|
|
with timer():
|
|
|
|
key.set_contents_from_filename(_tarball_filename_for(directory))
|
2014-04-03 22:18:50 +02:00
|
|
|
print("{} cache successfully updated.".format(cache_name))
|
2014-04-03 23:50:57 +02:00
|
|
|
mark_uploaded(cache_name)
|
2013-12-16 06:09:01 +01:00
|
|
|
|
|
|
|
|
2013-12-13 12:03:15 +01:00
|
|
|
if __name__ == '__main__':
|
|
|
|
# Uses environment variables:
|
2013-12-25 20:19:59 +01:00
|
|
|
# AWS_ACCESS_KEY_ID -- AWS Access Key ID
|
|
|
|
# AWS_SECRET_ACCESS_KEY -- AWS Secret Access Key
|
2013-12-13 12:03:15 +01:00
|
|
|
argv.pop(0)
|
2014-04-03 22:18:50 +02:00
|
|
|
if len(argv) != 2:
|
|
|
|
raise SystemExit("USAGE: s3_cache.py <download | upload> <cache name>")
|
|
|
|
mode, cache_name = argv
|
|
|
|
script_dir = dirname(realpath(__file__))
|
|
|
|
chdir(script_dir)
|
|
|
|
try:
|
|
|
|
with open(CONFIG_FILE, 'rt') as config_file:
|
|
|
|
config = load(config_file)
|
|
|
|
except (IOError, OSError, ValueError) as config_err:
|
|
|
|
print(config_err)
|
|
|
|
raise SystemExit("Error when trying to load config from JSON file!")
|
2013-12-13 12:03:15 +01:00
|
|
|
|
2014-04-03 22:18:50 +02:00
|
|
|
try:
|
|
|
|
cache_info = config[cache_name]
|
|
|
|
key_file = expandvars(cache_info["key"])
|
|
|
|
fallback_cmd = cache_info["generate"]
|
|
|
|
directory = expandvars(cache_info["cache"])
|
|
|
|
except (TypeError, KeyError) as load_err:
|
|
|
|
print(load_err)
|
|
|
|
raise SystemExit("Config for cache named {!r} is missing or malformed!".format(cache_name))
|
2013-12-13 12:03:15 +01:00
|
|
|
|
2014-04-03 22:18:50 +02:00
|
|
|
try:
|
|
|
|
try:
|
|
|
|
BUCKET_NAME = environ['TWBS_S3_BUCKET']
|
|
|
|
except KeyError:
|
|
|
|
raise SystemExit("TWBS_S3_BUCKET environment variable not set!")
|
|
|
|
|
|
|
|
conn = S3Connection()
|
|
|
|
bucket = conn.lookup(BUCKET_NAME)
|
|
|
|
if bucket is None:
|
|
|
|
raise SystemExit("Could not access bucket!")
|
|
|
|
|
|
|
|
key_file_hash = _sha256_of_file(key_file)
|
|
|
|
|
|
|
|
key = Key(bucket, key_file_hash)
|
|
|
|
key.storage_class = 'REDUCED_REDUNDANCY'
|
|
|
|
|
|
|
|
if mode == 'download':
|
|
|
|
download(directory)
|
|
|
|
elif mode == 'upload':
|
2014-04-03 23:50:57 +02:00
|
|
|
if need_to_upload(cache_name):
|
2014-04-03 22:18:50 +02:00
|
|
|
upload(directory)
|
|
|
|
else:
|
|
|
|
print("No need to upload anything.")
|
2013-12-13 12:03:15 +01:00
|
|
|
else:
|
2014-04-03 22:18:50 +02:00
|
|
|
raise SystemExit("Unrecognized mode {!r}".format(mode))
|
|
|
|
except BaseException as exc:
|
|
|
|
if mode != 'download':
|
|
|
|
raise
|
|
|
|
print("Error!:", exc)
|
|
|
|
print("Unable to download from cache.")
|
|
|
|
print("Running fallback command to generate cache directory {!r}: {}".format(directory, fallback_cmd))
|
|
|
|
with timer():
|
|
|
|
run(fallback_cmd, shell=True)
|