- if [ "$TWBS_TEST" = validate-html ]; then echo "ruby=$(basename $(rvm gemdir)) jekyll=$JEKYLL_VERSION" > pseudo_Gemfile.lock; fi
install:
- time npm install -g grunt-cli
- - time ./test-infra/s3_cache.py download 'npm packages' test-infra/npm-shrinkwrap.canonical.json ./node_modules || time ./test-infra/uncached-npm-install.sh
- - if [ "$TWBS_TEST" = validate-html ]; then time ./test-infra/s3_cache.py download rubygems pseudo_Gemfile.lock $(rvm gemdir) || gem install -N jekyll -v $JEKYLL_VERSION; fi
+ - ./test-infra/s3_cache.py download 'npm packages' test-infra/npm-shrinkwrap.canonical.json ./node_modules || time ./test-infra/uncached-npm-install.sh
+ - if [ "$TWBS_TEST" = validate-html ]; then ./test-infra/s3_cache.py download rubygems pseudo_Gemfile.lock $(rvm gemdir) || time gem install -N jekyll -v $JEKYLL_VERSION; fi
after_script:
- - if [ "$TWBS_TEST" = core ]; then time ./test-infra/s3_cache.py upload 'npm packages' test-infra/npm-shrinkwrap.canonical.json ./node_modules; fi
- - if [ "$TWBS_TEST" = validate-html ]; then time ./test-infra/s3_cache.py upload rubygems pseudo_Gemfile.lock $(rvm gemdir); fi
+ - if [ "$TWBS_TEST" = core ]; then ./test-infra/s3_cache.py upload 'npm packages' test-infra/npm-shrinkwrap.canonical.json ./node_modules; fi
+ - if [ "$TWBS_TEST" = validate-html ]; then ./test-infra/s3_cache.py upload rubygems pseudo_Gemfile.lock $(rvm gemdir); fi
env:
global:
- JEKYLL_VERSION: 1.5.0
from os.path import isfile, dirname, basename, abspath
from hashlib import sha256
from subprocess import check_call as run
+from contextlib import contextmanager
+from datetime import datetime
from boto.s3.connection import S3Connection
from boto.s3.key import Key
raise SystemExit("TWBS_S3_BUCKET environment variable not set!")
+@contextmanager
+def timer():
+ start = datetime.utcnow()
+ yield
+ end = datetime.utcnow()
+ elapsed = end - start
+ print("\tDone. Took", int(elapsed.total_seconds()), "seconds.")
+
+
def _sha256_of_file(filename):
hasher = sha256()
with open(filename, 'rb') as input_file:
def _create_tarball(directory):
print("Creating tarball of {}...".format(directory))
- run(['tar', '-czf', _tarball_filename_for(directory), '-C', dirname(directory), basename(directory)])
+ with timer():
+ run(['tar', '-czf', _tarball_filename_for(directory), '-C', dirname(directory), basename(directory)])
def _extract_tarball(directory):
print("Extracting tarball of {}...".format(directory))
- run(['tar', '-xzf', _tarball_filename_for(directory), '-C', dirname(directory)])
+ with timer():
+ run(['tar', '-xzf', _tarball_filename_for(directory), '-C', dirname(directory)])
def download(directory):
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
try:
print("Downloading {} tarball from S3...".format(friendly_name))
- key.get_contents_to_filename(_tarball_filename_for(directory))
+ with timer():
+ key.get_contents_to_filename(_tarball_filename_for(directory))
except S3ResponseError as err:
open(NEED_TO_UPLOAD_MARKER, 'a').close()
print(err)
def upload(directory):
_create_tarball(directory)
print("Uploading {} tarball to S3... ({})".format(friendly_name, _tarball_size(directory)))
- key.set_contents_from_filename(_tarball_filename_for(directory))
+ with timer():
+ key.set_contents_from_filename(_tarball_filename_for(directory))
print("{} cache successfully updated.".format(friendly_name))
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)