better way of tracking what caches need to be uploaded; fixes #13270
This commit is contained in:
parent
4c049e3600
commit
0b6f1906cd
|
@ -1,12 +1,13 @@
|
||||||
#!/usr/bin/env python2.7
|
#!/usr/bin/env python2.7
|
||||||
|
# pylint: disable=C0301
|
||||||
from __future__ import absolute_import, unicode_literals, print_function, division
|
from __future__ import absolute_import, unicode_literals, print_function, division
|
||||||
|
|
||||||
from sys import argv
|
from sys import argv
|
||||||
from os import environ, stat, chdir, remove as _delete_file
|
from os import environ, stat, chdir, remove as _delete_file
|
||||||
from os.path import isfile, dirname, basename, abspath, realpath, expandvars
|
from os.path import dirname, basename, abspath, realpath, expandvars
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from subprocess import check_call as run
|
from subprocess import check_call as run
|
||||||
from json import load
|
from json import load, dump as save
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
@ -16,7 +17,7 @@ from boto.exception import S3ResponseError
|
||||||
|
|
||||||
|
|
||||||
CONFIG_FILE = './S3Cachefile.json'
|
CONFIG_FILE = './S3Cachefile.json'
|
||||||
NEED_TO_UPLOAD_MARKER = '.need-to-upload'
|
UPLOAD_TODO_FILE = './S3CacheTodo.json'
|
||||||
BYTES_PER_MB = 1024 * 1024
|
BYTES_PER_MB = 1024 * 1024
|
||||||
|
|
||||||
|
|
||||||
|
@ -29,6 +30,24 @@ def timer():
|
||||||
print("\tDone. Took", int(elapsed.total_seconds()), "second(s).")
|
print("\tDone. Took", int(elapsed.total_seconds()), "second(s).")
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def todo_file(writeback=True):
|
||||||
|
try:
|
||||||
|
with open(UPLOAD_TODO_FILE, 'rt') as json_file:
|
||||||
|
todo = load(json_file)
|
||||||
|
except (IOError, OSError, ValueError):
|
||||||
|
todo = {}
|
||||||
|
|
||||||
|
yield todo
|
||||||
|
|
||||||
|
if writeback:
|
||||||
|
try:
|
||||||
|
with open(UPLOAD_TODO_FILE, 'wt') as json_file:
|
||||||
|
save(todo, json_file)
|
||||||
|
except (OSError, IOError) as save_err:
|
||||||
|
print("Error saving {}:".format(UPLOAD_TODO_FILE), save_err)
|
||||||
|
|
||||||
|
|
||||||
def _sha256_of_file(filename):
|
def _sha256_of_file(filename):
|
||||||
hasher = sha256()
|
hasher = sha256()
|
||||||
with open(filename, 'rb') as input_file:
|
with open(filename, 'rb') as input_file:
|
||||||
|
@ -45,6 +64,21 @@ def _delete_file_quietly(filename):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def mark_needs_uploading(cache_name):
|
||||||
|
with todo_file() as todo:
|
||||||
|
todo[cache_name] = True
|
||||||
|
|
||||||
|
|
||||||
|
def mark_uploaded(cache_name):
|
||||||
|
with todo_file() as todo:
|
||||||
|
todo.pop(cache_name, None)
|
||||||
|
|
||||||
|
|
||||||
|
def need_to_upload(cache_name):
|
||||||
|
with todo_file(writeback=False) as todo:
|
||||||
|
return todo.get(cache_name, False)
|
||||||
|
|
||||||
|
|
||||||
def _tarball_size(directory):
|
def _tarball_size(directory):
|
||||||
kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB
|
kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB
|
||||||
return "{} MiB".format(kib)
|
return "{} MiB".format(kib)
|
||||||
|
@ -67,14 +101,13 @@ def _extract_tarball(directory):
|
||||||
|
|
||||||
|
|
||||||
def download(directory):
|
def download(directory):
|
||||||
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
|
mark_uploaded(cache_name) # reset
|
||||||
try:
|
try:
|
||||||
print("Downloading {} tarball from S3...".format(cache_name))
|
print("Downloading {} tarball from S3...".format(cache_name))
|
||||||
with timer():
|
with timer():
|
||||||
key.get_contents_to_filename(_tarball_filename_for(directory))
|
key.get_contents_to_filename(_tarball_filename_for(directory))
|
||||||
except S3ResponseError as err:
|
except S3ResponseError as err:
|
||||||
open(NEED_TO_UPLOAD_MARKER, 'a').close()
|
mark_needs_uploading(cache_name)
|
||||||
print(err)
|
|
||||||
raise SystemExit("Cached {} download failed!".format(cache_name))
|
raise SystemExit("Cached {} download failed!".format(cache_name))
|
||||||
print("Downloaded {}.".format(_tarball_size(directory)))
|
print("Downloaded {}.".format(_tarball_size(directory)))
|
||||||
_extract_tarball(directory)
|
_extract_tarball(directory)
|
||||||
|
@ -87,7 +120,7 @@ def upload(directory):
|
||||||
with timer():
|
with timer():
|
||||||
key.set_contents_from_filename(_tarball_filename_for(directory))
|
key.set_contents_from_filename(_tarball_filename_for(directory))
|
||||||
print("{} cache successfully updated.".format(cache_name))
|
print("{} cache successfully updated.".format(cache_name))
|
||||||
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
|
mark_uploaded(cache_name)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -135,7 +168,7 @@ if __name__ == '__main__':
|
||||||
if mode == 'download':
|
if mode == 'download':
|
||||||
download(directory)
|
download(directory)
|
||||||
elif mode == 'upload':
|
elif mode == 'upload':
|
||||||
if isfile(NEED_TO_UPLOAD_MARKER): # FIXME
|
if need_to_upload(cache_name):
|
||||||
upload(directory)
|
upload(directory)
|
||||||
else:
|
else:
|
||||||
print("No need to upload anything.")
|
print("No need to upload anything.")
|
||||||
|
|
Loading…
Reference in New Issue