pin awscli to 1.3.9, and then complexify the sync's to make them more likely to complete

Docker-DCO-1.1-Signed-off-by: SvenDowideit <SvenDowideit@home.org.au> (github: SvenDowideit)
This commit is contained in:
SvenDowideit 2014-06-18 12:38:45 +10:00
parent f0ec901819
commit 6b352d1cfe
2 changed files with 41 additions and 8 deletions

View File

@ -13,10 +13,8 @@ RUN pip install mkdocs
#RUN easy_install -U setuptools
#RUN pip install MarkdownTools2
# this week I seem to need the latest dev release of awscli too
# awscli 1.3.6 does --error-document correctly
# https://github.com/aws/aws-cli/commit/edc2290e173dfaedc70b48cfa3624d58c533c6c3
RUN pip install awscli
# this version works, the current versions fail in different ways
RUN pip install awscli==1.3.9
# get my sitemap.xml branch of mkdocs and use that for now
RUN git clone https://github.com/SvenDowideit/mkdocs &&\

View File

@ -30,10 +30,10 @@ echo "cfg file: $AWS_CONFIG_FILE ; profile: $AWS_DEFAULT_PROFILE"
setup_s3() {
echo "Create $BUCKET"
# Try creating the bucket. Ignore errors (it might already exist).
aws s3 mb s3://$BUCKET 2>/dev/null || true
aws s3 mb --profile $BUCKET s3://$BUCKET 2>/dev/null || true
# Check access to the bucket.
echo "test $BUCKET exists"
aws s3 ls s3://$BUCKET
aws s3 --profile $BUCKET ls s3://$BUCKET
# Make the bucket accessible through website endpoints.
echo "make $BUCKET accessible as a website"
#aws s3 website s3://$BUCKET --index-document index.html --error-document jsearch/index.html
@ -41,7 +41,7 @@ setup_s3() {
echo
echo $s3conf
echo
aws s3api put-bucket-website --bucket $BUCKET --website-configuration "$s3conf"
aws s3api --profile $BUCKET put-bucket-website --bucket $BUCKET --website-configuration "$s3conf"
}
build_current_documentation() {
@ -57,7 +57,42 @@ upload_current_documentation() {
echo " to $dst"
echo
#s3cmd --recursive --follow-symlinks --preserve --acl-public sync "$src" "$dst"
aws s3 sync --cache-control "max-age=3600" --acl public-read --exclude "*.rej" --exclude "*.rst" --exclude "*.orig" --exclude "*.py" "$src" "$dst"
#aws s3 cp --profile $BUCKET --cache-control "max-age=3600" --acl public-read "site/search_content.json" "$dst"
# a really complicated way to send only the files we want
# if there are too many in any one set, aws s3 sync seems to fall over with 2 files to go
endings=( json html xml css js gif png JPG )
for i in ${endings[@]}; do
include=""
for j in ${endings[@]}; do
if [ "$i" != "$j" ];then
include="$include --exclude *.$j"
fi
done
echo "uploading *.$i"
run="aws s3 sync --profile $BUCKET --cache-control \"max-age=3600\" --acl public-read \
$include \
--exclude *.txt \
--exclude *.text* \
--exclude *Dockerfile \
--exclude *.DS_Store \
--exclude *.psd \
--exclude *.ai \
--exclude *.svg \
--exclude *.eot \
--exclude *.otf \
--exclude *.ttf \
--exclude *.woff \
--exclude *.rej \
--exclude *.rst \
--exclude *.orig \
--exclude *.py \
$src $dst"
echo "======================="
#echo "$run"
#echo "======================="
$run
done
}
setup_s3