From de03f4797b614fb192a72c83812c2a04a1939c87 Mon Sep 17 00:00:00 2001 From: Nathan LeClaire Date: Fri, 10 Apr 2015 10:57:43 -0700 Subject: [PATCH] Allow SEO crawling from docs site Signed-off-by: Nathan LeClaire Docker-DCO-1.1-Signed-off-by: Nathan LeClaire (github: nathanleclaire) --- docs/release.sh | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/docs/release.sh b/docs/release.sh index 09a85016c1..d01bc0293c 100755 --- a/docs/release.sh +++ b/docs/release.sh @@ -22,10 +22,17 @@ EOF } create_robots_txt() { - cat > ./sources/robots.txt <<'EOF' -User-agent: * -Disallow: / -EOF + if [ "$AWS_S3_BUCKET" == "docs.docker.com" ]; then + cat > ./sources/robots.txt <<-'EOF' + User-agent: * + Allow: / + EOF + else + cat > ./sources/robots.txt <<-'EOF' + User-agent: * + Disallow: / + EOF + fi } setup_s3() {