Merge pull request #7651 from zmerlynn/upload_hash_in_util

Also push .sha1 for devel builds
This commit is contained in:
Filipe Brandenburger 2015-05-06 09:07:39 -07:00
commit b7f9e2cea0

View File

@ -134,6 +134,41 @@ function detect-project () {
fi fi
} }
function sha1sum-file() {
if which shasum >/dev/null 2>&1; then
shasum -a1 "$1" | awk '{ print $1 }'
else
sha1sum "$1" | awk '{ print $1 }'
fi
}
function already-staged() {
local -r file=$1
local -r newsum=$2
[[ -e "${file}.sha1" ]] || return 1
local oldsum
oldsum=$(cat "${file}.sha1")
[[ "${oldsum}" == "${newsum}" ]]
}
# Copy a release tar, if we don't already think it's staged in GCS
function copy-if-not-staged() {
local -r staging_path=$1
local -r gs_url=$2
local -r tar=$3
local -r hash=$4
if already-staged "${tar}" "${hash}"; then
echo "+++ $(basename ${tar}) already staged ('rm ${tar}.sha1' to force)"
else
echo "${server_hash}" > "${tar}.sha1"
gsutil -m -q -h "Cache-Control:private, max-age=0" cp "${tar}" "${tar}.sha1" "${staging_path}"
gsutil -m acl ch -g all:R "${gs_url}" "${gs_url}.sha1" >/dev/null 2>&1
fi
}
# Take the local tar files and upload them to Google Storage. They will then be # Take the local tar files and upload them to Google Storage. They will then be
# downloaded by the master as part of the start up script for the master. # downloaded by the master as part of the start up script for the master.
@ -155,6 +190,7 @@ function upload-server-tars() {
else else
project_hash=$(echo -n "$PROJECT" | md5sum | awk '{ print $1 }') project_hash=$(echo -n "$PROJECT" | md5sum | awk '{ print $1 }')
fi fi
# This requires 1 million projects before the probability of collision is 50% # This requires 1 million projects before the probability of collision is 50%
# that's probably good enough for now :P # that's probably good enough for now :P
project_hash=${project_hash:0:10} project_hash=${project_hash:0:10}
@ -169,13 +205,16 @@ function upload-server-tars() {
local -r staging_path="${staging_bucket}/devel" local -r staging_path="${staging_bucket}/devel"
local server_hash
local salt_hash
server_hash=$(sha1sum-file "${SERVER_BINARY_TAR}")
salt_hash=$(sha1sum-file "${SALT_TAR}")
echo "+++ Staging server tars to Google Storage: ${staging_path}" echo "+++ Staging server tars to Google Storage: ${staging_path}"
local server_binary_gs_url="${staging_path}/${SERVER_BINARY_TAR##*/}" local server_binary_gs_url="${staging_path}/${SERVER_BINARY_TAR##*/}"
gsutil -q -h "Cache-Control:private, max-age=0" cp "${SERVER_BINARY_TAR}" "${server_binary_gs_url}"
gsutil acl ch -g all:R "${server_binary_gs_url}" >/dev/null 2>&1
local salt_gs_url="${staging_path}/${SALT_TAR##*/}" local salt_gs_url="${staging_path}/${SALT_TAR##*/}"
gsutil -q -h "Cache-Control:private, max-age=0" cp "${SALT_TAR}" "${salt_gs_url}" copy-if-not-staged "${staging_path}" "${server_binary_gs_url}" "${SERVER_BINARY_TAR}" "${server_hash}"
gsutil acl ch -g all:R "${salt_gs_url}" >/dev/null 2>&1 copy-if-not-staged "${staging_path}" "${salt_gs_url}" "${SALT_TAR}" "${salt_hash}"
# Convert from gs:// URL to an https:// URL # Convert from gs:// URL to an https:// URL
SERVER_BINARY_TAR_URL="${server_binary_gs_url/gs:\/\//https://storage.googleapis.com/}" SERVER_BINARY_TAR_URL="${server_binary_gs_url/gs:\/\//https://storage.googleapis.com/}"