Merge pull request #8958 from mbruzek/build-local

Use locally built kubernetes binaries in charms
This commit is contained in:
Eric Tune 2015-06-01 09:23:58 -07:00
commit 84dfa9b1b8
4 changed files with 67 additions and 38 deletions

View File

@ -7,7 +7,7 @@ kubernetes-local:
"gui-y": "0"
expose: true
options:
version: "v0.17.1"
version: "local"
docker:
charm: cs:trusty/docker
num_units: 2

View File

@ -1,6 +1,6 @@
server {
listen %(api_bind_address)s:80;
location %(web_uri)s {
alias /opt/kubernetes/_output/local/bin/linux/amd64/;
alias %(alias)s;
}
}

View File

@ -59,46 +59,54 @@ def config_changed():
config = hookenv.config()
# Get the version of kubernetes to install.
version = config['version']
# Get the package architecture, rather than the from the kernel (uname -m).
arch = subprocess.check_output(['dpkg', '--print-architecture']).strip()
kubernetes_dir = path('/opt/kubernetes')
if not kubernetes_dir.exists():
print('The source directory {0} does not exist'.format(kubernetes_dir))
print('Was the kubernetes code cloned during install?')
exit(1)
if version in ['source', 'head', 'master']:
if version == 'master':
# The 'master' branch of kuberentes is used when master is configured.
branch = 'master'
elif version == 'local':
# Check for kubernetes binaries in the local files/output directory.
branch = None
else:
# Create a branch to a tag.
# Create a branch to a tag to get the release version.
branch = 'tags/{0}'.format(version)
# Construct the path to the binaries using the arch.
output_path = kubernetes_dir / '_output/local/bin/linux' / arch
installer = KubernetesInstaller(arch, version, output_path)
# Get the package architecture, rather than arch from the kernel (uname -m).
arch = subprocess.check_output(['dpkg', '--print-architecture']).strip()
# Change to the kubernetes directory (git repository).
with kubernetes_dir:
# Create a command to get the current branch.
git_branch = 'git branch | grep "\*" | cut -d" " -f2'
current_branch = subprocess.check_output(git_branch, shell=True).strip()
print('Current branch: ', current_branch)
# Create the path to a file to indicate if the build was broken.
broken_build = charm_dir / '.broken_build'
# write out the .broken_build file while this block is executing.
with check_sentinel(broken_build) as last_build_failed:
print('Last build failed: ', last_build_failed)
# Rebuild if the current version is different or last build failed.
if current_branch != version or last_build_failed:
installer.build(branch)
if not output_path.exists():
broken_build.touch()
else:
print('Notifying minions of verison ' + version)
# Notify the minions of a version change.
for r in hookenv.relation_ids('minions-api'):
hookenv.relation_set(r, version=version)
print('Done notifing minions of version ' + version)
if not branch:
output_path = charm_dir / 'files/output'
installer = KubernetesInstaller(arch, version, output_path)
else:
# Build the kuberentes binaries from source on the units.
kubernetes_dir = path('/opt/kubernetes')
# Construct the path to the binaries using the arch.
output_path = kubernetes_dir / '_output/local/bin/linux' / arch
installer = KubernetesInstaller(arch, version, output_path)
if not kubernetes_dir.exists():
print('The source directory {0} does not exist'.format(kubernetes_dir))
print('Was the kubernetes code cloned during install?')
exit(1)
# Change to the kubernetes directory (git repository).
with kubernetes_dir:
# Create a command to get the current branch.
git_branch = 'git branch | grep "\*" | cut -d" " -f2'
current_branch = subprocess.check_output(git_branch, shell=True).strip()
print('Current branch: ', current_branch)
# Create the path to a file to indicate if the build was broken.
broken_build = charm_dir / '.broken_build'
# write out the .broken_build file while this block is executing.
with check_sentinel(broken_build) as last_build_failed:
print('Last build failed: ', last_build_failed)
# Rebuild if current version is different or last build failed.
if current_branch != version or last_build_failed:
installer.build(branch)
if not output_path.isdir():
broken_build.touch()
# Create the symoblic links to the right directories.
installer.install()
@ -141,6 +149,7 @@ def relation_changed():
# Send api endpoint to minions
notify_minions()
@hooks.hook('network-relation-changed')
def network_relation_changed():
relation_id = hookenv.relation_id()
@ -156,11 +165,13 @@ def notify_minions():
hostname=hookenv.unit_private_ip(),
port=8080,
version=config['version'])
print("Notified minions of version " + config['version'])
def get_template_data():
rels = hookenv.relations()
config = hookenv.config()
version = config['version']
template_data = {}
template_data['etcd_servers'] = ",".join([
"http://%s:%s" % (s[0], s[1]) for s in sorted(
@ -172,8 +183,14 @@ def get_template_data():
template_data['api_server_address'] = "http://%s:%s" % (
hookenv.unit_private_ip(), 8080)
arch = subprocess.check_output(['dpkg', '--print-architecture']).strip()
template_data['web_uri'] = "/kubernetes/%s/local/bin/linux/%s/" % (
config['version'], arch)
template_data['web_uri'] = "/kubernetes/%s/local/bin/linux/%s/" % (version,
arch)
if version == 'local':
template_data['alias'] = hookenv.charm_dir() + '/files/output/'
else:
directory = '/opt/kubernetes/_output/local/bin/linux/%s/' % arch
template_data['alias'] = directory
_encode(template_data)
return template_data

View File

@ -30,11 +30,21 @@ function verify-prereqs() {
gather_installation_reqs
}
function build-local() {
# Build the binaries locally that are used in the charms.
make all WHAT="cmd/kube-apiserver cmd/kubectl cmd/kube-controller-manager plugin/cmd/kube-scheduler cmd/kubelet cmd/kube-proxy"
OUTPUT_DIR=_output/local/bin/linux/amd64
mkdir -p cluster/juju/charms/trusty/kubernetes-master/files/output
# Copy the binary output to the charm directory.
cp -v $OUTPUT_DIR/* cluster/juju/charms/trusty/kubernetes-master/files/output
}
function get-password() {
echo "TODO: Assign username/password security"
}
function kube-up() {
build-local
if [[ -d "~/.juju/current-env" ]]; then
juju quickstart -i --no-browser
else
@ -50,6 +60,8 @@ function kube-up() {
}
function kube-down() {
# Remove the binary files from the charm directory.
rm -rf cluster/juju/charms/trusty/kubernetes-master/files/output/
local jujuenv
jujuenv=$(cat ~/.juju/current-environment)
juju destroy-environment $jujuenv