mirror of
https://github.com/k3s-io/kubernetes.git
synced 2025-07-20 18:31:15 +00:00
Merge pull request #106190 from MikeSpreitzer/integration-scrape-etcd
Add periodic etcd scraping to integration tests
This commit is contained in:
commit
0abc054933
@ -91,7 +91,39 @@ kube::etcd::start() {
|
|||||||
curl -fs -X POST "${KUBE_INTEGRATION_ETCD_URL}/v3/kv/put" -d '{"key": "X3Rlc3Q=", "value": ""}'
|
curl -fs -X POST "${KUBE_INTEGRATION_ETCD_URL}/v3/kv/put" -d '{"key": "X3Rlc3Q=", "value": ""}'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
kube::etcd::start_scraping() {
|
||||||
|
if [[ -d "${ARTIFACTS:-}" ]]; then
|
||||||
|
ETCD_SCRAPE_DIR="${ARTIFACTS}/etcd-scrapes"
|
||||||
|
else
|
||||||
|
ETCD_SCRAPE_DIR=$(mktemp -d -t test.XXXXXX)/etcd-scrapes
|
||||||
|
fi
|
||||||
|
kube::log::info "Periodically scraping etcd to ${ETCD_SCRAPE_DIR} ."
|
||||||
|
mkdir -p "${ETCD_SCRAPE_DIR}"
|
||||||
|
(
|
||||||
|
while sleep 30; do
|
||||||
|
kube::etcd::scrape
|
||||||
|
done
|
||||||
|
) &
|
||||||
|
ETCD_SCRAPE_PID=$!
|
||||||
|
}
|
||||||
|
|
||||||
|
kube::etcd::scrape() {
|
||||||
|
curl -s -S "${KUBE_INTEGRATION_ETCD_URL}/metrics" > "${ETCD_SCRAPE_DIR}/next" && mv "${ETCD_SCRAPE_DIR}/next" "${ETCD_SCRAPE_DIR}/$(date +%s).scrape"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
kube::etcd::stop() {
|
kube::etcd::stop() {
|
||||||
|
if [[ -n "${ETCD_SCRAPE_PID:-}" ]] && [[ -n "${ETCD_SCRAPE_DIR:-}" ]] ; then
|
||||||
|
kill "${ETCD_SCRAPE_PID}" &>/dev/null || :
|
||||||
|
wait "${ETCD_SCRAPE_PID}" &>/dev/null || :
|
||||||
|
kube::etcd::scrape || :
|
||||||
|
(
|
||||||
|
# shellcheck disable=SC2015
|
||||||
|
cd "${ETCD_SCRAPE_DIR}"/.. && \
|
||||||
|
tar czf etcd-scrapes.tgz etcd-scrapes && \
|
||||||
|
rm -rf etcd-scrapes || :
|
||||||
|
)
|
||||||
|
fi
|
||||||
if [[ -n "${ETCD_PID-}" ]]; then
|
if [[ -n "${ETCD_PID-}" ]]; then
|
||||||
kill "${ETCD_PID}" &>/dev/null || :
|
kill "${ETCD_PID}" &>/dev/null || :
|
||||||
wait "${ETCD_PID}" &>/dev/null || :
|
wait "${ETCD_PID}" &>/dev/null || :
|
||||||
|
@ -64,6 +64,9 @@ runTests() {
|
|||||||
kube::log::status "Starting etcd instance"
|
kube::log::status "Starting etcd instance"
|
||||||
CLEANUP_REQUIRED=1
|
CLEANUP_REQUIRED=1
|
||||||
kube::etcd::start
|
kube::etcd::start
|
||||||
|
# shellcheck disable=SC2034
|
||||||
|
local ETCD_SCRAPE_PID # Set in kube::etcd::start_scraping, used in cleanup
|
||||||
|
kube::etcd::start_scraping
|
||||||
kube::log::status "Running integration test cases"
|
kube::log::status "Running integration test cases"
|
||||||
|
|
||||||
make -C "${KUBE_ROOT}" test \
|
make -C "${KUBE_ROOT}" test \
|
||||||
|
98
hack/run-prometheus-on-etcd-scrapes.sh
Executable file
98
hack/run-prometheus-on-etcd-scrapes.sh
Executable file
@ -0,0 +1,98 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Copyright 2021 The Kubernetes Authors.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
# Unpacks a tarfile of etcd scrapes and runs a simple web server exposing it
|
||||||
|
# and a Prometheus server scraping that simple web server.
|
||||||
|
# The simple web server listens on port 9091.
|
||||||
|
# The Prometheus server is run in a container and looks for the
|
||||||
|
# simple web server at the host's first global IPv4 address.
|
||||||
|
|
||||||
|
# Usage: $0 scrapes_tar_pathname
|
||||||
|
#
|
||||||
|
# Where scrapes_tar_pathname is a gzipped tar archive containing
|
||||||
|
# files whose name is of the form
|
||||||
|
# <timestamp>.scrape
|
||||||
|
# where <timestamp> is seconds since Jan 1, 1970 UTC.
|
||||||
|
# Each such file is taken to be a scrape that lacks timestamps,
|
||||||
|
# and the timestamp from the filename is multiplied by the necessary 1000
|
||||||
|
# and added to the data in that file.
|
||||||
|
|
||||||
|
# This requires a:
|
||||||
|
# - `docker run` command
|
||||||
|
# - an `ip` or `ifconfig` command that this script knows how to wrangle
|
||||||
|
# - an `nc` command that serve-prom-scrapes.sh knows how to wrangle
|
||||||
|
|
||||||
|
if (( $# != 1 )); then
|
||||||
|
echo "Usage: $0 \$scrapes_tar_pathname" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
scrapes_file="$1"
|
||||||
|
|
||||||
|
if ! [[ -r "$scrapes_file" ]]; then
|
||||||
|
echo "$0: $scrapes_file is not a readable file" >&2
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}")
|
||||||
|
|
||||||
|
CONFIG="/tmp/$(cd /tmp && mktemp config.XXXXXX)"
|
||||||
|
UNPACKDIR="/tmp/$(cd /tmp && mktemp -d unpack.XXXXXX)"
|
||||||
|
SERVER_PID=""
|
||||||
|
|
||||||
|
cleanup_prom() {
|
||||||
|
rm -f "$CONFIG"
|
||||||
|
rm -rf "$UNPACKDIR"
|
||||||
|
if [[ -n "$SERVER_PID" ]]; then
|
||||||
|
kill "$SERVER_PID"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
trap cleanup_prom EXIT
|
||||||
|
|
||||||
|
chmod +r "$CONFIG" "$UNPACKDIR"
|
||||||
|
|
||||||
|
tar xzf "$scrapes_file" -C "$UNPACKDIR"
|
||||||
|
|
||||||
|
if which ip > /dev/null; then
|
||||||
|
IPADDR=$(ip addr show scope global up |
|
||||||
|
grep -w inet | head -1 |
|
||||||
|
awk '{ print $2 }' | awk -F/ '{ print $1 }')
|
||||||
|
else
|
||||||
|
IPADDR=$(ifconfig | grep -w inet | grep -Fv 127.0.0. | head -1 |
|
||||||
|
awk '{ print $2 }' | awk -F/ '{ print $1 }')
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo "Historic metrics will be at http://\${any_local_address}:9091/\${any_path}"
|
||||||
|
echo "Prometheus will listen on port 9090 and scrape historic metrics from http://${IPADDR}:9091/metrics"
|
||||||
|
sleep 1
|
||||||
|
echo
|
||||||
|
|
||||||
|
cat > "$CONFIG" <<EOF
|
||||||
|
global:
|
||||||
|
scrape_interval: 30s
|
||||||
|
|
||||||
|
scrape_configs:
|
||||||
|
|
||||||
|
- job_name: local
|
||||||
|
static_configs:
|
||||||
|
- targets: ['${IPADDR}:9091']
|
||||||
|
EOF
|
||||||
|
|
||||||
|
"${SCRIPT_ROOT}/serve-prom-scrapes.sh" 9091 "$UNPACKDIR" &
|
||||||
|
SERVER_PID=$!
|
||||||
|
docker run -p 9090:9090 -v "${CONFIG}:/config.yaml" prom/prometheus --config.file=/config.yaml --storage.tsdb.retention.time=3650d
|
67
hack/serve-prom-scrapes.sh
Executable file
67
hack/serve-prom-scrapes.sh
Executable file
@ -0,0 +1,67 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Copyright 2021 The Kubernetes Authors.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
# Serves a collection of scrape files up to Prometheus scraping.
|
||||||
|
|
||||||
|
# Usage: $0 port_num scrapes-dir
|
||||||
|
#
|
||||||
|
# Where scrapes-dir has descendant files whose name is of the form
|
||||||
|
# <timestamp>.scrape
|
||||||
|
# where <timestamp> is seconds since Jan 1, 1970 UTC.
|
||||||
|
# Each such file is taken to be a scrape that lacks timestamps,
|
||||||
|
# and the timestamp from the filename is multiplied by the necessary 1000
|
||||||
|
# and added to the data in that file.
|
||||||
|
|
||||||
|
# This requires an `nc` comment that this script knows how to wrangle.
|
||||||
|
|
||||||
|
if (( $# != 2 )); then
|
||||||
|
echo "Usage: $0 port_num scrapes_dir" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
port_num="$1"
|
||||||
|
scrapes_dir="$2"
|
||||||
|
response_file="/tmp/$(cd /tmp && mktemp response.XXXXXX)"
|
||||||
|
|
||||||
|
cleanup_serve() {
|
||||||
|
rm -rf "$response_file"
|
||||||
|
}
|
||||||
|
|
||||||
|
trap cleanup_serve EXIT
|
||||||
|
|
||||||
|
chmod +r "$response_file"
|
||||||
|
|
||||||
|
transform() {
|
||||||
|
path="$1"
|
||||||
|
base="$(basename "$path")"
|
||||||
|
seconds="${base%.scrape}"
|
||||||
|
sed 's/^\([^#].*\)$/\1 '"${seconds}000/" "$path"
|
||||||
|
}
|
||||||
|
|
||||||
|
find_and_transform() {
|
||||||
|
echo -n $'HTTP/1.0 200 OK\r\nContent-Type: text/plain\r\n\r\n' > "$response_file"
|
||||||
|
find "$scrapes_dir" -name "*.scrape" -print0 | sort -z | while read -d '' -r scrapename; do transform "$scrapename" >> "$response_file"; done
|
||||||
|
}
|
||||||
|
|
||||||
|
find_and_transform
|
||||||
|
|
||||||
|
if man nc | grep -wq -e -N
|
||||||
|
then dashen=-N
|
||||||
|
else dashen=
|
||||||
|
fi
|
||||||
|
|
||||||
|
# shellcheck disable=SC2086
|
||||||
|
while true; do nc -l $dashen 0.0.0.0 "$port_num" < "$response_file" > /dev/null; sleep 10; done
|
Loading…
Reference in New Issue
Block a user