Skip to content

Commit 0f86955

Browse files
kaisechengjsvd
andauthored
buildkite serverless test (#15150)
This commit adds a Buildkite pipeline to test against serverless endpoint daily Tests cover - es-output - es-input - es-filter - central pipeline management - legacy monitoring - dlq - integration-filter - kibana API - metricbeat stack monitoring Co-authored-by: João Duarte <[email protected]> Co-authored-by: João Duarte <[email protected]>
1 parent 2165d43 commit 0f86955

28 files changed

+733
-7
lines changed

.buildkite/scripts/setup_java.sh

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
#!/bin/bash
2+
3+
set -e
4+
5+
install_java() {
6+
# TODO: let's think about regularly creating a custom image for Logstash which may align on version.yml definitions
7+
sudo apt update && sudo apt install -y openjdk-17-jdk && sudo apt install -y openjdk-17-jre
8+
}
9+
10+
install_java
Lines changed: 23 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,24 @@
1+
agents:
2+
provider: "gcp"
3+
machineType: "n1-standard-4"
4+
image: family/core-ubuntu-2204
5+
16
steps:
2-
- label: "Elasticsearch output test"
3-
command: ./ci/serverless/serverless_core_rspec_tests.sh
7+
- label: "DLQ rspec integration test"
8+
command: ./.buildkite/scripts/setup_java.sh && ./ci/serverless/dlq_rspec_tests.sh
9+
- label: "es-output test"
10+
command: ./.buildkite/scripts/setup_java.sh && ./ci/serverless/es_output_tests.sh
11+
- label: "es-input test"
12+
command: ./.buildkite/scripts/setup_java.sh && ./ci/serverless/es_input_tests.sh
13+
- label: "es-filter test"
14+
command: ./.buildkite/scripts/setup_java.sh && ./ci/serverless/es_filter_tests.sh
15+
- label: "elastic_integration filter test"
16+
command: ./.buildkite/scripts/setup_java.sh && ./ci/serverless/elastic_integration_filter_tests.sh
17+
- label: "central pipeline management test"
18+
command: ./.buildkite/scripts/setup_java.sh && ./ci/serverless/cpm_tests.sh
19+
- label: "Logstash legacy monitoring test"
20+
command: ./.buildkite/scripts/setup_java.sh && ./ci/serverless/monitoring_tests.sh
21+
- label: "Kibana API test"
22+
command: ./.buildkite/scripts/setup_java.sh && ./ci/serverless/kibana_api_tests.sh
23+
- label: "metricbeat stack monitoring test"
24+
command: ./.buildkite/scripts/setup_java.sh && ./ci/serverless/metricbeat_monitoring_tests.sh

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,4 +62,5 @@ lib/pluginmanager/plugin_aliases.yml
6262
logstash-core/src/main/resources/org/logstash/plugins/plugin_aliases.yml
6363
spec/unit/plugin_manager/plugin_aliases.yml
6464
logstash-core/src/test/resources/org/logstash/plugins/plugin_aliases.yml
65-
qa/integration/fixtures/logs_rollover/log4j2.properties
65+
qa/integration/fixtures/logs_rollover/log4j2.properties
66+
ci/serverless/config/*.log

ci/serverless/common.sh

Lines changed: 151 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,151 @@
1+
#!/usr/bin/env bash
2+
set -ex
3+
4+
export CURRENT_DIR="$(dirname "$0")"
5+
export INDEX_NAME="serverless_it_${BUILDKITE_BUILD_NUMBER:-`date +%s`}"
6+
# store all error messages
7+
export ERR_MSGS=()
8+
# numeric values representing the results of the checks. 0: pass, >0: fail
9+
export CHECKS=()
10+
11+
setup_vault() {
12+
vault_path=secret/ci/elastic-logstash/serverless-test
13+
set +x
14+
export ES_ENDPOINT=$(vault read -field=es_host "${vault_path}")
15+
export ES_USER=$(vault read -field=es_user "${vault_path}")
16+
export ES_PW=$(vault read -field=es_user_pw "${vault_path}")
17+
export KB_ENDPOINT=$(vault read -field=kb_host "${vault_path}")
18+
set -x
19+
}
20+
21+
build_logstash() {
22+
./gradlew clean bootstrap assemble installDefaultGems
23+
}
24+
25+
index_test_data() {
26+
curl -X POST -u "$ES_USER:$ES_PW" "$ES_ENDPOINT/$INDEX_NAME/_bulk" -H 'Content-Type: application/json' --data-binary @"$CURRENT_DIR/test_data/book.json"
27+
}
28+
29+
# $1: check function
30+
run_cpm_logstash() {
31+
# copy log4j
32+
cp "$CURRENT_DIR/../../config/log4j2.properties" "$CURRENT_DIR/config/log4j2.properties"
33+
34+
# run logstash
35+
$CURRENT_DIR/../../bin/logstash --path.settings "$CURRENT_DIR/config" 2>/dev/null &
36+
export LS_PID=$!
37+
38+
check_logstash_readiness
39+
40+
$1 # check function
41+
42+
kill "$LS_PID" || true
43+
}
44+
45+
# $1: pipeline file
46+
# $2: check function
47+
# run_logstash 001_es-output.conf check_es_output
48+
run_logstash() {
49+
$CURRENT_DIR/../../bin/logstash -f "$1" 2>/dev/null &
50+
export LS_PID=$!
51+
52+
check_logstash_readiness
53+
54+
$2 # check function
55+
56+
kill "$LS_PID" || true
57+
}
58+
59+
60+
# $1: number of try
61+
# $n: check function with args - return non empty string as pass
62+
count_down_check() {
63+
count=$1
64+
while ! [[ $("${@:2}") ]] && [[ $count -gt 0 ]]; do
65+
count=$(( count - 1 ))
66+
sleep 1
67+
done
68+
69+
[[ $count -eq 0 ]] && echo "1" && return
70+
71+
echo "Passed check!"
72+
echo "0"
73+
}
74+
75+
76+
check_logstash_readiness() {
77+
curl_logstash() {
78+
[[ $(curl --silent localhost:9600) ]] && echo "0"
79+
}
80+
check_readiness() {
81+
count_down_check 120 curl_logstash
82+
}
83+
add_check check_readiness "Failed readiness check."
84+
85+
[[ "${CHECKS[-1]}" -eq "1" ]] && exit 1
86+
87+
echo "Logstash is Up !"
88+
return 0
89+
}
90+
91+
# $1: jq filter
92+
# $2: expected value
93+
# check_logstash_api '.pipelines.main.plugins.outputs[0].documents.successes' '1'
94+
check_logstash_api() {
95+
curl_node_stats() {
96+
[[ $(curl --silent localhost:9600/_node/stats | jq "$1") -ge "$2" ]] && echo "0"
97+
}
98+
99+
count_down_check 30 curl_node_stats "$1" "$2"
100+
}
101+
102+
# add check result to CHECKS
103+
# $1: check function - expected the last char of result to be 0 or positive number
104+
# $2: err msg
105+
add_check() {
106+
FEATURE_CHECK=$($1)
107+
FEATURE_CHECK="${FEATURE_CHECK: -1}"
108+
109+
ERR_MSGS+=("$2")
110+
CHECKS+=("$FEATURE_CHECK")
111+
}
112+
113+
# check log if the line contains [ERROR] or [FATAL] and does not relate to "unreachable"
114+
check_err_log() {
115+
LOG_FILE="$CURRENT_DIR/../../logs/logstash-plain.log"
116+
LOG_CHECK=$(grep -E "\[ERROR\]|\[FATAL\]" "$LOG_FILE" | grep -cvE "unreachable|Connection refused") || true
117+
118+
ERR_MSGS+=("Found error in log")
119+
CHECKS+=("$LOG_CHECK")
120+
}
121+
122+
# if CHECKS[i] is 1, print ERR_MSGS[i]
123+
print_result() {
124+
for i in "${!CHECKS[@]}"; do
125+
[[ "${CHECKS[$i]}" -gt 0 ]] && echo "${ERR_MSGS[$i]}" || true
126+
done
127+
}
128+
129+
# exit 1 if one of the checks fails
130+
exit_with_code() {
131+
for c in "${CHECKS[@]}"; do
132+
[[ $c -gt 0 ]] && exit 1
133+
done
134+
135+
exit 0
136+
}
137+
138+
clean_up_and_get_result() {
139+
[[ -n "$LS_PID" ]] && kill "$LS_PID" || true
140+
141+
check_err_log
142+
print_result
143+
exit_with_code
144+
}
145+
146+
# common setup
147+
setup() {
148+
setup_vault
149+
build_logstash
150+
trap clean_up_and_get_result INT TERM EXIT
151+
}

ci/serverless/config/logstash.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
xpack.management.enabled: true
2+
xpack.management.pipeline.id: ["gen_es"]
3+
xpack.management.elasticsearch.username: ${ES_USER}
4+
xpack.management.elasticsearch.password: ${ES_PW}
5+
xpack.management.elasticsearch.hosts: ["${ES_ENDPOINT}"]
6+
7+
xpack.monitoring.enabled: true
8+
xpack.monitoring.elasticsearch.username: ${ES_USER}
9+
xpack.monitoring.elasticsearch.password: ${ES_PW}
10+
xpack.monitoring.elasticsearch.hosts: ["${ES_ENDPOINT}"]

ci/serverless/cpm_tests.sh

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
#!/usr/bin/env bash
2+
set -ex
3+
4+
source ./$(dirname "$0")/common.sh
5+
6+
export PIPELINE_NAME='gen_es'
7+
8+
# update pipeline and check response code
9+
index_pipeline() {
10+
RESP_CODE=$(curl -s -w "%{http_code}" -X PUT -u "$ES_USER:$ES_PW" "$ES_ENDPOINT/_logstash/pipeline/$1" -H 'Content-Type: application/json' -d "$2")
11+
if [[ $RESP_CODE -ge '400' ]]; then
12+
echo "failed to update pipeline for Central Pipeline Management. Got $RESP_CODE from Elasticsearch"
13+
exit 1
14+
fi
15+
}
16+
17+
# index pipeline to serverless ES
18+
index_cpm_pipelines() {
19+
index_pipeline "$PIPELINE_NAME" '{
20+
"pipeline": "input { generator { count => 100 } } output { elasticsearch { hosts => \"${ES_ENDPOINT}\" user => \"${ES_USER}\" password => \"${ES_PW}\" index=> \"${INDEX_NAME}\" } }",
21+
"last_modified": "2023-07-04T22:22:22.222Z",
22+
"pipeline_metadata": { "version": "1"},
23+
"username": "log.stash",
24+
"pipeline_settings": {"pipeline.batch.delay": "50"}
25+
}'
26+
}
27+
28+
check_es_output() {
29+
check_logstash_api '.pipelines.gen_es.plugins.outputs[0].documents.successes' '100'
30+
}
31+
32+
check_plugin() {
33+
add_check check_es_output "Failed central pipeline management check."
34+
}
35+
36+
delete_pipeline() {
37+
curl -u "$ES_USER:$ES_PW" -X DELETE "$ES_ENDPOINT/_logstash/pipeline/$PIPELINE_NAME" -H 'Content-Type: application/json';
38+
}
39+
40+
cpm_clean_up_and_get_result() {
41+
delete_pipeline
42+
clean_up_and_get_result
43+
}
44+
45+
setup
46+
trap cpm_clean_up_and_get_result INT TERM EXIT
47+
index_cpm_pipelines
48+
run_cpm_logstash check_plugin

ci/serverless/dlq_rspec_tests.sh

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
#!/usr/bin/env bash
2+
set -ex
3+
4+
vault_path=secret/ci/elastic-logstash/serverless-test
5+
6+
export JRUBY_OPTS="-J-Xmx1g"
7+
export SERVERLESS=true
8+
set +x
9+
export ES_ENDPOINT=$(vault read -field=es_host "${vault_path}")
10+
export ES_USER=$(vault read -field=es_user "${vault_path}")
11+
export ES_PW=$(vault read -field=es_user_pw "${vault_path}")
12+
set -x
13+
14+
./gradlew clean bootstrap assemble installDefaultGems unpackTarDistribution
15+
./gradlew :logstash-core:copyGemjar
16+
17+
export GEM_PATH=vendor/bundle/jruby/3.1.0
18+
export GEM_HOME=vendor/bundle/jruby/3.1.0
19+
20+
vendor/jruby/bin/jruby -S bundle install --with development
21+
22+
vendor/jruby/bin/jruby -S bundle exec rspec -fd qa/integration/specs/dlq_spec.rb -e "using pipelines.yml"
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
#!/usr/bin/env bash
2+
set -ex
3+
4+
source ./$(dirname "$0")/common.sh
5+
6+
deploy_ingest_pipeline() {
7+
PIPELINE_RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X PUT -u "$ES_USER:$ES_PW" "$ES_ENDPOINT/_ingest/pipeline/integration-logstash_test.events-default" \
8+
-H 'Content-Type: application/json' \
9+
--data-binary @"$CURRENT_DIR/test_data/ingest_pipeline.json")
10+
11+
TEMPLATE_RESP_CODE=$(curl -s -w "%{http_code}" -o /dev/null -X PUT -u "$ES_USER:$ES_PW" "$ES_ENDPOINT/_index_template/logs-serverless-default-template" \
12+
-H 'Content-Type: application/json' \
13+
--data-binary @"$CURRENT_DIR/test_data/index_template.json")
14+
15+
# ingest pipeline is likely be there from the last run
16+
# failing to update pipeline does not stop the test
17+
if [[ $PIPELINE_RESP_CODE -ge '400' ]]; then
18+
ERR_MSGS+=("Failed to update ingest pipeline. Got $PIPELINE_RESP_CODE")
19+
fi
20+
21+
if [[ $TEMPLATE_RESP_CODE -ge '400' ]]; then
22+
ERR_MSGS+=("Failed to update index template. Got $TEMPLATE_RESP_CODE")
23+
fi
24+
}
25+
26+
# processor should append 'serverless' to message
27+
check_integration_filter() {
28+
check_logstash_api '.pipelines.main.plugins.filters[] | select(.id == "mutate1") | .events.out' '1'
29+
}
30+
31+
get_doc_msg_length() {
32+
curl -s -u "$ES_USER:$ES_PW" "$ES_ENDPOINT/logs-$INDEX_NAME.004-default/_search?size=1" | jq '.hits.hits[0]._source.message | length'
33+
}
34+
35+
# ensure no double run of ingest pipeline
36+
# message = ['ok', 'serverless*']
37+
validate_ds_doc() {
38+
[[ $(get_doc_msg_length) -eq "2" ]] && echo "0"
39+
}
40+
41+
check_doc_no_duplication() {
42+
count_down_check 20 validate_ds_doc
43+
}
44+
45+
check_plugin() {
46+
add_check check_integration_filter "Failed ingest pipeline processor check."
47+
add_check check_doc_no_duplication "Failed ingest pipeline duplication check."
48+
}
49+
50+
setup
51+
# install plugin
52+
"$CURRENT_DIR/../../bin/logstash-plugin" install logstash-filter-elastic_integration
53+
deploy_ingest_pipeline
54+
run_logstash "$CURRENT_DIR/pipeline/004_integration-filter.conf" check_plugin

ci/serverless/es_filter_tests.sh

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
#!/usr/bin/env bash
2+
set -ex
3+
4+
source ./$(dirname "$0")/common.sh
5+
6+
check_es_filter() {
7+
check_logstash_api '.pipelines.main.plugins.filters[] | select(.id == "ok") | .events.out' '1'
8+
}
9+
10+
check_plugin() {
11+
add_check check_es_filter "Failed es-filter check."
12+
}
13+
14+
setup
15+
index_test_data
16+
run_logstash "$CURRENT_DIR/pipeline/002_es-filter.conf" check_plugin

ci/serverless/es_input_tests.sh

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
#!/usr/bin/env bash
2+
set -ex
3+
4+
source ./$(dirname "$0")/common.sh
5+
6+
check_es_input() {
7+
check_logstash_api '.pipelines.main.plugins.inputs[0].events.out' '1'
8+
}
9+
10+
check_plugin() {
11+
add_check check_es_input "Failed es-input check."
12+
}
13+
14+
setup
15+
index_test_data
16+
run_logstash "$CURRENT_DIR/pipeline/003_es-input.conf" check_plugin

0 commit comments

Comments
 (0)