blob: ff431f8831f16c90c0a463af5606936cc087105c (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
|
#!/usr/bin/env bash
usage() {
cat <<EOF
Usage: $0 [OPTIONS] [TESTS]
Options:
-A Use "cat -A" to print test output (only some tests)
-c Run integration tests on a cluster
-C Clean up when done by removing test state directory (see -V)
-D Show diff between failed/expected test output (some tests only)
-e Exit on the first test failure
-H No headers - for running single test with other wrapper
-I <count> Iterate tests <count> times, exiting on failure (implies -e, -N)
-l <count> Use <count> daemons for local daemon integration tests
-L Print daemon logs on test failure (only some tests)
-N Don't print summary of tests results after running all tests
-q Quiet - don't show tests being run (still displays summary)
-S <lib> Use socket wrapper library <lib> for local integration tests
-v Verbose - print test output for non-failures (only some tests)
-V <dir> Use <dir> as test state directory
-x Trace this script with the -x option
-X Trace certain scripts run by tests using -x (only some tests)
EOF
exit 1
}
# Print a message and exit.
die ()
{
echo "$1" >&2 ; exit "${2:-1}"
}
######################################################################
with_summary=true
quiet=false
exit_on_fail=false
max_iterations=1
no_header=false
test_state_dir=""
cleanup=false
test_time_limit=3600
export CTDB_TEST_VERBOSE=false
export CTDB_TEST_COMMAND_TRACE=false
export CTDB_TEST_CAT_RESULTS_OPTS=""
export CTDB_TEST_DIFF_RESULTS=false
export CTDB_TEST_PRINT_LOGS_ON_ERROR=false
export CTDB_TEST_LOCAL_DAEMONS=3
export CTDB_TEST_SWRAP_SO_PATH=""
while getopts "AcCDehHI:l:LNqS:T:vV:xX?" opt ; do
case "$opt" in
A) CTDB_TEST_CAT_RESULTS_OPTS="-A" ;;
c) CTDB_TEST_LOCAL_DAEMONS="" ;;
C) cleanup=true ;;
D) CTDB_TEST_DIFF_RESULTS=true ;;
e) exit_on_fail=true ;;
H) no_header=true ;;
I) max_iterations="$OPTARG" ; exit_on_fail=true ; with_summary=false ;;
l) CTDB_TEST_LOCAL_DAEMONS="$OPTARG" ;;
L) CTDB_TEST_PRINT_LOGS_ON_ERROR=true ;;
N) with_summary=false ;;
q) quiet=true ;;
S) CTDB_TEST_SWRAP_SO_PATH="$OPTARG" ;;
T) test_time_limit="$OPTARG" ;;
v) CTDB_TEST_VERBOSE=true ;;
V) test_state_dir="$OPTARG" ;;
x) set -x ;;
X) CTDB_TEST_COMMAND_TRACE=true ;;
\?|h) usage ;;
esac
done
shift $((OPTIND - 1))
case $(basename "$0") in
*run_cluster_tests*)
# Running on a cluster... same as -c
CTDB_TEST_LOCAL_DAEMONS=""
;;
esac
if $quiet ; then
show_progress() { cat >/dev/null ; }
else
show_progress() { cat ; }
fi
######################################################################
test_header ()
{
local name="$1"
echo "--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--"
echo "Running test $name ($(date '+%T'))"
echo "--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--==--"
}
test_footer ()
{
local f="$1"
local status="$2"
local interp="$3"
local duration="$4"
local statstr=""
if [ "$status" -eq 0 ] ; then
statstr=""
else
statstr=" (status $status)"
fi
echo "=========================================================================="
echo "TEST ${interp}: ${f}${statstr} (duration: ${duration}s)"
echo "=========================================================================="
}
ctdb_test_run ()
{
local f="$1"
$no_header || test_header "$f"
local status=0
local start_time
start_time=$(date '+%s')
if [ -x "$f" ] ; then
timeout "$test_time_limit" "$f" </dev/null | show_progress
status=$?
else
echo "TEST IS NOT EXECUTABLE"
status=99
fi
local duration=$(($(date +%s) - start_time))
tests_total=$((tests_total + 1))
local interp
case "$status" in
0)
interp="PASSED"
tests_passed=$((tests_passed + 1))
;;
77)
interp="SKIPPED"
tests_skipped=$((tests_skipped + 1))
;;
99)
interp="ERROR"
tests_failed=$((tests_failed + 1))
;;
124)
interp="TIMEDOUT"
tests_failed=$((tests_failed + 1))
;;
*)
interp="FAILED"
tests_failed=$((tests_failed + 1))
;;
esac
$no_header || test_footer "$f" "$status" "$interp" "$duration"
if $with_summary ; then
local t
if [ $status -eq 0 ] ; then
t=" ${interp}"
else
t="*${interp}*"
fi
printf '%-10s %s\n' "$t" "$f" >>"$summary_file"
fi
# Skipped tests should not cause failure
case "$status" in
77)
status=0
;;
esac
return $status
}
######################################################################
tests_total=0
tests_passed=0
tests_skipped=0
tests_failed=0
if ! type mktemp >/dev/null 2>&1 ; then
# Not perfect, but it will do...
mktemp ()
{
local dir=false
if [ "$1" = "-d" ] ; then
dir=true
fi
local t="${TMPDIR:-/tmp}/tmp.$$.$RANDOM"
(
umask 077
if $dir ; then
mkdir "$t"
else
: >"$t"
fi
)
echo "$t"
}
fi
set -o pipefail
run_one_test ()
{
local f="$1"
CTDB_TEST_SUITE_DIR=$(dirname "$f")
export CTDB_TEST_SUITE_DIR
# This expands the most probable problem cases like "." and "..".
if [ "$(dirname "$CTDB_TEST_SUITE_DIR")" = "." ] ; then
CTDB_TEST_SUITE_DIR=$(cd "$CTDB_TEST_SUITE_DIR" && pwd)
fi
# Set CTDB_TEST_TMP_DIR
#
# Determine the relative test suite subdirectory. The top-level
# test directory needs to be a prefix of the test suite directory,
# so make absolute versions of both.
local test_dir test_suite_dir reldir
test_dir=$(cd "$CTDB_TEST_DIR" && pwd)
test_suite_dir=$(cd "$CTDB_TEST_SUITE_DIR" && pwd)
reldir="${test_suite_dir#"${test_dir}"/}"
export CTDB_TEST_TMP_DIR="${test_state_dir}/${reldir}"
rm -rf "$CTDB_TEST_TMP_DIR"
mkdir -p "$CTDB_TEST_TMP_DIR"
ctdb_test_run "$f"
status=$?
}
run_tests ()
{
local f
for f ; do
case "$f" in
*/README|*/README.md)
continue
;;
esac
if [ ! -e "$f" ] ; then
# Can't find it? Check relative to CTDB_TEST_DIR.
# Strip off current directory from beginning,
# if there, just to make paths more friendly.
f="${CTDB_TEST_DIR#"${PWD}"/}/${f}"
fi
if [ -d "$f" ] ; then
local test_dir dir reldir subtests
test_dir=$(cd "$CTDB_TEST_DIR" && pwd)
dir=$(cd "$f" && pwd)
reldir="${dir#"${test_dir}"/}"
case "$reldir" in
*/*/*)
die "test \"$f\" is not recognised"
;;
*/*)
# This is a test suite
subtests=$(echo "${f%/}/"*".sh")
if [ "$subtests" = "${f%/}/*.sh" ] ; then
# Probably empty directory
die "test \"$f\" is not recognised"
fi
;;
CLUSTER|INTEGRATION|UNIT)
# A collection of test suites
subtests=$(echo "${f%/}/"*)
;;
*)
die "test \"$f\" is not recognised"
esac
# Recurse - word-splitting wanted
# shellcheck disable=SC2086
run_tests $subtests
elif [ -f "$f" ] ; then
run_one_test "$f"
else
# Time to give up
die "test \"$f\" is not recognised"
fi
if $exit_on_fail && [ $status -ne 0 ] ; then
return $status
fi
done
}
export CTDB_TEST_MODE="yes"
# Following 2 lines may be modified by installation script
CTDB_TESTS_ARE_INSTALLED=false
CTDB_TEST_DIR=$(dirname "$0")
export CTDB_TESTS_ARE_INSTALLED CTDB_TEST_DIR
if [ -z "$test_state_dir" ] ; then
if $CTDB_TESTS_ARE_INSTALLED ; then
test_state_dir=$(mktemp -d)
else
test_state_dir="${CTDB_TEST_DIR}/var"
fi
fi
mkdir -p "$test_state_dir"
summary_file="${test_state_dir}/.summary"
: >"$summary_file"
export TEST_SCRIPTS_DIR="${CTDB_TEST_DIR}/scripts"
# If no tests specified then run some defaults
if [ -z "$1" ] ; then
if [ -n "$CTDB_TEST_LOCAL_DAEMONS" ] ; then
set -- UNIT INTEGRATION
else
set -- INTEGRATION CLUSTER
fi
fi
do_cleanup ()
{
if $cleanup ; then
echo "Removing test state directory: ${test_state_dir}"
rm -rf "$test_state_dir"
else
echo "Not cleaning up test state directory: ${test_state_dir}"
fi
}
trap "do_cleanup ; exit 130" SIGINT
trap "do_cleanup ; exit 143" SIGTERM
iterations=0
# Special case: -I 0 means iterate forever (until failure)
while [ "$max_iterations" -eq 0 ] || [ $iterations -lt "$max_iterations" ] ; do
iterations=$((iterations + 1))
if [ "$max_iterations" -ne 1 ] ; then
echo
echo "##################################################"
echo "ITERATION ${iterations}"
echo "##################################################"
echo
fi
run_tests "$@"
status=$?
if [ $status -ne 0 ] ; then
break
fi
done
if $with_summary ; then
if [ $status -eq 0 ] || ! $exit_on_fail ; then
echo
cat "$summary_file"
echo
tests_run=$((tests_total - tests_skipped))
printf '%d/%d tests passed' $tests_passed $tests_run
if [ $tests_skipped -gt 0 ] ; then
printf ' (%d skipped)' $tests_skipped
fi
printf '\n'
fi
fi
rm -f "$summary_file"
echo
do_cleanup
if $no_header || $exit_on_fail ; then
exit $status
elif [ $tests_failed -gt 0 ] ; then
exit 1
else
exit 0
fi
|