1147 lines
30 KiB
Text
1147 lines
30 KiB
Text
|
#!/usr/bin/env bash
|
||
|
|
||
|
# Copyright (C) 2015, Arpinum
|
||
|
#
|
||
|
# shebang-unit is free software: you can redistribute it and/or modify it under
|
||
|
# the terms of the GNU General Public License as published by the Free Software
|
||
|
# Foundation, either version 3 of the License, or (at your option) any later
|
||
|
# version.
|
||
|
#
|
||
|
# shebang-unit is distributed in the hope that it will be useful, but WITHOUT
|
||
|
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||
|
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
||
|
# details.
|
||
|
#
|
||
|
# You should have received a copy of the GNU General Public License along with
|
||
|
# shebang-unit. If not, see http://www.gnu.org/licenses/lgpl.html.
|
||
|
|
||
|
# shebang-unit all in one source file
|
||
|
|
||
|
|
||
|
configuration__load() {
|
||
|
# yes/no representation used with shebang-unit parameters to activate
|
||
|
# stuff like colors
|
||
|
SBU_YES="yes"
|
||
|
SBU_NO="no"
|
||
|
|
||
|
# Colors for outputs
|
||
|
SBU_GREEN_COLOR_CODE="\\033[1;32m"
|
||
|
SBU_RED_COLOR_CODE="\\033[1;31m"
|
||
|
SBU_YELLOW_COLOR_CODE="\\033[1;33m"
|
||
|
SBU_DEFAULT_COLOR_CODE="\\e[0m"
|
||
|
|
||
|
# Functions coding coventions
|
||
|
SBU_GLOBAL_SETUP_FUNCTION_NAME="global_setup"
|
||
|
SBU_GLOBAL_TEARDOWN_FUNCTION_NAME="global_teardown"
|
||
|
SBU_SETUP_FUNCTION_NAME="setup"
|
||
|
SBU_TEARDOWN_FUNCTION_NAME="teardown"
|
||
|
SBU_FUNCTION_DECLARATION_REGEX="^[ ]*\(function\)\{0,1\}[ ]*\([A-Za-z0-9_-]\{1,\}\)[ ]*\(([ ]*)\)\{0,1\}[ ]*{"
|
||
|
SBU_PRIVATE_FUNCTION_NAME_REGEX="^_.*"
|
||
|
|
||
|
# Default configuration that can be modified with shebang-unit parameters
|
||
|
# For more information see shebang-unit usages
|
||
|
SBU_TEST_FILE_PATTERN="*_test.sh"
|
||
|
SBU_TEST_FUNCTION_PATTERN="*"
|
||
|
SBU_USE_COLORS="${SBU_YES}"
|
||
|
SBU_RANDOM_RUN="${SBU_NO}"
|
||
|
SBU_REPORTERS="simple"
|
||
|
SBU_JUNIT_REPORTER_OUTPUT_FILE="./junit_report.xml"
|
||
|
|
||
|
# Internal constants
|
||
|
SBU_SUCCESS_STATUS_CODE=0
|
||
|
SBU_FAILURE_STATUS_CODE=1
|
||
|
SBU_VALUE_SEPARATOR=","
|
||
|
SBU_TEMP_DIR="/tmp/.shebang-unit"
|
||
|
SBU_LAST_ASSERTION_MSG_KEY="last_assertion_message"
|
||
|
SBU_NO_RUN="${SBU_NO}"
|
||
|
SBU_STANDARD_FD=42
|
||
|
SBU_ERROR_FD=43
|
||
|
}
|
||
|
|
||
|
|
||
|
assertion__equal() {
|
||
|
if [[ "$1" != "$2" ]]; then
|
||
|
_assertion__failed "Actual: <$2>, expected: <$1>."
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
assertion__different() {
|
||
|
if [[ "$1" == "$2" ]]; then
|
||
|
_assertion__failed "Both values are: <$1>."
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
assertion__string_contains() {
|
||
|
if ! system__string_contains "$1" "$2"; then
|
||
|
_assertion__failed "String: <$1> does not contain: <$2>."
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
assertion__string_does_not_contain() {
|
||
|
if system__string_contains "$1" "$2"; then
|
||
|
_assertion__failed "String: <$1> contains: <$2>."
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
assertion__string_empty() {
|
||
|
if [[ -n "$1" ]]; then
|
||
|
_assertion__failed "String: <$1> is not empty."
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
assertion__string_not_empty() {
|
||
|
if [[ -z "$1" ]]; then
|
||
|
_assertion__failed "The string is empty."
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
assertion__array_contains() {
|
||
|
local element=$1
|
||
|
shift 1
|
||
|
if ! array__contains "${element}" "$@"; then
|
||
|
local array_as_string="$(system__pretty_print_array "$@")"
|
||
|
_assertion__failed \
|
||
|
"Array: <${array_as_string}> does not contain: <${element}>."
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
assertion__array_does_not_contain() {
|
||
|
local element=$1
|
||
|
shift 1
|
||
|
if array__contains "${element}" "$@"; then
|
||
|
local array_as_string="$(system__pretty_print_array "$@")"
|
||
|
_assertion__failed \
|
||
|
"Array: <${array_as_string}> contains: <${element}>."
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
assertion__status_code_is_success() {
|
||
|
if (( $1 != ${SBU_SUCCESS_STATUS_CODE} )); then
|
||
|
_assertion__failed \
|
||
|
"Status code is failure instead of success." "$2"
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
assertion__status_code_is_failure() {
|
||
|
if (( $1 == ${SBU_SUCCESS_STATUS_CODE} )); then
|
||
|
_assertion__failed \
|
||
|
"Status code is success instead of failure." "$2"
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
assertion__successful() {
|
||
|
"$@"
|
||
|
if (( $? != ${SBU_SUCCESS_STATUS_CODE} )); then
|
||
|
_assertion__failed "Command is failing instead of successful."
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
assertion__failing() {
|
||
|
"$@"
|
||
|
if (( $? == ${SBU_SUCCESS_STATUS_CODE} )); then
|
||
|
_assertion__failed "Command is successful instead of failing."
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
_assertion__failed() {
|
||
|
local message_to_use="$(_assertion__get_assertion_message_to_use "$1" "$2")"
|
||
|
system__print_line "Assertion failed. ${message_to_use}"
|
||
|
exit ${SBU_FAILURE_STATUS_CODE}
|
||
|
}
|
||
|
|
||
|
_assertion__get_assertion_message_to_use() {
|
||
|
local message=$1
|
||
|
local custom_messsage=$2
|
||
|
if [[ -n "${custom_messsage}" ]]; then
|
||
|
system__print "${message} ${custom_messsage}"
|
||
|
else
|
||
|
system__print "${message}"
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
|
||
|
mock__make_function_do_nothing() {
|
||
|
mock__make_function_call "$1" ":"
|
||
|
}
|
||
|
|
||
|
mock__make_function_prints() {
|
||
|
local function=$1
|
||
|
local text=$2
|
||
|
eval "${function}() { printf "${text}"; }"
|
||
|
}
|
||
|
|
||
|
mock__make_function_call() {
|
||
|
local function_to_mock=$1
|
||
|
local function_to_call=$2
|
||
|
shift 2
|
||
|
eval "${function_to_mock}() { ${function_to_call} \"\$@\"; }"
|
||
|
}
|
||
|
|
||
|
|
||
|
runner__run_all_test_files() {
|
||
|
SBU_BASE_TEST_DIRECTORY=$1
|
||
|
reporter__test_files_start_running
|
||
|
timer__store_current_time "global_time"
|
||
|
results__test_files_start_running
|
||
|
_runner__run_all_test_files_with_pattern_in_directory "$1"
|
||
|
reporter__test_files_end_running "$(timer__get_time_elapsed "global_time")"
|
||
|
runner__tests_are_successful
|
||
|
}
|
||
|
|
||
|
_runner__run_all_test_files_with_pattern_in_directory() {
|
||
|
local file
|
||
|
local files
|
||
|
array__from_lines files <<< "$(_runner__get_test_files_in_directory "$1")"
|
||
|
for file in "${files[@]}"; do
|
||
|
file_runner__run_test_file "${file}"
|
||
|
done
|
||
|
}
|
||
|
|
||
|
_runner__get_test_files_in_directory() {
|
||
|
local files
|
||
|
array__from_lines files <<< "$(find "$1" -name "${SBU_TEST_FILE_PATTERN}" | sort)"
|
||
|
if [[ "${SBU_RANDOM_RUN}" == "${SBU_YES}" ]]; then
|
||
|
array__from_lines files <<< "$(system__randomize_array "${files[@]}")"
|
||
|
fi
|
||
|
array__print "${files[@]}"
|
||
|
}
|
||
|
|
||
|
runner__tests_are_successful() {
|
||
|
(( $(results__get_failing_tests_count) == 0 \
|
||
|
&& $(results__get_skipped_tests_count) == 0 ))
|
||
|
}
|
||
|
|
||
|
|
||
|
file_runner__run_test_file() {
|
||
|
local file=$1
|
||
|
local public_functions=($(parser__get_public_functions_in_file "${file}"))
|
||
|
local test_functions=($(_file_runner__get_test_functions))
|
||
|
reporter__test_file_starts_running "${file}" "${#test_functions[@]}"
|
||
|
( source "${file}"
|
||
|
_file_runner__run_global_setup_if_exists \
|
||
|
&& _file_runner__call_all_tests
|
||
|
_file_runner__run_global_teardown_if_exists )
|
||
|
_file_runner__check_if_global_setup_has_exited
|
||
|
reporter__test_file_ends_running
|
||
|
}
|
||
|
|
||
|
_file_runner__run_all_tests_if_global_setup_is_successful() {
|
||
|
_file_runner__call_all_tests
|
||
|
}
|
||
|
|
||
|
_file_runner__call_all_tests() {
|
||
|
local i
|
||
|
for (( i=0; i < ${#test_functions[@]}; i++ )); do
|
||
|
test_runner__run_test "${test_functions[${i}]}" "${public_functions[@]}"
|
||
|
done
|
||
|
}
|
||
|
|
||
|
_file_runner__skip_all_tests() {
|
||
|
local i
|
||
|
for (( i=0; i < ${#test_functions[@]}; i++ )); do
|
||
|
test_runner__skip_test "${test_functions[${i}]}" "${public_functions[@]}"
|
||
|
done
|
||
|
}
|
||
|
|
||
|
_file_runner__get_test_functions() {
|
||
|
local result=()
|
||
|
local test_function
|
||
|
for test_function in "${public_functions[@]}"; do
|
||
|
if _file_runner__function_is_a_test "${test_function}"\
|
||
|
&& [[ "${test_function}" == ${SBU_TEST_FUNCTION_PATTERN} ]]; then
|
||
|
result+=("${test_function}")
|
||
|
fi
|
||
|
done
|
||
|
_file_runner__get_randomized_test_functions_if_needed "${result[@]}"
|
||
|
}
|
||
|
|
||
|
_file_runner__get_randomized_test_functions_if_needed() {
|
||
|
if [[ "${SBU_RANDOM_RUN}" == "${SBU_YES}" ]]; then
|
||
|
system__randomize_array "$@"
|
||
|
else
|
||
|
array__print "$@"
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
_file_runner__run_global_setup_if_exists() {
|
||
|
database__put "sbu_current_global_setup_has_failed" "${SBU_YES}"
|
||
|
_file_runner__call_function_if_exists "${SBU_GLOBAL_SETUP_FUNCTION_NAME}" \
|
||
|
&& database__put "sbu_current_global_setup_has_failed" "${SBU_NO}"
|
||
|
}
|
||
|
|
||
|
_file_runner__run_global_teardown_if_exists() {
|
||
|
_file_runner__call_function_if_exists "${SBU_GLOBAL_TEARDOWN_FUNCTION_NAME}"
|
||
|
}
|
||
|
|
||
|
_file_runner__function_is_a_test() {
|
||
|
! array__contains "$1" \
|
||
|
"${SBU_GLOBAL_SETUP_FUNCTION_NAME}" \
|
||
|
"${SBU_GLOBAL_TEARDOWN_FUNCTION_NAME}" \
|
||
|
"${SBU_SETUP_FUNCTION_NAME}" \
|
||
|
"${SBU_TEARDOWN_FUNCTION_NAME}"
|
||
|
}
|
||
|
|
||
|
_file_runner__call_function_if_exists() {
|
||
|
local function=$1
|
||
|
shift 1
|
||
|
if array__contains "${function}" "${public_functions[@]}"; then
|
||
|
"${function}"
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
_file_runner__check_if_global_setup_has_exited() {
|
||
|
local has_exited="$(database__get "sbu_current_global_setup_has_failed")"
|
||
|
if [[ "${has_exited}" == "${SBU_YES}" ]]; then
|
||
|
_file_runner__handle_failure_in_global_setup
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
_file_runner__handle_failure_in_global_setup() {
|
||
|
reporter__global_setup_has_failed
|
||
|
_file_runner__skip_all_tests
|
||
|
}
|
||
|
|
||
|
|
||
|
parser__get_public_functions_in_file() {
|
||
|
_parser__find_functions_in_file "$1" \
|
||
|
| _parser__filter_private_functions \
|
||
|
| awk '{ print $1 }'
|
||
|
}
|
||
|
|
||
|
_parser__find_functions_in_file() {
|
||
|
grep -o "${SBU_FUNCTION_DECLARATION_REGEX}" "$1" \
|
||
|
| _parser__get_function_name_from_declaration
|
||
|
}
|
||
|
|
||
|
_parser__filter_private_functions() {
|
||
|
grep -v "${SBU_PRIVATE_FUNCTION_NAME_REGEX}"
|
||
|
}
|
||
|
|
||
|
_parser__get_function_name_from_declaration() {
|
||
|
sed "s/${SBU_FUNCTION_DECLARATION_REGEX}/\2/"
|
||
|
}
|
||
|
|
||
|
|
||
|
timer__store_current_time() {
|
||
|
local id=$1
|
||
|
database__put "sbu_beginning_date_$1" "$(system__get_date_in_seconds)"
|
||
|
}
|
||
|
|
||
|
timer__get_time_elapsed() {
|
||
|
local id=$1
|
||
|
local beginning_date="$(database__get "sbu_beginning_date_$1")"
|
||
|
local ending_date="$(system__get_date_in_seconds)"
|
||
|
|
||
|
[[ -n "${beginning_date}" ]] \
|
||
|
&& system__print "$(( ending_date - beginning_date ))" \
|
||
|
|| system__print "0"
|
||
|
}
|
||
|
|
||
|
|
||
|
results__test_files_start_running() {
|
||
|
database__put "sbu_successful_tests_count" "0"
|
||
|
database__put "sbu_failing_tests_count" "0"
|
||
|
database__put "sbu_skipped_tests_count" "0"
|
||
|
}
|
||
|
|
||
|
results__get_successful_tests_count() {
|
||
|
_results__get_tests_count_of_type "successful"
|
||
|
}
|
||
|
|
||
|
results__increment_successful_tests() {
|
||
|
_results__increment_tests_of_type "successful"
|
||
|
}
|
||
|
|
||
|
results__get_failing_tests_count() {
|
||
|
_results__get_tests_count_of_type "failing"
|
||
|
}
|
||
|
|
||
|
results__increment_failing_tests() {
|
||
|
_results__increment_tests_of_type "failing"
|
||
|
}
|
||
|
|
||
|
results__get_skipped_tests_count() {
|
||
|
_results__get_tests_count_of_type "skipped"
|
||
|
}
|
||
|
|
||
|
results__increment_skipped_tests() {
|
||
|
_results__increment_tests_of_type "skipped"
|
||
|
}
|
||
|
|
||
|
results__get_total_tests_count() {
|
||
|
local successes="$(results__get_successful_tests_count)"
|
||
|
local failures="$(results__get_failing_tests_count)"
|
||
|
local skipped="$(results__get_skipped_tests_count)"
|
||
|
system__print "$(( successes + failures + skipped ))"
|
||
|
}
|
||
|
|
||
|
_results__get_tests_count_of_type() {
|
||
|
local type=$1
|
||
|
database__get "sbu_${type}_tests_count"
|
||
|
}
|
||
|
|
||
|
_results__increment_tests_of_type() {
|
||
|
local type=$1
|
||
|
local count="$(results__get_${type}_tests_count)"
|
||
|
database__put "sbu_${type}_tests_count" "$(( count + 1 ))"
|
||
|
}
|
||
|
|
||
|
|
||
|
test_runner__run_test() {
|
||
|
local test_function=$1
|
||
|
shift 1
|
||
|
reporter__test_starts_running "${test_function}"
|
||
|
timer__store_current_time "test_time"
|
||
|
(
|
||
|
_test_runner__call_setup_if_exists "$@" \
|
||
|
&& _test_runner__call_test_fonction "${test_function}"
|
||
|
local setup_and_test_code=$?
|
||
|
_test_runner__call_teardown_if_exists "$@"
|
||
|
(( $? == ${SBU_SUCCESS_STATUS_CODE} \
|
||
|
&& ${setup_and_test_code} == ${SBU_SUCCESS_STATUS_CODE} ))
|
||
|
)
|
||
|
_test_runner__parse_test_function_result $?
|
||
|
reporter__test_ends_running "$(timer__get_time_elapsed "test_time")"
|
||
|
}
|
||
|
|
||
|
_test_runner__call_test_fonction() {
|
||
|
( "$1" >&${SBU_STANDARD_FD} 2>&${SBU_ERROR_FD} )
|
||
|
}
|
||
|
|
||
|
_test_runner__call_setup_if_exists() {
|
||
|
_test_runner__call_function_if_exits "${SBU_SETUP_FUNCTION_NAME}" "$@"
|
||
|
}
|
||
|
|
||
|
_test_runner__call_teardown_if_exists() {
|
||
|
_test_runner__call_function_if_exits "${SBU_TEARDOWN_FUNCTION_NAME}" "$@"
|
||
|
}
|
||
|
|
||
|
_test_runner__parse_test_function_result() {
|
||
|
if (( $1 == ${SBU_SUCCESS_STATUS_CODE} )); then
|
||
|
results__increment_successful_tests
|
||
|
reporter__test_has_succeeded
|
||
|
else
|
||
|
results__increment_failing_tests
|
||
|
reporter__test_has_failed
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
_test_runner__call_function_if_exits() {
|
||
|
local function=$1
|
||
|
shift 1
|
||
|
if array__contains "${function}" "$@"; then
|
||
|
"${function}"
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
test_runner__skip_test() {
|
||
|
local test_function=$1
|
||
|
reporter__test_starts_running "${test_function}"
|
||
|
results__increment_skipped_tests
|
||
|
reporter__test_is_skipped "${test_function}"
|
||
|
reporter__test_ends_running 0
|
||
|
}
|
||
|
|
||
|
|
||
|
reporter__test_files_start_running() {
|
||
|
_reporter__initialise_file_descriptors
|
||
|
reporter__for_each_reporter \
|
||
|
_reporter__call_function "test_files_start_running" "$@"
|
||
|
}
|
||
|
|
||
|
_reporter__initialise_file_descriptors() {
|
||
|
eval "exec ${SBU_STANDARD_FD}>&1"
|
||
|
eval "exec ${SBU_ERROR_FD}>&2"
|
||
|
}
|
||
|
|
||
|
reporter__global_setup_has_failed() {
|
||
|
reporter__for_each_reporter \
|
||
|
_reporter__call_function "global_setup_has_failed" "$@"
|
||
|
}
|
||
|
|
||
|
reporter__test_file_starts_running() {
|
||
|
reporter__for_each_reporter \
|
||
|
_reporter__call_function "test_file_starts_running" "$@"
|
||
|
}
|
||
|
|
||
|
reporter__test_starts_running() {
|
||
|
reporter__for_each_reporter \
|
||
|
_reporter__call_function "test_starts_running" "$@"
|
||
|
}
|
||
|
|
||
|
reporter__test_has_succeeded() {
|
||
|
reporter__for_each_reporter \
|
||
|
_reporter__call_function "test_has_succeeded" "$@"
|
||
|
}
|
||
|
|
||
|
reporter__test_has_failed() {
|
||
|
reporter__for_each_reporter \
|
||
|
_reporter__call_function "test_has_failed" "$@"
|
||
|
}
|
||
|
|
||
|
reporter__test_is_skipped() {
|
||
|
reporter__for_each_reporter \
|
||
|
_reporter__call_function "test_is_skipped" "$@"
|
||
|
}
|
||
|
|
||
|
reporter__test_ends_running() {
|
||
|
reporter__for_each_reporter \
|
||
|
_reporter__call_function "test_ends_running" "$@"
|
||
|
}
|
||
|
|
||
|
reporter__test_file_ends_running() {
|
||
|
reporter__for_each_reporter \
|
||
|
_reporter__call_function "test_file_ends_running" "$@"
|
||
|
}
|
||
|
|
||
|
reporter__test_files_end_running() {
|
||
|
reporter__for_each_reporter \
|
||
|
_reporter__call_function "test_files_end_running" "$@"
|
||
|
_reporter__release_file_descriptors
|
||
|
}
|
||
|
|
||
|
_reporter__release_file_descriptors() {
|
||
|
eval "exec 1>&${SBU_STANDARD_FD} ${SBU_STANDARD_FD}>&-"
|
||
|
eval "exec 2>&${SBU_ERROR_FD} ${SBU_ERROR_FD}>&-"
|
||
|
}
|
||
|
|
||
|
_reporter__call_function() {
|
||
|
local function=$1
|
||
|
shift 1
|
||
|
"${reporter}_reporter__${function}" "$@"
|
||
|
}
|
||
|
|
||
|
reporter__for_each_reporter() {
|
||
|
local reporter
|
||
|
for reporter in ${SBU_REPORTERS//${SBU_VALUE_SEPARATOR}/ }; do
|
||
|
"$@"
|
||
|
done
|
||
|
}
|
||
|
|
||
|
reporter__print_with_color() {
|
||
|
system__print_with_color "$@" >&${SBU_STANDARD_FD}
|
||
|
}
|
||
|
|
||
|
reporter__print_line() {
|
||
|
system__print_line "$@" >&${SBU_STANDARD_FD}
|
||
|
}
|
||
|
|
||
|
reporter__print_line_with_color() {
|
||
|
system__print_line_with_color "$@" >&${SBU_STANDARD_FD}
|
||
|
}
|
||
|
|
||
|
reporter__print_new_line() {
|
||
|
system__print_new_line >&${SBU_STANDARD_FD}
|
||
|
}
|
||
|
|
||
|
reporter__get_color_code_for_tests_result() {
|
||
|
local color_code=${SBU_GREEN_COLOR_CODE}
|
||
|
if ! runner__tests_are_successful; then
|
||
|
color_code=${SBU_RED_COLOR_CODE}
|
||
|
fi
|
||
|
system__print "${color_code}"
|
||
|
}
|
||
|
|
||
|
reporter__get_test_file_relative_name() {
|
||
|
system__print "${1#${SBU_BASE_TEST_DIRECTORY}\/}"
|
||
|
}
|
||
|
|
||
|
|
||
|
simple_reporter__test_files_start_running() {
|
||
|
:
|
||
|
}
|
||
|
|
||
|
simple_reporter__test_file_starts_running() {
|
||
|
local relative_name="$(reporter__get_test_file_relative_name "$1")"
|
||
|
reporter__print_line "[File] ${relative_name}"
|
||
|
}
|
||
|
|
||
|
simple_reporter__global_setup_has_failed() {
|
||
|
reporter__print_line_with_color \
|
||
|
"Global setup has failed" ${SBU_YELLOW_COLOR_CODE}
|
||
|
}
|
||
|
|
||
|
simple_reporter__test_starts_running() {
|
||
|
reporter__print_line "[Test] $1"
|
||
|
}
|
||
|
|
||
|
simple_reporter__test_has_succeeded() {
|
||
|
reporter__print_line_with_color "OK" ${SBU_GREEN_COLOR_CODE}
|
||
|
}
|
||
|
|
||
|
simple_reporter__test_has_failed() {
|
||
|
reporter__print_line_with_color "KO" ${SBU_RED_COLOR_CODE}
|
||
|
}
|
||
|
|
||
|
simple_reporter__test_is_skipped() {
|
||
|
reporter__print_line_with_color "Skipped" ${SBU_YELLOW_COLOR_CODE}
|
||
|
}
|
||
|
|
||
|
simple_reporter__test_ends_running() {
|
||
|
:
|
||
|
}
|
||
|
|
||
|
simple_reporter__test_file_ends_running() {
|
||
|
reporter__print_new_line
|
||
|
}
|
||
|
|
||
|
simple_reporter__test_files_end_running() {
|
||
|
local time="in $1s"
|
||
|
reporter__print_line "[Results]"
|
||
|
local color="$(reporter__get_color_code_for_tests_result)"
|
||
|
local total_count="$(_simple_reporter__get_total_count_message)"
|
||
|
local failures_count="$(_simple_reporter__get_failures_count_message)"
|
||
|
local skipped_count="$(results__get_skipped_tests_count) skipped"
|
||
|
local message="${total_count}, ${failures_count}, ${skipped_count} ${time}"
|
||
|
reporter__print_line_with_color "${message}" "${color}"
|
||
|
}
|
||
|
|
||
|
_simple_reporter__get_total_count_message() {
|
||
|
local count="$(results__get_total_tests_count)"
|
||
|
system__print "${count} test$(_simple_reporter__get_agreement ${count})"
|
||
|
}
|
||
|
|
||
|
_simple_reporter__get_failures_count_message() {
|
||
|
local count="$(results__get_failing_tests_count)"
|
||
|
system__print "${count} failure$(_simple_reporter__get_agreement ${count})"
|
||
|
}
|
||
|
|
||
|
_simple_reporter__get_agreement() {
|
||
|
(( $1 > 1 )) \
|
||
|
&& system__print "s" \
|
||
|
|| system__print ""
|
||
|
}
|
||
|
|
||
|
|
||
|
dots_reporter__test_files_start_running() {
|
||
|
exec 1>/dev/null
|
||
|
exec 2>/dev/null
|
||
|
}
|
||
|
|
||
|
dots_reporter__test_file_starts_running() {
|
||
|
:
|
||
|
}
|
||
|
|
||
|
dots_reporter__global_setup_has_failed() {
|
||
|
:
|
||
|
}
|
||
|
|
||
|
dots_reporter__test_starts_running() {
|
||
|
:
|
||
|
}
|
||
|
|
||
|
dots_reporter__test_has_succeeded() {
|
||
|
reporter__print_with_color "." ${SBU_GREEN_COLOR_CODE}
|
||
|
}
|
||
|
|
||
|
dots_reporter__test_has_failed() {
|
||
|
reporter__print_with_color "F" ${SBU_RED_COLOR_CODE}
|
||
|
}
|
||
|
|
||
|
dots_reporter__test_is_skipped() {
|
||
|
reporter__print_with_color "S" ${SBU_YELLOW_COLOR_CODE}
|
||
|
}
|
||
|
|
||
|
dots_reporter__test_ends_running() {
|
||
|
:
|
||
|
}
|
||
|
|
||
|
dots_reporter__test_file_ends_running() {
|
||
|
:
|
||
|
}
|
||
|
|
||
|
dots_reporter__test_files_end_running() {
|
||
|
local color="$(reporter__get_color_code_for_tests_result)"
|
||
|
local texte="$(runner__tests_are_successful \
|
||
|
&& system__print "OK" \
|
||
|
|| system__print "KO")"
|
||
|
reporter__print_line_with_color "${texte}" "${color}"
|
||
|
}
|
||
|
|
||
|
|
||
|
junit_reporter__test_files_start_running() {
|
||
|
_junit_reporter__initialise_report_with \
|
||
|
"<?xml version=\"1.0\" encoding=\"UTF-8\" ?>"
|
||
|
_junit_reporter__write_line_to_report "<testsuites>"
|
||
|
}
|
||
|
|
||
|
junit_reporter__test_file_starts_running() {
|
||
|
local file_name=$1
|
||
|
local test_count=$2
|
||
|
local suite_name="$(_junit_reporter__get_suite_name "${file_name}")"
|
||
|
database__put "sbu_current_suite_name" "${suite_name}"
|
||
|
_junit_reporter__write_line_to_report \
|
||
|
" <testsuite name=\"${suite_name}\" tests=\"${test_count}\">"
|
||
|
_junit_reporter__delete_all_outputs_lines "suite"
|
||
|
_junit_reporter__redirect_outputs_to_database "suite"
|
||
|
}
|
||
|
|
||
|
junit_reporter__global_setup_has_failed() {
|
||
|
:
|
||
|
}
|
||
|
|
||
|
junit_reporter__test_starts_running() {
|
||
|
local suite_name="$(database__get "sbu_current_suite_name")"
|
||
|
local test_name="$(xml__encode_text "$1")"
|
||
|
_junit_reporter__write_line_to_report \
|
||
|
" <testcase name=\"${test_name}\" classname=\"${suite_name}\" \
|
||
|
time=\"\${sbu_current_test_time}\">"
|
||
|
_junit_reporter__delete_all_outputs_lines "test"
|
||
|
_junit_reporter__redirect_outputs_to_database "test"
|
||
|
}
|
||
|
|
||
|
junit_reporter__test_has_succeeded() {
|
||
|
:
|
||
|
}
|
||
|
|
||
|
junit_reporter__test_has_failed() {
|
||
|
_junit_reporter__write_line_to_report " <failure>"
|
||
|
_junit_reporter__write_line_to_report " </failure>"
|
||
|
}
|
||
|
|
||
|
junit_reporter__test_is_skipped() {
|
||
|
_junit_reporter__write_line_to_report " <skipped>"
|
||
|
_junit_reporter__write_line_to_report " </skipped>"
|
||
|
}
|
||
|
|
||
|
junit_reporter__test_ends_running() {
|
||
|
_junit_reporter__redirect_outputs_to_database "suite"
|
||
|
_junit_reporter__write_time_in_current_test_case_tag_in_report "$1"
|
||
|
_junit_reporter__flush_all_outputs_to_report_if_any "test"
|
||
|
_junit_reporter__write_line_to_report " </testcase>"
|
||
|
}
|
||
|
|
||
|
_junit_reporter__write_time_in_current_test_case_tag_in_report() {
|
||
|
local test_time=$1
|
||
|
local report_content=$(cat "${SBU_JUNIT_REPORTER_OUTPUT_FILE}")
|
||
|
local content_with_time="$(system__substitute_variable \
|
||
|
"${report_content}" "sbu_current_test_time" "${test_time}")"
|
||
|
system__print_line \
|
||
|
"${content_with_time}" > "${SBU_JUNIT_REPORTER_OUTPUT_FILE}"
|
||
|
}
|
||
|
|
||
|
junit_reporter__test_file_ends_running() {
|
||
|
_junit_reporter__flush_all_outputs_to_report_if_any "suite"
|
||
|
_junit_reporter__write_line_to_report " </testsuite>"
|
||
|
database__put "sbu_current_suite_name" ""
|
||
|
}
|
||
|
|
||
|
junit_reporter__test_files_end_running() {
|
||
|
_junit_reporter__write_line_to_report "</testsuites>"
|
||
|
}
|
||
|
|
||
|
_junit_reporter__get_suite_name() {
|
||
|
local relative_name="$(reporter__get_test_file_relative_name "$1")"
|
||
|
local dots_replaced_by_underscores="${relative_name//./_}"
|
||
|
local slashes_replaced_by_dots="${dots_replaced_by_underscores//\//.}"
|
||
|
xml__encode_text "${slashes_replaced_by_dots}"
|
||
|
}
|
||
|
|
||
|
_junit_reporter__initialise_report_with() {
|
||
|
system__print_line "$1" > "${SBU_JUNIT_REPORTER_OUTPUT_FILE}"
|
||
|
}
|
||
|
|
||
|
_junit_reporter__write_line_to_report() {
|
||
|
system__print_line "$1" >> "${SBU_JUNIT_REPORTER_OUTPUT_FILE}"
|
||
|
}
|
||
|
|
||
|
_junit_reporter__redirect_outputs_to_database() {
|
||
|
local scope=$1
|
||
|
exec 1>>\
|
||
|
"$(database__get_descriptor "sbu_current_${scope}_standard_ouputs_lines")"
|
||
|
exec 2>>\
|
||
|
"$(database__get_descriptor "sbu_current_${scope}_error_ouputs_lines")"
|
||
|
}
|
||
|
|
||
|
_junit_reporter__delete_all_outputs_lines() {
|
||
|
database__put "sbu_current_$1_standard_ouputs_lines"
|
||
|
database__put "sbu_current_$1_error_ouputs_lines"
|
||
|
}
|
||
|
|
||
|
_junit_reporter__flush_all_outputs_to_report_if_any() {
|
||
|
_junit_reporter__flush_outputs_to_report_if_any "$1" "standard"
|
||
|
_junit_reporter__flush_outputs_to_report_if_any "$1" "error"
|
||
|
}
|
||
|
|
||
|
_junit_reporter__flush_outputs_to_report_if_any() {
|
||
|
local scope=$1
|
||
|
local outputs_type=$2
|
||
|
local key="sbu_current_${scope}_${outputs_type}_ouputs_lines"
|
||
|
local outputs="$(database__get "${key}")"
|
||
|
if [[ -n "${outputs}" ]]; then
|
||
|
_junit_reporter__write_outputs_to_report \
|
||
|
"${scope}" "${outputs_type}" "${outputs}"
|
||
|
database__put "${key}" ""
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
_junit_reporter__write_outputs_to_report() {
|
||
|
local scope=$1
|
||
|
local outputs_type=$2
|
||
|
local outputs=$3
|
||
|
local tag="$(_junit_reporter__get_tag_for_outputs_type "${outputs_type}")"
|
||
|
local indentation="$(_junit_reporter__get_indentation_for_scope "${scope}")"
|
||
|
_junit_reporter__write_line_to_report "${indentation}<${tag}>"
|
||
|
_junit_reporter__write_line_to_report "$(xml__encode_text "${outputs}")"
|
||
|
_junit_reporter__write_line_to_report "${indentation}</${tag}>"
|
||
|
}
|
||
|
|
||
|
_junit_reporter__get_tag_for_outputs_type() {
|
||
|
[[ "$1" == "standard" ]] \
|
||
|
&& system__print "system-out" \
|
||
|
|| system__print "system-err"
|
||
|
}
|
||
|
|
||
|
_junit_reporter__get_indentation_for_scope() {
|
||
|
[[ "$1" == "suite" ]] \
|
||
|
&& system__print " " \
|
||
|
|| system__print " "
|
||
|
}
|
||
|
|
||
|
|
||
|
xml__encode_text() {
|
||
|
local encoded=${1//\&/\&\;}
|
||
|
encoded=${encoded//\</\<\;}
|
||
|
encoded=${encoded//\>/\>\;}
|
||
|
encoded=${encoded//\"/\"\;}
|
||
|
encoded=${encoded//\'/\&apos\;}
|
||
|
system__print "${encoded}"
|
||
|
}
|
||
|
|
||
|
|
||
|
database__initialise() {
|
||
|
_SBU_DB_TOKEN="$(system__random)"
|
||
|
_database__ensure_directory_exists
|
||
|
}
|
||
|
|
||
|
database__release() {
|
||
|
rm -rf "$(_database__get_dir)"
|
||
|
}
|
||
|
|
||
|
database__put() {
|
||
|
_database__ensure_directory_exists
|
||
|
system__print "$2" > "$(_database__get_dir)/$1"
|
||
|
}
|
||
|
|
||
|
database__post() {
|
||
|
_database__ensure_directory_exists
|
||
|
system__print "$2" >> "$(_database__get_dir)/$1"
|
||
|
}
|
||
|
|
||
|
database__post_line() {
|
||
|
_database__ensure_directory_exists
|
||
|
system__print_line "$2" >> "$(_database__get_dir)/$1"
|
||
|
}
|
||
|
|
||
|
database__put_variable() {
|
||
|
_database__ensure_directory_exists
|
||
|
database__put "$1" "${!1}"
|
||
|
}
|
||
|
|
||
|
database__get() {
|
||
|
[[ -e "$(_database__get_dir)/$1" ]] && cat "$(_database__get_dir)/$1"
|
||
|
}
|
||
|
|
||
|
database__get_descriptor() {
|
||
|
system__print "$(_database__get_dir)/$1"
|
||
|
}
|
||
|
|
||
|
_database__ensure_directory_exists() {
|
||
|
mkdir -p "$(_database__get_dir)"
|
||
|
}
|
||
|
|
||
|
_database__get_dir() {
|
||
|
system__print "${SBU_TEMP_DIR}/database/${_SBU_DB_TOKEN}"
|
||
|
}
|
||
|
|
||
|
|
||
|
system__get_string_or_default() {
|
||
|
[[ -n "$1" ]] \
|
||
|
&& system__print "$1" \
|
||
|
|| system__print "$2"
|
||
|
}
|
||
|
|
||
|
system__get_date_in_seconds() {
|
||
|
date +%s
|
||
|
}
|
||
|
|
||
|
system__print_line_with_color() {
|
||
|
system__print_with_color "$@"
|
||
|
system__print_new_line
|
||
|
}
|
||
|
|
||
|
system__print_with_color() {
|
||
|
if [[ "${SBU_USE_COLORS}" == "${SBU_YES}" ]]; then
|
||
|
printf "$2$1${SBU_DEFAULT_COLOR_CODE}"
|
||
|
else
|
||
|
system__print "$1"
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
system__print_line() {
|
||
|
system__print "$1"
|
||
|
system__print_new_line
|
||
|
}
|
||
|
|
||
|
system__print() {
|
||
|
printf "%s" "$1"
|
||
|
}
|
||
|
|
||
|
system__print_new_line() {
|
||
|
printf "\n"
|
||
|
}
|
||
|
|
||
|
array__contains() {
|
||
|
local value=$1
|
||
|
shift 1
|
||
|
local i
|
||
|
for (( i=1; i <= $#; i++ )); do
|
||
|
if [[ "${!i}" == "${value}" ]]; then
|
||
|
return ${SBU_SUCCESS_STATUS_CODE}
|
||
|
fi
|
||
|
done
|
||
|
return ${SBU_FAILURE_STATUS_CODE}
|
||
|
}
|
||
|
|
||
|
array__from_lines() {
|
||
|
local IFS=$'\n'
|
||
|
eval "$1=(\$(</dev/stdin))"
|
||
|
}
|
||
|
|
||
|
array__print() {
|
||
|
local element
|
||
|
for element in "$@"; do
|
||
|
system__print_line "${element}"
|
||
|
done
|
||
|
}
|
||
|
|
||
|
system__pretty_print_array() {
|
||
|
local array_as_string=""
|
||
|
local i
|
||
|
for (( i=1; i <= $#; i++ )); do
|
||
|
array_as_string+="${!i}, "
|
||
|
done
|
||
|
array_as_string=${array_as_string/%, /}
|
||
|
printf "[%s]" "${array_as_string}"
|
||
|
}
|
||
|
|
||
|
system__string_contains() {
|
||
|
[[ "$1" == *"$2"* ]]
|
||
|
}
|
||
|
|
||
|
system__randomize_array() {
|
||
|
local copy=("$@")
|
||
|
while (( ${#copy[@]} > 0 )); do
|
||
|
local random_index=$(( $(system__random) % ${#copy[@]} ))
|
||
|
system__print_line "${copy[${random_index}]}"
|
||
|
unset copy[${random_index}]
|
||
|
copy=("${copy[@]}")
|
||
|
done
|
||
|
}
|
||
|
|
||
|
system__random() {
|
||
|
system__print "${RANDOM}"
|
||
|
}
|
||
|
|
||
|
system__substitute_variable() {
|
||
|
local string=$1
|
||
|
local key="\$\{$2\}"
|
||
|
local value=$3
|
||
|
printf "%s" "${string//${key}/${value}}"
|
||
|
}
|
||
|
|
||
|
|
||
|
main__main() {
|
||
|
configuration__load
|
||
|
_main__initialise
|
||
|
local parsed_arguments=0
|
||
|
_main__parse_arguments "$@"
|
||
|
shift ${parsed_arguments}
|
||
|
_main__assert_only_one_argument_left $#
|
||
|
_main__assert_reporters_are_known
|
||
|
SBU_BASE_TEST_DIRECTORY=$1
|
||
|
|
||
|
if [[ "${SBU_NO_RUN}" != "${SBU_YES}" ]]; then
|
||
|
runner__run_all_test_files "$1"
|
||
|
return $?
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
_main__initialise() {
|
||
|
database__initialise
|
||
|
trap _main__release EXIT
|
||
|
}
|
||
|
|
||
|
_main__release() {
|
||
|
database__release
|
||
|
}
|
||
|
|
||
|
_main__parse_arguments() {
|
||
|
local argument
|
||
|
for argument in "$@"; do
|
||
|
case "${argument}" in
|
||
|
-a|--api-cheat-sheet)
|
||
|
_main__print_api_cheat_sheet_and_exit
|
||
|
;;
|
||
|
-c=*|--colors=*)
|
||
|
SBU_USE_COLORS="${argument#*=}"
|
||
|
(( parsed_arguments++ ))
|
||
|
;;
|
||
|
-d=*|--random-run=*)
|
||
|
SBU_RANDOM_RUN="${argument#*=}"
|
||
|
(( parsed_arguments++ ))
|
||
|
;;
|
||
|
-h|--help)
|
||
|
_main__print_full_usage
|
||
|
exit ${SBU_SUCCESS_STATUS_CODE}
|
||
|
;;
|
||
|
-f=*|--file-pattern=*)
|
||
|
SBU_TEST_FILE_PATTERN="${argument#*=}"
|
||
|
(( parsed_arguments++ ))
|
||
|
;;
|
||
|
--no-run)
|
||
|
SBU_NO_RUN="${SBU_YES}"
|
||
|
(( parsed_arguments++ ))
|
||
|
;;
|
||
|
-o=*|--output-file=*)
|
||
|
SBU_JUNIT_REPORTER_OUTPUT_FILE="${argument#*=}"
|
||
|
(( parsed_arguments++ ))
|
||
|
;;
|
||
|
-t=*|--test-pattern=*)
|
||
|
SBU_TEST_FUNCTION_PATTERN="${argument#*=}"
|
||
|
(( parsed_arguments++ ))
|
||
|
;;
|
||
|
-r=*|--reporters=*)
|
||
|
SBU_REPORTERS="${argument#*=}"
|
||
|
(( parsed_arguments++ ))
|
||
|
;;
|
||
|
-*|--*)
|
||
|
_main__print_illegal_option "${argument}"
|
||
|
_main__print_usage_and_exit_with_code ${SBU_FAILURE_STATUS_CODE}
|
||
|
;;
|
||
|
esac
|
||
|
done
|
||
|
}
|
||
|
|
||
|
_main__assert_reporters_are_known() {
|
||
|
reporter__for_each_reporter _main__fail_if_reporter_unknown
|
||
|
}
|
||
|
|
||
|
_main__fail_if_reporter_unknown() {
|
||
|
if ! array__contains "${reporter}" "simple" "dots" "junit"; then
|
||
|
system__print_line \
|
||
|
"$(_main__get_script_name): unknown reporter <${reporter}>"
|
||
|
exit ${SBU_FAILURE_STATUS_CODE}
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
_main__print_illegal_option() {
|
||
|
local option="${1%=*}"
|
||
|
option="${option#-}"
|
||
|
option="${option#-}"
|
||
|
system__print_line "$(_main__get_script_name): illegal option -- ${option}"
|
||
|
}
|
||
|
|
||
|
_main__assert_only_one_argument_left() {
|
||
|
if (( $1 > 1 )); then
|
||
|
system__print_line "$(_main__get_script_name): only one path is allowed"
|
||
|
_main__print_usage_and_exit_with_code ${SBU_FAILURE_STATUS_CODE}
|
||
|
fi
|
||
|
}
|
||
|
|
||
|
_main__get_script_name() {
|
||
|
basename "${BASH_SOURCE[0]}"
|
||
|
}
|
||
|
|
||
|
_main__print_usage_and_exit_with_code() {
|
||
|
_main__print_usage
|
||
|
exit $1
|
||
|
}
|
||
|
|
||
|
_main__print_full_usage() {
|
||
|
_main__print_usage
|
||
|
local script="$(_main__get_script_name)"
|
||
|
system__print_new_line
|
||
|
system__print_line "\
|
||
|
[options]
|
||
|
-a, --api-cheat-sheet
|
||
|
print api cheat sheet like assertions
|
||
|
-c, --colors=${SBU_YES} or ${SBU_NO}
|
||
|
tests output with colors or no
|
||
|
-d, --random-run=${SBU_YES} or ${SBU_NO}
|
||
|
tests files and functions randomly run or no
|
||
|
-f, --file-pattern=<pattern>
|
||
|
pattern to filter test files
|
||
|
-h
|
||
|
print usage
|
||
|
-o, --output-file=<file>
|
||
|
output file for JUnit reporter
|
||
|
-r, --reporters=<reporter1,reporter2>
|
||
|
comma-separated reporters (simple, dots or junit)
|
||
|
-t, --test-pattern=<pattern>
|
||
|
pattern to filter test function in files
|
||
|
|
||
|
[examples]
|
||
|
${script} .
|
||
|
run all tests in current directory
|
||
|
${script} -p=*test.sh sources/test
|
||
|
run all tests files ending with test.sh in sources/test"
|
||
|
}
|
||
|
|
||
|
_main__print_usage() {
|
||
|
system__print_line "\
|
||
|
usage: $(_main__get_script_name) [options] path
|
||
|
run all tests in path"
|
||
|
}
|
||
|
|
||
|
_main__print_api_cheat_sheet_and_exit() {
|
||
|
system__print_line "\
|
||
|
[assertions]
|
||
|
assertion__equal (value, other)
|
||
|
-> assert that <value> is equal to <other>
|
||
|
assertion__different (value, other)
|
||
|
-> assert that <value> is different from <other>
|
||
|
assertion__string_contains (string, substring)
|
||
|
-> assert that <string> contains <substring>
|
||
|
assertion__string_does_not_contain (string, substring)
|
||
|
-> assert that <string> does not contain <substring>
|
||
|
assertion__string_empty (string)
|
||
|
-> assert that <string> is empty
|
||
|
assertion__string_not_empty (string)
|
||
|
-> assert that <string> is not empty
|
||
|
assertion__array_contains (element, array[0], array[1], ...)
|
||
|
-> assert that the <array> contains the <element>
|
||
|
assertion__array_does_not_contain (element, array elements...)
|
||
|
-> assert that the <array> does not contain the <element>
|
||
|
assertion__successful (command)
|
||
|
-> assert that the <command> is successful
|
||
|
assertion__failing (command)
|
||
|
-> assert that the <command> is failing
|
||
|
assertion__status_code_is_success (code)
|
||
|
-> assert that the status <code> is 0
|
||
|
assertion__status_code_is_failure (code)
|
||
|
-> assert that the status <code> is not 0
|
||
|
|
||
|
[special functions]
|
||
|
${SBU_GLOBAL_SETUP_FUNCTION_NAME}
|
||
|
-> Executed before all tests in a file
|
||
|
${SBU_GLOBAL_TEARDOWN_FUNCTION_NAME}
|
||
|
-> Executed after all tests in a file
|
||
|
${SBU_SETUP_FUNCTION_NAME}
|
||
|
-> Executed before each test in a file
|
||
|
${SBU_TEARDOWN_FUNCTION_NAME}
|
||
|
-> Executed after each test in a file
|
||
|
|
||
|
[mocks]
|
||
|
mock__make_function_do_nothing (function_to_mock)
|
||
|
-> make function do nothing
|
||
|
mock__make_function_prints (function_to_mock, message)
|
||
|
-> make function prints a message
|
||
|
mock__make_function_call (function_to_mock, function_to_call)
|
||
|
-> make function call another function"
|
||
|
exit ${SBU_SUCCESS_STATUS_CODE}
|
||
|
}
|
||
|
|
||
|
|
||
|
main__main "$@"
|