diff --git a/.travis.yml b/.travis.yml
index 578bac4cc..46b08b43f 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,38 +1,128 @@
-sudo: required
-dist: trusty
-language: python
-python: "2.7"
+sudo: false
-addons:
- hosts:
- - node1
+git:
+ depth: 5
env:
- - SITE=cluster.yml ANSIBLE_VERSION=2.0.0
+ global:
+ GCE_USER=travis
+ SSH_USER=$GCE_USER
+ TEST_ID=$TRAVIS_JOB_NUMBER
+ CONTAINER_ENGINE=docker
+ PRIVATE_KEY=$GCE_PRIVATE_KEY
+ ANSIBLE_KEEP_REMOTE_FILES=1
+ matrix:
+ # Debian Jessie
+ - >-
+ KUBE_NETWORK_PLUGIN=flannel
+ CLOUD_IMAGE=debian-8
+ CLOUD_REGION=europe-west1-b
+ - >-
+ KUBE_NETWORK_PLUGIN=calico
+ CLOUD_IMAGE=debian-8
+ CLOUD_REGION=europe-west1-b
-install:
+ # Centos 7
+ - >-
+ KUBE_NETWORK_PLUGIN=flannel
+ CLOUD_IMAGE=centos-7-sudo
+ CLOUD_REGION=us-central1-c
+
+ # - >-
+ # KUBE_NETWORK_PLUGIN=calico
+ # CLOUD_IMAGE=centos-7-sudo
+ # CLOUD_REGION=us-central1-c
+
+
+ # # Redhat 7
+ # - >-
+ # KUBE_NETWORK_PLUGIN=flannel
+ # CLOUD_IMAGE=rhel-7-sudo
+ # CLOUD_REGION=us-east1-d
+ # SSH_ARGS='--ssh-extra-args="-t"'
+ # - >-
+ # KUBE_NETWORK_PLUGIN=calico
+ # CLOUD_IMAGE=rhel-7-sudo
+ # CLOUD_REGION=us-east1-d
+
+ # Ubuntu 14.04
+ - >-
+ KUBE_NETWORK_PLUGIN=flannel
+ CLOUD_IMAGE=ubuntu-1404-trusty
+ CLOUD_REGION=europe-west1-c
+ - >-
+ KUBE_NETWORK_PLUGIN=calico
+ CLOUD_IMAGE=ubuntu-1404-trusty
+ CLOUD_REGION=europe-west1-c
+
+ # Ubuntu 15.10
+ - >-
+ KUBE_NETWORK_PLUGIN=flannel
+ CLOUD_IMAGE=ubuntu-1510-wily
+ CLOUD_REGION=us-central1-a
+ - >-
+ KUBE_NETWORK_PLUGIN=calico
+ CLOUD_IMAGE=ubuntu-1510-wily
+ CLOUD_REGION=us-central1-a
+
+
+matrix:
+ allow_failures:
+ - env: KUBE_NETWORK_PLUGIN=flannel CLOUD_IMAGE=centos-7-sudo CLOUD_REGION=us-central1-c
+
+before_install:
# Install Ansible.
- - sudo -H pip install ansible==${ANSIBLE_VERSION}
- - sudo -H pip install netaddr
+ - pip install --user boto -U
+ - pip install --user ansible
+ - pip install --user netaddr
+ - pip install --user apache-libcloud
cache:
- directories:
- - $HOME/releases
+ - directories:
- $HOME/.cache/pip
+ - $HOME/.local
before_script:
- - export PATH=$PATH:/usr/local/bin
+ - echo "RUN $TRAVIS_JOB_NUMBER $KUBE_NETWORK_PLUGIN $CONTAINER_ENGINE "
+ - mkdir -p $HOME/.ssh
+ - echo $PRIVATE_KEY | base64 -d > $HOME/.ssh/id_rsa
+ - echo $GCE_PEM_FILE | base64 -d > $HOME/.ssh/gce
+ - chmod 400 $HOME/.ssh/id_rsa
+ - chmod 755 $HOME/.local/bin/ansible-playbook
+ - $HOME/.local/bin/ansible-playbook --version
+ - cp tests/ansible.cfg .
+# - "echo $HOME/.local/bin/ansible-playbook -i inventory.ini -u $SSH_USER -e ansible_ssh_user=$SSH_USER $SSH_ARGS -b --become-user=root -e '{\"cloud_provider\": true}' $LOG_LEVEL -e kube_network_plugin=${KUBE_NETWORK_PLUGIN} setup-kubernetes/cluster.yml"
script:
- # Check the role/playbook's syntax.
- - "sudo -H ansible-playbook -i inventory/local-tests.cfg $SITE --syntax-check"
-
- # Run the role/playbook with ansible-playbook.
- - "sudo -H ansible-playbook -i inventory/local-tests.cfg $SITE --connection=local"
-
- # Run the role/playbook again, checking to make sure it's idempotent.
- >
- sudo -H ansible-playbook -i inventory/local-tests.cfg $SITE --connection=local
- | tee /dev/stderr | grep -q 'changed=0.*failed=0'
- && (echo 'Idempotence test: pass' && exit 0)
- || (echo 'Idempotence test: fail' && exit 1)
+ $HOME/.local/bin/ansible-playbook tests/cloud_playbooks/create-gce.yml -i tests/local_inventory/hosts -c local $LOG_LEVEL
+ -e test_id=${TEST_ID}
+ -e kube_network_plugin=${KUBE_NETWORK_PLUGIN}
+ -e gce_project_id=${GCE_PROJECT_ID}
+ -e gce_service_account_email=${GCE_ACCOUNT}
+ -e gce_pem_file=${HOME}/.ssh/gce
+ -e cloud_image=${CLOUD_IMAGE}
+ -e inventory_path=${PWD}/inventory/inventory.ini
+ -e cloud_region=${CLOUD_REGION}
+
+ # Create cluster
+ - "$HOME/.local/bin/ansible-playbook -i inventory/inventory.ini -u $SSH_USER -e ansible_ssh_user=$SSH_USER $SSH_ARGS -b --become-user=root -e '{\"cloud_provider\": true}' $LOG_LEVEL -e kube_network_plugin=${KUBE_NETWORK_PLUGIN} cluster.yml"
+ # Tests Cases
+ ## Test Master API
+ - $HOME/.local/bin/ansible-playbook -i inventory/inventory.ini tests/testcases/010_check-apiserver.yml $LOG_LEVEL
+ ## Create a POD
+ - $HOME/.local/bin/ansible-playbook -i inventory/inventory.ini -u $SSH_USER -e ansible_ssh_user=$SSH_USER $SSH_ARGS -b --become-user=root tests/testcases/020_check-create-pod.yml $LOG_LEVEL
+ ## Ping the between 2 pod
+ - $HOME/.local/bin/ansible-playbook -i setup-kubernetes/inventory/inventory.ini -u $SSH_USER -e ansible_ssh_user=$SSH_USER $SSH_ARGS -b --become-user=root tests/testcases/030_check-network.yml $LOG_LEVEL
+
+after_script:
+ - >
+ $HOME/.local/bin/ansible-playbook -i inventory/inventory.ini tests/cloud_playbooks/delete-gce.yml -c local $LOG_LEVEL
+ -e test_id=${TEST_ID}
+ -e kube_network_plugin=${KUBE_NETWORK_PLUGIN}
+ -e gce_project_id=${GCE_PROJECT_ID}
+ -e gce_service_account_email=${GCE_ACCOUNT}
+ -e gce_pem_file=${HOME}/.ssh/gce
+ -e cloud_image=${CLOUD_IMAGE}
+ -e inventory_path=${PWD}/inventory/inventory.ini
+ -e cloud_region=${CLOUD_REGION}
diff --git a/ansible.cfg b/ansible.cfg
new file mode 100644
index 000000000..2be6f4d02
--- /dev/null
+++ b/ansible.cfg
@@ -0,0 +1,4 @@
+[ssh_connection]
+pipelining=True
+[defaults]
+host_key_checking=False
diff --git a/tests/README.md b/tests/README.md
new file mode 100644
index 000000000..24b98197f
--- /dev/null
+++ b/tests/README.md
@@ -0,0 +1,33 @@
+# k8s-integration-tests
+
+*Work In Progress*
+
+## Test environment variables
+
+### Common
+
+Variable | Description | Required | Default
+--------------------- | -------------------------------------- | ---------- | --------
+`TEST_ID` | A unique execution ID for this test | Yes |
+`KUBE_NETWORK_PLUGIN` | The network plugin (calico or flannel) | Yes |
+`PRIVATE_KEY_FILE` | The path to the SSH private key file | No |
+
+### AWS Tests
+
+Variable | Description | Required | Default
+--------------------- | ----------------------------------------------- | ---------- | ---------
+`AWS_ACCESS_KEY` | The Amazon Access Key ID | Yes |
+`AWS_SECRET_KEY` | The Amazon Secret Access Key | Yes |
+`AWS_AMI_ID` | The AMI ID to deploy | Yes |
+`AWS_KEY_PAIR_NAME` | The name of the EC2 key pair to use | Yes |
+`AWS_SECURITY_GROUP` | The EC2 Security Group to use | No | default
+`AWS_REGION` | The EC2 region | No | eu-central-1
+
+#### Use private ssh key
+
+##### Key
+
+```bash
+openssl pkcs12 -in gce-secure.p12 -passin pass:notasecret -nodes -nocerts | openssl rsa -out gce-secure.pem
+cat gce-secure.pem |base64 -w0 > GCE_PEM_FILE`
+```
diff --git a/tests/ansible.cfg b/tests/ansible.cfg
new file mode 100644
index 000000000..2be6f4d02
--- /dev/null
+++ b/tests/ansible.cfg
@@ -0,0 +1,4 @@
+[ssh_connection]
+pipelining=True
+[defaults]
+host_key_checking=False
diff --git a/tests/cloud_playbooks/create-aws.yml b/tests/cloud_playbooks/create-aws.yml
new file mode 100644
index 000000000..b0749ab5b
--- /dev/null
+++ b/tests/cloud_playbooks/create-aws.yml
@@ -0,0 +1,33 @@
+---
+- hosts: localhost
+ sudo: False
+ gather_facts: False
+
+ tasks:
+ - name: Provision a set of instances
+ ec2:
+ key_name: "{{ aws.key_name }}"
+ aws_access_key: "{{ aws.access_key }}"
+ aws_secret_key: "{{ aws.secret_key }}"
+ region: "{{ aws.region }}"
+ group_id: "{{ aws.group}}"
+ instance_type: "{{ aws.instance_type}}"
+ image: "{{ aws.ami_id }}"
+ wait: true
+ count: "{{ aws.count }}"
+ instance_tags: "{{ aws.tags }}"
+ register: ec2
+
+ - name: Template the inventory
+ template:
+ src: templates/inventory.ini.j2
+ dest: "{{ inventory_path }}"
+
+ - name: Wait until SSH is available
+ local_action:
+ module: wait_for
+ host: "{{ item.public_ip }}"
+ port: 22
+ timeout: 300
+ state: started
+ with_items: ec2.instances
diff --git a/tests/cloud_playbooks/create-gce.yml b/tests/cloud_playbooks/create-gce.yml
new file mode 100644
index 000000000..18b5f51a3
--- /dev/null
+++ b/tests/cloud_playbooks/create-gce.yml
@@ -0,0 +1,33 @@
+---
+- hosts: localhost
+ sudo: False
+ gather_facts: no
+ vars:
+ cloud_machine_type: g1-small
+
+ tasks:
+ - name: replace_test_id
+ set_fact:
+ test_name: "{{test_id |regex_replace('\\.', '-')}}"
+
+ - name: Create gce instances
+ gce:
+ instance_names: "k8s-{{test_name}}-1,k8s-{{test_name}}-2,k8s-{{test_name}}-3"
+ machine_type: "{{ cloud_machine_type }}"
+ image: "{{ cloud_image }}"
+ service_account_email: "{{ gce_service_account_email }}"
+ pem_file: "{{ gce_pem_file }}"
+ project_id: "{{ gce_project_id }}"
+ zone: "{{cloud_region}}"
+ metadata: '{"test_id": "{{test_id}}", "network": "{{kube_network_plugin}}"}'
+ tags: "build-{{test_name}},{{kube_network_plugin}}"
+ register: gce
+
+ - name: Template the inventory
+ template:
+ src: ../templates/inventory-gce.j2
+ dest: "{{ inventory_path }}"
+
+ - name: Wait for SSH to come up
+ wait_for: host={{item.public_ip}} port=22 delay=10 timeout=180 state=started
+ with_items: gce.instance_data
diff --git a/tests/cloud_playbooks/delete-aws.yml b/tests/cloud_playbooks/delete-aws.yml
new file mode 100644
index 000000000..3b44f3091
--- /dev/null
+++ b/tests/cloud_playbooks/delete-aws.yml
@@ -0,0 +1,15 @@
+---
+- hosts: kube-node
+ sudo: False
+
+ tasks:
+ - name: Gather EC2 facts
+ action: ec2_facts
+
+ - name: Terminate EC2 instances
+ local_action:
+ module: ec2
+ state: absent
+ instance_ids: "{{ ansible_ec2_instance_id }}"
+ region: "{{ ansible_ec2_placement_region }}"
+ wait: True
diff --git a/tests/cloud_playbooks/delete-gce.yml b/tests/cloud_playbooks/delete-gce.yml
new file mode 100644
index 000000000..d42c6cc91
--- /dev/null
+++ b/tests/cloud_playbooks/delete-gce.yml
@@ -0,0 +1,24 @@
+---
+- hosts: localhost
+ sudo: False
+ gather_facts: no
+ vars:
+ cloud_machine_type: f1-micro
+
+ tasks:
+ - name: replace_test_id
+ set_fact:
+ test_name: "{{test_id |regex_replace('\\.', '-')}}"
+
+ - name: delete gce instances
+ gce:
+ instance_names: "k8s-{{test_name}}-1,k8s-{{test_name}}-2,k8s-{{test_name}}-3"
+ machine_type: "{{ cloud_machine_type }}"
+ image: "{{ cloud_image }}"
+ service_account_email: "{{ gce_service_account_email }}"
+ pem_file: "{{ gce_pem_file }}"
+ project_id: "{{ gce_project_id }}"
+ zone: "{{cloud_region | default('europe-west1-b')}}"
+ metadata: '{"test_id": "{{test_id}}", "network": "{{kube_network_plugin}}"}'
+ state: 'absent'
+ register: gce
diff --git a/tests/local_inventory/host_vars/localhost b/tests/local_inventory/host_vars/localhost
new file mode 100644
index 000000000..695c0ecbf
--- /dev/null
+++ b/tests/local_inventory/host_vars/localhost
@@ -0,0 +1,12 @@
+aws:
+ key_name: "{{ key_name | default('ansibl8s') }}"
+ access_key: "{{ aws_access_key }}"
+ secret_key: "{{ aws_secret_key }}"
+ region: "{{ aws_region | default('eu-west-1') }}" # default to eu-west-1
+ group: "{{ aws_security_group | default ('default')}}"
+ instance_type: t2.micro
+ ami_id: "{{ aws_ami_id | default('ami-02724d1f') }}" # default to Debian Jessie
+ count: 3
+ tags:
+ test_id: "{{ test_id }}"
+ network_plugin: "{{ kube_network_plugin }}"
diff --git a/tests/local_inventory/hosts.cfg b/tests/local_inventory/hosts.cfg
new file mode 100644
index 000000000..a3550c31b
--- /dev/null
+++ b/tests/local_inventory/hosts.cfg
@@ -0,0 +1 @@
+localhost ansible_connection=localx
\ No newline at end of file
diff --git a/tests/run-tests.sh b/tests/run-tests.sh
new file mode 100755
index 000000000..c20438e10
--- /dev/null
+++ b/tests/run-tests.sh
@@ -0,0 +1,8 @@
+#! /bin/bash
+
+# curl -# -C - -o shebang-unit https://raw.github.com/arpinum-oss/shebang-unit/master/releases/shebang-unit
+# chmod +x shebang-unit
+
+now=$(date +"%Y%m%d%H%M%S")
+mkdir -p ${PWD}/tests-results
+./shebang-unit --reporters=simple,junit --output-file=${PWD}/tests-results/junit_report-${now}.xml tests
diff --git a/tests/scripts/ansibl8s_test.sh b/tests/scripts/ansibl8s_test.sh
new file mode 100644
index 000000000..53f8f2d7d
--- /dev/null
+++ b/tests/scripts/ansibl8s_test.sh
@@ -0,0 +1,52 @@
+#! /bin/bash
+
+global_setup() {
+ git clone https://github.com/ansibl8s/setup-kubernetes.git setup-kubernetes
+ private_key=""
+ if [ ! -z ${PRIVATE_KEY_FILE} ]
+ then
+ private_key="--private-key=${PRIVATE_KEY_FILE}"
+ fi
+ ansible-playbook create.yml -i hosts -u admin -s \
+ -e test_id=${TEST_ID} \
+ -e kube_network_plugin=${KUBE_NETWORK_PLUGIN} \
+ -e aws_access_key=${AWS_ACCESS_KEY} \
+ -e aws_secret_key=${AWS_SECRET_KEY} \
+ -e aws_ami_id=${AWS_AMI_ID} \
+ -e aws_security_group=${AWS_SECURITY_GROUP} \
+ -e key_name=${AWS_KEY_PAIR_NAME} \
+ -e inventory_path=${PWD}/inventory.ini \
+ -e aws_region=${AWS_REGION}
+}
+
+global_teardown() {
+ if [ -f inventory.ini ];
+ then
+ ansible-playbook -i inventory.ini -u admin delete.yml
+ fi
+ rm -rf ${PWD}/setup-kubernetes
+}
+
+should_deploy_cluster() {
+ ansible-playbook -i inventory.ini -s ${private_key} -e kube_network_plugin=${KUBE_NETWORK_PLUGIN} setup-kubernetes/cluster.yml
+
+ assertion__status_code_is_success $?
+}
+
+should_api_server_respond() {
+ ansible-playbook -i inventory.ini ${private_key} testcases/check-apiserver.yml
+
+ assertion__status_code_is_success $?
+}
+
+should_create_pod() {
+ ansible-playbook -i inventory.ini -s ${private_key} testcases/check-create-pod.yml -vv
+
+ assertion__status_code_is_success $?
+}
+
+should_pod_be_in_expected_subnet() {
+ ansible-playbook -i inventory.ini -s ${private_key} testcases/check-network.yml -vv
+
+ assertion__status_code_is_success $?
+}
diff --git a/tests/shebang-unit b/tests/shebang-unit
new file mode 100755
index 000000000..6f9a2bc6c
--- /dev/null
+++ b/tests/shebang-unit
@@ -0,0 +1,1146 @@
+#!/usr/bin/env bash
+
+# Copyright (C) 2015, Arpinum
+#
+# shebang-unit is free software: you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# shebang-unit is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+# details.
+#
+# You should have received a copy of the GNU General Public License along with
+# shebang-unit. If not, see http://www.gnu.org/licenses/lgpl.html.
+
+# shebang-unit all in one source file
+
+
+configuration__load() {
+ # yes/no representation used with shebang-unit parameters to activate
+ # stuff like colors
+ SBU_YES="yes"
+ SBU_NO="no"
+
+ # Colors for outputs
+ SBU_GREEN_COLOR_CODE="\\033[1;32m"
+ SBU_RED_COLOR_CODE="\\033[1;31m"
+ SBU_YELLOW_COLOR_CODE="\\033[1;33m"
+ SBU_DEFAULT_COLOR_CODE="\\e[0m"
+
+ # Functions coding coventions
+ SBU_GLOBAL_SETUP_FUNCTION_NAME="global_setup"
+ SBU_GLOBAL_TEARDOWN_FUNCTION_NAME="global_teardown"
+ SBU_SETUP_FUNCTION_NAME="setup"
+ SBU_TEARDOWN_FUNCTION_NAME="teardown"
+ SBU_FUNCTION_DECLARATION_REGEX="^[ ]*\(function\)\{0,1\}[ ]*\([A-Za-z0-9_-]\{1,\}\)[ ]*\(([ ]*)\)\{0,1\}[ ]*{"
+ SBU_PRIVATE_FUNCTION_NAME_REGEX="^_.*"
+
+ # Default configuration that can be modified with shebang-unit parameters
+ # For more information see shebang-unit usages
+ SBU_TEST_FILE_PATTERN="*_test.sh"
+ SBU_TEST_FUNCTION_PATTERN="*"
+ SBU_USE_COLORS="${SBU_YES}"
+ SBU_RANDOM_RUN="${SBU_NO}"
+ SBU_REPORTERS="simple"
+ SBU_JUNIT_REPORTER_OUTPUT_FILE="./junit_report.xml"
+
+ # Internal constants
+ SBU_SUCCESS_STATUS_CODE=0
+ SBU_FAILURE_STATUS_CODE=1
+ SBU_VALUE_SEPARATOR=","
+ SBU_TEMP_DIR="/tmp/.shebang-unit"
+ SBU_LAST_ASSERTION_MSG_KEY="last_assertion_message"
+ SBU_NO_RUN="${SBU_NO}"
+ SBU_STANDARD_FD=42
+ SBU_ERROR_FD=43
+}
+
+
+assertion__equal() {
+ if [[ "$1" != "$2" ]]; then
+ _assertion__failed "Actual: <$2>, expected: <$1>."
+ fi
+}
+
+assertion__different() {
+ if [[ "$1" == "$2" ]]; then
+ _assertion__failed "Both values are: <$1>."
+ fi
+}
+
+assertion__string_contains() {
+ if ! system__string_contains "$1" "$2"; then
+ _assertion__failed "String: <$1> does not contain: <$2>."
+ fi
+}
+
+assertion__string_does_not_contain() {
+ if system__string_contains "$1" "$2"; then
+ _assertion__failed "String: <$1> contains: <$2>."
+ fi
+}
+
+assertion__string_empty() {
+ if [[ -n "$1" ]]; then
+ _assertion__failed "String: <$1> is not empty."
+ fi
+}
+
+assertion__string_not_empty() {
+ if [[ -z "$1" ]]; then
+ _assertion__failed "The string is empty."
+ fi
+}
+
+assertion__array_contains() {
+ local element=$1
+ shift 1
+ if ! array__contains "${element}" "$@"; then
+ local array_as_string="$(system__pretty_print_array "$@")"
+ _assertion__failed \
+ "Array: <${array_as_string}> does not contain: <${element}>."
+ fi
+}
+
+assertion__array_does_not_contain() {
+ local element=$1
+ shift 1
+ if array__contains "${element}" "$@"; then
+ local array_as_string="$(system__pretty_print_array "$@")"
+ _assertion__failed \
+ "Array: <${array_as_string}> contains: <${element}>."
+ fi
+}
+
+assertion__status_code_is_success() {
+ if (( $1 != ${SBU_SUCCESS_STATUS_CODE} )); then
+ _assertion__failed \
+ "Status code is failure instead of success." "$2"
+ fi
+}
+
+assertion__status_code_is_failure() {
+ if (( $1 == ${SBU_SUCCESS_STATUS_CODE} )); then
+ _assertion__failed \
+ "Status code is success instead of failure." "$2"
+ fi
+}
+
+assertion__successful() {
+ "$@"
+ if (( $? != ${SBU_SUCCESS_STATUS_CODE} )); then
+ _assertion__failed "Command is failing instead of successful."
+ fi
+}
+
+assertion__failing() {
+ "$@"
+ if (( $? == ${SBU_SUCCESS_STATUS_CODE} )); then
+ _assertion__failed "Command is successful instead of failing."
+ fi
+}
+
+_assertion__failed() {
+ local message_to_use="$(_assertion__get_assertion_message_to_use "$1" "$2")"
+ system__print_line "Assertion failed. ${message_to_use}"
+ exit ${SBU_FAILURE_STATUS_CODE}
+}
+
+_assertion__get_assertion_message_to_use() {
+ local message=$1
+ local custom_messsage=$2
+ if [[ -n "${custom_messsage}" ]]; then
+ system__print "${message} ${custom_messsage}"
+ else
+ system__print "${message}"
+ fi
+}
+
+
+mock__make_function_do_nothing() {
+ mock__make_function_call "$1" ":"
+}
+
+mock__make_function_prints() {
+ local function=$1
+ local text=$2
+ eval "${function}() { printf "${text}"; }"
+}
+
+mock__make_function_call() {
+ local function_to_mock=$1
+ local function_to_call=$2
+ shift 2
+ eval "${function_to_mock}() { ${function_to_call} \"\$@\"; }"
+}
+
+
+runner__run_all_test_files() {
+ SBU_BASE_TEST_DIRECTORY=$1
+ reporter__test_files_start_running
+ timer__store_current_time "global_time"
+ results__test_files_start_running
+ _runner__run_all_test_files_with_pattern_in_directory "$1"
+ reporter__test_files_end_running "$(timer__get_time_elapsed "global_time")"
+ runner__tests_are_successful
+}
+
+_runner__run_all_test_files_with_pattern_in_directory() {
+ local file
+ local files
+ array__from_lines files <<< "$(_runner__get_test_files_in_directory "$1")"
+ for file in "${files[@]}"; do
+ file_runner__run_test_file "${file}"
+ done
+}
+
+_runner__get_test_files_in_directory() {
+ local files
+ array__from_lines files <<< "$(find "$1" -name "${SBU_TEST_FILE_PATTERN}" | sort)"
+ if [[ "${SBU_RANDOM_RUN}" == "${SBU_YES}" ]]; then
+ array__from_lines files <<< "$(system__randomize_array "${files[@]}")"
+ fi
+ array__print "${files[@]}"
+}
+
+runner__tests_are_successful() {
+ (( $(results__get_failing_tests_count) == 0 \
+ && $(results__get_skipped_tests_count) == 0 ))
+}
+
+
+file_runner__run_test_file() {
+ local file=$1
+ local public_functions=($(parser__get_public_functions_in_file "${file}"))
+ local test_functions=($(_file_runner__get_test_functions))
+ reporter__test_file_starts_running "${file}" "${#test_functions[@]}"
+ ( source "${file}"
+ _file_runner__run_global_setup_if_exists \
+ && _file_runner__call_all_tests
+ _file_runner__run_global_teardown_if_exists )
+ _file_runner__check_if_global_setup_has_exited
+ reporter__test_file_ends_running
+}
+
+_file_runner__run_all_tests_if_global_setup_is_successful() {
+ _file_runner__call_all_tests
+}
+
+_file_runner__call_all_tests() {
+ local i
+ for (( i=0; i < ${#test_functions[@]}; i++ )); do
+ test_runner__run_test "${test_functions[${i}]}" "${public_functions[@]}"
+ done
+}
+
+_file_runner__skip_all_tests() {
+ local i
+ for (( i=0; i < ${#test_functions[@]}; i++ )); do
+ test_runner__skip_test "${test_functions[${i}]}" "${public_functions[@]}"
+ done
+}
+
+_file_runner__get_test_functions() {
+ local result=()
+ local test_function
+ for test_function in "${public_functions[@]}"; do
+ if _file_runner__function_is_a_test "${test_function}"\
+ && [[ "${test_function}" == ${SBU_TEST_FUNCTION_PATTERN} ]]; then
+ result+=("${test_function}")
+ fi
+ done
+ _file_runner__get_randomized_test_functions_if_needed "${result[@]}"
+}
+
+_file_runner__get_randomized_test_functions_if_needed() {
+ if [[ "${SBU_RANDOM_RUN}" == "${SBU_YES}" ]]; then
+ system__randomize_array "$@"
+ else
+ array__print "$@"
+ fi
+}
+
+_file_runner__run_global_setup_if_exists() {
+ database__put "sbu_current_global_setup_has_failed" "${SBU_YES}"
+ _file_runner__call_function_if_exists "${SBU_GLOBAL_SETUP_FUNCTION_NAME}" \
+ && database__put "sbu_current_global_setup_has_failed" "${SBU_NO}"
+}
+
+_file_runner__run_global_teardown_if_exists() {
+ _file_runner__call_function_if_exists "${SBU_GLOBAL_TEARDOWN_FUNCTION_NAME}"
+}
+
+_file_runner__function_is_a_test() {
+ ! array__contains "$1" \
+ "${SBU_GLOBAL_SETUP_FUNCTION_NAME}" \
+ "${SBU_GLOBAL_TEARDOWN_FUNCTION_NAME}" \
+ "${SBU_SETUP_FUNCTION_NAME}" \
+ "${SBU_TEARDOWN_FUNCTION_NAME}"
+}
+
+_file_runner__call_function_if_exists() {
+ local function=$1
+ shift 1
+ if array__contains "${function}" "${public_functions[@]}"; then
+ "${function}"
+ fi
+}
+
+_file_runner__check_if_global_setup_has_exited() {
+ local has_exited="$(database__get "sbu_current_global_setup_has_failed")"
+ if [[ "${has_exited}" == "${SBU_YES}" ]]; then
+ _file_runner__handle_failure_in_global_setup
+ fi
+}
+
+_file_runner__handle_failure_in_global_setup() {
+ reporter__global_setup_has_failed
+ _file_runner__skip_all_tests
+}
+
+
+parser__get_public_functions_in_file() {
+ _parser__find_functions_in_file "$1" \
+ | _parser__filter_private_functions \
+ | awk '{ print $1 }'
+}
+
+_parser__find_functions_in_file() {
+ grep -o "${SBU_FUNCTION_DECLARATION_REGEX}" "$1" \
+ | _parser__get_function_name_from_declaration
+}
+
+_parser__filter_private_functions() {
+ grep -v "${SBU_PRIVATE_FUNCTION_NAME_REGEX}"
+}
+
+_parser__get_function_name_from_declaration() {
+ sed "s/${SBU_FUNCTION_DECLARATION_REGEX}/\2/"
+}
+
+
+timer__store_current_time() {
+ local id=$1
+ database__put "sbu_beginning_date_$1" "$(system__get_date_in_seconds)"
+}
+
+timer__get_time_elapsed() {
+ local id=$1
+ local beginning_date="$(database__get "sbu_beginning_date_$1")"
+ local ending_date="$(system__get_date_in_seconds)"
+
+ [[ -n "${beginning_date}" ]] \
+ && system__print "$(( ending_date - beginning_date ))" \
+ || system__print "0"
+}
+
+
+results__test_files_start_running() {
+ database__put "sbu_successful_tests_count" "0"
+ database__put "sbu_failing_tests_count" "0"
+ database__put "sbu_skipped_tests_count" "0"
+}
+
+results__get_successful_tests_count() {
+ _results__get_tests_count_of_type "successful"
+}
+
+results__increment_successful_tests() {
+ _results__increment_tests_of_type "successful"
+}
+
+results__get_failing_tests_count() {
+ _results__get_tests_count_of_type "failing"
+}
+
+results__increment_failing_tests() {
+ _results__increment_tests_of_type "failing"
+}
+
+results__get_skipped_tests_count() {
+ _results__get_tests_count_of_type "skipped"
+}
+
+results__increment_skipped_tests() {
+ _results__increment_tests_of_type "skipped"
+}
+
+results__get_total_tests_count() {
+ local successes="$(results__get_successful_tests_count)"
+ local failures="$(results__get_failing_tests_count)"
+ local skipped="$(results__get_skipped_tests_count)"
+ system__print "$(( successes + failures + skipped ))"
+}
+
+_results__get_tests_count_of_type() {
+ local type=$1
+ database__get "sbu_${type}_tests_count"
+}
+
+_results__increment_tests_of_type() {
+ local type=$1
+ local count="$(results__get_${type}_tests_count)"
+ database__put "sbu_${type}_tests_count" "$(( count + 1 ))"
+}
+
+
+test_runner__run_test() {
+ local test_function=$1
+ shift 1
+ reporter__test_starts_running "${test_function}"
+ timer__store_current_time "test_time"
+ (
+ _test_runner__call_setup_if_exists "$@" \
+ && _test_runner__call_test_fonction "${test_function}"
+ local setup_and_test_code=$?
+ _test_runner__call_teardown_if_exists "$@"
+ (( $? == ${SBU_SUCCESS_STATUS_CODE} \
+ && ${setup_and_test_code} == ${SBU_SUCCESS_STATUS_CODE} ))
+ )
+ _test_runner__parse_test_function_result $?
+ reporter__test_ends_running "$(timer__get_time_elapsed "test_time")"
+}
+
+_test_runner__call_test_fonction() {
+ ( "$1" >&${SBU_STANDARD_FD} 2>&${SBU_ERROR_FD} )
+}
+
+_test_runner__call_setup_if_exists() {
+ _test_runner__call_function_if_exits "${SBU_SETUP_FUNCTION_NAME}" "$@"
+}
+
+_test_runner__call_teardown_if_exists() {
+ _test_runner__call_function_if_exits "${SBU_TEARDOWN_FUNCTION_NAME}" "$@"
+}
+
+_test_runner__parse_test_function_result() {
+ if (( $1 == ${SBU_SUCCESS_STATUS_CODE} )); then
+ results__increment_successful_tests
+ reporter__test_has_succeeded
+ else
+ results__increment_failing_tests
+ reporter__test_has_failed
+ fi
+}
+
+_test_runner__call_function_if_exits() {
+ local function=$1
+ shift 1
+ if array__contains "${function}" "$@"; then
+ "${function}"
+ fi
+}
+
+test_runner__skip_test() {
+ local test_function=$1
+ reporter__test_starts_running "${test_function}"
+ results__increment_skipped_tests
+ reporter__test_is_skipped "${test_function}"
+ reporter__test_ends_running 0
+}
+
+
+reporter__test_files_start_running() {
+ _reporter__initialise_file_descriptors
+ reporter__for_each_reporter \
+ _reporter__call_function "test_files_start_running" "$@"
+}
+
+_reporter__initialise_file_descriptors() {
+ eval "exec ${SBU_STANDARD_FD}>&1"
+ eval "exec ${SBU_ERROR_FD}>&2"
+}
+
+reporter__global_setup_has_failed() {
+ reporter__for_each_reporter \
+ _reporter__call_function "global_setup_has_failed" "$@"
+}
+
+reporter__test_file_starts_running() {
+ reporter__for_each_reporter \
+ _reporter__call_function "test_file_starts_running" "$@"
+}
+
+reporter__test_starts_running() {
+ reporter__for_each_reporter \
+ _reporter__call_function "test_starts_running" "$@"
+}
+
+reporter__test_has_succeeded() {
+ reporter__for_each_reporter \
+ _reporter__call_function "test_has_succeeded" "$@"
+}
+
+reporter__test_has_failed() {
+ reporter__for_each_reporter \
+ _reporter__call_function "test_has_failed" "$@"
+}
+
+reporter__test_is_skipped() {
+ reporter__for_each_reporter \
+ _reporter__call_function "test_is_skipped" "$@"
+}
+
+reporter__test_ends_running() {
+ reporter__for_each_reporter \
+ _reporter__call_function "test_ends_running" "$@"
+}
+
+reporter__test_file_ends_running() {
+ reporter__for_each_reporter \
+ _reporter__call_function "test_file_ends_running" "$@"
+}
+
+reporter__test_files_end_running() {
+ reporter__for_each_reporter \
+ _reporter__call_function "test_files_end_running" "$@"
+ _reporter__release_file_descriptors
+}
+
+_reporter__release_file_descriptors() {
+ eval "exec 1>&${SBU_STANDARD_FD} ${SBU_STANDARD_FD}>&-"
+ eval "exec 2>&${SBU_ERROR_FD} ${SBU_ERROR_FD}>&-"
+}
+
+_reporter__call_function() {
+ local function=$1
+ shift 1
+ "${reporter}_reporter__${function}" "$@"
+}
+
+reporter__for_each_reporter() {
+ local reporter
+ for reporter in ${SBU_REPORTERS//${SBU_VALUE_SEPARATOR}/ }; do
+ "$@"
+ done
+}
+
+reporter__print_with_color() {
+ system__print_with_color "$@" >&${SBU_STANDARD_FD}
+}
+
+reporter__print_line() {
+ system__print_line "$@" >&${SBU_STANDARD_FD}
+}
+
+reporter__print_line_with_color() {
+ system__print_line_with_color "$@" >&${SBU_STANDARD_FD}
+}
+
+reporter__print_new_line() {
+ system__print_new_line >&${SBU_STANDARD_FD}
+}
+
+reporter__get_color_code_for_tests_result() {
+ local color_code=${SBU_GREEN_COLOR_CODE}
+ if ! runner__tests_are_successful; then
+ color_code=${SBU_RED_COLOR_CODE}
+ fi
+ system__print "${color_code}"
+}
+
+reporter__get_test_file_relative_name() {
+ system__print "${1#${SBU_BASE_TEST_DIRECTORY}\/}"
+}
+
+
+simple_reporter__test_files_start_running() {
+ :
+}
+
+simple_reporter__test_file_starts_running() {
+ local relative_name="$(reporter__get_test_file_relative_name "$1")"
+ reporter__print_line "[File] ${relative_name}"
+}
+
+simple_reporter__global_setup_has_failed() {
+ reporter__print_line_with_color \
+ "Global setup has failed" ${SBU_YELLOW_COLOR_CODE}
+}
+
+simple_reporter__test_starts_running() {
+ reporter__print_line "[Test] $1"
+}
+
+simple_reporter__test_has_succeeded() {
+ reporter__print_line_with_color "OK" ${SBU_GREEN_COLOR_CODE}
+}
+
+simple_reporter__test_has_failed() {
+ reporter__print_line_with_color "KO" ${SBU_RED_COLOR_CODE}
+}
+
+simple_reporter__test_is_skipped() {
+ reporter__print_line_with_color "Skipped" ${SBU_YELLOW_COLOR_CODE}
+}
+
+simple_reporter__test_ends_running() {
+ :
+}
+
+simple_reporter__test_file_ends_running() {
+ reporter__print_new_line
+}
+
+simple_reporter__test_files_end_running() {
+ local time="in $1s"
+ reporter__print_line "[Results]"
+ local color="$(reporter__get_color_code_for_tests_result)"
+ local total_count="$(_simple_reporter__get_total_count_message)"
+ local failures_count="$(_simple_reporter__get_failures_count_message)"
+ local skipped_count="$(results__get_skipped_tests_count) skipped"
+ local message="${total_count}, ${failures_count}, ${skipped_count} ${time}"
+ reporter__print_line_with_color "${message}" "${color}"
+}
+
+_simple_reporter__get_total_count_message() {
+ local count="$(results__get_total_tests_count)"
+ system__print "${count} test$(_simple_reporter__get_agreement ${count})"
+}
+
+_simple_reporter__get_failures_count_message() {
+ local count="$(results__get_failing_tests_count)"
+ system__print "${count} failure$(_simple_reporter__get_agreement ${count})"
+}
+
+_simple_reporter__get_agreement() {
+ (( $1 > 1 )) \
+ && system__print "s" \
+ || system__print ""
+}
+
+
+dots_reporter__test_files_start_running() {
+ exec 1>/dev/null
+ exec 2>/dev/null
+}
+
+dots_reporter__test_file_starts_running() {
+ :
+}
+
+dots_reporter__global_setup_has_failed() {
+ :
+}
+
+dots_reporter__test_starts_running() {
+ :
+}
+
+dots_reporter__test_has_succeeded() {
+ reporter__print_with_color "." ${SBU_GREEN_COLOR_CODE}
+}
+
+dots_reporter__test_has_failed() {
+ reporter__print_with_color "F" ${SBU_RED_COLOR_CODE}
+}
+
+dots_reporter__test_is_skipped() {
+ reporter__print_with_color "S" ${SBU_YELLOW_COLOR_CODE}
+}
+
+dots_reporter__test_ends_running() {
+ :
+}
+
+dots_reporter__test_file_ends_running() {
+ :
+}
+
+dots_reporter__test_files_end_running() {
+ local color="$(reporter__get_color_code_for_tests_result)"
+ local texte="$(runner__tests_are_successful \
+ && system__print "OK" \
+ || system__print "KO")"
+ reporter__print_line_with_color "${texte}" "${color}"
+}
+
+
+junit_reporter__test_files_start_running() {
+ _junit_reporter__initialise_report_with \
+ ""
+ _junit_reporter__write_line_to_report ""
+}
+
+junit_reporter__test_file_starts_running() {
+ local file_name=$1
+ local test_count=$2
+ local suite_name="$(_junit_reporter__get_suite_name "${file_name}")"
+ database__put "sbu_current_suite_name" "${suite_name}"
+ _junit_reporter__write_line_to_report \
+ " "
+ _junit_reporter__delete_all_outputs_lines "suite"
+ _junit_reporter__redirect_outputs_to_database "suite"
+}
+
+junit_reporter__global_setup_has_failed() {
+ :
+}
+
+junit_reporter__test_starts_running() {
+ local suite_name="$(database__get "sbu_current_suite_name")"
+ local test_name="$(xml__encode_text "$1")"
+ _junit_reporter__write_line_to_report \
+ " "
+ _junit_reporter__delete_all_outputs_lines "test"
+ _junit_reporter__redirect_outputs_to_database "test"
+}
+
+junit_reporter__test_has_succeeded() {
+ :
+}
+
+junit_reporter__test_has_failed() {
+ _junit_reporter__write_line_to_report " "
+ _junit_reporter__write_line_to_report " "
+}
+
+junit_reporter__test_is_skipped() {
+ _junit_reporter__write_line_to_report " "
+ _junit_reporter__write_line_to_report " "
+}
+
+junit_reporter__test_ends_running() {
+ _junit_reporter__redirect_outputs_to_database "suite"
+ _junit_reporter__write_time_in_current_test_case_tag_in_report "$1"
+ _junit_reporter__flush_all_outputs_to_report_if_any "test"
+ _junit_reporter__write_line_to_report " "
+}
+
+_junit_reporter__write_time_in_current_test_case_tag_in_report() {
+ local test_time=$1
+ local report_content=$(cat "${SBU_JUNIT_REPORTER_OUTPUT_FILE}")
+ local content_with_time="$(system__substitute_variable \
+ "${report_content}" "sbu_current_test_time" "${test_time}")"
+ system__print_line \
+ "${content_with_time}" > "${SBU_JUNIT_REPORTER_OUTPUT_FILE}"
+}
+
+junit_reporter__test_file_ends_running() {
+ _junit_reporter__flush_all_outputs_to_report_if_any "suite"
+ _junit_reporter__write_line_to_report " "
+ database__put "sbu_current_suite_name" ""
+}
+
+junit_reporter__test_files_end_running() {
+ _junit_reporter__write_line_to_report ""
+}
+
+_junit_reporter__get_suite_name() {
+ local relative_name="$(reporter__get_test_file_relative_name "$1")"
+ local dots_replaced_by_underscores="${relative_name//./_}"
+ local slashes_replaced_by_dots="${dots_replaced_by_underscores//\//.}"
+ xml__encode_text "${slashes_replaced_by_dots}"
+}
+
+_junit_reporter__initialise_report_with() {
+ system__print_line "$1" > "${SBU_JUNIT_REPORTER_OUTPUT_FILE}"
+}
+
+_junit_reporter__write_line_to_report() {
+ system__print_line "$1" >> "${SBU_JUNIT_REPORTER_OUTPUT_FILE}"
+}
+
+_junit_reporter__redirect_outputs_to_database() {
+ local scope=$1
+ exec 1>>\
+ "$(database__get_descriptor "sbu_current_${scope}_standard_ouputs_lines")"
+ exec 2>>\
+ "$(database__get_descriptor "sbu_current_${scope}_error_ouputs_lines")"
+}
+
+_junit_reporter__delete_all_outputs_lines() {
+ database__put "sbu_current_$1_standard_ouputs_lines"
+ database__put "sbu_current_$1_error_ouputs_lines"
+}
+
+_junit_reporter__flush_all_outputs_to_report_if_any() {
+ _junit_reporter__flush_outputs_to_report_if_any "$1" "standard"
+ _junit_reporter__flush_outputs_to_report_if_any "$1" "error"
+}
+
+_junit_reporter__flush_outputs_to_report_if_any() {
+ local scope=$1
+ local outputs_type=$2
+ local key="sbu_current_${scope}_${outputs_type}_ouputs_lines"
+ local outputs="$(database__get "${key}")"
+ if [[ -n "${outputs}" ]]; then
+ _junit_reporter__write_outputs_to_report \
+ "${scope}" "${outputs_type}" "${outputs}"
+ database__put "${key}" ""
+ fi
+}
+
+_junit_reporter__write_outputs_to_report() {
+ local scope=$1
+ local outputs_type=$2
+ local outputs=$3
+ local tag="$(_junit_reporter__get_tag_for_outputs_type "${outputs_type}")"
+ local indentation="$(_junit_reporter__get_indentation_for_scope "${scope}")"
+ _junit_reporter__write_line_to_report "${indentation}<${tag}>"
+ _junit_reporter__write_line_to_report "$(xml__encode_text "${outputs}")"
+ _junit_reporter__write_line_to_report "${indentation}${tag}>"
+}
+
+_junit_reporter__get_tag_for_outputs_type() {
+ [[ "$1" == "standard" ]] \
+ && system__print "system-out" \
+ || system__print "system-err"
+}
+
+_junit_reporter__get_indentation_for_scope() {
+ [[ "$1" == "suite" ]] \
+ && system__print " " \
+ || system__print " "
+}
+
+
+xml__encode_text() {
+ local encoded=${1//\&/\&\;}
+ encoded=${encoded//\\<\;}
+ encoded=${encoded//\>/\>\;}
+ encoded=${encoded//\"/\"\;}
+ encoded=${encoded//\'/\&apos\;}
+ system__print "${encoded}"
+}
+
+
+database__initialise() {
+ _SBU_DB_TOKEN="$(system__random)"
+ _database__ensure_directory_exists
+}
+
+database__release() {
+ rm -rf "$(_database__get_dir)"
+}
+
+database__put() {
+ _database__ensure_directory_exists
+ system__print "$2" > "$(_database__get_dir)/$1"
+}
+
+database__post() {
+ _database__ensure_directory_exists
+ system__print "$2" >> "$(_database__get_dir)/$1"
+}
+
+database__post_line() {
+ _database__ensure_directory_exists
+ system__print_line "$2" >> "$(_database__get_dir)/$1"
+}
+
+database__put_variable() {
+ _database__ensure_directory_exists
+ database__put "$1" "${!1}"
+}
+
+database__get() {
+ [[ -e "$(_database__get_dir)/$1" ]] && cat "$(_database__get_dir)/$1"
+}
+
+database__get_descriptor() {
+ system__print "$(_database__get_dir)/$1"
+}
+
+_database__ensure_directory_exists() {
+ mkdir -p "$(_database__get_dir)"
+}
+
+_database__get_dir() {
+ system__print "${SBU_TEMP_DIR}/database/${_SBU_DB_TOKEN}"
+}
+
+
+system__get_string_or_default() {
+ [[ -n "$1" ]] \
+ && system__print "$1" \
+ || system__print "$2"
+}
+
+system__get_date_in_seconds() {
+ date +%s
+}
+
+system__print_line_with_color() {
+ system__print_with_color "$@"
+ system__print_new_line
+}
+
+system__print_with_color() {
+ if [[ "${SBU_USE_COLORS}" == "${SBU_YES}" ]]; then
+ printf "$2$1${SBU_DEFAULT_COLOR_CODE}"
+ else
+ system__print "$1"
+ fi
+}
+
+system__print_line() {
+ system__print "$1"
+ system__print_new_line
+}
+
+system__print() {
+ printf "%s" "$1"
+}
+
+system__print_new_line() {
+ printf "\n"
+}
+
+array__contains() {
+ local value=$1
+ shift 1
+ local i
+ for (( i=1; i <= $#; i++ )); do
+ if [[ "${!i}" == "${value}" ]]; then
+ return ${SBU_SUCCESS_STATUS_CODE}
+ fi
+ done
+ return ${SBU_FAILURE_STATUS_CODE}
+}
+
+array__from_lines() {
+ local IFS=$'\n'
+ eval "$1=(\$( 0 )); do
+ local random_index=$(( $(system__random) % ${#copy[@]} ))
+ system__print_line "${copy[${random_index}]}"
+ unset copy[${random_index}]
+ copy=("${copy[@]}")
+ done
+}
+
+system__random() {
+ system__print "${RANDOM}"
+}
+
+system__substitute_variable() {
+ local string=$1
+ local key="\$\{$2\}"
+ local value=$3
+ printf "%s" "${string//${key}/${value}}"
+}
+
+
+main__main() {
+ configuration__load
+ _main__initialise
+ local parsed_arguments=0
+ _main__parse_arguments "$@"
+ shift ${parsed_arguments}
+ _main__assert_only_one_argument_left $#
+ _main__assert_reporters_are_known
+ SBU_BASE_TEST_DIRECTORY=$1
+
+ if [[ "${SBU_NO_RUN}" != "${SBU_YES}" ]]; then
+ runner__run_all_test_files "$1"
+ return $?
+ fi
+}
+
+_main__initialise() {
+ database__initialise
+ trap _main__release EXIT
+}
+
+_main__release() {
+ database__release
+}
+
+_main__parse_arguments() {
+ local argument
+ for argument in "$@"; do
+ case "${argument}" in
+ -a|--api-cheat-sheet)
+ _main__print_api_cheat_sheet_and_exit
+ ;;
+ -c=*|--colors=*)
+ SBU_USE_COLORS="${argument#*=}"
+ (( parsed_arguments++ ))
+ ;;
+ -d=*|--random-run=*)
+ SBU_RANDOM_RUN="${argument#*=}"
+ (( parsed_arguments++ ))
+ ;;
+ -h|--help)
+ _main__print_full_usage
+ exit ${SBU_SUCCESS_STATUS_CODE}
+ ;;
+ -f=*|--file-pattern=*)
+ SBU_TEST_FILE_PATTERN="${argument#*=}"
+ (( parsed_arguments++ ))
+ ;;
+ --no-run)
+ SBU_NO_RUN="${SBU_YES}"
+ (( parsed_arguments++ ))
+ ;;
+ -o=*|--output-file=*)
+ SBU_JUNIT_REPORTER_OUTPUT_FILE="${argument#*=}"
+ (( parsed_arguments++ ))
+ ;;
+ -t=*|--test-pattern=*)
+ SBU_TEST_FUNCTION_PATTERN="${argument#*=}"
+ (( parsed_arguments++ ))
+ ;;
+ -r=*|--reporters=*)
+ SBU_REPORTERS="${argument#*=}"
+ (( parsed_arguments++ ))
+ ;;
+ -*|--*)
+ _main__print_illegal_option "${argument}"
+ _main__print_usage_and_exit_with_code ${SBU_FAILURE_STATUS_CODE}
+ ;;
+ esac
+ done
+}
+
+ _main__assert_reporters_are_known() {
+ reporter__for_each_reporter _main__fail_if_reporter_unknown
+}
+
+_main__fail_if_reporter_unknown() {
+ if ! array__contains "${reporter}" "simple" "dots" "junit"; then
+ system__print_line \
+ "$(_main__get_script_name): unknown reporter <${reporter}>"
+ exit ${SBU_FAILURE_STATUS_CODE}
+ fi
+}
+
+_main__print_illegal_option() {
+ local option="${1%=*}"
+ option="${option#-}"
+ option="${option#-}"
+ system__print_line "$(_main__get_script_name): illegal option -- ${option}"
+}
+
+_main__assert_only_one_argument_left() {
+ if (( $1 > 1 )); then
+ system__print_line "$(_main__get_script_name): only one path is allowed"
+ _main__print_usage_and_exit_with_code ${SBU_FAILURE_STATUS_CODE}
+ fi
+}
+
+_main__get_script_name() {
+ basename "${BASH_SOURCE[0]}"
+}
+
+_main__print_usage_and_exit_with_code() {
+ _main__print_usage
+ exit $1
+}
+
+_main__print_full_usage() {
+ _main__print_usage
+ local script="$(_main__get_script_name)"
+ system__print_new_line
+ system__print_line "\
+[options]
+ -a, --api-cheat-sheet
+ print api cheat sheet like assertions
+ -c, --colors=${SBU_YES} or ${SBU_NO}
+ tests output with colors or no
+ -d, --random-run=${SBU_YES} or ${SBU_NO}
+ tests files and functions randomly run or no
+ -f, --file-pattern=
+ pattern to filter test files
+ -h
+ print usage
+ -o, --output-file=
+ output file for JUnit reporter
+ -r, --reporters=
+ comma-separated reporters (simple, dots or junit)
+ -t, --test-pattern=
+ pattern to filter test function in files
+
+[examples]
+ ${script} .
+ run all tests in current directory
+ ${script} -p=*test.sh sources/test
+ run all tests files ending with test.sh in sources/test"
+}
+
+_main__print_usage() {
+ system__print_line "\
+usage: $(_main__get_script_name) [options] path
+ run all tests in path"
+}
+
+_main__print_api_cheat_sheet_and_exit() {
+ system__print_line "\
+[assertions]
+ assertion__equal (value, other)
+ -> assert that is equal to
+ assertion__different (value, other)
+ -> assert that is different from
+ assertion__string_contains (string, substring)
+ -> assert that contains
+ assertion__string_does_not_contain (string, substring)
+ -> assert that does not contain
+ assertion__string_empty (string)
+ -> assert that is empty
+ assertion__string_not_empty (string)
+ -> assert that is not empty
+ assertion__array_contains (element, array[0], array[1], ...)
+ -> assert that the contains the
+ assertion__array_does_not_contain (element, array elements...)
+ -> assert that the does not contain the
+ assertion__successful (command)
+ -> assert that the is successful
+ assertion__failing (command)
+ -> assert that the is failing
+ assertion__status_code_is_success (code)
+ -> assert that the status is 0
+ assertion__status_code_is_failure (code)
+ -> assert that the status is not 0
+
+[special functions]
+ ${SBU_GLOBAL_SETUP_FUNCTION_NAME}
+ -> Executed before all tests in a file
+ ${SBU_GLOBAL_TEARDOWN_FUNCTION_NAME}
+ -> Executed after all tests in a file
+ ${SBU_SETUP_FUNCTION_NAME}
+ -> Executed before each test in a file
+ ${SBU_TEARDOWN_FUNCTION_NAME}
+ -> Executed after each test in a file
+
+[mocks]
+ mock__make_function_do_nothing (function_to_mock)
+ -> make function do nothing
+ mock__make_function_prints (function_to_mock, message)
+ -> make function prints a message
+ mock__make_function_call (function_to_mock, function_to_call)
+ -> make function call another function"
+ exit ${SBU_SUCCESS_STATUS_CODE}
+}
+
+
+main__main "$@"
diff --git a/tests/templates/inventory-gce.j2 b/tests/templates/inventory-gce.j2
new file mode 100644
index 000000000..72ad469de
--- /dev/null
+++ b/tests/templates/inventory-gce.j2
@@ -0,0 +1,20 @@
+node1 ansible_ssh_host={{gce.instance_data[0].public_ip}}
+node2 ansible_ssh_host={{gce.instance_data[1].public_ip}}
+node3 ansible_ssh_host={{gce.instance_data[2].public_ip}}
+
+[kube-master]
+node1
+node2
+
+[kube-node]
+node1
+node2
+node3
+
+[etcd]
+node1
+node2
+
+[k8s-cluster:children]
+kube-node
+kube-master
diff --git a/tests/templates/inventory.ini.j2 b/tests/templates/inventory.ini.j2
new file mode 100644
index 000000000..d6358efb9
--- /dev/null
+++ b/tests/templates/inventory.ini.j2
@@ -0,0 +1,20 @@
+node1 ansible_ssh_host={{ec2.instances[0].public_ip}} ansible_ssh_user=admin
+node2 ansible_ssh_host={{ec2.instances[1].public_ip}} ansible_ssh_user=admin
+node3 ansible_ssh_host={{ec2.instances[2].public_ip}} ansible_ssh_user=admin
+
+[kube-master]
+node1
+node2
+
+[kube-node]
+node1
+node2
+node3
+
+[etcd]
+node1
+node2
+
+[k8s-cluster:children]
+kube-node
+kube-master
diff --git a/tests/testcases/010_check-apiserver.yml b/tests/testcases/010_check-apiserver.yml
new file mode 100644
index 000000000..7beed06fb
--- /dev/null
+++ b/tests/testcases/010_check-apiserver.yml
@@ -0,0 +1,12 @@
+---
+- hosts: kube-master
+
+ tasks:
+ - name: Check the API servers are responding
+ local_action:
+ module: uri
+ url: https://{{ansible_ssh_host}}/api/v1
+ user: kube
+ password: changeme
+ validate_certs: no
+ status_code: 200
diff --git a/tests/testcases/020_check-create-pod.yml b/tests/testcases/020_check-create-pod.yml
new file mode 100644
index 000000000..219fbb985
--- /dev/null
+++ b/tests/testcases/020_check-create-pod.yml
@@ -0,0 +1,13 @@
+---
+- hosts: node1
+
+ tasks:
+ - name: Run a replica controller composed of 2 pods
+ shell: "kubectl run test --image=busybox --replicas=2 --command -- tail -f /dev/null"
+
+ - name: Pods are running
+ shell: "kubectl get pods --no-headers -o json"
+ register: run_pods_log
+ until: (run_pods_log.stdout | from_json)['items'] | map(attribute = 'status.phase') | join(',') == "Running,Running"
+ retries: 24
+ delay: 5
diff --git a/tests/testcases/030_check-network.yml b/tests/testcases/030_check-network.yml
new file mode 100644
index 000000000..596ce0ce1
--- /dev/null
+++ b/tests/testcases/030_check-network.yml
@@ -0,0 +1,21 @@
+---
+- hosts: node1
+
+ tasks:
+
+ - name: Get pod names
+ shell: "kubectl get pods -o json"
+ register: pods
+
+ - set_fact:
+ pod_names: "{{ (pods.stdout | from_json)['items'] | map(attribute = 'metadata.name') | list }}"
+ pod_ips: "{{ (pods.stdout | from_json)['items'] | map(attribute = 'status.podIP') | list }}"
+
+ - name: Check pods IP are in correct network
+ assert:
+ that: item | ipaddr(kube_pods_subnet)
+ with_items: pod_ips
+
+
+ - name: Ping between pods is working
+ shell: "kubectl exec {{pod_names[0]}} -- ping -c 4 {{ pod_ips[1] }}"