Fix uploading CI logs to GCS

* Use gsutil to configure the logs bucket's lifecycle,
  which is not in the gc_storage module yet.
  (See https://cloud.google.com/storage/docs/gsutil_install).
* Generate uniq bucket names extended with the build's OS type info as well.
* Ignore boto related errors for the gc_storage module.
* Use no_log when needed to supress noise/secrets in output

Signed-off-by: Bogdan Dobrelya <bdobrelia@mirantis.com>
This commit is contained in:
Bogdan Dobrelya 2016-11-07 16:00:49 +01:00
parent 6b0d26ddf0
commit d197130148
4 changed files with 60 additions and 23 deletions

View file

@ -149,13 +149,16 @@ script:
- $HOME/.local/bin/ansible-playbook -i inventory/inventory.ini -u $SSH_USER -e ansible_ssh_user=$SSH_USER $SSH_ARGS -b --become-user=root tests/testcases/030_check-network.yml $LOG_LEVEL
after_failure:
- $HOME/.local/bin/ansible-playbook -i inventory/inventory.ini -u $SSH_USER -e ansible_ssh_user=$SSH_USER $SSH_ARGS -b --become-user=root scripts/collect-info.yaml >/dev/null
- $HOME/.local/bin/ansible-playbook -i inventory/inventory.ini -u $SSH_USER -e ansible_ssh_user=$SSH_USER $SSH_ARGS -b --become-user=root scripts/collect-info.yaml
- >
$HOME/.local/bin/ansible-playbook tests/cloud_playbooks/upload-logs-gcs.yml -i "localhost," -c local
-e test_id=${TEST_ID}
-e kube_network_plugin=${KUBE_NETWORK_PLUGIN}
-e gs_key=${GS_ACCESS_KEY_ID}
-e gs_skey=${GS_SECRET_ACCESS_KEY}
-e ostype=${CLOUD_IMAGE}
-e commit=${TRAVIS_COMMIT}
-e pr=${TRAVIS_PULL_REQUEST}
after_script:
- >

View file

@ -45,6 +45,7 @@
register: output
ignore_errors: true
with_items: "{{commands}}"
no_log: True
- debug: var=item
with_items: "{{output.results}}"

View file

@ -0,0 +1,9 @@
{
"rule":
[
{
"action": {"type": "Delete"},
"condition": {"age": 2}
}
]
}

View file

@ -3,41 +3,65 @@
become: false
gather_facts: no
vars:
expire: 72000
tasks:
- name: Generate uniq bucket name prefix
shell: date +%s | sha256sum | base64 | head -c 32
register: out
- name: replace_test_id
set_fact:
test_name: "{{ test_id | regex_replace('\\.', '-') }}"
test_name: "kargo-{{ commit }}-{{ pr }}-{{ out.stdout|lower }}-{{ test_id | regex_replace('\\.', '-') }}"
- name: Create a bucket
gc_storage:
bucket: "{{ test_name }}"
mode: create
expiration: "{{ expire }}"
permission: private
gs_access_key: gs_key
gs_secret_key: gs_skey
gs_access_key: "{{ gs_key }}"
gs_secret_key: "{{ gs_skey }}"
no_log: True
- name: Download gsutil cp installer
get_url:
url: https://dl.google.com/dl/cloudsdk/channels/rapid/install_google_cloud_sdk.bash
dest: /tmp/gcp-installer.sh
- name: Get gsutil tool
script: /tmp/gcp-installer.sh
environment:
CLOUDSDK_CORE_DISABLE_PROMPTS: 1
no_log: True
- name: Create a lifecycle template for the bucket
file: src=gcs_life.json path=/tmp/gcs_life.json
- name: Hack the boto config for GCS access keys
lineinfile:
dest: .boto
line: "gs_access_key_id = {{ gs_key }}"
regexp: "^#gs_access_key_id = .*$"
no_log: True
- name: Hack the boto config for GCS secret access keys
lineinfile:
dest: .boto
line: "gs_secret_access_key = {{ gs_skey }}"
regexp: "^#gs_secret_access_key = .*$"
no_log: True
- name: Apply the lifecycle rules
shell: bash google-cloud-sdk/bin/gsutil lifecycle set /tmp/gcs_life.json gs://{{ test_name }}
environment:
BOTO_CONFIG: .boto
- name: Upload collected diagnostic info
gc_storage:
bucket: "{{ test_name }}"
mode: put
permission: private
expiration: "{{ expire }}"
object: "build-{{ test_name }}-{{ kube_network_plugin }}-logs.tar.gz"
object: "build-{{ ostype }}-{{ kube_network_plugin }}-logs.tar.gz"
src: logs.tar.gz
gs_access_key: gs_key
gs_secret_key: gs_skey
- name: Get a link
gc_storage:
bucket: "{{ test_name }}"
object: "build-{{ test_name }}-{{ kube_network_plugin }}-logs.tar.gz"
mode: get_url
register: url
gs_access_key: gs_key
gs_secret_key: gs_skey
- debug: msg="Download URL {{get_url}}"
headers: '{"Content-Encoding": "x-gzip"}'
gs_access_key: "{{ gs_key }}"
gs_secret_key: "{{ gs_skey }}"
ignore_errors: true