Fix uploading CI logs to GCS
* Use gsutil to configure the logs bucket's lifecycle, which is not in the gc_storage module yet. (See https://cloud.google.com/storage/docs/gsutil_install). * Generate uniq bucket names extended with the build's OS type info as well. * Ignore boto related errors for the gc_storage module. * Use no_log when needed to supress noise/secrets in output Signed-off-by: Bogdan Dobrelya <bdobrelia@mirantis.com>
This commit is contained in:
parent
6b0d26ddf0
commit
d197130148
4 changed files with 60 additions and 23 deletions
|
@ -149,13 +149,16 @@ script:
|
||||||
- $HOME/.local/bin/ansible-playbook -i inventory/inventory.ini -u $SSH_USER -e ansible_ssh_user=$SSH_USER $SSH_ARGS -b --become-user=root tests/testcases/030_check-network.yml $LOG_LEVEL
|
- $HOME/.local/bin/ansible-playbook -i inventory/inventory.ini -u $SSH_USER -e ansible_ssh_user=$SSH_USER $SSH_ARGS -b --become-user=root tests/testcases/030_check-network.yml $LOG_LEVEL
|
||||||
|
|
||||||
after_failure:
|
after_failure:
|
||||||
- $HOME/.local/bin/ansible-playbook -i inventory/inventory.ini -u $SSH_USER -e ansible_ssh_user=$SSH_USER $SSH_ARGS -b --become-user=root scripts/collect-info.yaml >/dev/null
|
- $HOME/.local/bin/ansible-playbook -i inventory/inventory.ini -u $SSH_USER -e ansible_ssh_user=$SSH_USER $SSH_ARGS -b --become-user=root scripts/collect-info.yaml
|
||||||
- >
|
- >
|
||||||
$HOME/.local/bin/ansible-playbook tests/cloud_playbooks/upload-logs-gcs.yml -i "localhost," -c local
|
$HOME/.local/bin/ansible-playbook tests/cloud_playbooks/upload-logs-gcs.yml -i "localhost," -c local
|
||||||
-e test_id=${TEST_ID}
|
-e test_id=${TEST_ID}
|
||||||
-e kube_network_plugin=${KUBE_NETWORK_PLUGIN}
|
-e kube_network_plugin=${KUBE_NETWORK_PLUGIN}
|
||||||
-e gs_key=${GS_ACCESS_KEY_ID}
|
-e gs_key=${GS_ACCESS_KEY_ID}
|
||||||
-e gs_skey=${GS_SECRET_ACCESS_KEY}
|
-e gs_skey=${GS_SECRET_ACCESS_KEY}
|
||||||
|
-e ostype=${CLOUD_IMAGE}
|
||||||
|
-e commit=${TRAVIS_COMMIT}
|
||||||
|
-e pr=${TRAVIS_PULL_REQUEST}
|
||||||
|
|
||||||
after_script:
|
after_script:
|
||||||
- >
|
- >
|
||||||
|
|
|
@ -45,6 +45,7 @@
|
||||||
register: output
|
register: output
|
||||||
ignore_errors: true
|
ignore_errors: true
|
||||||
with_items: "{{commands}}"
|
with_items: "{{commands}}"
|
||||||
|
no_log: True
|
||||||
|
|
||||||
- debug: var=item
|
- debug: var=item
|
||||||
with_items: "{{output.results}}"
|
with_items: "{{output.results}}"
|
||||||
|
|
9
tests/cloud_playbooks/files/gcs_life.json
Normal file
9
tests/cloud_playbooks/files/gcs_life.json
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
{
|
||||||
|
"rule":
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"action": {"type": "Delete"},
|
||||||
|
"condition": {"age": 2}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -3,41 +3,65 @@
|
||||||
become: false
|
become: false
|
||||||
gather_facts: no
|
gather_facts: no
|
||||||
|
|
||||||
vars:
|
|
||||||
expire: 72000
|
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
|
- name: Generate uniq bucket name prefix
|
||||||
|
shell: date +%s | sha256sum | base64 | head -c 32
|
||||||
|
register: out
|
||||||
|
|
||||||
- name: replace_test_id
|
- name: replace_test_id
|
||||||
set_fact:
|
set_fact:
|
||||||
test_name: "{{ test_id | regex_replace('\\.', '-') }}"
|
test_name: "kargo-{{ commit }}-{{ pr }}-{{ out.stdout|lower }}-{{ test_id | regex_replace('\\.', '-') }}"
|
||||||
|
|
||||||
- name: Create a bucket
|
- name: Create a bucket
|
||||||
gc_storage:
|
gc_storage:
|
||||||
bucket: "{{ test_name }}"
|
bucket: "{{ test_name }}"
|
||||||
mode: create
|
mode: create
|
||||||
expiration: "{{ expire }}"
|
|
||||||
permission: private
|
permission: private
|
||||||
gs_access_key: gs_key
|
gs_access_key: "{{ gs_key }}"
|
||||||
gs_secret_key: gs_skey
|
gs_secret_key: "{{ gs_skey }}"
|
||||||
|
no_log: True
|
||||||
|
|
||||||
|
- name: Download gsutil cp installer
|
||||||
|
get_url:
|
||||||
|
url: https://dl.google.com/dl/cloudsdk/channels/rapid/install_google_cloud_sdk.bash
|
||||||
|
dest: /tmp/gcp-installer.sh
|
||||||
|
|
||||||
|
- name: Get gsutil tool
|
||||||
|
script: /tmp/gcp-installer.sh
|
||||||
|
environment:
|
||||||
|
CLOUDSDK_CORE_DISABLE_PROMPTS: 1
|
||||||
|
no_log: True
|
||||||
|
|
||||||
|
- name: Create a lifecycle template for the bucket
|
||||||
|
file: src=gcs_life.json path=/tmp/gcs_life.json
|
||||||
|
|
||||||
|
- name: Hack the boto config for GCS access keys
|
||||||
|
lineinfile:
|
||||||
|
dest: .boto
|
||||||
|
line: "gs_access_key_id = {{ gs_key }}"
|
||||||
|
regexp: "^#gs_access_key_id = .*$"
|
||||||
|
no_log: True
|
||||||
|
|
||||||
|
- name: Hack the boto config for GCS secret access keys
|
||||||
|
lineinfile:
|
||||||
|
dest: .boto
|
||||||
|
line: "gs_secret_access_key = {{ gs_skey }}"
|
||||||
|
regexp: "^#gs_secret_access_key = .*$"
|
||||||
|
no_log: True
|
||||||
|
|
||||||
|
- name: Apply the lifecycle rules
|
||||||
|
shell: bash google-cloud-sdk/bin/gsutil lifecycle set /tmp/gcs_life.json gs://{{ test_name }}
|
||||||
|
environment:
|
||||||
|
BOTO_CONFIG: .boto
|
||||||
|
|
||||||
- name: Upload collected diagnostic info
|
- name: Upload collected diagnostic info
|
||||||
gc_storage:
|
gc_storage:
|
||||||
bucket: "{{ test_name }}"
|
bucket: "{{ test_name }}"
|
||||||
mode: put
|
mode: put
|
||||||
permission: private
|
permission: private
|
||||||
expiration: "{{ expire }}"
|
object: "build-{{ ostype }}-{{ kube_network_plugin }}-logs.tar.gz"
|
||||||
object: "build-{{ test_name }}-{{ kube_network_plugin }}-logs.tar.gz"
|
|
||||||
src: logs.tar.gz
|
src: logs.tar.gz
|
||||||
gs_access_key: gs_key
|
headers: '{"Content-Encoding": "x-gzip"}'
|
||||||
gs_secret_key: gs_skey
|
gs_access_key: "{{ gs_key }}"
|
||||||
|
gs_secret_key: "{{ gs_skey }}"
|
||||||
- name: Get a link
|
ignore_errors: true
|
||||||
gc_storage:
|
|
||||||
bucket: "{{ test_name }}"
|
|
||||||
object: "build-{{ test_name }}-{{ kube_network_plugin }}-logs.tar.gz"
|
|
||||||
mode: get_url
|
|
||||||
register: url
|
|
||||||
gs_access_key: gs_key
|
|
||||||
gs_secret_key: gs_skey
|
|
||||||
|
|
||||||
- debug: msg="Download URL {{get_url}}"
|
|
||||||
|
|
Loading…
Reference in a new issue