Files
dc-deployments-automation/roles/bitbucket_dataset_restore/tasks/main.yml
2024-08-19 08:48:25 +10:00

57 lines
1.8 KiB
YAML

---
- name: Force all notified handlers to run at this point, not waiting for normal sync points
ansible.builtin.meta: flush_handlers
- name: wait for port 7990 to be up
ansible.builtin.wait_for:
port: 7990
delay: 60
- name: wait for path to become available
ansible.builtin.wait_for:
path: "{{ atl_product_home_shared }}/data/migration/import"
delay: 60
- name: Copy Bitbucket dataset from s3
ansible.builtin.get_url:
url: "{{ atl_bitbucket_dataset_url }}"
dest: "{{ atl_product_home_shared }}/data/migration/import"
- name: Invoke Import API
ansible.builtin.uri:
url: "{{ atl_bitbucket_baseurl }}/rest/api/1.0/migration/imports"
user: admin
password: "{{ atl_bitbucket_admin_password }}"
method: POST
follow_redirects: yes
force_basic_auth: yes
creates: "{{ atl_product_home_shared }}/data/migration/import/lock.file"
body: "{ \"archivePath\": \"{{ atl_bitbucket_dataset_url | basename }}\" }"
body_format: json
return_content: yes
register: output
until: output.status == 200
retries: 6
delay: 15
failed_when: output is defined and output.json is defined and output.json.state != 'INITIALISING'
- name: get import status
ansible.builtin.uri:
url: "{{ atl_bitbucket_baseurl }}/rest/api/1.0/migration/imports/{{ output.json.id }}"
user: admin
password: "{{ atl_bitbucket_admin_password }}"
method: get
force_basic_auth: yes
body_format: json
return_content: yes
register: import_status
until: import_status is defined and import_status.json is defined and import_status.json.state == 'COMPLETED'
retries: 30
delay: 10
- name: create lock file
ansible.builtin.file:
path: "{{ atl_product_home_shared }}/data/migration/import/lock.file"
state: touch
when: import_status.json.state == 'COMPLETED'