Skip to content

Commit

Permalink
Merge pull request #2 from sitewards/ADHOC-add-azure-blob-support
Browse files Browse the repository at this point in the history
Adhoc add azure blob support
  • Loading branch information
aboritskiy authored Aug 26, 2019
2 parents 9c9bddd + defeed7 commit d16cb8b
Show file tree
Hide file tree
Showing 7 changed files with 94 additions and 30 deletions.
14 changes: 14 additions & 0 deletions defaults/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
## Where values are commented out, it means that you must populate them in your playbooks. They are necessarily
## secret, or specific to the playbook.

## The name of the database storage provider (AWS S3 or Azure Blob Storage)
## (required)
import_db_storage_provider: "aws-s3"
#import_db_storage_provider: "azure-blob"

## The S3 bucket that contains the database dump to download.
## (Required)
# import_db_s3_bucket:
Expand All @@ -14,6 +19,12 @@
## (Required)
# import_db_s3_region:

## The name of Azure Blob container that is used to get database snapshots from
#import_db_azure_container_name: "sql-dumps"

## The name of Azure Blob object, i.e. filename of database snapshot
#import_db_azure_object_name:

## The database to import into
## (Required)
# import_db_database_name:
Expand All @@ -31,3 +42,6 @@ import_db_aws_secret_access_key: "{{ lookup('env', 'AWS_SECRET_ACCESS_KEY') }}"
import_db_post_import_sql_queries: []
## The query that should be executed
# - query: ""

## Tmp path to put downloaded files while preparing them for import
import_db_tmp_path: "/tmp/import_db"
12 changes: 3 additions & 9 deletions tasks/dependencies.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
---
- name: "Install tools required to fetch/unpack content from AWS"
- name: "Install tools required to fetch/unpack content from storage provider"
package:
name: "{{ item }}"
state: "present"
Expand All @@ -8,11 +8,5 @@
become: "True"
become_user: "root"

- name: "Install the required python packages"
pip:
name: "{{ item }}"
state: "latest"
become: "True"
become_user: "root"
with_items:
- "boto3"
- name: "Install dependencies for corresponding storage provider"
include: "dependencies/{{ import_db_storage_provider }}.yml"
9 changes: 9 additions & 0 deletions tasks/dependencies/aws-s3.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
- name: "Install the required python packages for AWS"
pip:
name: "{{ item }}"
state: "latest"
become: "True"
become_user: "root"
with_items:
- "boto3"
11 changes: 11 additions & 0 deletions tasks/dependencies/azure-blob.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
---
- name: "Install the required python packages for Azure"
pip:
name: "{{ item }}"
state: "latest"
become: "True"
become_user: "root"
with_items:
- packaging
- msrestazure
- ansible[azure]
14 changes: 14 additions & 0 deletions tasks/download-from/aws-s3.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
---
# Note: The AWS keys are deliberately omitted for this task. They should be set in the session:
# $ export AWS_ACCESS_KEY_ID="YOUR_ACCESS_KEY"
# $ export AWS_SECRET_ACCESS_KEY="YOUR_SECRET_KEY"
- name: "Download the copy of the DB from AWS S3"
aws_s3:
bucket: "{{ import_db_s3_bucket }}"
object: "{{ import_db_s3_object_name }}"
dest: "{{ import_db_tmp_path }}/db_dump.sql.gz"
region: "{{ import_db_s3_region }}"
aws_access_key: "{{ import_db_aws_access_key_id }}"
aws_secret_key: "{{ import_db_aws_secret_access_key }}"
mode: "get"
register: "import_db_downloaded_archive"
28 changes: 28 additions & 0 deletions tasks/download-from/azure-blob.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
---
#- name: "Download the copy of the DB from Azure Storage"
# azure_rm_storageblob:
# resource_group: "{{ lookup('env','AZURE_PROD_RESOURCE_GROUP') }}"
# storage_account_name: "{{ lookup('env','AZURE_STORAGE_ACCOUNT') }}"
# container: "{{ import_db_azure_container_name }}"
# blob: "{{ import_db_azure_object_name }}"
# dest: "{{ import_db_tmp_path }}/db_dump.sql.gz"
# environment:
# AZURE_CLIENT_ID: "{{ lookup('env','AZURE_CLIENT_ID') }}"
# AZURE_SECRET: "{{ lookup('env','AZURE_SECRET') }}"
# AZURE_TENANT: "{{ lookup('env','AZURE_TENANT') }}"
# AZURE_SUBSCRIPTION_ID: "{{ lookup('env','AZURE_SUBSCRIPTION_ID') }}"
# AZURE_STORAGE_ACCOUNT: "{{ lookup('env','AZURE_STORAGE_ACCOUNT') }}"
# AZURE_STORAGE_KEY: "{{ lookup('env','AZURE_STORAGE_KEY') }}"
# register: "import_db_downloaded_archive"
#
# This expects to authenticate based on the invokers Azure credentials, supplied through environment variables
- name: "Download the copy of the DB from Azure Storage"
shell: |
AZURE_CLIENT_ID="{{ lookup('env','AZURE_CLIENT_ID') }}" \
AZURE_SECRET="{{ lookup('env','AZURE_SECRET') }}" \
AZURE_TENANT="{{ lookup('env','AZURE_TENANT') }}" \
AZURE_SUBSCRIPTION_ID="{{ lookup('env','AZURE_SUBSCRIPTION_ID') }}" \
AZURE_STORAGE_ACCOUNT="{{ lookup('env','AZURE_STORAGE_ACCOUNT') }}" \
AZURE_STORAGE_KEY="{{ lookup('env','AZURE_STORAGE_KEY') }}" \
az storage blob download --container-name="{{ import_db_azure_container_name }}" --file={{ import_db_tmp_path }}/db_dump.sql.gz --name={{ import_db_azure_object_name }}
register: "import_db_downloaded_archive"
36 changes: 15 additions & 21 deletions tasks/import.yml
Original file line number Diff line number Diff line change
@@ -1,41 +1,35 @@
---
# Note: The AWS keys are deliberately omitted for this task. They should be set in the session:
# $ export AWS_ACCESS_KEY_ID="YOUR_ACCESS_KEY"
# $ export AWS_SECRET_ACCESS_KEY="YOUR_SECRET_KEY"
- name: "Download the copy of the DB from S3"
aws_s3:
bucket: "{{ import_db_s3_bucket }}"
object: "{{ import_db_s3_object_name }}"
dest: "/tmp/db_dump.sql.gz"
region: "{{ import_db_s3_region }}"
aws_access_key: "{{ import_db_aws_access_key_id }}"
aws_secret_key: "{{ import_db_aws_secret_access_key }}"
mode: "get"
register: "s3db"
- name: "Make sure temp folder exists"
file:
path: "{{ import_db_tmp_path }}"
state: "directory"

- name: "Download the copy of the DB snapshot from the storage provider"
include: "download-from/{{ import_db_storage_provider }}.yml"

# Note: Ansible does not support plain .gz files as part of its "unarchive" module, as they are not strictly speaking
# archives. See
# - https://github.com/ansible/ansible-modules-extras/pull/1301
# - https://github.com/ansible/ansible-modules-core/issues/1035
- name: "Unpack the DB"
command: "gzip -d /tmp/db_dump.sql.gz"
command: "gzip -d {{ import_db_tmp_path }}/db_dump.sql.gz"
args:
creates: "/tmp/db_dump.sql"
when: "s3db.changed"
creates: "{{ import_db_tmp_path }}/db_dump.sql"
when: "import_db_downloaded_archive.changed"

- name: "Delete the old database"
mysql_db:
name: "{{ import_db_database_name }}"
state: "absent"
login_password: "{{ import_db_root_password }}"
login_user: "root"
when: "s3db.changed"
when: "import_db_downloaded_archive.changed"

- name: "Restore the database"
mysql_db:
name: "{{ import_db_database_name }}"
state: "import"
target: "/tmp/db_dump.sql"
target: "{{ import_db_tmp_path }}/db_dump.sql"
login_password: "{{ import_db_root_password }}"
login_user: "root"

Expand All @@ -44,6 +38,6 @@
path: "{{ item }}"
state: "absent"
with_items:
- "/tmp/db_dump.sql"
- "/tmp/db_dump.sql.gz"
when: "s3db.changed"
- "{{ import_db_tmp_path }}/db_dump.sql"
- "{{ import_db_tmp_path }}/db_dump.sql.gz"
- "{{ import_db_tmp_path }}"

0 comments on commit d16cb8b

Please sign in to comment.