Skip to content

Commit

Permalink
ADHOC feat (Azure): add Azure Blob support
Browse files Browse the repository at this point in the history
adding a support to download actual DB snapshots from Azure blobs.

For unknown reason download with ansible modules is not working properly, at
the same time shell command seem to be operating correctly.

AWS S3 functionality should work like before with this change, this commit is
not supposed to change AWS related behavior.
  • Loading branch information
Anton Boritskiy committed Aug 22, 2019
1 parent 0dafcfc commit defeed7
Show file tree
Hide file tree
Showing 7 changed files with 78 additions and 22 deletions.
11 changes: 11 additions & 0 deletions defaults/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
## Where values are commented out, it means that you must populate them in your playbooks. They are necessarily
## secret, or specific to the playbook.

## The name of the database storage provider (AWS S3 or Azure Blob Storage)
## (required)
import_db_storage_provider: "aws-s3"
#import_db_storage_provider: "azure-blob"

## The S3 bucket that contains the database dump to download.
## (Required)
# import_db_s3_bucket:
Expand All @@ -14,6 +19,12 @@
## (Required)
# import_db_s3_region:

## The name of Azure Blob container that is used to get database snapshots from
#import_db_azure_container_name: "sql-dumps"

## The name of Azure Blob object, i.e. filename of database snapshot
#import_db_azure_object_name:

## The database to import into
## (Required)
# import_db_database_name:
Expand Down
12 changes: 3 additions & 9 deletions tasks/dependencies.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
---
- name: "Install tools required to fetch/unpack content from AWS"
- name: "Install tools required to fetch/unpack content from storage provider"
package:
name: "{{ item }}"
state: "present"
Expand All @@ -8,11 +8,5 @@
become: "True"
become_user: "root"

- name: "Install the required python packages"
pip:
name: "{{ item }}"
state: "latest"
become: "True"
become_user: "root"
with_items:
- "boto3"
- name: "Install dependencies for corresponding storage provider"
include: "dependencies/{{ import_db_storage_provider }}.yml"
9 changes: 9 additions & 0 deletions tasks/dependencies/aws-s3.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
- name: "Install the required python packages for AWS"
pip:
name: "{{ item }}"
state: "latest"
become: "True"
become_user: "root"
with_items:
- "boto3"
11 changes: 11 additions & 0 deletions tasks/dependencies/azure-blob.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
---
- name: "Install the required python packages for Azure"
pip:
name: "{{ item }}"
state: "latest"
become: "True"
become_user: "root"
with_items:
- packaging
- msrestazure
- ansible[azure]
14 changes: 14 additions & 0 deletions tasks/download-from/aws-s3.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
---
# Note: The AWS keys are deliberately omitted for this task. They should be set in the session:
# $ export AWS_ACCESS_KEY_ID="YOUR_ACCESS_KEY"
# $ export AWS_SECRET_ACCESS_KEY="YOUR_SECRET_KEY"
- name: "Download the copy of the DB from AWS S3"
aws_s3:
bucket: "{{ import_db_s3_bucket }}"
object: "{{ import_db_s3_object_name }}"
dest: "{{ import_db_tmp_path }}/db_dump.sql.gz"
region: "{{ import_db_s3_region }}"
aws_access_key: "{{ import_db_aws_access_key_id }}"
aws_secret_key: "{{ import_db_aws_secret_access_key }}"
mode: "get"
register: "import_db_downloaded_archive"
28 changes: 28 additions & 0 deletions tasks/download-from/azure-blob.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
---
#- name: "Download the copy of the DB from Azure Storage"
# azure_rm_storageblob:
# resource_group: "{{ lookup('env','AZURE_PROD_RESOURCE_GROUP') }}"
# storage_account_name: "{{ lookup('env','AZURE_STORAGE_ACCOUNT') }}"
# container: "{{ import_db_azure_container_name }}"
# blob: "{{ import_db_azure_object_name }}"
# dest: "{{ import_db_tmp_path }}/db_dump.sql.gz"
# environment:
# AZURE_CLIENT_ID: "{{ lookup('env','AZURE_CLIENT_ID') }}"
# AZURE_SECRET: "{{ lookup('env','AZURE_SECRET') }}"
# AZURE_TENANT: "{{ lookup('env','AZURE_TENANT') }}"
# AZURE_SUBSCRIPTION_ID: "{{ lookup('env','AZURE_SUBSCRIPTION_ID') }}"
# AZURE_STORAGE_ACCOUNT: "{{ lookup('env','AZURE_STORAGE_ACCOUNT') }}"
# AZURE_STORAGE_KEY: "{{ lookup('env','AZURE_STORAGE_KEY') }}"
# register: "import_db_downloaded_archive"
#
# This expects to authenticate based on the invokers Azure credentials, supplied through environment variables
- name: "Download the copy of the DB from Azure Storage"
shell: |
AZURE_CLIENT_ID="{{ lookup('env','AZURE_CLIENT_ID') }}" \
AZURE_SECRET="{{ lookup('env','AZURE_SECRET') }}" \
AZURE_TENANT="{{ lookup('env','AZURE_TENANT') }}" \
AZURE_SUBSCRIPTION_ID="{{ lookup('env','AZURE_SUBSCRIPTION_ID') }}" \
AZURE_STORAGE_ACCOUNT="{{ lookup('env','AZURE_STORAGE_ACCOUNT') }}" \
AZURE_STORAGE_KEY="{{ lookup('env','AZURE_STORAGE_KEY') }}" \
az storage blob download --container-name="{{ import_db_azure_container_name }}" --file={{ import_db_tmp_path }}/db_dump.sql.gz --name={{ import_db_azure_object_name }}
register: "import_db_downloaded_archive"
15 changes: 2 additions & 13 deletions tasks/import.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,8 @@
path: "{{ import_db_tmp_path }}"
state: "directory"

# Note: The AWS keys are deliberately omitted for this task. They should be set in the session:
# $ export AWS_ACCESS_KEY_ID="YOUR_ACCESS_KEY"
# $ export AWS_SECRET_ACCESS_KEY="YOUR_SECRET_KEY"
- name: "Download the copy of the DB from S3"
aws_s3:
bucket: "{{ import_db_s3_bucket }}"
object: "{{ import_db_s3_object_name }}"
dest: "{{ import_db_tmp_path }}/db_dump.sql.gz"
region: "{{ import_db_s3_region }}"
aws_access_key: "{{ import_db_aws_access_key_id }}"
aws_secret_key: "{{ import_db_aws_secret_access_key }}"
mode: "get"
register: "import_db_downloaded_archive"
- name: "Download the copy of the DB snapshot from the storage provider"
include: "download-from/{{ import_db_storage_provider }}.yml"

# Note: Ansible does not support plain .gz files as part of its "unarchive" module, as they are not strictly speaking
# archives. See
Expand Down

0 comments on commit defeed7

Please sign in to comment.