From 0dafcfc94a1445726800bfb7bdd4c92f835c96d1 Mon Sep 17 00:00:00 2001 From: Anton Boritskiy Date: Thu, 22 Aug 2019 16:27:14 +0100 Subject: [PATCH 1/2] ADHOC chore (Azure): refactoring as preparation removing AWS specific variable naming, cause future work will add Azure as provider here. Also adding a more specific tmp folder to have better control on what does the role create and where. --- defaults/main.yml | 3 +++ tasks/import.yml | 25 +++++++++++++++---------- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/defaults/main.yml b/defaults/main.yml index dc0f5ab..56465cd 100644 --- a/defaults/main.yml +++ b/defaults/main.yml @@ -31,3 +31,6 @@ import_db_aws_secret_access_key: "{{ lookup('env', 'AWS_SECRET_ACCESS_KEY') }}" import_db_post_import_sql_queries: [] ## The query that should be executed # - query: "" + +## Tmp path to put downloaded files while preparing them for import +import_db_tmp_path: "/tmp/import_db" diff --git a/tasks/import.yml b/tasks/import.yml index 692ff32..4e54e7b 100644 --- a/tasks/import.yml +++ b/tasks/import.yml @@ -1,4 +1,9 @@ --- +- name: "Make sure temp folder exists" + file: + path: "{{ import_db_tmp_path }}" + state: "directory" + # Note: The AWS keys are deliberately omitted for this task. They should be set in the session: # $ export AWS_ACCESS_KEY_ID="YOUR_ACCESS_KEY" # $ export AWS_SECRET_ACCESS_KEY="YOUR_SECRET_KEY" @@ -6,22 +11,22 @@ aws_s3: bucket: "{{ import_db_s3_bucket }}" object: "{{ import_db_s3_object_name }}" - dest: "/tmp/db_dump.sql.gz" + dest: "{{ import_db_tmp_path }}/db_dump.sql.gz" region: "{{ import_db_s3_region }}" aws_access_key: "{{ import_db_aws_access_key_id }}" aws_secret_key: "{{ import_db_aws_secret_access_key }}" mode: "get" - register: "s3db" + register: "import_db_downloaded_archive" # Note: Ansible does not support plain .gz files as part of its "unarchive" module, as they are not strictly speaking # archives. See # - https://github.com/ansible/ansible-modules-extras/pull/1301 # - https://github.com/ansible/ansible-modules-core/issues/1035 - name: "Unpack the DB" - command: "gzip -d /tmp/db_dump.sql.gz" + command: "gzip -d {{ import_db_tmp_path }}/db_dump.sql.gz" args: - creates: "/tmp/db_dump.sql" - when: "s3db.changed" + creates: "{{ import_db_tmp_path }}/db_dump.sql" + when: "import_db_downloaded_archive.changed" - name: "Delete the old database" mysql_db: @@ -29,13 +34,13 @@ state: "absent" login_password: "{{ import_db_root_password }}" login_user: "root" - when: "s3db.changed" + when: "import_db_downloaded_archive.changed" - name: "Restore the database" mysql_db: name: "{{ import_db_database_name }}" state: "import" - target: "/tmp/db_dump.sql" + target: "{{ import_db_tmp_path }}/db_dump.sql" login_password: "{{ import_db_root_password }}" login_user: "root" @@ -44,6 +49,6 @@ path: "{{ item }}" state: "absent" with_items: - - "/tmp/db_dump.sql" - - "/tmp/db_dump.sql.gz" - when: "s3db.changed" + - "{{ import_db_tmp_path }}/db_dump.sql" + - "{{ import_db_tmp_path }}/db_dump.sql.gz" + - "{{ import_db_tmp_path }}" From defeed7aa6379fe74e2ee3f6bc6088f4d09cf197 Mon Sep 17 00:00:00 2001 From: Anton Boritskiy Date: Thu, 22 Aug 2019 17:31:40 +0100 Subject: [PATCH 2/2] ADHOC feat (Azure): add Azure Blob support adding a support to download actual DB snapshots from Azure blobs. For unknown reason download with ansible modules is not working properly, at the same time shell command seem to be operating correctly. AWS S3 functionality should work like before with this change, this commit is not supposed to change AWS related behavior. --- defaults/main.yml | 11 +++++++++++ tasks/dependencies.yml | 12 +++--------- tasks/dependencies/aws-s3.yml | 9 +++++++++ tasks/dependencies/azure-blob.yml | 11 +++++++++++ tasks/download-from/aws-s3.yml | 14 ++++++++++++++ tasks/download-from/azure-blob.yml | 28 ++++++++++++++++++++++++++++ tasks/import.yml | 15 ++------------- 7 files changed, 78 insertions(+), 22 deletions(-) create mode 100644 tasks/dependencies/aws-s3.yml create mode 100644 tasks/dependencies/azure-blob.yml create mode 100644 tasks/download-from/aws-s3.yml create mode 100644 tasks/download-from/azure-blob.yml diff --git a/defaults/main.yml b/defaults/main.yml index 56465cd..9b3aea1 100644 --- a/defaults/main.yml +++ b/defaults/main.yml @@ -2,6 +2,11 @@ ## Where values are commented out, it means that you must populate them in your playbooks. They are necessarily ## secret, or specific to the playbook. +## The name of the database storage provider (AWS S3 or Azure Blob Storage) +## (required) +import_db_storage_provider: "aws-s3" +#import_db_storage_provider: "azure-blob" + ## The S3 bucket that contains the database dump to download. ## (Required) # import_db_s3_bucket: @@ -14,6 +19,12 @@ ## (Required) # import_db_s3_region: +## The name of Azure Blob container that is used to get database snapshots from +#import_db_azure_container_name: "sql-dumps" + +## The name of Azure Blob object, i.e. filename of database snapshot +#import_db_azure_object_name: + ## The database to import into ## (Required) # import_db_database_name: diff --git a/tasks/dependencies.yml b/tasks/dependencies.yml index c3e4d7f..f8df89c 100644 --- a/tasks/dependencies.yml +++ b/tasks/dependencies.yml @@ -1,5 +1,5 @@ --- -- name: "Install tools required to fetch/unpack content from AWS" +- name: "Install tools required to fetch/unpack content from storage provider" package: name: "{{ item }}" state: "present" @@ -8,11 +8,5 @@ become: "True" become_user: "root" -- name: "Install the required python packages" - pip: - name: "{{ item }}" - state: "latest" - become: "True" - become_user: "root" - with_items: - - "boto3" +- name: "Install dependencies for corresponding storage provider" + include: "dependencies/{{ import_db_storage_provider }}.yml" diff --git a/tasks/dependencies/aws-s3.yml b/tasks/dependencies/aws-s3.yml new file mode 100644 index 0000000..6b7310c --- /dev/null +++ b/tasks/dependencies/aws-s3.yml @@ -0,0 +1,9 @@ +--- +- name: "Install the required python packages for AWS" + pip: + name: "{{ item }}" + state: "latest" + become: "True" + become_user: "root" + with_items: + - "boto3" diff --git a/tasks/dependencies/azure-blob.yml b/tasks/dependencies/azure-blob.yml new file mode 100644 index 0000000..12044e7 --- /dev/null +++ b/tasks/dependencies/azure-blob.yml @@ -0,0 +1,11 @@ +--- +- name: "Install the required python packages for Azure" + pip: + name: "{{ item }}" + state: "latest" + become: "True" + become_user: "root" + with_items: + - packaging + - msrestazure + - ansible[azure] diff --git a/tasks/download-from/aws-s3.yml b/tasks/download-from/aws-s3.yml new file mode 100644 index 0000000..b095f34 --- /dev/null +++ b/tasks/download-from/aws-s3.yml @@ -0,0 +1,14 @@ +--- +# Note: The AWS keys are deliberately omitted for this task. They should be set in the session: +# $ export AWS_ACCESS_KEY_ID="YOUR_ACCESS_KEY" +# $ export AWS_SECRET_ACCESS_KEY="YOUR_SECRET_KEY" +- name: "Download the copy of the DB from AWS S3" + aws_s3: + bucket: "{{ import_db_s3_bucket }}" + object: "{{ import_db_s3_object_name }}" + dest: "{{ import_db_tmp_path }}/db_dump.sql.gz" + region: "{{ import_db_s3_region }}" + aws_access_key: "{{ import_db_aws_access_key_id }}" + aws_secret_key: "{{ import_db_aws_secret_access_key }}" + mode: "get" + register: "import_db_downloaded_archive" diff --git a/tasks/download-from/azure-blob.yml b/tasks/download-from/azure-blob.yml new file mode 100644 index 0000000..587dbac --- /dev/null +++ b/tasks/download-from/azure-blob.yml @@ -0,0 +1,28 @@ +--- +#- name: "Download the copy of the DB from Azure Storage" +# azure_rm_storageblob: +# resource_group: "{{ lookup('env','AZURE_PROD_RESOURCE_GROUP') }}" +# storage_account_name: "{{ lookup('env','AZURE_STORAGE_ACCOUNT') }}" +# container: "{{ import_db_azure_container_name }}" +# blob: "{{ import_db_azure_object_name }}" +# dest: "{{ import_db_tmp_path }}/db_dump.sql.gz" +# environment: +# AZURE_CLIENT_ID: "{{ lookup('env','AZURE_CLIENT_ID') }}" +# AZURE_SECRET: "{{ lookup('env','AZURE_SECRET') }}" +# AZURE_TENANT: "{{ lookup('env','AZURE_TENANT') }}" +# AZURE_SUBSCRIPTION_ID: "{{ lookup('env','AZURE_SUBSCRIPTION_ID') }}" +# AZURE_STORAGE_ACCOUNT: "{{ lookup('env','AZURE_STORAGE_ACCOUNT') }}" +# AZURE_STORAGE_KEY: "{{ lookup('env','AZURE_STORAGE_KEY') }}" +# register: "import_db_downloaded_archive" +# +# This expects to authenticate based on the invokers Azure credentials, supplied through environment variables +- name: "Download the copy of the DB from Azure Storage" + shell: | + AZURE_CLIENT_ID="{{ lookup('env','AZURE_CLIENT_ID') }}" \ + AZURE_SECRET="{{ lookup('env','AZURE_SECRET') }}" \ + AZURE_TENANT="{{ lookup('env','AZURE_TENANT') }}" \ + AZURE_SUBSCRIPTION_ID="{{ lookup('env','AZURE_SUBSCRIPTION_ID') }}" \ + AZURE_STORAGE_ACCOUNT="{{ lookup('env','AZURE_STORAGE_ACCOUNT') }}" \ + AZURE_STORAGE_KEY="{{ lookup('env','AZURE_STORAGE_KEY') }}" \ + az storage blob download --container-name="{{ import_db_azure_container_name }}" --file={{ import_db_tmp_path }}/db_dump.sql.gz --name={{ import_db_azure_object_name }} + register: "import_db_downloaded_archive" diff --git a/tasks/import.yml b/tasks/import.yml index 4e54e7b..1479555 100644 --- a/tasks/import.yml +++ b/tasks/import.yml @@ -4,19 +4,8 @@ path: "{{ import_db_tmp_path }}" state: "directory" -# Note: The AWS keys are deliberately omitted for this task. They should be set in the session: -# $ export AWS_ACCESS_KEY_ID="YOUR_ACCESS_KEY" -# $ export AWS_SECRET_ACCESS_KEY="YOUR_SECRET_KEY" -- name: "Download the copy of the DB from S3" - aws_s3: - bucket: "{{ import_db_s3_bucket }}" - object: "{{ import_db_s3_object_name }}" - dest: "{{ import_db_tmp_path }}/db_dump.sql.gz" - region: "{{ import_db_s3_region }}" - aws_access_key: "{{ import_db_aws_access_key_id }}" - aws_secret_key: "{{ import_db_aws_secret_access_key }}" - mode: "get" - register: "import_db_downloaded_archive" +- name: "Download the copy of the DB snapshot from the storage provider" + include: "download-from/{{ import_db_storage_provider }}.yml" # Note: Ansible does not support plain .gz files as part of its "unarchive" module, as they are not strictly speaking # archives. See