diff --git a/defaults/main.yml b/defaults/main.yml index dc0f5ab..9b3aea1 100644 --- a/defaults/main.yml +++ b/defaults/main.yml @@ -2,6 +2,11 @@ ## Where values are commented out, it means that you must populate them in your playbooks. They are necessarily ## secret, or specific to the playbook. +## The name of the database storage provider (AWS S3 or Azure Blob Storage) +## (required) +import_db_storage_provider: "aws-s3" +#import_db_storage_provider: "azure-blob" + ## The S3 bucket that contains the database dump to download. ## (Required) # import_db_s3_bucket: @@ -14,6 +19,12 @@ ## (Required) # import_db_s3_region: +## The name of Azure Blob container that is used to get database snapshots from +#import_db_azure_container_name: "sql-dumps" + +## The name of Azure Blob object, i.e. filename of database snapshot +#import_db_azure_object_name: + ## The database to import into ## (Required) # import_db_database_name: @@ -31,3 +42,6 @@ import_db_aws_secret_access_key: "{{ lookup('env', 'AWS_SECRET_ACCESS_KEY') }}" import_db_post_import_sql_queries: [] ## The query that should be executed # - query: "" + +## Tmp path to put downloaded files while preparing them for import +import_db_tmp_path: "/tmp/import_db" diff --git a/tasks/dependencies.yml b/tasks/dependencies.yml index c3e4d7f..f8df89c 100644 --- a/tasks/dependencies.yml +++ b/tasks/dependencies.yml @@ -1,5 +1,5 @@ --- -- name: "Install tools required to fetch/unpack content from AWS" +- name: "Install tools required to fetch/unpack content from storage provider" package: name: "{{ item }}" state: "present" @@ -8,11 +8,5 @@ become: "True" become_user: "root" -- name: "Install the required python packages" - pip: - name: "{{ item }}" - state: "latest" - become: "True" - become_user: "root" - with_items: - - "boto3" +- name: "Install dependencies for corresponding storage provider" + include: "dependencies/{{ import_db_storage_provider }}.yml" diff --git a/tasks/dependencies/aws-s3.yml b/tasks/dependencies/aws-s3.yml new file mode 100644 index 0000000..6b7310c --- /dev/null +++ b/tasks/dependencies/aws-s3.yml @@ -0,0 +1,9 @@ +--- +- name: "Install the required python packages for AWS" + pip: + name: "{{ item }}" + state: "latest" + become: "True" + become_user: "root" + with_items: + - "boto3" diff --git a/tasks/dependencies/azure-blob.yml b/tasks/dependencies/azure-blob.yml new file mode 100644 index 0000000..12044e7 --- /dev/null +++ b/tasks/dependencies/azure-blob.yml @@ -0,0 +1,11 @@ +--- +- name: "Install the required python packages for Azure" + pip: + name: "{{ item }}" + state: "latest" + become: "True" + become_user: "root" + with_items: + - packaging + - msrestazure + - ansible[azure] diff --git a/tasks/download-from/aws-s3.yml b/tasks/download-from/aws-s3.yml new file mode 100644 index 0000000..b095f34 --- /dev/null +++ b/tasks/download-from/aws-s3.yml @@ -0,0 +1,14 @@ +--- +# Note: The AWS keys are deliberately omitted for this task. They should be set in the session: +# $ export AWS_ACCESS_KEY_ID="YOUR_ACCESS_KEY" +# $ export AWS_SECRET_ACCESS_KEY="YOUR_SECRET_KEY" +- name: "Download the copy of the DB from AWS S3" + aws_s3: + bucket: "{{ import_db_s3_bucket }}" + object: "{{ import_db_s3_object_name }}" + dest: "{{ import_db_tmp_path }}/db_dump.sql.gz" + region: "{{ import_db_s3_region }}" + aws_access_key: "{{ import_db_aws_access_key_id }}" + aws_secret_key: "{{ import_db_aws_secret_access_key }}" + mode: "get" + register: "import_db_downloaded_archive" diff --git a/tasks/download-from/azure-blob.yml b/tasks/download-from/azure-blob.yml new file mode 100644 index 0000000..587dbac --- /dev/null +++ b/tasks/download-from/azure-blob.yml @@ -0,0 +1,28 @@ +--- +#- name: "Download the copy of the DB from Azure Storage" +# azure_rm_storageblob: +# resource_group: "{{ lookup('env','AZURE_PROD_RESOURCE_GROUP') }}" +# storage_account_name: "{{ lookup('env','AZURE_STORAGE_ACCOUNT') }}" +# container: "{{ import_db_azure_container_name }}" +# blob: "{{ import_db_azure_object_name }}" +# dest: "{{ import_db_tmp_path }}/db_dump.sql.gz" +# environment: +# AZURE_CLIENT_ID: "{{ lookup('env','AZURE_CLIENT_ID') }}" +# AZURE_SECRET: "{{ lookup('env','AZURE_SECRET') }}" +# AZURE_TENANT: "{{ lookup('env','AZURE_TENANT') }}" +# AZURE_SUBSCRIPTION_ID: "{{ lookup('env','AZURE_SUBSCRIPTION_ID') }}" +# AZURE_STORAGE_ACCOUNT: "{{ lookup('env','AZURE_STORAGE_ACCOUNT') }}" +# AZURE_STORAGE_KEY: "{{ lookup('env','AZURE_STORAGE_KEY') }}" +# register: "import_db_downloaded_archive" +# +# This expects to authenticate based on the invokers Azure credentials, supplied through environment variables +- name: "Download the copy of the DB from Azure Storage" + shell: | + AZURE_CLIENT_ID="{{ lookup('env','AZURE_CLIENT_ID') }}" \ + AZURE_SECRET="{{ lookup('env','AZURE_SECRET') }}" \ + AZURE_TENANT="{{ lookup('env','AZURE_TENANT') }}" \ + AZURE_SUBSCRIPTION_ID="{{ lookup('env','AZURE_SUBSCRIPTION_ID') }}" \ + AZURE_STORAGE_ACCOUNT="{{ lookup('env','AZURE_STORAGE_ACCOUNT') }}" \ + AZURE_STORAGE_KEY="{{ lookup('env','AZURE_STORAGE_KEY') }}" \ + az storage blob download --container-name="{{ import_db_azure_container_name }}" --file={{ import_db_tmp_path }}/db_dump.sql.gz --name={{ import_db_azure_object_name }} + register: "import_db_downloaded_archive" diff --git a/tasks/import.yml b/tasks/import.yml index 692ff32..1479555 100644 --- a/tasks/import.yml +++ b/tasks/import.yml @@ -1,27 +1,21 @@ --- -# Note: The AWS keys are deliberately omitted for this task. They should be set in the session: -# $ export AWS_ACCESS_KEY_ID="YOUR_ACCESS_KEY" -# $ export AWS_SECRET_ACCESS_KEY="YOUR_SECRET_KEY" -- name: "Download the copy of the DB from S3" - aws_s3: - bucket: "{{ import_db_s3_bucket }}" - object: "{{ import_db_s3_object_name }}" - dest: "/tmp/db_dump.sql.gz" - region: "{{ import_db_s3_region }}" - aws_access_key: "{{ import_db_aws_access_key_id }}" - aws_secret_key: "{{ import_db_aws_secret_access_key }}" - mode: "get" - register: "s3db" +- name: "Make sure temp folder exists" + file: + path: "{{ import_db_tmp_path }}" + state: "directory" + +- name: "Download the copy of the DB snapshot from the storage provider" + include: "download-from/{{ import_db_storage_provider }}.yml" # Note: Ansible does not support plain .gz files as part of its "unarchive" module, as they are not strictly speaking # archives. See # - https://github.com/ansible/ansible-modules-extras/pull/1301 # - https://github.com/ansible/ansible-modules-core/issues/1035 - name: "Unpack the DB" - command: "gzip -d /tmp/db_dump.sql.gz" + command: "gzip -d {{ import_db_tmp_path }}/db_dump.sql.gz" args: - creates: "/tmp/db_dump.sql" - when: "s3db.changed" + creates: "{{ import_db_tmp_path }}/db_dump.sql" + when: "import_db_downloaded_archive.changed" - name: "Delete the old database" mysql_db: @@ -29,13 +23,13 @@ state: "absent" login_password: "{{ import_db_root_password }}" login_user: "root" - when: "s3db.changed" + when: "import_db_downloaded_archive.changed" - name: "Restore the database" mysql_db: name: "{{ import_db_database_name }}" state: "import" - target: "/tmp/db_dump.sql" + target: "{{ import_db_tmp_path }}/db_dump.sql" login_password: "{{ import_db_root_password }}" login_user: "root" @@ -44,6 +38,6 @@ path: "{{ item }}" state: "absent" with_items: - - "/tmp/db_dump.sql" - - "/tmp/db_dump.sql.gz" - when: "s3db.changed" + - "{{ import_db_tmp_path }}/db_dump.sql" + - "{{ import_db_tmp_path }}/db_dump.sql.gz" + - "{{ import_db_tmp_path }}"