diff --git a/.github/workflows/warehouse-publish-docker-containers.yml b/.github/workflows/warehouse-publish-docker-containers.yml index e1c694b04..0281fe764 100644 --- a/.github/workflows/warehouse-publish-docker-containers.yml +++ b/.github/workflows/warehouse-publish-docker-containers.yml @@ -1,4 +1,4 @@ -name: warehouse-publish-cloudquery-plugins +name: warehouse-publish-docker-containers env: X_GITHUB_GRAPHQL_API: ${{ vars.X_GITHUB_GRAPHQL_API }} X_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -18,6 +18,8 @@ jobs: permissions: packages: write + contents: 'read' + id-token: 'write' steps: - name: Checkout code @@ -37,4 +39,25 @@ jobs: - name: Package and publish other docker containers run: bash .github/scripts/publish-docker-containers.sh - \ No newline at end of file + + # The remaining steps currently kill a pods in our kubernetes + # because we haven't dealt with using strict versions on the cluster yet. + # This ensures the pods are up to date. This is a hack for now. + - uses: 'google-github-actions/auth@v2' + with: + credentials_json: '${{ secrets.GOOGLE_CREDENTIALS_JSON }}' + create_credentials_file: true + + - name: 'Set up Cloud SDK' + uses: 'google-github-actions/setup-gcloud@v2' + with: + version: '>= 363.0.0' + + # Get the GKE credentials so we can deploy to the cluster + - uses: google-github-actions/get-gke-credentials@db150f2cc60d1716e61922b832eae71d2a45938f + with: + cluster_name: ${{ secrets.GKE_CLUSTER_NAME }} + location: ${{ secrets.GKE_CLUSTER_REGION }} + + - name: Delete pod for dagster + run: kubectl delete pods --namespace production-dagster -l app.kubernetes.io/instance=production-dagster,component=user-deployments diff --git a/docker/images/dagster-dask/Dockerfile b/docker/images/dagster-dask/Dockerfile index 63c545081..1534def4e 100644 --- a/docker/images/dagster-dask/Dockerfile +++ b/docker/images/dagster-dask/Dockerfile @@ -1,5 +1,7 @@ FROM ubuntu:jammy +ARG GCLOUD_VERSION=478.0.0 + ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update && \ apt-get install -y software-properties-common && \ @@ -10,6 +12,12 @@ RUN apt-get install -y curl && \ curl -o get-pip.py https://bootstrap.pypa.io/get-pip.py && \ python3.12 get-pip.py RUN pip3.12 install poetry +RUN curl -o gcloud.tar.gz https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-cli-${GCLOUD_VERSION}-linux-x86_64.tar.gz && \ + tar xvf gcloud.tar.gz && \ + bash ./google-cloud-sdk/install.sh && \ + rm gcloud.tar.gz && true +ENV PATH $PATH:/google-cloud-sdk/bin + ENV DAGSTER_DBT_PARSE_PROJECT_ON_LOAD=1 ENV DAGSTER_DBT_GENERATE_AND_AUTH_GCP=1 diff --git a/warehouse/dbt/macros/models/contract_invocation_events_with_l1.sql b/warehouse/dbt/macros/models/contract_invocation_events_with_l1.sql index 440b511ee..eda34fe3b 100644 --- a/warehouse/dbt/macros/models/contract_invocation_events_with_l1.sql +++ b/warehouse/dbt/macros/models/contract_invocation_events_with_l1.sql @@ -4,35 +4,23 @@ with blockchain_artifacts as ( select - artifact_source_id, - MAX_BY(artifact_type, artifact_rank) as artifact_type - from ( - select - LOWER(artifact_source_id) as artifact_source_id, - artifact_type, - case - when artifact_type = 'SAFE' then 5 - when artifact_type = 'FACTORY' then 4 - when artifact_type = 'CONTRACT' then 3 - when artifact_type = 'DEPLOYER' then 2 - when artifact_type = 'EOA' then 1 - else 0 - end as artifact_rank - from {{ ref('int_all_artifacts') }} - where artifact_source in ("{{ upper_network_name }}", "ANY_EVM") - ) - group by artifact_source_id + artifact_id, + LOWER(artifact_source_id) as artifact_source_id, + from {{ ref('int_all_artifacts') }} + where UPPER(artifact_source) in ("{{ upper_network_name }}", "ANY_EVM") ), all_transactions as ( select -- noqa: ST06 TIMESTAMP_TRUNC(transactions.block_timestamp, day) as `time`, - LOWER(transactions.to_address) as to_name, - COALESCE(to_artifacts.artifact_type, "CONTRACT") as to_type, - LOWER(transactions.to_address) as to_source_id, - LOWER(transactions.from_address) as from_name, - COALESCE(from_artifacts.artifact_type, "EOA") as from_type, - LOWER(transactions.from_address) as from_source_id, + COALESCE(to_artifacts.artifact_id, {{ oso_id("'%s'" % upper_network_name, "transactions.to_address") }}) as to_artifact_id, + LOWER(transactions.to_address) as to_artifact_name, + "CONTRACT" as to_artifact_type, + LOWER(transactions.to_address) as to_artifact_source_id, + COALESCE(to_artifacts.artifact_id, {{ oso_id("'%s'" % upper_network_name, "transactions.from_address") }}) as from_artifact_id, + LOWER(transactions.from_address) as from_artifact_name, + "EOA" as from_artifact_type, + LOWER(transactions.from_address) as from_artifact_source_id, transactions.receipt_status, ( transactions.receipt_gas_used @@ -51,27 +39,29 @@ all_transactions as ( contract_invocations as ( select time, - to_name, - to_type, - to_source_id, - from_name, - from_type, - from_source_id, + to_artifact_id, + to_artifact_name, + to_artifact_type, + to_artifact_source_id, + from_artifact_id, + from_artifact_name, + from_artifact_type, + from_artifact_source_id, "{{ upper_network_name }}" as event_source, - "{{ lower_network_name }}" as to_namespace, - "{{ lower_network_name }}" as from_namespace, SUM(l2_gas_fee) as total_l2_gas_used, COUNT(*) as total_count, SUM(case when receipt_status = 1 then 1 else 0 end) as success_count from all_transactions group by time, - to_name, - to_type, - to_source_id, - from_name, - from_type, - from_source_id + to_artifact_id, + to_artifact_name, + to_artifact_type, + to_artifact_source_id, + from_artifact_id, + from_artifact_name, + from_artifact_type, + from_artifact_source_id ), all_events as ( @@ -79,14 +69,14 @@ all_events as ( time, 'CONTRACT_INVOCATION_DAILY_L2_GAS_USED' as event_type, event_source, - to_name, - to_namespace, - to_type, - to_source_id, - from_name, - from_namespace, - from_type, - from_source_id, + to_artifact_id, + to_artifact_name, + to_artifact_type, + to_artifact_source_id, + from_artifact_id, + from_artifact_name, + from_artifact_type, + from_artifact_source_id, total_l2_gas_used as amount from contract_invocations union all @@ -94,14 +84,14 @@ all_events as ( time, 'CONTRACT_INVOCATION_DAILY_COUNT' as event_type, event_source, - to_name, - to_namespace, - to_type, - to_source_id, - from_name, - from_namespace, - from_type, - from_source_id, + to_artifact_id, + to_artifact_name, + to_artifact_type, + to_artifact_source_id, + from_artifact_id, + from_artifact_name, + from_artifact_type, + from_artifact_source_id, total_count as amount from contract_invocations union all @@ -109,14 +99,14 @@ all_events as ( time, 'CONTRACT_INVOCATION_SUCCESS_DAILY_COUNT' as event_type, event_source, - to_name, - to_namespace, - to_type, - to_source_id, - from_name, - from_namespace, - from_type, - from_source_id, + to_artifact_id, + to_artifact_name, + to_artifact_type, + to_artifact_source_id, + from_artifact_id, + from_artifact_name, + from_artifact_type, + from_artifact_source_id, success_count as amount from contract_invocations ) @@ -125,15 +115,17 @@ select time, event_type, event_source, - to_name, - to_namespace, - to_type, - to_source_id, - from_name, - from_namespace, - from_type, - from_source_id, + to_artifact_id, + to_artifact_name, + "{{ lower_network_name }}" as to_artifact_namespace, + to_artifact_type, + to_artifact_source_id, + from_artifact_id, + from_artifact_name, + "{{ lower_network_name }}" as from_artifact_namespace, + from_artifact_type, + from_artifact_source_id, amount, - {{ oso_id('event_source', 'time', 'to_name', 'from_name') }} as event_source_id + {{ oso_id('event_source', 'time', 'to_artifact_source_id', 'from_artifact_source_id') }} as event_source_id from all_events {% endmacro %} diff --git a/warehouse/dbt/macros/models/filtered_blockchain_events.sql b/warehouse/dbt/macros/models/filtered_blockchain_events.sql index 6544a8bb1..9be7a7901 100644 --- a/warehouse/dbt/macros/models/filtered_blockchain_events.sql +++ b/warehouse/dbt/macros/models/filtered_blockchain_events.sql @@ -2,7 +2,7 @@ with known_addresses as ( select distinct `artifact_source_id` as `address` from {{ ref("int_all_artifacts") }} - where LOWER(artifact_source) = LOWER('{{ artifact_source }}') + where UPPER(artifact_source) in (UPPER('{{ artifact_source }}'), 'ANY_EVM') ), known_to as ( select events.* from {{ oso_source(source_name, source_table)}} as events diff --git a/warehouse/dbt/models/intermediate/blockchain/int_optimism_contract_invocation_events.sql b/warehouse/dbt/models/intermediate/blockchain/int_optimism_contract_invocation_events.sql index deba53768..601eb7849 100644 --- a/warehouse/dbt/models/intermediate/blockchain/int_optimism_contract_invocation_events.sql +++ b/warehouse/dbt/models/intermediate/blockchain/int_optimism_contract_invocation_events.sql @@ -16,139 +16,4 @@ {% else %} {% set start = "'1970-01-01'" %} {% endif %} - -with blockchain_artifacts as ( - select - artifact_source_id, - MAX_BY(artifact_type, artifact_rank) as artifact_type - from ( - select - artifact_type, - LOWER(artifact_source_id) as artifact_source_id, - case - when artifact_type = 'SAFE' then 5 - when artifact_type = 'FACTORY' then 4 - when artifact_type = 'CONTRACT' then 3 - when artifact_type = 'DEPLOYER' then 2 - when artifact_type = 'EOA' then 1 - else 0 - end as artifact_rank - from {{ ref('int_all_artifacts') }} - where UPPER(artifact_source) in ('OPTIMISM', 'ANY_EVM') - ) - group by artifact_source_id -), - -all_transactions as ( - select -- noqa: ST06 - TIMESTAMP_TRUNC(transactions.block_timestamp, day) as `time`, - LOWER(transactions.to_address) as to_name, - COALESCE(to_artifacts.artifact_type, 'CONTRACT') as to_type, - LOWER(transactions.to_address) as to_source_id, - LOWER(transactions.from_address) as from_name, - COALESCE(from_artifacts.artifact_type, 'EOA') as from_type, - LOWER(transactions.from_address) as from_source_id, - transactions.receipt_status, - ( - transactions.receipt_gas_used - * transactions.receipt_effective_gas_price - ) as l2_gas_fee - from {{ ref('int_optimism_transactions') }} as transactions - left join blockchain_artifacts as to_artifacts - on LOWER(transactions.to_address) = to_artifacts.artifact_source_id - left join blockchain_artifacts as from_artifacts - on LOWER(transactions.from_address) = from_artifacts.artifact_source_id - where - transactions.input != '0x' - and transactions.block_timestamp >= {{ start }} -), - -contract_invocations as ( - select - time, - to_name, - to_type, - to_source_id, - from_name, - from_type, - from_source_id, - 'OPTIMISM' as event_source, - 'optimism' as to_namespace, - 'optimism' as from_namespace, - SUM(l2_gas_fee) as total_l2_gas_used, - COUNT(*) as total_count, - SUM(case when receipt_status = 1 then 1 else 0 end) as success_count - from all_transactions - group by - time, - to_name, - to_type, - to_source_id, - from_name, - from_type, - from_source_id -), - -all_events as ( - select - time, - 'CONTRACT_INVOCATION_DAILY_L2_GAS_USED' as event_type, - event_source, - to_name, - to_namespace, - to_type, - to_source_id, - from_name, - from_namespace, - from_type, - from_source_id, - total_l2_gas_used as amount - from contract_invocations - union all - select - time, - 'CONTRACT_INVOCATION_DAILY_COUNT' as event_type, - event_source, - to_name, - to_namespace, - to_type, - to_source_id, - from_name, - from_namespace, - from_type, - from_source_id, - total_count as amount - from contract_invocations - union all - select - time, - 'CONTRACT_INVOCATION_SUCCESS_DAILY_COUNT' as event_type, - event_source, - to_name, - to_namespace, - to_type, - to_source_id, - from_name, - from_namespace, - from_type, - from_source_id, - success_count as amount - from contract_invocations -) - -select - time, - event_type, - event_source, - to_name, - to_namespace, - to_type, - to_source_id, - from_name, - from_namespace, - from_type, - from_source_id, - amount, - {{ oso_id('event_source', 'time', 'to_name', 'from_name') }} - as event_source_id -from all_events +{{ contract_invocation_events_with_l1("optimism", start) }} diff --git a/warehouse/dbt/models/intermediate/events/int_events.sql b/warehouse/dbt/models/intermediate/events/int_events.sql index 38f61f1e5..1c0836b84 100644 --- a/warehouse/dbt/models/intermediate/events/int_events.sql +++ b/warehouse/dbt/models/intermediate/events/int_events.sql @@ -43,7 +43,7 @@ with github_commits as ( SPLIT(REPLACE(repository_name, "@", ""), "/")[SAFE_OFFSET(0)] as to_namespace, "REPOSITORY" as to_type, - CAST(repository_id as STRING) as to_source_id, + CAST(repository_id as STRING) as to_artifact_source_id, COALESCE(actor_login, author_email) as from_name, COALESCE(actor_login, author_email) as from_namespace, case @@ -53,7 +53,7 @@ with github_commits as ( case when actor_login is not null then CAST(actor_id as STRING) else author_email - end as from_source_id, + end as from_artifact_source_id, CAST(1 as FLOAT64) as amount from {{ ref('stg_github__distinct_commits_resolved_mergebot') }} ), @@ -69,11 +69,11 @@ github_issues as ( SPLIT(REPLACE(repository_name, "@", ""), "/")[SAFE_OFFSET(0)] as to_namespace, "REPOSITORY" as to_type, - CAST(repository_id as STRING) as to_source_id, + CAST(repository_id as STRING) as to_artifact_source_id, actor_login as from_name, actor_login as from_namespace, "GIT_USER" as from_type, - CAST(actor_id as STRING) as from_source_id, + CAST(actor_id as STRING) as from_artifact_source_id, CAST(1 as FLOAT64) as amount from {{ ref('stg_github__issues') }} ), @@ -89,11 +89,11 @@ github_pull_requests as ( SPLIT(REPLACE(repository_name, "@", ""), "/")[SAFE_OFFSET(0)] as to_namespace, "REPOSITORY" as to_type, - CAST(repository_id as STRING) as to_source_id, + CAST(repository_id as STRING) as to_artifact_source_id, actor_login as from_name, actor_login as from_namespace, "GIT_USER" as from_type, - CAST(actor_id as STRING) as from_source_id, + CAST(actor_id as STRING) as from_artifact_source_id, CAST(1 as FLOAT64) as amount from {{ ref('stg_github__pull_requests') }} ), @@ -109,11 +109,11 @@ github_pull_request_merge_events as ( SPLIT(REPLACE(repository_name, "@", ""), "/")[SAFE_OFFSET(0)] as to_namespace, "REPOSITORY" as to_type, - CAST(repository_id as STRING) as to_source_id, + CAST(repository_id as STRING) as to_artifact_source_id, actor_login as from_name, actor_login as from_namespace, "GIT_USER" as from_type, - CAST(actor_id as STRING) as from_source_id, + CAST(actor_id as STRING) as from_artifact_source_id, CAST(1 as FLOAT64) as amount from {{ ref('stg_github__pull_request_merge_events') }} ), @@ -129,11 +129,11 @@ github_stars_and_forks as ( SPLIT(REPLACE(repository_name, "@", ""), "/")[SAFE_OFFSET(0)] as to_namespace, "REPOSITORY" as to_type, - CAST(repository_id as STRING) as to_source_id, + CAST(repository_id as STRING) as to_artifact_source_id, actor_login as from_name, actor_login as from_namespace, "GIT_USER" as from_type, - CAST(actor_id as STRING) as from_source_id, + CAST(actor_id as STRING) as from_artifact_source_id, CAST(1 as FLOAT64) as amount from {{ ref('stg_github__stars_and_forks') }} ), @@ -144,14 +144,16 @@ all_events as ( event_type, event_source_id, event_source, - to_name, - to_namespace, - to_type, - to_source_id, - from_name, - from_namespace, - from_type, - from_source_id, + to_artifact_id, + to_artifact_name, + to_artifact_namespace, + to_artifact_type, + to_artifact_source_id, + from_artifact_id, + from_artifact_name, + from_artifact_namespace, + from_artifact_type, + from_artifact_source_id, amount from ( select * from {{ ref('int_optimism_contract_invocation_events') }} @@ -172,14 +174,16 @@ all_events as ( event_type, event_source_id, event_source, - to_name, - to_namespace, - to_type, - to_source_id, - from_name, - from_namespace, - from_type, - from_source_id, + {{ oso_id("event_source", "to_artifact_source_id") }} as to_artifact_id, + to_name as to_artifact_name, + to_namespace as to_artifact_namespace, + to_type as to_artifact_type, + to_artifact_source_id, + {{ oso_id("event_source", "from_artifact_source_id") }} as from_artifact_id, + from_name as from_artifact_name, + from_namespace as from_artifact_namespace, + from_type as from_artifact_type, + from_artifact_source_id, amount from ( select * from github_commits @@ -196,16 +200,18 @@ all_events as ( select time, + to_artifact_id, + from_artifact_id, UPPER(event_type) as event_type, CAST(event_source_id as STRING) as event_source_id, UPPER(event_source) as event_source, - LOWER(to_name) as to_artifact_name, - LOWER(to_namespace) as to_artifact_namespace, - UPPER(to_type) as to_artifact_type, - LOWER(to_source_id) as to_artifact_source_id, - LOWER(from_name) as from_artifact_name, - LOWER(from_namespace) as from_artifact_namespace, - UPPER(from_type) as from_artifact_type, - LOWER(from_source_id) as from_artifact_source_id, + LOWER(to_artifact_name) as to_artifact_name, + LOWER(to_artifact_namespace) as to_artifact_namespace, + UPPER(to_artifact_type) as to_artifact_type, + LOWER(to_artifact_source_id) as to_artifact_source_id, + LOWER(from_artifact_name) as from_artifact_name, + LOWER(from_artifact_namespace) as from_artifact_namespace, + UPPER(from_artifact_type) as from_artifact_type, + LOWER(from_artifact_source_id) as from_artifact_source_id, CAST(amount as FLOAT64) as amount from all_events diff --git a/warehouse/dbt/models/intermediate/events/int_events_with_artifact_id.sql b/warehouse/dbt/models/intermediate/events/int_events_with_artifact_id.sql index 71e2afcee..615757482 100644 --- a/warehouse/dbt/models/intermediate/events/int_events_with_artifact_id.sql +++ b/warehouse/dbt/models/intermediate/events/int_events_with_artifact_id.sql @@ -3,20 +3,5 @@ materialized='ephemeral', ) #} -select - time, - event_type, - event_source_id, - event_source, - to_artifact_name, - to_artifact_namespace, - to_artifact_type, - to_artifact_source_id, - {{ oso_id("event_source", "to_artifact_source_id") }} as to_artifact_id, - from_artifact_name, - from_artifact_namespace, - from_artifact_type, - from_artifact_source_id, - {{ oso_id("event_source", "from_artifact_source_id") }} as from_artifact_id, - amount +select * from {{ ref('int_events') }} diff --git a/warehouse/oso_dagster/assets.py b/warehouse/oso_dagster/assets.py index b9895dd60..509673590 100644 --- a/warehouse/oso_dagster/assets.py +++ b/warehouse/oso_dagster/assets.py @@ -4,7 +4,7 @@ from dagster_dbt import DbtCliResource, dbt_assets, DagsterDbtTranslator from google.cloud.bigquery.schema import SchemaField -from .constants import main_dbt_manifests, main_dbt_project_dir +from .constants import main_dbt_manifests, main_dbt_project_dir, dbt_profiles_dir from .factories.goldsky import ( GoldskyConfig, goldsky_asset, @@ -54,6 +54,7 @@ def dbt_assets_from_manifests_map( internal_map = {} assets: List[AssetsDefinition] = [] for target, manifest_path in manifests.items(): + print(f"Target[{target}] using profiles dir {dbt_profiles_dir}") translator = CustomDagsterDbtTranslator(["dbt", target], internal_map) @@ -63,7 +64,12 @@ def dbt_assets_from_manifests_map( dagster_dbt_translator=translator, ) def _generated_dbt_assets(context: AssetExecutionContext, **kwargs): - dbt = DbtCliResource(project_dir=os.fspath(project_dir), target=target) + print(f"using profiles dir {dbt_profiles_dir}") + dbt = DbtCliResource( + project_dir=os.fspath(project_dir), + target=target, + profiles_dir=dbt_profiles_dir, + ) yield from dbt.cli(["build"], context=context).stream() assets.append(_generated_dbt_assets) @@ -71,13 +77,6 @@ def _generated_dbt_assets(context: AssetExecutionContext, **kwargs): return assets -# @dbt_assets( -# manifest=production_dbt_manifest_path, -# dagster_dbt_translator=CustomDagsterDbtTranslator("oso"), -# ) -# def production_dbt_assets(context: AssetExecutionContext, main_dbt: DbtCliResource): -# yield from main_dbt.cli(["build"], context=context).stream() - all_dbt_assets = dbt_assets_from_manifests_map( main_dbt_project_dir, main_dbt_manifests, @@ -89,14 +88,6 @@ def _generated_dbt_assets(context: AssetExecutionContext, **kwargs): ) -# @dbt_assets( -# manifest=source_dbt_manifest_path, -# dagster_dbt_translator=CustomDagsterDbtTranslator("sources"), -# ) -# def source_dbt_assets(context: AssetExecutionContext, source_dbt: DbtCliResource): -# yield from source_dbt.cli(["build"], context=context).stream() - - base_blocks = goldsky_asset( GoldskyConfig( key_prefix="base", diff --git a/warehouse/oso_dagster/constants.py b/warehouse/oso_dagster/constants.py index fbf1b6615..792057f8d 100644 --- a/warehouse/oso_dagster/constants.py +++ b/warehouse/oso_dagster/constants.py @@ -23,8 +23,7 @@ job_execution_time_seconds: 300 job_retries: 1 location: US - method: service-account - keyfile: %(service_account_path)s + method: oauth project: %(project_id)s threads: 32 base_playground: @@ -33,8 +32,7 @@ job_execution_time_seconds: 300 job_retries: 1 location: US - method: service-account - keyfile: %(service_account_path)s + method: oauth project: %(project_id)s threads: 32 playground: @@ -43,15 +41,19 @@ job_execution_time_seconds: 300 job_retries: 1 location: US - method: service-account - keyfile: %(service_account_path)s + method: oauth project: %(project_id)s threads: 32 """ +def get_profiles_dir(): + return os.environ.get("DBT_PROFILES_DIR", os.path.expanduser("~/.dbt")) + + def generate_profile_and_auth(): - profiles_path = os.path.expanduser("~/.dbt/profiles.yml") + profiles_path = os.path.join(get_profiles_dir(), "profiles.yml") + Path(os.path.dirname(profiles_path)).mkdir(parents=True, exist_ok=True) service_account_path = os.path.expanduser("~/service-account.json") @@ -59,17 +61,12 @@ def generate_profile_and_auth(): print(f"writing dbt profile to {profiles_path}") - token_url = "http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/token" - r = requests.get( - token_url, allow_redirects=True, headers={"Metadata-Flavor": "Google"} - ) - open(service_account_path, "wb").write(r.content) project_id_url = ( "http://metadata.google.internal/computeMetadata/v1/project/project-id" ) project_id = requests.get( project_id_url, allow_redirects=True, headers={"Metadata-Flavor": "Google"} - ).content + ).content.decode("utf-8") with open(profiles_path, "w") as f: f.write( generated_profiles_yml @@ -111,4 +108,5 @@ def load_dbt_manifests(targets: List[str]) -> Dict[str, str]: return manifests +dbt_profiles_dir = get_profiles_dir() main_dbt_manifests = load_dbt_manifests(["production", "base_playground", "playground"])