Skip to content

Commit

Permalink
Update Hasura to use dbt metadata (#1188)
Browse files Browse the repository at this point in the history
* Call dbt from inside the hasura table meta generator

* Update workflows
  • Loading branch information
ravenac95 authored Apr 5, 2024
1 parent 210509a commit 040467b
Show file tree
Hide file tree
Showing 10 changed files with 134 additions and 38 deletions.
14 changes: 14 additions & 0 deletions .github/workflows/deploy-hasura.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ env:
HASURA_GRAPHQL_ADMIN_SECRET: ${{ secrets.HASURA_GRAPHQL_ADMIN_SECRET }}
HASURA_GRAPHQL_ENDPOINT: ${{ vars.HASURA_GRAPHQL_ENDPOINT }}
HASURA_GRAPHQL_DATABASE_URL: ${{ secrets.HASURA_GRAPHQL_DATABASE_URL }}
GOOGLE_CREDENTIALS_JSON: ${{ vars.GOOGLE_TEST_DUMMY_CREDENTIALS_JSON }}

# Trigger the workflow when:
on:
Expand All @@ -27,18 +28,31 @@ jobs:
uses: actions/checkout@v3
with:
fetch-depth: 1

- name: Setup pnpm
uses: pnpm/action-setup@v2
with:
version: 8
run_install: |
- recursive: true
args: [--frozen-lockfile, --strict-peer-dependencies]
- name: Set up Node.js 20
uses: actions/setup-node@v3
with:
cache: "pnpm"
node-version: "20.x"

- name: Login to google
uses: 'google-github-actions/auth@v2'
with:
credentials_json: '${{ secrets.GOOGLE_BQ_ADMIN_CREDENTIALS_JSON }}'
create_credentials_file: true

- name: Setup dbt profile
run: |
bash .github/scripts/create-dbt-profile.sh ${GOOGLE_APPLICATION_CREDENTIALS}
- name: Build
run: pnpm build:hasura
- name: Deploy
Expand Down
7 changes: 7 additions & 0 deletions .github/workflows/refresh-test-credentials.yml
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,13 @@ jobs:
cd ops/external-prs &&
bash scripts/rotate-service-account.sh [email protected] bigquery-admin.json &&
pnpm tools refresh-gcp-credentials ${{ github.repository }} external-prs-app bigquery-admin.json GOOGLE_BQ_ADMIN_CREDENTIALS_JSON
- name: Refresh credentials for the bigquery-admin user on the deploy environment
shell: bash
run: |
cd ops/external-prs &&
bash scripts/rotate-service-account.sh [email protected] bigquery-admin.json &&
pnpm tools refresh-gcp-credentials ${{ github.repository }} deploy bigquery-admin.json GOOGLE_BQ_ADMIN_CREDENTIALS_JSON
rebuild-docker-public-vars:
name: rebuild-docker-public-vars
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
table:
name: github_metrics_by_project
name: deployers_by_project
schema: public
select_permissions:
- role: anonymous
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
table:
name: github_metrics_by_collection
name: event_totals_by_project
schema: public
select_permissions:
- role: anonymous
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
table:
name: onchain_metrics_by_project_arbitrum
name: pm_dev_months
schema: public
select_permissions:
- role: anonymous
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
table:
name: onchain_metrics_by_collection_arbitrum
name: pm_new_contribs
schema: public
select_permissions:
- role: anonymous
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
table:
name: repos_by_project
schema: public
select_permissions:
- role: anonymous
permission:
columns: "*"
filter: {}
allow_aggregations: false
comment: ""
- role: user
permission:
columns: "*"
filter: {}
allow_aggregations: false
comment: ""
- role: developer
permission:
columns: "*"
filter: {}
allow_aggregations: true
comment: ""
49 changes: 27 additions & 22 deletions apps/hasura/metadata/databases/cloudsql/tables/tables.yaml
Original file line number Diff line number Diff line change
@@ -1,31 +1,36 @@
- "!include onchain_metrics_by_collection.yaml"
- "!include onchain_metrics_by_project.yaml"
- "!include artifacts.yaml"
- "!include artifacts_by_project.yaml"
- "!include code_metrics_by_collection.yaml"
- "!include code_metrics_by_project.yaml"
- "!include collections.yaml"
- "!include deployers_by_project.yaml"
- "!include event_totals_by_project.yaml"
- "!include event_types.yaml"
- "!include first_contribution_to_project.yaml"
- "!include last_contribution_to_project.yaml"
- "!include users_monthly_to_project.yaml"
- "!include events_weekly_from_artifact.yaml"
- "!include events_weekly_from_collection.yaml"
- "!include events_weekly_from_project.yaml"
- "!include events_weekly_to_artifact.yaml"
- "!include events_weekly_to_collection.yaml"
- "!include events_weekly_to_project.yaml"
- "!include events_monthly_from_artifact.yaml"
- "!include events_monthly_from_collection.yaml"
- "!include events_monthly_from_project.yaml"
- "!include events_monthly_to_artifact.yaml"
- "!include events_monthly_to_collection.yaml"
- "!include events_monthly_to_project.yaml"
- "!include events_daily_from_artifact.yaml"
- "!include events_daily_from_collection.yaml"
- "!include events_daily_from_project.yaml"
- "!include events_daily_to_artifact.yaml"
- "!include events_daily_to_collection.yaml"
- "!include events_daily_to_project.yaml"
- "!include artifacts.yaml"
- "!include artifacts_by_project.yaml"
- "!include collections.yaml"
- "!include events_monthly_from_artifact.yaml"
- "!include events_monthly_from_collection.yaml"
- "!include events_monthly_from_project.yaml"
- "!include events_monthly_to_artifact.yaml"
- "!include events_monthly_to_collection.yaml"
- "!include events_monthly_to_project.yaml"
- "!include events_weekly_from_artifact.yaml"
- "!include events_weekly_from_collection.yaml"
- "!include events_weekly_from_project.yaml"
- "!include events_weekly_to_artifact.yaml"
- "!include events_weekly_to_collection.yaml"
- "!include events_weekly_to_project.yaml"
- "!include first_contribution_to_project.yaml"
- "!include last_contribution_to_project.yaml"
- "!include onchain_metrics_by_collection.yaml"
- "!include onchain_metrics_by_project.yaml"
- "!include pm_dev_months.yaml"
- "!include pm_new_contribs.yaml"
- "!include projects.yaml"
- "!include projects_by_collection.yaml"
- "!include code_metrics_by_collection.yaml"
- "!include code_metrics_by_project.yaml"
- "!include repos_by_project.yaml"
- "!include users_monthly_to_project.yaml"
4 changes: 2 additions & 2 deletions apps/hasura/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@
"node": ">=20"
},
"scripts": {
"build": "tsc && pnpm metadata:genTables",
"build": "tsc",
"lint": "tsc --noEmit && pnpm lint:eslint && pnpm lint:prettier",
"lint:eslint": "eslint --ignore-path ../../.gitignore --max-warnings 0 .",
"lint:prettier": "prettier --ignore-path ../../.gitignore --log-level warn --check **/*.{js,jsx,ts,tsx,sol,md,json}",
"metadata:genTables": "node --loader ts-node/esm src/genTables.ts",
"metadata:pull": "hasura metadata export",
"metadata:reload": "hasura metadata reload",
"metadata:apply": "hasura metadata apply",
"deploy": "pnpm metadata:reload && pnpm metadata:apply"
"deploy": "pnpm metadata:genTables && pnpm metadata:reload && pnpm metadata:apply"
},
"keywords": [],
"devDependencies": {
Expand Down
68 changes: 58 additions & 10 deletions apps/hasura/src/genTables.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,15 @@ import path from "node:path";
import fs from "node:fs/promises";
import { fileURLToPath } from "node:url";
import * as yaml from "yaml";
import { exec } from "node:child_process";
import * as util from "util";

const execPromise = util.promisify(exec);

const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// YAML file extension
const EXTENSION = ".yaml";
// Recursively scan this directory for database tables
const modelDir = path.resolve(
__dirname,
"../../../warehouse/dbt/models/marts/",
);
// Where to store all table configs
const tablesDir = path.resolve(
__dirname,
Expand Down Expand Up @@ -79,14 +78,63 @@ const createConfig = (name: string): TableConfig => ({
],
});

type ModelConfig = {
name: string;
config: {
meta: {
sync_to_cloudsql: boolean;
};
};
};

async function main(): Promise<void> {
console.log(`Generating tables from ${modelDir}`);
const target = process.env.DBT_TARGET;
if (!target) {
throw new Error("specify a DBT_TARGET");
}
console.log(`Generating tables from dbt`);

// FIXME... this isn't very portable
// Run dbt to get the json
const repoRoot = path.resolve("../../");
const modelsList = await execPromise(
`${repoRoot}/.venv/bin/dbt ls -q --output json --select marts.* --target ${target} --resource-type model`,
{
cwd: repoRoot,
},
);

const modelsConfigRaw = modelsList.stdout.split("\n");
const modelConfigs: ModelConfig[] = [];
for (const raw of modelsConfigRaw) {
const trimmed = raw.trim();
if (trimmed === "") {
continue;
}
const modelConfig = JSON.parse(trimmed) as {
name: string;
config: {
meta: {
sync_to_cloudsql: boolean;
};
};
};
modelConfigs.push(modelConfig);
}
const filteredConfigs = modelConfigs.filter((c) => {
return c.config.meta.sync_to_cloudsql === true;
});

const tableNames = filteredConfigs.map((c) => {
return c.name;
});

// Recursively scan all files in the model directory
const allFiles = await fs.readdir(modelDir, { recursive: true });
//const allFiles = await fs.readdir(modelDir, { recursive: true });
// Get the basename as the table name
const tableNames = allFiles
.filter((f) => f.endsWith(".sql"))
.map((f) => path.basename(f, ".sql"));
// const tableNames = allFiles
// .filter((f) => f.endsWith(".sql"))
// .map((f) => path.basename(f, ".sql"));
console.log("Tables:");
console.log(tableNames);
// Write the list of tables
Expand Down

0 comments on commit 040467b

Please sign in to comment.