Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix ci default #1076

Merged
merged 8 commits into from
Mar 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions .github/workflows/ci-default.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ jobs:
ref: ${{ github.event.pull_request.head.sha }}
# Fetch all history so gitlint can check the relevant commits.
fetch-depth: "0"

- name: Setup pnpm
uses: pnpm/action-setup@v2
with:
Expand All @@ -86,10 +87,10 @@ jobs:
with:
version: nightly-87bc53fc6c874bd4c92d97ed180b949e3a36d78c

- name: Authenticate to google with a NOOP user
- name: Authenticate to google with a test-dummy user
uses: 'google-github-actions/auth@v2'
with:
credentials_json: '${{ secrets.GOOGLE_NOOP_CREDENTIALS_JSON }}'
credentials_json: '${{ vars.GOOGLE_TEST_DUMMY_CREDENTIALS_JSON }}'
create_credentials_file: true

- name: Setup dbt profile
Expand All @@ -100,6 +101,11 @@ jobs:
run: |
bash .github/scripts/run-supabase-local.sh apps/frontend

- name: Check if algolia is empty, set the variable to some dummy value if it is
shell: bash
run: |
echo "NEXT_PUBLIC_ALGOLIA_API_KEY=${NEXT_PUBLIC_ALGOLIA_API_KEY:-xxxx}" >> "$GITHUB_ENV"

- name: Build
run: |
pnpm build
Expand Down
64 changes: 64 additions & 0 deletions .github/workflows/refresh-test-credentials.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# We need to push credentials into github that have no actual permissions for
# anything. However, just in case those credentials expire every hour we do this
# by having a GCP project where service account keys expire every hour. This is
# done to allow for CI to run some jobs that need a GCP Service Account but
# actually don't need access to run. This will allow us to use some checks with
# contributors without write access to the repo

# This workflow will run every 30 mins to hopefully ensure that credentials
# don't expire even if this script errors.
name: warehouse-run-data-pipeline
env:
BIGQUERY_DATASET_ID: ${{ vars.BIGQUERY_DATASET_ID }}

# For now this only runs on a schedule once a day. Once we have made some of the
# plugin workflows more incremental we will run this on _every_ commit to main
on:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
schedule:

# Schedule every 30 mins
- cron: '*/30 * * * *'

jobs:
refresh-test-credentials:
name: refresh-test-credentials
environment: ops
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v3
with:
fetch-depth: 1

- name: 'Set up Cloud SDK'
uses: 'google-github-actions/setup-gcloud@v2'
with:
version: '>= 363.0.0'

- name: Authenticate to google with an ops user
uses: 'google-github-actions/auth@v2'
with:
credentials_json: '${{ secrets.GOOGLE_OPS_CREDENTIALS_JSON }}'
create_credentials_file: true

- name: Setup external pr tools
uses: ./.github/workflows/setup-external-pr-tools

# These credentials are not supposed to be secrets
- name: Refresh credentials for the oso-test-dummy user on the testing environment
shell: bash
run: |
cd ops/external-prs &&
gcloud iam service-accounts keys create dummy.json --iam-account=oso-test-dummy@oso-pull-requests.iam.gserviceaccount.com &&
pnpm tools refresh-gcp-credentials --secret=false ${{ github.repository }} testing dummy.json GOOGLE_TEST_DUMMY_CREDENTIALS_JSON

# These credentials are intended to be secret
- name: Refresh credentials for the bigquery-admin user on the external-prs-app environment
shell: bash
run: |
cd ops/external-prs &&
gcloud iam service-accounts keys create bigquery-admin.json --iam-account=bigquery-admin@oso-pull-requests.iam.gserviceaccount.com &&
pnpm tools refresh-gcp-credentials ${{ github.repository }} external-prs-app bigquery-admin.json GOOGLE_BQ_ADMIN_CREDENTIALS_JSON
2 changes: 2 additions & 0 deletions ops/external-prs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,10 @@
"typescript": "^5.3.3"
},
"dependencies": {
"@types/libsodium-wrappers": "^0.7.13",
"@types/yargs": "^17.0.32",
"chalk": "^5.3.0",
"libsodium-wrappers": "^0.7.13",
"octokit": "^3.1.0",
"winston": "^3.11.0",
"yaml": "^2.3.1",
Expand Down
122 changes: 116 additions & 6 deletions ops/external-prs/src/cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import { Argv, ArgumentsCamelCase } from "yargs";
import { hideBin } from "yargs/helpers";
import { App, Octokit } from "octokit";
import * as fsPromise from "fs/promises";
import _sodium from "libsodium-wrappers";

import { logger } from "./utils/logger.js";
import { handleError } from "./utils/error.js";
Expand Down Expand Up @@ -60,6 +61,13 @@ interface InitializePRCheck extends BaseArgs {
sha: string;
}

interface RefreshGCPCredentials extends BaseArgs {
environment: string;
credsPath: string;
secret: boolean;
name: string;
}

interface TestDeployArgs extends BaseArgs {}

interface TestDeploySetupArgs extends TestDeployArgs {}
Expand Down Expand Up @@ -122,6 +130,13 @@ async function parseDeployComment(args: ParseCommentArgs) {
owner: args.repo.owner,
comment_id: args.comment,
});
if (
["OWNER", "COLLABORATOR", "MEMBER"].indexOf(
comment.data.author_association,
) === -1
) {
process.exit(1);
}
const body = comment.data.body || "";
const match = body.match(/\/deploy\s+([0-9a-f]{6,40})/);
if (!match) {
Expand All @@ -145,6 +160,86 @@ async function parseDeployComment(args: ParseCommentArgs) {
});
}

async function fileToBase64(filePath: string): Promise<string> {
try {
const fileBuffer = await fsPromise.readFile(filePath);
const base64String = fileBuffer.toString("base64");
return base64String;
} catch (error) {
logger.error("Error reading file:", error);
throw error;
}
}

async function refreshCredentials(args: RefreshGCPCredentials) {
logger.info({
message: "setting up credentials",
environment: args.environment,
name: args.name,
});

const app = args.app;

const octo = await getOctokitFor(app, args.repo);
if (!octo) {
throw new Error("No repo found");
}

const repo = await octo.rest.repos.get({
repo: args.repo.name,
owner: args.repo.owner,
});

const creds = await fileToBase64(args.credsPath);

if (args.secret) {
// The github secret must use libsodium's crypto_box_seal for the
// `encrypted_value`
await _sodium.ready;

const pkey = await octo.rest.actions.getEnvironmentPublicKey({
repository_id: repo.data.id,
environment_name: args.environment,
});

const messageBytes = Buffer.from(creds);
const keyBytes = Buffer.from(pkey.data.key, "base64");
const encryptedBytes = _sodium.crypto_box_seal(messageBytes, keyBytes);
const ciphertext = Buffer.from(encryptedBytes).toString("base64");

await octo.rest.actions.createOrUpdateEnvironmentSecret({
repository_id: repo.data.id,
environment_name: args.environment,
secret_name: args.name,
encrypted_value: ciphertext,
key_id: pkey.data.key_id,
});
} else {
try {
const currentVar = await octo.rest.actions.getEnvironmentVariable({
repository_id: repo.data.id,
environment_name: args.environment,
name: args.name,
});
if (currentVar) {
await octo.rest.actions.deleteEnvironmentVariable({
repository_id: repo.data.id,
environment_name: args.environment,
name: args.name,
});
}
} catch (e) {
logger.info("no existing var found");
}
await octo.rest.actions.createEnvironmentVariable({
repository_id: repo.data.id,
environment_name: args.environment,
name: args.name,
value: creds,
});
}
}

async function testDeploySetup(_args: TestDeployArgs) {
console.log("setup");
// This should create a new public dataset inside a "testing" project
Expand Down Expand Up @@ -190,12 +285,6 @@ function testDeployGroup(group: Argv) {
.demandCommand();
}

/**
* When adding a new fetcher, please remember to add it to both this registry and yargs
*/
export const FETCHER_REGISTRY = [
//NpmDownloadsInterface,
];
const cli = yargs(hideBin(process.argv))
.env("PR_TOOLS")
.positional("repo", {
Expand Down Expand Up @@ -260,6 +349,27 @@ const cli = yargs(hideBin(process.argv))
},
(args) => handleError(parseDeployComment(args)),
)
.command<RefreshGCPCredentials>(
"refresh-gcp-credentials <repo> <environment> <creds-path> <name>",
"Refresh creds",
(yags) => {
yags.positional("environment", {
type: "string",
});
yags.positional("creds-path", {
type: "string",
});
yags.positional("name", {
type: "string",
});
yags.option("secret", {
type: "boolean",
default: true,
});
yags.boolean("secret");
},
(args) => handleError(refreshCredentials(args)),
)
.command<TestDeployArgs>(
"test-deploy",
"Test deployment commands",
Expand Down
3 changes: 3 additions & 0 deletions ops/tf-modules/test-project/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# test-project

Sets up a test project that has short TTLS for service accounts.
71 changes: 71 additions & 0 deletions ops/tf-modules/test-project/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
terraform {
required_providers {
google-beta = {
version = "~> 5.19.0"
}
google = {
version = "~> 5.21.0"
}
}
}

resource "google_org_policy_policy" "short_iam_ttl" {
provider = google
name = "projects/${google_project.project.name}/policies/iam.serviceAccountKeyExpiryHours"
parent = "projects/${google_project.project.name}"

spec {
#reset = true
inherit_from_parent = false
rules {
values {
allowed_values = ["1h"]
}
}
}
}

resource "google_project" "project" {
project_id = var.project_name
name = var.project_name
org_id = var.organization_id
}

##
# Dummy service account
#
# This is used to create a service account that has no permissions at all. This
# is necessary for things like sqlfluff and dbt on the ci-default pipeline
##
resource "google_service_account" "dummy_sa" {
project = google_project.project.name
account_id = "oso-test-dummy"
display_name = "Dummy account for test pipelines"
}

##
# BigQuery admin
#
# A bigquery admin user that can create datasets
##
resource "google_service_account" "bq_admin" {
project = google_project.project.name
account_id = "bigquery-admin"
display_name = "BigQuery admin for the test account"
}

resource "google_project_iam_member" "bq_admin_binding" {
project = google_project.project.id
role = "roles/bigquery.admin"

member = "serviceAccount:${google_service_account.bq_admin.email}"
}

resource "google_project_iam_member" "admins" {
project = google_project.project.id
role = "roles/owner"

for_each = toset(var.admin_principals)

member = "serviceAccount:${each.key}"
}
14 changes: 14 additions & 0 deletions ops/tf-modules/test-project/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
variable "project_name" {
type = string
description = "The name to use for the project"
}

variable "organization_id" {
type = string
description = "The org id"
}

variable "admin_principals" {
type = list(string)
description = "A list of gcp princpals that have admin privileges on this project"
}
Loading
Loading