Skip to content

Commit

Permalink
Merge pull request #11 from kddejong/feature/schema/releases
Browse files Browse the repository at this point in the history
Feature/schema/releases
  • Loading branch information
kddejong authored Aug 21, 2024
2 parents 5cd17ab + 2606bea commit eeeb6cc
Show file tree
Hide file tree
Showing 8 changed files with 341 additions and 12 deletions.
31 changes: 31 additions & 0 deletions .github/workflows/cd-release.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
name: "[CD] Create release"
on:
push:
tags:
- "v1*"

jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Build
run: |
pip install -e .
scripts/release/generator.py
scripts/release/changelog.py --version ${{ github.ref_name }}
- name: Release
uses: softprops/action-gh-release@v2
if: startsWith(github.ref, 'refs/tags/v1')
with:
name: Release ${{ github.ref_name }}
token: ${{ secrets.GH_PAT }}
body_path: ${{ github.workspace }}/build/CHANGELOG.txt
files: |
build/schemas-cfnlint.zip
build/schemas-draft7.zip
2 changes: 1 addition & 1 deletion .github/workflows/ci-pr-coverage.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:
ref: ${{ github.event.workflow_run.head_sha }}
path: repo_clone
- name: Upload coverage report
if: '!cancelled()'
if: "!cancelled()"
uses: codecov/codecov-action@v3
with:
override_commit: ${{ github.event.workflow_run.head_sha }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ci-pr.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python: [ "3.8", "3.9", "3.10", "3.11", "3.12" ]
python: ["3.8", "3.9", "3.10", "3.11", "3.12"]

steps:
- uses: actions/checkout@v4
Expand Down
9 changes: 4 additions & 5 deletions .github/workflows/maintenance-v1.yaml
Original file line number Diff line number Diff line change
@@ -1,20 +1,19 @@
name: Automated Maintenance v1
on:
schedule:
- cron: '0 0,6,12,18 * * *'
- cron: "0 0,6,12,18 * * *"
workflow_dispatch: # Enables on-demand/manual triggering: https://docs.github.com/en/free-pro-team@latest/actions/managing-workflow-runs/manually-running-a-workflow
jobs:
job:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
ref: 'main'
ref: "main"
- uses: actions/setup-python@v5
with:
python-version: 3
-
id: maintenance
- id: maintenance
run: |
latest_sam_cli=`curl -s https://api.github.com/repos/aws/aws-sam-cli/releases/latest | jq -r .tag_name | cut -c 2-`
latest=`curl "https://pypi.org/pypi/aws-sam-cli/$latest_sam_cli/json" -s | jq -r '.info.requires_dist[] | select(contains("aws-sam-translator"))' | cut -c 21-`
Expand Down Expand Up @@ -44,4 +43,4 @@ jobs:
Automated changes by [create-pull-request](https://github.com/peter-evans/create-pull-request) GitHub action
delete-branch: true
title: chore(schemas) - Update CloudFormation schemas to ${{ steps.maintenance.outputs.date }}
title: Update CloudFormation schemas to ${{ steps.maintenance.outputs.date }}
95 changes: 95 additions & 0 deletions scripts/release/_translator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
"""
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
"""

from __future__ import annotations

# Translate cfn-lint unique keywords into json schema keywords
import logging
from collections import deque
from typing import Any, Iterator

from cfnlint.schema import PROVIDER_SCHEMA_MANAGER

logger = logging.getLogger(__name__)


def required_xor(properties: list[str]) -> dict[str, list[Any]]:

return {"oneOf": [{"required": [p]} for p in properties]}


def dependent_excluded(properties: dict[str, list[str]]) -> dict[str, list[Any]]:
dependencies: dict[str, Any] = {"dependencies": {}}
for prop, exclusions in properties.items():
dependencies["dependencies"][prop] = {"not": {"anyOf": []}}
for exclusion in exclusions:
dependencies["dependencies"][prop]["not"]["anyOf"].append(
{"required": exclusion}
)

return dependencies


_keywords = {
"requiredXor": required_xor,
"dependentExcluded": dependent_excluded,
}


def _find_keywords(schema: Any) -> Iterator[deque[str | int]]:

if isinstance(schema, list):
for i, item in enumerate(schema):
for path in _find_keywords(item):
path.appendleft(i)
yield path
elif isinstance(schema, dict):
for key, value in schema.items():
if key in _keywords:
yield deque([key, value])
else:
for path in _find_keywords(value):
path.appendleft(key)
yield path


def translator(resource_type: str, region: str):
keywords = list(
_find_keywords(
PROVIDER_SCHEMA_MANAGER.get_resource_schema(
region=region, resource_type=resource_type
).schema
)
)

for keyword in keywords:
value = keyword.pop()
key = keyword.pop()
if not keyword:
path = ""
else:
path = f"/{'/'.join(str(k) for k in keyword)}"

patch = [
{
"op": "add",
"path": f"{path}/allOf",
"value": [],
}
]

logger.info(f"Patch {resource_type} add allOf for {key}")
PROVIDER_SCHEMA_MANAGER._schemas[region][resource_type].patch(patches=patch)

patch = [
{
"op": "remove",
"path": f"{path}/{key}",
},
{"op": "add", "path": f"{path}/allOf/-", "value": _keywords[key](value)}, # type: ignore
]

logger.info(f"Patch {resource_type} replace for {key}")
PROVIDER_SCHEMA_MANAGER._schemas[region][resource_type].patch(patches=patch)
32 changes: 32 additions & 0 deletions scripts/release/changelog.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
#!/usr/bin/env python
"""
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
"""

import argparse
from pathlib import Path

parser = argparse.ArgumentParser()
parser.add_argument("--version")
args = parser.parse_args()

with open("CHANGELOG.md", "r") as f:
text = f.read()

output = []

for line in text.splitlines():

if line.startswith("### "):
if args.version == line[3:].strip():
found = True
elif found:
break
else:
if found:
output.append(line)

build_dir = Path("build")
with open(build_dir / "CHANGELOG.md", "w") as f:
f.write("\n".join(output))
176 changes: 176 additions & 0 deletions scripts/release/generator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,176 @@
#!/usr/bin/env python
"""
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
"""
import logging
import tarfile
from collections import deque
from pathlib import Path

import _translator

from cfnlint.helpers import REGIONS, ToPy, format_json_string, load_plugins
from cfnlint.schema import PROVIDER_SCHEMA_MANAGER

logging.basicConfig(
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
)
logger = logging.getLogger(__name__)


def _get_schema_path(schema, path):
s = schema.schema
schema_path = deque([])
while path:
key = path.popleft()
if key == "*":
schema_path.append("items")
s = s["items"]
else:
s = s["properties"][key]
schema_path.extend(["properties", key])

pointer = s.get("$ref")
if pointer:
_, s = schema.resolver.resolve(pointer)
schema_path = deque(pointer.split("/")[1:])

return schema_path


def _build_patch(path, patch):
if not path:
path_str = "/allOf"
else:
path_str = f"/{'/'.join(path)}/allOf"

return (
[
{
"op": "add",
"path": path_str,
"value": [],
}
],
[
{
"op": "add",
"path": f"{path_str}/-",
"value": patch,
}
],
)


schemas = {}

##########################
#
# Build the definitive list of all resource types across all regions
#
###########################

for region in ["us-east-1"] + list((set(REGIONS) - set(["us-east-1"]))):
for resource_type in PROVIDER_SCHEMA_MANAGER.get_resource_types(region):
if resource_type in ["AWS::CDK::Metadata", "Module"]:
continue
if resource_type not in schemas:
schemas[resource_type] = region


##########################
#
# Merge in rule schemas into the resource schemas
#
###########################

rules_folder = Path("src") / "cfnlint" / "rules"

rules = load_plugins(
rules_folder,
name="CfnLintJsonSchema",
modules=(
"cfnlint.rules.jsonschema.CfnLintJsonSchema",
"cfnlint.rules.jsonschema.CfnLintJsonSchema.CfnLintJsonSchema",
),
)

for rule in rules:
if rule.__class__.__base__ == (
"cfnlint.rules.jsonschema."
"CfnLintJsonSchemaRegional.CfnLintJsonSchemaRegional"
):
continue
if not rule.id or rule.schema == {}:
continue

for keyword in rule.keywords:
if not keyword.startswith("Resources/"):
continue
path = deque(keyword.split("/"))

if len(path) < 3:
continue

path.popleft()
resource_type = path.popleft()
resource_properties = path.popleft()
if resource_type not in schemas and resource_properties != "Properties":
continue

schema_path = _get_schema_path(
PROVIDER_SCHEMA_MANAGER.get_resource_schema(
schemas[resource_type], resource_type
),
path,
)
all_of_patch, schema_patch = _build_patch(schema_path, rule.schema)

PROVIDER_SCHEMA_MANAGER._schemas[schemas[resource_type]][resource_type].patch(
patches=all_of_patch
)
PROVIDER_SCHEMA_MANAGER._schemas[schemas[resource_type]][resource_type].patch(
patches=schema_patch
)

logger.info(f"Patch {rule.id} for {resource_type} in {schemas[resource_type]}")


build_dir = Path("build")
schemas_dir = build_dir / "schemas"
schemas_cfnlint_dir = schemas_dir / "cfnlint"
schemas_cfnlint_dir.mkdir(parents=True, exist_ok=True)

schemas_draft7_dir = schemas_dir / "draft7"
schemas_draft7_dir.mkdir(parents=True, exist_ok=True)

for resource_type, region in schemas.items():
rt_py = ToPy(resource_type)

with open(schemas_cfnlint_dir / f"{rt_py.py}.json", "w") as f:
f.write(
format_json_string(
PROVIDER_SCHEMA_MANAGER.get_resource_schema(
region, resource_type
).schema
)
)

_translator.translator(resource_type, region)

with open(schemas_draft7_dir / f"{rt_py.py}.json", "w") as f:
f.write(
format_json_string(
PROVIDER_SCHEMA_MANAGER.get_resource_schema(
region, resource_type
).schema
)
)

logger.info("Create schema package")
with tarfile.open(build_dir / "schemas-cfnlint.zip", "w:gz") as tar:
tar.add(schemas_cfnlint_dir, arcname="schemas")

with tarfile.open(build_dir / "schemas-draft7.zip", "w:gz") as tar:
tar.add(schemas_draft7_dir, arcname="schemas")
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,6 @@
"Subnets": {
"minItems": 2
}
},
"requiredXor": [
"Subnets",
"SubnetMappings"
]
}
}
}

0 comments on commit eeeb6cc

Please sign in to comment.