diff --git a/.github/workflows/fossa.yml b/.github/workflows/fossa.yml index 75de2e8c..81e3aae4 100644 --- a/.github/workflows/fossa.yml +++ b/.github/workflows/fossa.yml @@ -1,184 +1,173 @@ -# Name of the GitHub Action workflow -name: FOSSA Report Generation +name: Enterprise- FOSSA Report Generation -# Event triggers for the workflow on: - workflow_call: + workflow_dispatch: inputs: - org: - required: false + version_number_for_report_generation: type: string - description: 'Organization name: liquibase or datical' - workflow_dispatch: - -# Define the jobs in the workflow + description: 'Supply the DaticalDb-installer version variable which is used during its report generation to be stored in the s3 bucket. eg 8.7.352' + required: false + jobs: - fossa-scan: - # Specifies the type of runner to use + wait-for-fossa-report-generation: runs-on: ubuntu-latest - permissions: - contents: read - packages: write - # Sequence of steps that make up a single job + strategy: + matrix: + repo: [ + { name: "DaticalDB-installer", ref: "master",owner: "Datical" }, + {name: "ephemeral-database", ref: "master",owner: "liquibase"}, + { name: "drivers", ref: "master",owner: "Datical" }, + {name: "protoclub", ref: "develop",owner: "Datical"}, + { name: "datical-sqlparser", ref: "master",owner: "Datical" }, + { name: "storedlogic", ref: "master",owner: "Datical" }, + { name: "AppDBA", ref: "master",owner: "Datical" }, + { name: "liquibase-bundle", ref: "master",owner: "Datical" }, + { name: "liquibase", ref: "ddb",owner: "Datical" }, + { name: "ephemeral-database", ref: "master",owner: "Liquibase" } + ] + + name: "${{ matrix.repo.name }} - Fossa Report" steps: - # Checkout the code to the GitHub runner - - name: Checkout Code - uses: actions/checkout@v4 + - name: Set workflow inputs + run: | + if [[ "${{ matrix.repo.name }}" ]]; then + echo "WORKFLOW_INPUTS={ \"version_number_for_report_generation\": \"${{ github.event.inputs.version_number_for_report_generation }}\" }" >> $GITHUB_ENV + else + echo "WORKFLOW_INPUTS={}" >> $GITHUB_ENV + fi - - name: Set up JDK for Datical - if: inputs.org == 'datical' - uses: actions/setup-java@v4 + - name: Dispatch an action and get the run ID + uses: codex-/return-dispatch@v1 + id: return_dispatch + continue-on-error: true with: - distribution: 'temurin' - java-version: '11' - cache: 'maven' - server-id: datical - server-username: REPO_MAVEN_USER - server-password: REPO_MAVEN_PASSWORD - - - name: Log inputs + token: ${{ secrets.FOSSA_TRIGGER_REPORT_GENERATION }} + ref: ${{ matrix.repo.ref }} + repo: ${{ matrix.repo.name }} + owner: ${{ matrix.repo.owner }} + workflow: fossa.yml + workflow_inputs: ${{ env.WORKFLOW_INPUTS }} + + - name: Retry fetching run ID (max 4 attempts with 5 seconds delay) run: | - echo "Org: ${{ inputs.org }}" + retries=7 + delay=5 # Delay of 5 seconds between retries + for i in $(seq 1 $retries); do + run_id="${{ steps.return_dispatch.outputs.run_id }}" + if [ -n "$run_id" ]; then + echo "Found run ID: $run_id" + echo "run_id=$run_id" >> $GITHUB_ENV + break + else + echo "Run ID not found, retrying in $delay seconds..." + fi + + if [ $i -eq $retries ]; then + echo "Failed to get run ID after $retries attempts." + exit 1 + fi + + # Wait before retrying + sleep $delay + done + shell: bash - - name: Set up JDK for Liquibase - if: inputs.org == 'liquibase' - uses: actions/setup-java@v4 + - name: Await Run ID ${{ steps.return_dispatch.outputs.run_id }} + uses: Codex-/await-remote-run@v1 with: - distribution: 'temurin' - java-version: '17' + token: ${{ secrets.FOSSA_TRIGGER_REPORT_GENERATION }} + run_id: ${{ steps.return_dispatch.outputs.run_id }} + repo: ${{ matrix.repo.name }} + owner: ${{ matrix.repo.owner }} + run_timeout_seconds: 420 # 7 minutes Time until giving up on the run + poll_interval_ms: 120000 # 2 minutes Frequency to poll the run for a status. + - #look for dependencies in maven - - name: maven-settings-xml-action for Liquibase - if: inputs.org == 'liquibase' - uses: whelk-io/maven-settings-xml-action@v22 + combine-fossa-reports: + runs-on: ubuntu-latest + needs: wait-for-fossa-report-generation + steps: + - name: Checkout code + uses: actions/checkout@v4 with: - repositories: | - [ - { - "id": "liquibase", - "url": "https://maven.pkg.github.com/liquibase/liquibase", - "releases": { - "enabled": "false" - }, - "snapshots": { - "enabled": "true", - "updatePolicy": "always" - } - }, - { - "id": "liquibase-pro", - "url": "https://maven.pkg.github.com/liquibase/liquibase-pro", - "releases": { - "enabled": "false" - }, - "snapshots": { - "enabled": "true", - "updatePolicy": "always" - } - } - ] - servers: | - [ - { - "id": "liquibase-pro", - "username": "liquibot", - "password": "${{ secrets.LIQUIBOT_PAT }}" - }, - { - "id": "liquibase", - "username": "liquibot", - "password": "${{ secrets.LIQUIBOT_PAT }}" - } - ] - - - name: run FOSSA CLI - uses: fossas/fossa-action@main + repository: liquibase/build-logic + ref: DAT-18919 + path: build-logic + + - name: Set up AWS credentials + uses: aws-actions/configure-aws-credentials@v4 with: - api-key: ${{ secrets.FOSSA_API_KEY }} - branch: ${{ github.ref }} + aws-access-key-id: ${{ secrets.LIQUIBASEORIGIN_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.LIQUIBASEORIGIN_SECRET_ACCESS_KEY }} + aws-region: us-east-1 - - name: Get the commit sha - id: get_commit_sha + - name: Download reports from S3 and Rearrange CSV files run: | - commit_sha=`(git rev-parse HEAD)` - echo "commit_sha=${commit_sha}" >> $GITHUB_OUTPUT - - - name: Get repository name - id: get_repo_name - run: echo "repo_name=${{ github.event.repository.name }}" >> $GITHUB_OUTPUT - - # https://docs.fossa.com/docs/download-fossa-project-attribution-reports - # 7retries×30seconds=210seconds - - name: Datical- Set the dependency metadata information - if: inputs.org == 'datical' - run: | - mkdir -p /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports - - max_retries=8 - retry_delay=30 - attempt=0 - success=0 - - while [ $attempt -lt $max_retries ]; do - curl --location 'https://app.fossa.com/api/revisions/custom%2B40163%2Fgithub.com%2FDatical%2F${{ steps.get_repo_name.outputs.repo_name }}%24${{ steps.get_commit_sha.outputs.commit_sha }}/attribution/download?format=CSV&includeDirectDependencies=true&includeDeepDependencies=true&download=true' \ - --header 'Authorization: Bearer ${{ secrets.FOSSA_COMBINED_REPORT }}' \ - -o /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports/${{ steps.get_repo_name.outputs.repo_name }}.csv && success=1 && break - - echo "Curl failed, retrying in $retry_delay seconds..." - attempt=$((attempt + 1)) - sleep $retry_delay - done - - if [ $success -ne 1 ]; then - echo "Failed to download the report after $max_retries attempts" - exit 1 - fi - - ls -l /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports - cat /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports/${{ steps.get_repo_name.outputs.repo_name }}.csv || echo "File is empty or not found" - - # 7retries×30seconds=210seconds - - name: Liquibase- Set the dependency metadata information - if: inputs.org == 'liquibase' - run: | - mkdir -p /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports - - max_retries=8 - retry_delay=30 - attempt=0 - success=0 - - while [ $attempt -lt $max_retries ]; do - curl --location 'https://app.fossa.com/api/revisions/custom%2B40163%2Fgithub.com%2Fliquibase%2F${{ steps.get_repo_name.outputs.repo_name }}%24${{ steps.get_commit_sha.outputs.commit_sha }}/attribution/download?format=CSV&includeDirectDependencies=true&includeDeepDependencies=true&download=true' \ - --header 'Authorization: Bearer ${{ secrets.FOSSA_COMBINED_REPORT }}' \ - -o /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports/${{ steps.get_repo_name.outputs.repo_name }}.csv && success=1 && break - - echo "Curl failed, retrying in $retry_delay seconds..." - attempt=$((attempt + 1)) - sleep $retry_delay - done - - if [ $success -ne 1 ]; then - echo "Failed to download the report after $max_retries attempts" - exit 1 - fi - - ls -l /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports - cat /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports/${{ steps.get_repo_name.outputs.repo_name }}.csv || echo "File is empty or not found" - - # Upload report to S3 - - name: Upload report to S3 - if: always() - run: | - aws s3 cp /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports/${{ steps.get_repo_name.outputs.repo_name }}.csv s3://liquibaseorg-origin/enterprise_fossa_report/ - env: - AWS_ACCESS_KEY_ID: ${{ secrets.LIQUIBASEORIGIN_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.LIQUIBASEORIGIN_SECRET_ACCESS_KEY }} - AWS_DEFAULT_REGION: us-east-1 - - - name : Upload to build page - uses: actions/upload-artifact@v3 - with: - name: fossa-reports - path: | - /home/runner/work/${{ steps.get_repo_name.outputs.repo_name }}/fossa_reports/${{ steps.get_repo_name.outputs.repo_name }}.csv + # Create a directory to store downloaded reports from S3 + mkdir -p /home/runner/work/enterprise/fossa_reports_s3 + + # Download all files from the specified S3 bucket to the created directory + aws s3 cp --recursive s3://liquibaseorg-origin/enterprise_fossa_report/${{ github.event.inputs.version_number_for_report_generation }}/raw_reports /home/runner/work/enterprise/fossa_reports_s3/ + + # List the contents of the directory to confirm successful download + ls -l /home/runner/work/enterprise/fossa_reports_s3 + + # Define an array of CSV file names + csv_files=("DaticalDB-installer" "drivers" "protoclub" "datical-sqlparser" "storedlogic" "AppDBA" "liquibase-bundle" "liquibase") + + # Loop through each CSV file and remove headers again for combine report generation + for file in "${csv_files[@]}"; do + tail -n +1 /home/runner/work/enterprise/fossa_reports_s3/${file}.csv >> /home/runner/work/enterprise/fossa_reports_s3/${file}_no_header.csv + done + + # Concatenate all CSV files without headers, sort, and remove duplicates + cat /home/runner/work/enterprise/fossa_reports_s3/*_no_header.csv | sort | uniq > /home/runner/work/enterprise/fossa_reports_s3/enterprise_unique.csv + + # Add a header to the final CSV file, placing it above the sorted and unique data + echo 'Title,Version,Declared License,Package Homepage' | cat - /home/runner/work/enterprise/fossa_reports_s3/enterprise_unique.csv > temp && mv temp /home/runner/work/enterprise/fossa_reports_s3/enterprise_unique.csv + + ls -l $GITHUB_WORKSPACE + + # Read ignored dependencies from a file + ignoredLibsFile=$(cat $GITHUB_WORKSPACE/build-logic/.github/workflows/ignore_dependencies_fossa.txt) + + # Split the ignored dependencies into an array + IFS=',' read -r -a ignoredLibs <<< "$ignoredLibsFile" + + # Create a temporary file + tempfile=$(mktemp) + + # Build the grep command to filter out ignored dependencies + grepCmd="grep -iv" + for lib in "${ignoredLibs[@]}"; do + grepCmd="$grepCmd -e \"$lib\"" + done + + # Process the FOSSA report to remove ignored dependencies + cat /home/runner/work/enterprise/fossa_reports_s3/enterprise_unique.csv | eval $grepCmd > enterprise_report.csv + + + - name: Upload CSV to Artifacts + uses: actions/upload-artifact@v3 + with: + name: enterprise_report + path: ${{ inputs.version_number_for_report_generation }}/enterprise_report.csv + + - name: Upload merged CSV to S3 + if: always() + run: aws s3 cp enterprise_report.csv s3://liquibaseorg-origin/enterprise_fossa_report/${{ inputs.version_number_for_report_generation }}/enterprise_report_${{ inputs.version_number_for_report_generation }}.csv + + + trigger-datical-service: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Dispatch an action for datical-service + uses: peter-evans/repository-dispatch@v3 + with: + token: ${{ secrets.FOSSA_TRIGGER_REPORT_GENERATION }} + repository: Datical/datical-service + event-type: trigger-fossa-report-generation + client-payload: '{"ref": "master", "version_number_for_report_generation": "${{ github.event.inputs.version_number_for_report_generation }}"}' \ No newline at end of file diff --git a/.github/workflows/generate-upload-fossa-report.yml b/.github/workflows/generate-upload-fossa-report.yml new file mode 100644 index 00000000..6e2a97fc --- /dev/null +++ b/.github/workflows/generate-upload-fossa-report.yml @@ -0,0 +1,80 @@ +name: Generate and upload Fossa Report to s3 + +on: + workflow_call: + inputs: + version_number_for_report_generation: + type: string + required: true + +jobs: + fossa-scan: + runs-on: ubuntu-latest + env: + FOSSA_API_KEY: ${{ secrets.FOSSA_API_KEY }} + permissions: + contents: read + packages: write + steps: + - name: Checkout Code + uses: actions/checkout@v4 + with: + ref: ${{ github.ref }} + + - name: Get the commit sha + id: get_commit_sha + run: | + commit_sha=`(git rev-parse HEAD)` + echo "commit_sha=${commit_sha}" >> $GITHUB_OUTPUT + + - name: Get repository name + id: get_repo_name + run: echo "repo_name=${{ github.event.repository.name }}" >> $GITHUB_OUTPUT + + - name: Setup Fossa CLI + run: | + curl -H 'Cache-Control: no-cache' https://raw.githubusercontent.com/fossas/fossa-cli/master/install-latest.sh | bash + export FOSSA_API_KEY="${{ secrets.FOSSA_API_KEY }}" + + - name: Analyze project + run: fossa analyze + + - name: Generate report + run: | + # Run JSON report + fossa report attribution --format json > fossa.json + + csv_filename="${{ steps.get_repo_name.outputs.repo_name }}.csv" + + # Extract fields from the JSON and create a CSV report. + echo "Title,Version,Declared License,Package Homepage" > $csv_filename + + jq -r ' + (.directDependencies + .deepDependencies)[] | + [ + .title, + .version, + (.licenses | map(.name) | join(";")), + .projectUrl + ] | + @csv + ' fossa.json >> $csv_filename + + - name: Upload report to S3 + if: always() + run: | + csv_filename="${{ steps.get_repo_name.outputs.repo_name }}.csv" + aws s3 cp $csv_filename s3://liquibaseorg-origin/enterprise_fossa_report/${{ github.event.inputs.version_number_for_report_generation }}/raw_reports/ + env: + AWS_ACCESS_KEY_ID: ${{ secrets.LIQUIBASEORIGIN_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.LIQUIBASEORIGIN_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: us-east-1 + + - name: Upload to build page + if: always() + uses: actions/upload-artifact@v3 + with: + name: fossa-reports + path: | + /home/runner/work/${{ steps.set_csv_filename.outputs.csv_filename }} + diff --git a/.github/workflows/ignore_dependencies_fossa.txt b/.github/workflows/ignore_dependencies_fossa.txt new file mode 100644 index 00000000..4fef7e89 --- /dev/null +++ b/.github/workflows/ignore_dependencies_fossa.txt @@ -0,0 +1 @@ +JUnit Jupiter API,JUnit Jupiter Engine,JUnit Platform Commons,JUnit Platform Engine API,JUnit Platform Launcher,JUnit,Hamcrest Core,Deep Dependencies,Provder \ No newline at end of file diff --git a/.github/workflows/package.yml b/.github/workflows/package.yml index 67279738..b86cb7af 100644 --- a/.github/workflows/package.yml +++ b/.github/workflows/package.yml @@ -228,15 +228,13 @@ jobs: echo "pr_title: $pr_title" # Set the environment variable based on whether the PR exists if [ -z "$pr_title" ]; then - PR_EXISTS=false + echo "PR_EXISTS=false" >> $GITHUB_ENV else - PR_EXISTS=true + echo "PR_EXISTS=true" >> $GITHUB_ENV fi - echo PR_EXISTS=$PR_EXISTS - echo PR_EXISTS=$PR_EXISTS >> GITHUB_OUTPUT - name: Update Homebrew formula for ${{ inputs.artifactId }} - if: ${{ steps.check-brew-pr.outputs.PR_EXISTS == false && inputs.dry_run == false }} + if: ${{ env.PR_EXISTS == false && inputs.dry_run == false }} uses: mislav/bump-homebrew-formula-action@v3 with: formula-name: liquibase diff --git a/README.md b/README.md index ac773fda..73887ff9 100644 --- a/README.md +++ b/README.md @@ -453,3 +453,16 @@ Here the modules we want to generate and aggregate test reports must be specifie ### Releasing a new version of build-logic When you want to release new version of `build-logic`, it is important to update all the occurrences of previous version eg: `v0.7.8` with the new version eg : `v0.7.8` in all the files. As, the code for the new version internally refers to the old version. + + +### Fossa Report Generation for Enterprise + +1. AWS s3 bucket under `liquibase-prod` `s3://liquibaseorg-origin/enterprise_fossa_report/` +2. Manually run the workflow under `enterprise-fossa-trigger-report-generation.yml` + - this workflow triggers a run in the specified repository matrix + - individual repositories call the workflow `generate-upload-fossa-report.yml` +3. `generate-upload-fossa-report.yml` + - the individual reports are uploaded under `raw_reports` + - the combined reports is called `enterprise_report_version_number_for_report_generation` which is uploaded under `version_number_for_report_generation` + - the report for `datical-service` is uploaded under version_number_for_report_generation +4. You might need to do some manipulation of the columns as sometimes they are empty. Just the way Fossa populates them! \ No newline at end of file