diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8eb486d..b911a25 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -7,14 +7,14 @@ on: types: [run-all-tool-tests-command] env: GALAXY_FORK: galaxyproject - GALAXY_BRANCH: release_21.09 + GALAXY_BRANCH: release_23.1 GALAXY_COPERNICUS_CDSAPIRC_KEY: ${{ secrets.GALAXY_COPERNICUS_CDSAPIRC_KEY }} COPERNICUS_CDSAPIRC_KEY_FILE: ${{ secrets.COPERNICUS_CDSAPIRC_KEY_FILE }} MAX_CHUNKS: 40 jobs: setup: name: Setup cache and determine changed repositories - if: github.repository_owner == 'NordicESMhub' + if: ${{ github.repository_owner == 'NordicESMhub' }} runs-on: ubuntu-latest outputs: galaxy-head-sha: ${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }} @@ -29,7 +29,7 @@ jobs: steps: - name: Add reaction if: ${{ github.event.client_payload.slash_command.command == 'run-all-tool-tests' }} - uses: peter-evans/create-or-update-comment@v1 + uses: peter-evans/create-or-update-comment@v2 with: token: ${{ secrets.PAT }} repository: ${{ github.event.client_payload.github.payload.repository.full_name }} @@ -39,17 +39,17 @@ jobs: id: get-fork-branch run: | TMP="${{ github.event.client_payload.slash_command.args.named.fork }}" - echo "::set-output name=fork::${TMP:-$GALAXY_FORK}" + echo "fork=${TMP:-$GALAXY_FORK}" >> $GITHUB_OUTPUT TMP="${{ github.event.client_payload.slash_command.args.named.branch }}" - echo "::set-output name=branch::${TMP:-$GALAXY_BRANCH}" + echo "branch=${TMP:-$GALAXY_BRANCH}" >> $GITHUB_OUTPUT - name: Determine latest commit in the Galaxy repo id: get-galaxy-sha - run: echo "::set-output name=galaxy-head-sha::$(git ls-remote https://github.com/${{ steps.get-fork-branch.outputs.fork }}/galaxy refs/heads/${{ steps.get-fork-branch.outputs.branch }} | cut -f1)" - - uses: actions/setup-python@v1 + run: echo "galaxy-head-sha=$(git ls-remote https://github.com/${{ steps.get-fork-branch.outputs.fork }}/galaxy refs/heads/${{ steps.get-fork-branch.outputs.branch }} | cut -f1)" >> $GITHUB_OUTPUT + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Cache .cache/pip - uses: actions/cache@v2 + uses: actions/cache@v3 id: cache-pip with: path: ~/.cache/pip @@ -58,7 +58,7 @@ jobs: # are not available as wheels, pip will build a wheel for them, which can be cached. - name: Install wheel run: pip install wheel - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 1 - name: Fake a Planemo run to update cache and determine commit range, repositories, and chunks @@ -76,6 +76,42 @@ jobs: run: | echo 'Using ${{ steps.discover.outputs.chunk-count }} chunks (${{ steps.discover.outputs.chunk-list }})' + lint: + name: Check for missing containers + needs: setup + if: ${{ needs.setup.outputs.repository-list != '' || needs.setup.outputs.tool-list != '' }} + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ['3.7'] + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 1 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Cache .cache/pip + uses: actions/cache@v3 + id: cache-pip + with: + path: ~/.cache/pip + key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} + - name: Planemo lint + uses: galaxyproject/planemo-ci-action@v1 + id: lint + with: + mode: lint + repository-list: ${{ needs.setup.outputs.repository-list }} + tool-list: ${{ needs.setup.outputs.tool-list }} + additional-planemo-options: --biocontainers -s tests,output,inputs,help,general,command,citations,tool_xsd + - uses: actions/upload-artifact@v3 + if: ${{ failure() }} + with: + name: 'Tool linting output' + path: lint_report.txt + test: name: Test tools # This job runs on Linux @@ -99,18 +135,23 @@ jobs: steps: # checkout the repository # and use it as the current working directory - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 1 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Cache .cache/pip - uses: actions/cache@v2 + uses: actions/cache@v3 id: cache-pip with: path: ~/.cache/pip key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} + - name: Get number of CPU cores + uses: SimenB/github-actions-cpu-cores@v1 + id: cpu-cores + - name: Clean dotnet folder for space + run: rm -Rf /usr/share/dotnet - name: Create Copernicus Climate Change Config file uses: jwsi/secret-parser@v1 with: @@ -141,7 +182,10 @@ jobs: galaxy-branch: ${{ needs.setup.outputs.branch }} chunk: ${{ matrix.chunk }} chunk-count: ${{ needs.setup.outputs.chunk-count }} - - uses: actions/upload-artifact@v2 + galaxy-slots: ${{ steps.cpu-cores.outputs.count }} + # Limit each test to 15 minutes + test_timeout: 900 + - uses: actions/upload-artifact@v3 with: name: 'Tool test output ${{ matrix.chunk }}' path: upload @@ -160,14 +204,14 @@ jobs: # This job runs on Linux runs-on: ubuntu-latest steps: - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@v3 with: path: artifacts - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Cache .cache/pip - uses: actions/cache@v2 + uses: actions/cache@v3 id: cache-pip with: path: ~/.cache/pip @@ -178,18 +222,18 @@ jobs: with: mode: combine html-report: true - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 with: name: 'All tool test results' path: upload - name: Create URL to the run output if: ${{ github.event.client_payload.slash_command.command == 'run-all-tool-tests' }} id: vars - run: echo "::set-output name=run-url::https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" + run: echo "run-url=https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" >> $GITHUB_OUTPUT - name: Create comment if: ${{ github.event.client_payload.slash_command.command == 'run-all-tool-tests' }} - uses: peter-evans/create-or-update-comment@v1 + uses: peter-evans/create-or-update-comment@v2 with: token: ${{ secrets.PAT }} repository: ${{ github.event.client_payload.github.payload.repository.full_name }} diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 776998d..0ae4e97 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -1,11 +1,32 @@ name: Galaxy Tool Linting and Tests for push and PR -on: [push, pull_request] +on: + pull_request: + paths-ignore: + - '.github/**' + - 'deprecated/**' + - 'docs/**' + - '*' + push: + branches: + - main + - master + paths-ignore: + - '.github/**' + - 'deprecated/**' + - 'docs/**' + - '*' env: GALAXY_FORK: galaxyproject - GALAXY_BRANCH: release_21.09 + GALAXY_BRANCH: release_23.1 GALAXY_COPERNICUS_CDSAPIRC_KEY: ${{ secrets.GALAXY_COPERNICUS_CDSAPIRC_KEY }} COPERNICUS_CDSAPIRC_KEY_FILE: ${{ secrets.COPERNICUS_CDSAPIRC_KEY_FILE }} MAX_CHUNKS: 4 + MAX_FILE_SIZE: 1M +concurrency: + # Group runs by PR, but keep runs on the default branch separate + # because we do not want to cancel ToolShed uploads + group: pr-${{ (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main') && github.run_number || github.ref }} + cancel-in-progress: true jobs: # the setup job does two things: # 1. cache the pip cache and .planemo @@ -23,6 +44,7 @@ jobs: tool-list: ${{ steps.discover.outputs.tool-list }} chunk-count: ${{ steps.discover.outputs.chunk-count }} chunk-list: ${{ steps.discover.outputs.chunk-list }} + commit-range: ${{ steps.discover.outputs.commit-range }} strategy: matrix: python-version: ['3.7'] @@ -38,18 +60,18 @@ jobs: echo 'event.after: ${{ github.event.after }}' - name: Determine latest commit in the Galaxy repo id: get-galaxy-sha - run: echo "::set-output name=galaxy-head-sha::$(git ls-remote https://github.com/${{ env.GALAXY_FORK }}/galaxy refs/heads/${{ env.GALAXY_BRANCH }} | cut -f1)" - - uses: actions/setup-python@v1 + run: echo "galaxy-head-sha=$(git ls-remote https://github.com/${{ env.GALAXY_FORK }}/galaxy refs/heads/${{ env.GALAXY_BRANCH }} | cut -f1)" >> $GITHUB_OUTPUT + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Cache .cache/pip - uses: actions/cache@v2 + uses: actions/cache@v3 id: cache-pip with: path: ~/.cache/pip key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ steps.get-galaxy-sha.outputs.galaxy-head-sha }} - name: Cache .planemo - uses: actions/cache@v2 + uses: actions/cache@v3 id: cache-planemo with: path: ~/.planemo @@ -60,7 +82,7 @@ jobs: run: pip install wheel - name: Install flake8 run: pip install flake8 flake8-import-order - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 0 - name: Fake a Planemo run to update cache and determine commit range, repositories, and chunks @@ -93,28 +115,36 @@ jobs: matrix: python-version: ['3.7'] steps: - # checkout the repository - # and use it as the current working directory - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 1 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Cache .cache/pip - uses: actions/cache@v2 + uses: actions/cache@v3 id: cache-pip with: path: ~/.cache/pip key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} + + - name: Set fail level for pull request + if: ${{ github.event_name == 'pull_request' }} + run: + echo "FAIL_LEVEL=warn" >> "$GITHUB_ENV" + - name: Set fail level for merge + if: ${{ github.event_name != 'pull_request' }} + run: + echo "FAIL_LEVEL=error" >> "$GITHUB_ENV" - name: Planemo lint uses: galaxyproject/planemo-ci-action@v1 id: lint with: mode: lint + fail-level: ${{ env.FAIL_LEVEL }} repository-list: ${{ needs.setup.outputs.repository-list }} tool-list: ${{ needs.setup.outputs.tool-list }} - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 if: ${{ failure() }} with: name: 'Tool linting output' @@ -124,21 +154,21 @@ jobs: flake8: name: Lint Python scripts needs: setup - if: ${{ needs.setup.outputs.repository-list != '' }} + if: ${{ github.event_name == 'pull_request' && needs.setup.outputs.repository-list != '' }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: python-version: ['3.7'] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 1 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Cache .cache/pip - uses: actions/cache@v2 + uses: actions/cache@v3 id: cache-pip with: path: ~/.cache/pip @@ -147,7 +177,7 @@ jobs: run: pip install flake8 flake8-import-order - name: Flake8 run: echo '${{ needs.setup.outputs.repository-list }}' | xargs -d '\n' flake8 --output-file pylint_report.txt --tee - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 if: ${{ failure() }} with: name: 'Python linting output' @@ -156,21 +186,21 @@ jobs: lintr: name: Lint R scripts needs: setup - if: ${{ needs.setup.outputs.repository-list != '' }} + if: ${{ github.event_name == 'pull_request' && needs.setup.outputs.repository-list != '' }} runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-20.04] - r-version: ['4.0.1'] + r-version: ['release'] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 1 - - uses: r-lib/actions/setup-r@master + - uses: r-lib/actions/setup-r@v2 with: r-version: ${{ matrix.r-version }} - name: Cache R packages - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ${{ env.R_LIBS_USER }} key: r_cache_${{ matrix.os }}_${{ matrix.r-version }} @@ -186,7 +216,7 @@ jobs: - name: lintr run: | library(lintr) - linters <- with_defaults(line_length_linter = NULL, cyclocomp_linter = NULL, object_usage_linter = NULL) + linters <- linters_with_defaults(line_length_linter = NULL, cyclocomp_linter = NULL, object_usage_linter = NULL, object_name_linter = NULL) con <- file("repository_list.txt", "r") status <- 0 while (TRUE) { @@ -199,19 +229,46 @@ jobs: status <- 1 for (l in lnt) { rel_path <- paste(repo, l$filename, sep="/") - write(paste(paste(rel_path, l$line_number, l$column_number, sep=":"), l$message), stderr()) - write(paste(paste(rel_path, l$line_number, l$column_number, sep=":"), l$message), "rlint_report.txt", append=TRUE) + write(paste(paste(rel_path, l$line_number, l$column_number, sep=":"), l$message, paste("(", l$line, ")")), stderr()) + write(paste(paste(rel_path, l$line_number, l$column_number, sep=":"), l$message, paste("(", l$line, ")")), "rlint_report.txt", append=TRUE) } } } quit(status = status) shell: Rscript {0} - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 if: ${{ failure() }} with: name: 'R linting output' path: rlint_report.txt + file_sizes: + name: Check file sizes + needs: setup + if: ${{ github.event_name == 'pull_request' && needs.setup.outputs.repository-list != '' }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Check file sizes + run: | + touch file_size_report.txt + git diff --diff-filter=d --name-only ${{ needs.setup.outputs.commit-range }} > git.diff + while read line; do + find "$line" -type f -size +${{ env.MAX_FILE_SIZE }} >> file_size_report.txt + done < git.diff + if [[ -s file_size_report.txt ]]; then + echo "Files larger than ${{ env.MAX_FILE_SIZE }} found" + cat file_size_report.txt + exit 1 + fi + - uses: actions/upload-artifact@v3 + if: ${{ failure() }} + with: + name: 'File size report' + path: file_size_report.txt + # Planemo test the changed repositories, each chunk creates an artifact # containing HTML and JSON reports for the executed tests test: @@ -234,26 +291,30 @@ jobs: ports: - 5432:5432 steps: - # checkout the repository - # and use it as the current working directory - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 1 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Cache .cache/pip - uses: actions/cache@v2 + uses: actions/cache@v3 id: cache-pip with: path: ~/.cache/pip key: pip_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} - name: Cache .planemo - uses: actions/cache@v2 + uses: actions/cache@v3 id: cache-planemo with: path: ~/.planemo key: planemo_cache_py_${{ matrix.python-version }}_gxy_${{ needs.setup.outputs.galaxy-head-sha }} + - name: Get number of CPU cores + uses: SimenB/github-actions-cpu-cores@v1 + id: cpu-cores + - name: Clean dotnet folder for space + run: rm -Rf /usr/share/dotnet + - name: Create Copernicus Climate Change Config file uses: jwsi/secret-parser@v1 with: @@ -284,7 +345,10 @@ jobs: galaxy-branch: ${{ env.GALAXY_BRANCH }} chunk: ${{ matrix.chunk }} chunk-count: ${{ needs.setup.outputs.chunk-count }} - - uses: actions/upload-artifact@v2 + galaxy-slots: ${{ steps.cpu-cores.outputs.count }} + # Limit each test to 15 minutes + test_timeout: 1800 + - uses: actions/upload-artifact@v3 with: name: 'Tool test output ${{ matrix.chunk }}' path: upload @@ -297,20 +361,20 @@ jobs: combine_outputs: name: Combine chunked test results needs: [setup, test] - if: ${{ needs.setup.outputs.repository-list != '' }} + if: ${{ always() && needs.setup.outputs.repository-list != '' }} runs-on: ubuntu-latest strategy: matrix: python-version: ['3.7'] steps: - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@v3 with: path: artifacts - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Cache .cache/pip - uses: actions/cache@v2 + uses: actions/cache@v3 id: cache-pip with: path: ~/.cache/pip @@ -321,7 +385,7 @@ jobs: with: mode: combine html-report: true - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 with: name: 'All tool test results' path: upload @@ -330,25 +394,31 @@ jobs: id: check with: mode: check + - name: Check if all test chunks succeeded + run: | + NFILES=$(ls artifacts/ | grep "Tool test output" | wc -l) + if [[ "${{ needs.setup.outputs.chunk-count }}" != "$NFILES" ]]; then + exit 1 + fi # deploy the tools to the toolsheds (first TTS for testing) deploy: name: Deploy - needs: [setup, lint, flake8, lintr, combine_outputs] + needs: [setup, lint, combine_outputs] if: ${{ (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' ) && github.repository_owner == 'NordicESMHub' }} runs-on: ubuntu-latest strategy: matrix: python-version: ['3.7'] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 1 - - uses: actions/setup-python@v1 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Cache .cache/pip - uses: actions/cache@v2 + uses: actions/cache@v3 id: cache-pip with: path: ~/.cache/pip @@ -369,9 +439,31 @@ jobs: shed-target: toolshed shed-key: ${{ secrets.TS_API_KEY }} + deploy-report: + name: Report deploy status + needs: [deploy] + if: ${{ always() && needs.deploy.result != 'success' && (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' ) && github.repository_owner == 'galaxyproject' }} + runs-on: ubuntu-latest + steps: + # report to the PR if deployment failed + - name: Get PR object + uses: 8BitJonny/gh-get-current-pr@2.2.0 + id: getpr + with: + sha: ${{ github.event.after }} + - name: Create comment + uses: peter-evans/create-or-update-comment@v2 + with: + token: ${{ secrets.PAT }} + issue-number: ${{ steps.getpr.outputs.number }} + body: | + Attention: deployment ${{ needs.deploy.result }}! + + https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} + determine-success: name: Check workflow success - needs: [setup, lint, flake8, lintr, combine_outputs] + needs: [setup, lint, flake8, lintr, file_sizes, combine_outputs] if: ${{ always() && github.ref != 'refs/heads/master' && github.ref != 'refs/heads/main' }} runs-on: ubuntu-latest steps: @@ -384,6 +476,9 @@ jobs: - name: Indicate R script lint status if: ${{ needs.lintr.result != 'success' && needs.lintr.result != 'skipped' }} run: exit 1 + - name: Indicate file size check status + if: ${{ needs.file_sizes.result != 'success' && needs.file_sizes.result != 'skipped' }} + run: exit 1 - name: Check tool test status if: ${{ needs.combine_outputs.result != 'success' && needs.combine_outputs.result != 'skipped' }} run: exit 1 diff --git a/.github/workflows/pr_without_tool_change.yaml b/.github/workflows/pr_without_tool_change.yaml new file mode 100644 index 0000000..4e9cad6 --- /dev/null +++ b/.github/workflows/pr_without_tool_change.yaml @@ -0,0 +1,17 @@ +name: Galaxy Tool Linting and Tests for push and PR +# Fallback workflow that provides a succeeding "Check workflow success" job +# as this is a requirement for being able to merge a PR +# see https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/troubleshooting-required-status-checks#handling-skipped-but-required-checks +on: + pull_request: +concurrency: + # Group runs by PR, but keep runs on the default branch separate + # because we do not want to cancel ToolShed uploads + group: pr-${{ (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main') && github.run_number || github.ref }} + cancel-in-progress: true +jobs: + determine-success: + name: Check workflow success + runs-on: ubuntu-latest + steps: + - run: 'echo "No tool tests required for this PR"'