feat: run linters in parallel (#5177)

This commit is contained in:
Marco Ferrari 2024-01-30 20:24:55 +01:00 committed by GitHub
parent 0578ab8daf
commit 99e41ce451
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 852 additions and 884 deletions

View file

@ -1,5 +1,9 @@
---
# Options reference: https://www.checkov.io/2.Basics/CLI%20Command%20Reference.html
directory:
- test/linters/checkov/bad
quiet: false
...

View file

@ -0,0 +1,8 @@
---
# Options reference: https://www.checkov.io/2.Basics/CLI%20Command%20Reference.html
directory:
- test/linters/checkov/good
quiet: false
...

View file

@ -0,0 +1,13 @@
title = "gitleaks config"
[extend]
# useDefault will extend the base configuration with the default gitleaks config:
# https://github.com/zricethezav/gitleaks/blob/master/config/gitleaks.toml
useDefault = true
[allowlist]
description = "Allow secrets in test files"
paths = [
'''.*/test/linters/gitleaks/bad/.*'''
]

View file

@ -110,6 +110,7 @@ jobs:
VALIDATE_ALL_CODEBASE: false
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DEFAULT_BRANCH: main
GITLEAKS_CONFIG_FILE: .gitleaks-ignore-tests.toml
RENOVATE_SHAREABLE_CONFIG_PRESET_FILE_NAMES: "default.json,hoge.json"
TYPESCRIPT_STANDARD_TSCONFIG_FILE: ".github/linters/tsconfig.json"

3
.gitignore vendored
View file

@ -77,6 +77,9 @@ super-linter.report
# Code coverage data for tests
.coverage
# Terraform workspace
.terraform
# Test reports
test/reports

View file

@ -1,9 +0,0 @@
.github-personal-access-token:github-fine-grained-pat:1
/github/workspace/.github-personal-access-token:github-fine-grained-pat:1
/github/workspace/test/linters/gitleaks/bad/gitleaks_bad_01.txt:aws-access-token:1
/github/workspace/test/linters/gitleaks/bad/gitleaks_bad_01.txt:generic-api-key:2
/tmp/lint/.github-personal-access-token:github-fine-grained-pat:1
/tmp/lint/test/linters/gitleaks/bad/gitleaks_bad_01.txt:aws-access-token:1
/tmp/lint/test/linters/gitleaks/bad/gitleaks_bad_01.txt:generic-api-key:2
test/linters/gitleaks/bad/gitleaks_bad_01.txt:aws-access-token:1
test/linters/gitleaks/bad/gitleaks_bad_01.txt:generic-api-key:2

View file

@ -134,6 +134,7 @@ RUN apk add --no-cache \
nodejs-current \
openjdk17-jre \
openssh-client \
parallel \
perl \
php82 \
php82-ctype \

View file

@ -164,6 +164,7 @@ test-git-flags: ## Run super-linter with different git-related flags
-e ACTIONS_RUNNER_DEBUG=true \
-e ERROR_ON_MISSING_EXEC_BIT=true \
-e ENABLE_GITHUB_ACTIONS_GROUP_TITLE=true \
-e FILTER_REGEX_EXCLUDE=".*/test/linters/.*" \
-e DEFAULT_BRANCH=main \
-e IGNORE_GENERATED_FILES=true \
-e IGNORE_GITIGNORED_FILES=true \
@ -178,6 +179,8 @@ lint-codebase: ## Lint the entire codebase
-e ACTIONS_RUNNER_DEBUG=true \
-e DEFAULT_BRANCH=main \
-e ENABLE_GITHUB_ACTIONS_GROUP_TITLE=true \
-e FILTER_REGEX_EXCLUDE=".*/test/linters/.*" \
-e GITLEAKS_CONFIG_FILE=".gitleaks-ignore-tests.toml" \
-e RENOVATE_SHAREABLE_CONFIG_PRESET_FILE_NAMES="default.json,hoge.json" \
-e VALIDATE_ALL_CODEBASE=true \
-v "$(CURDIR):/tmp/lint" \
@ -195,6 +198,7 @@ lint-subset-files-enable-only-one-type: ## Lint a small subset of files in the c
-e ACTIONS_RUNNER_DEBUG=true \
-e DEFAULT_BRANCH=main \
-e ENABLE_GITHUB_ACTIONS_GROUP_TITLE=true \
-e FILTER_REGEX_EXCLUDE=".*/test/linters/.*" \
-e VALIDATE_ALL_CODEBASE=true \
-e VALIDATE_MARKDOWN=true \
-v "$(CURDIR):/tmp/lint" \
@ -207,6 +211,7 @@ lint-subset-files-enable-expensive-io-checks: ## Lint a small subset of files in
-e ACTIONS_RUNNER_DEBUG=true \
-e DEFAULT_BRANCH=main \
-e ENABLE_GITHUB_ACTIONS_GROUP_TITLE=true \
-e FILTER_REGEX_EXCLUDE=".*/test/linters/.*" \
-e VALIDATE_ALL_CODEBASE=true \
-e VALIDATE_ARM=true \
-e VALIDATE_CLOUDFORMATION=true \
@ -272,19 +277,19 @@ test-custom-ssl-cert: ## Test the configuration of a custom SSL/TLS certificate
$(SUPER_LINTER_TEST_CONTAINER_URL)
.phony: test-linters
test-linters: ## Run the linters test suite
docker run \
-e ACTIONS_RUNNER_DEBUG=true \
-e CHECKOV_FILE_NAME=".checkov-test-linters.yaml" \
-e DEFAULT_BRANCH=main \
-e ENABLE_GITHUB_ACTIONS_GROUP_TITLE=true \
-e JSCPD_CONFIG_FILE=".jscpd-test-linters.json" \
-e RENOVATE_SHAREABLE_CONFIG_PRESET_FILE_NAMES="default.json,hoge.json" \
-e RUN_LOCAL=true \
-e TEST_CASE_RUN=true \
-e TYPESCRIPT_STANDARD_TSCONFIG_FILE=".github/linters/tsconfig.json" \
-v "$(CURDIR):/tmp/lint" \
$(SUPER_LINTER_TEST_CONTAINER_URL)
test-linters: test-linters-expect-success test-linters-expect-failure ## Run the linters test suite
.phony: test-linters-expect-success
test-linters-expect-success: ## Run the linters test suite expecting successes
$(CURDIR)/test/run-super-linter-tests.sh \
$(SUPER_LINTER_TEST_CONTAINER_URL) \
"run_test_cases_expect_success"
.phony: test-linters-expect-failure
test-linters-expect-failure: ## Run the linters test suite expecting failures
$(CURDIR)/test/run-super-linter-tests.sh \
$(SUPER_LINTER_TEST_CONTAINER_URL) \
"run_test_cases_expect_failure"
.phony: build-dev-container-image
build-dev-container-image: ## Build commit linter container image

View file

@ -90,6 +90,7 @@ new tool, it should include:
- Update the orchestration scripts to run the new tool:
- `lib/linter.sh`
- `lib/functions/linterCommands.sh`
- Provide the logic to populate the list of files or directories to examine: `lib/buildFileList.sh`
- If necessary, provide elaborate logic to detect if the tool should examine a file or a directory: `lib/detectFiles.sh`
- If the tool needs to take into account special cases:

View file

@ -52,10 +52,6 @@ This section helps you migrate from super-linter `v5` to `v6`.
- If you defined secret patterns in `.gitleaks.toml`, Gitleaks may report errors
about that file. If this happens, you can
[configure Gitleaks to ignore that file](https://github.com/gitleaks/gitleaks/tree/master?tab=readme-ov-file#gitleaksignore).
- Gitleaks doesn't consider the `FILTER_REGEX_EXCLUDE`, `FILTER_REGEX_INCLUDE`,
`IGNORE_GENERATED_FILES`, `IGNORE_GITIGNORED_FILES` variables. For more
information about how to ignore files with Gitleaks, see
[the Gitleaks documentation](https://github.com/gitleaks/gitleaks/tree/master?tab=readme-ov-file#gitleaksignore).
### Jscpd

View file

@ -13,7 +13,6 @@ function GenerateFileDiff() {
if [ "${GITHUB_EVENT_NAME:-}" == "push" ]; then
RunFileDiffCommand "${DIFF_TREE_CMD}"
if [ ${#RAW_FILE_ARRAY[@]} -eq 0 ]; then
debug "----------------------------------------------"
debug "Generating the file array with diff-tree produced [0] items, trying with git diff against the default branch..."
RunFileDiffCommand "${DIFF_GIT_DEFAULT_BRANCH_CMD}"
fi
@ -49,20 +48,13 @@ function BuildFileList() {
debug "TEST_CASE_RUN: ${TEST_CASE_RUN}"
if [ "${VALIDATE_ALL_CODEBASE}" == "false" ] && [ "${TEST_CASE_RUN}" != "true" ]; then
debug "----------------------------------------------"
debug "Build the list of all changed files"
GenerateFileDiff
else
WORKSPACE_PATH="${GITHUB_WORKSPACE}"
if [ "${TEST_CASE_RUN}" == "true" ]; then
WORKSPACE_PATH="${GITHUB_WORKSPACE}/${TEST_CASE_FOLDER}"
fi
if [ "${USE_FIND_ALGORITHM}" == 'true' ]; then
debug "----------------------------------------------"
debug "Populating the file list with all the files in the ${WORKSPACE_PATH} workspace using FIND algorithm"
if ! mapfile -t RAW_FILE_ARRAY < <(find "${WORKSPACE_PATH}" \
debug "Populating the file list with all the files in the ${GITHUB_WORKSPACE} workspace using FIND algorithm"
if ! mapfile -t RAW_FILE_ARRAY < <(find "${GITHUB_WORKSPACE}" \
-not \( -path '*/\.git' -prune \) \
-not \( -path '*/\.pytest_cache' -prune \) \
-not \( -path '*/\.rbenv' -prune \) \
@ -87,8 +79,7 @@ function BuildFileList() {
fi
else
debug "----------------------------------------------"
DIFF_GIT_VALIDATE_ALL_CODEBASE="git -C \"${WORKSPACE_PATH}\" ls-tree -r --name-only HEAD | xargs -I % sh -c \"echo ${WORKSPACE_PATH}/%\" 2>&1"
DIFF_GIT_VALIDATE_ALL_CODEBASE="git -C \"${GITHUB_WORKSPACE}\" ls-tree -r --name-only HEAD | xargs -I % sh -c \"echo ${GITHUB_WORKSPACE}/%\" 2>&1"
debug "Populating the file list with: ${DIFF_GIT_VALIDATE_ALL_CODEBASE}"
if ! mapfile -t RAW_FILE_ARRAY < <(eval "set -eo pipefail; ${DIFF_GIT_VALIDATE_ALL_CODEBASE}; set +eo pipefail"); then
fatal "Failed to get a list of changed files. USE_FIND_ALGORITHM: ${USE_FIND_ALGORITHM}"
@ -105,77 +96,108 @@ function BuildFileList() {
####################################################
# Configure linters that scan the entire workspace #
####################################################
debug "Checking if we are in test mode before configuring the list of directories to lint"
debug "Checking if we are in test mode before configuring the list of directories to lint. TEST_CASE_RUN: ${TEST_CASE_RUN}"
if [ "${TEST_CASE_RUN}" == "true" ]; then
debug "We are running in test mode."
debug "Adding test case directories to the list of directories to analyze with ansible-lint."
DEFAULT_ANSIBLE_TEST_CASE_DIRECTORY="${GITHUB_WORKSPACE}/${TEST_CASE_FOLDER}/ansible"
debug "DEFAULT_ANSIBLE_TEST_CASE_DIRECTORY: ${DEFAULT_ANSIBLE_TEST_CASE_DIRECTORY}"
FILE_ARRAY_ANSIBLE+=("${DEFAULT_ANSIBLE_TEST_CASE_DIRECTORY}/bad")
FILE_ARRAY_ANSIBLE+=("${DEFAULT_ANSIBLE_TEST_CASE_DIRECTORY}/good")
debug "Adding test case directories to the list of directories to analyze with Checkov."
DEFAULT_CHECKOV_TEST_CASE_DIRECTORY="${GITHUB_WORKSPACE}/${TEST_CASE_FOLDER}/checkov"
debug "DEFAULT_CHECKOV_TEST_CASE_DIRECTORY: ${DEFAULT_CHECKOV_TEST_CASE_DIRECTORY}"
FILE_ARRAY_CHECKOV+=("${DEFAULT_CHECKOV_TEST_CASE_DIRECTORY}/bad")
FILE_ARRAY_CHECKOV+=("${DEFAULT_CHECKOV_TEST_CASE_DIRECTORY}/good")
debug "Adding test case directories to the list of directories to analyze with Gitleaks."
DEFAULT_GITLEAKS_TEST_CASE_DIRECTORY="${GITHUB_WORKSPACE}/${TEST_CASE_FOLDER}/gitleaks"
debug "DEFAULT_GITLEAKS_TEST_CASE_DIRECTORY: ${DEFAULT_GITLEAKS_TEST_CASE_DIRECTORY}"
FILE_ARRAY_GITLEAKS+=("${DEFAULT_GITLEAKS_TEST_CASE_DIRECTORY}/bad")
FILE_ARRAY_GITLEAKS+=("${DEFAULT_GITLEAKS_TEST_CASE_DIRECTORY}/good")
debug "Adding test case directories to the list of directories to analyze with Checkov."
debug "Adding test case directories to the list of directories to analyze with JSCPD."
DEFAULT_JSCPD_TEST_CASE_DIRECTORY="${GITHUB_WORKSPACE}/${TEST_CASE_FOLDER}/jscpd"
# We need this for parallel
export DEFAULT_JSCPD_TEST_CASE_DIRECTORY
debug "DEFAULT_JSCPD_TEST_CASE_DIRECTORY: ${DEFAULT_JSCPD_TEST_CASE_DIRECTORY}"
FILE_ARRAY_JSCPD+=("${DEFAULT_JSCPD_TEST_CASE_DIRECTORY}/bad")
FILE_ARRAY_JSCPD+=("${DEFAULT_JSCPD_TEST_CASE_DIRECTORY}/good")
else
debug "We are not running in test mode (${TEST_CASE_RUN})."
RAW_FILE_ARRAY+=("${DEFAULT_JSCPD_TEST_CASE_DIRECTORY}/bad")
RAW_FILE_ARRAY+=("${DEFAULT_JSCPD_TEST_CASE_DIRECTORY}/good")
fi
debug "Add GITHUB_WORKSPACE (${GITHUB_WORKSPACE}) to the list of files to lint because we might need it for linters that lint the whole workspace"
RAW_FILE_ARRAY+=("${GITHUB_WORKSPACE}")
if [ -d "${ANSIBLE_DIRECTORY}" ]; then
debug "Adding ANSIBLE_DIRECTORY (${ANSIBLE_DIRECTORY}) to the list of files and directories to lint."
FILE_ARRAY_ANSIBLE+=("${ANSIBLE_DIRECTORY}")
RAW_FILE_ARRAY+=("${ANSIBLE_DIRECTORY}")
else
debug "ANSIBLE_DIRECTORY (${ANSIBLE_DIRECTORY}) does NOT exist."
fi
if CheckovConfigurationFileContainsDirectoryOption "${CHECKOV_LINTER_RULES}"; then
debug "No need to configure the directories to check for Checkov."
local PARALLEL_RESULTS_FILE_PATH
PARALLEL_RESULTS_FILE_PATH="/tmp/super-linter-parallel-results-build-file-list.json"
debug "PARALLEL_RESULTS_FILE_PATH when building the file list: ${PARALLEL_RESULTS_FILE_PATH}"
local -a PARALLEL_COMMAND
PARALLEL_COMMAND=(parallel --will-cite --keep-order --max-procs "$(($(nproc) * 1))" --results "${PARALLEL_RESULTS_FILE_PATH}" --xargs)
if [ "${LOG_DEBUG}" == "true" ]; then
debug "LOG_DEBUG is enabled. Enable verbose ouput for parallel"
PARALLEL_COMMAND+=(--verbose)
fi
# Max number of files to categorize per process
PARALLEL_COMMAND+=(--max-lines 10)
PARALLEL_COMMAND+=("BuildFileArrays")
debug "PARALLEL_COMMAND to build the list of files and directories to lint: ${PARALLEL_COMMAND[*]}"
FILE_ARRAYS_DIRECTORY_PATH="$(mktemp -d)"
export FILE_ARRAYS_DIRECTORY_PATH
debug "Created FILE_ARRAYS_DIRECTORY_PATH: ${FILE_ARRAYS_DIRECTORY_PATH}"
info "Building the list of files and directories to check"
PARALLEL_COMMAND_OUTPUT=$(printf "%s\n" "${RAW_FILE_ARRAY[@]}" | "${PARALLEL_COMMAND[@]}" 2>&1)
PARALLEL_COMMAND_RETURN_CODE=$?
debug "PARALLEL_COMMAND_OUTPUT to build the file list (exit code: ${PARALLEL_COMMAND_RETURN_CODE}):\n${PARALLEL_COMMAND_OUTPUT}"
debug "Parallel output file (${PARALLEL_RESULTS_FILE_PATH}) contents when building the file list:\n$(cat "${PARALLEL_RESULTS_FILE_PATH}")"
local RESULTS_OBJECT
RESULTS_OBJECT=
if ! RESULTS_OBJECT=$(jq -n '[inputs]' "${PARALLEL_RESULTS_FILE_PATH}"); then
fatal "Error loading results when building the file list: ${RESULTS_OBJECT}"
fi
debug "RESULTS_OBJECT for ${FILE_TYPE}:\n${RESULTS_OBJECT}"
local STDOUT_BUILD_FILE_LIST
# Get raw output so we can strip quotes from the data we load
if ! STDOUT_BUILD_FILE_LIST="$(jq --raw-output '.[].Stdout' <<<"${RESULTS_OBJECT}")"; then
fatal "Error when loading stdout when building the file list: ${STDOUT_BUILD_FILE_LIST}"
fi
if [ -n "${STDOUT_BUILD_FILE_LIST}" ]; then
info "Command output when building the file list:\n------\n${STDOUT_BUILD_FILE_LIST}\n------"
else
debug "Adding ${GITHUB_WORKSPACE} to the list of directories to analyze with Checkov."
FILE_ARRAY_CHECKOV+=("${GITHUB_WORKSPACE}")
debug "Stdout when building the file list is empty"
fi
debug "Adding ${GITHUB_WORKSPACE} to the list of directories to analyze with Gitleaks."
FILE_ARRAY_GITLEAKS+=("${GITHUB_WORKSPACE}")
debug "Adding ${GITHUB_WORKSPACE} to the list of directories to analyze with JSCPD."
FILE_ARRAY_JSCPD+=("${GITHUB_WORKSPACE}")
local STDERR_BUILD_FILE_LIST
if ! STDERR_BUILD_FILE_LIST="$(jq --raw-output '.[].Stderr' <<<"${RESULTS_OBJECT}")"; then
fatal "Error when loading stderr when building the file list:\n${STDERR_BUILD_FILE_LIST}"
fi
if CheckovConfigurationFileContainsDirectoryOption "${CHECKOV_LINTER_RULES}"; then
debug "No need to configure the directories to check for Checkov."
if [ -n "${STDERR_BUILD_FILE_LIST}" ]; then
info "Command output when building the file list:\n------\n${STDERR_BUILD_FILE_LIST}\n------"
else
debug "Checking if we are in test mode before configuring the list of directories to lint with Checkov"
if [ "${TEST_CASE_RUN}" == "true" ]; then
debug "We are running in test mode. Adding test case directories to the list of directories to analyze with Checkov."
FILE_ARRAY_CHECKOV+=("${DEFAULT_CHECKOV_TEST_CASE_DIRECTORY}/bad")
FILE_ARRAY_CHECKOV+=("${DEFAULT_CHECKOV_TEST_CASE_DIRECTORY}/good")
else
debug "We are not running in test mode (${TEST_CASE_RUN}). Adding ${GITHUB_WORKSPACE} to the list of directories to analyze with Checkov."
FILE_ARRAY_CHECKOV+=("${GITHUB_WORKSPACE}")
fi
debug "Stderr when building the file list is empty"
fi
################################################
# Iterate through the array of all files found #
################################################
info "---------------------------------"
info "------ File list to check: ------"
info "---------------------------------"
if [[ ${PARALLEL_COMMAND_RETURN_CODE} -ne 0 ]]; then
fatal "Error when building the list of files and directories to lint."
fi
################
# Footer print #
################
info "Successfully gathered list of files..."
}
BuildFileArrays() {
local -a RAW_FILE_ARRAY
RAW_FILE_ARRAY=("$@")
debug "Categorizing the following files: ${RAW_FILE_ARRAY[*]}"
debug "FILTER_REGEX_INCLUDE: ${FILTER_REGEX_INCLUDE}, FILTER_REGEX_EXCLUDE: ${FILTER_REGEX_EXCLUDE}"
ValidateBooleanVariable "IGNORE_GENERATED_FILES" "${IGNORE_GENERATED_FILES}"
ValidateBooleanVariable "IGNORE_GITIGNORED_FILES" "${IGNORE_GITIGNORED_FILES}"
for FILE in "${RAW_FILE_ARRAY[@]}"; do
# Get the file extension
FILE_TYPE="$(GetFileExtension "$FILE")"
@ -187,23 +209,35 @@ function BuildFileList() {
debug "FILE: ${FILE}, FILE_TYPE: ${FILE_TYPE}, BASE_FILE: ${BASE_FILE}, FILE_DIR_NAME: ${FILE_DIR_NAME}"
if [ ! -f "${FILE}" ]; then
if [ ! -e "${FILE}" ]; then
# File not found in workspace
warn "File:{$FILE} existed in commit data, but not found on file system, skipping..."
warn "{$FILE} exists in commit data, but not found on file system, skipping..."
continue
fi
########################################################
# Don't include test cases if not running in test mode #
########################################################
if [[ ${FILE} == *"${TEST_CASE_FOLDER}"* ]] && [ "${TEST_CASE_RUN}" != "true" ]; then
debug "TEST_CASE_RUN (${TEST_CASE_RUN}) is not true. Skipping ${FILE}..."
# Handle the corner cases of linters that are expected to lint the whole codebase,
# but we don't have a variable to explicitly set the directory
# to lint.
if [[ "${FILE}" == "${GITHUB_WORKSPACE}" ]]; then
debug "${FILE} matches with ${GITHUB_WORKSPACE}. Adding it to the list of directories to lint for linters that are expected to lint the whole codebase"
if CheckovConfigurationFileContainsDirectoryOption "${CHECKOV_LINTER_RULES}"; then
debug "No need to configure the directories to check for Checkov because its configuration file contains the list of directories to analyze."
debug "Add the Checkov configuration file path to the list of items to check to consume as output later."
echo "${CHECKOV_LINTER_RULES}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-CHECKOV"
else
debug "Adding ${GITHUB_WORKSPACE} to the list of directories to analyze with Checkov."
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-CHECKOV"
fi
# JSCPD test cases are handled below because we first need to exclude non-relevant test cases
if [[ "${TEST_CASE_RUN}" == "false" ]]; then
debug "Add ${FILE} to the list of items to lint with JSCPD"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-JSCPD"
fi
# No need to process this item furhter
continue
##################################################
# Include test cases if not running in test mode #
##################################################
elif [[ ${FILE} != *"${TEST_CASE_FOLDER}"* ]] && [ "${TEST_CASE_RUN}" == "true" ]; then
debug "TEST_CASE_RUN (${TEST_CASE_RUN}) is true. Skipping ${FILE}..."
fi
###############################################
@ -238,20 +272,40 @@ function BuildFileList() {
continue
fi
# Editorconfig-checker should check every file
FILE_ARRAY_EDITORCONFIG+=("${FILE}")
# These linters check every file
local EDITORCONFIG_FILE_PATH
EDITORCONFIG_FILE_PATH="${GITHUB_WORKSPACE}/.editorconfig"
if [ -e "${EDITORCONFIG_FILE_PATH}" ]; then
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-EDITORCONFIG"
else
debug "Don't include ${FILE} in the list of files to lint with editorconfig-checker because the workspace doesn't contain an EditorConfig file: ${EDITORCONFIG_FILE_PATH}"
fi
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-GITLEAKS"
if [[ ("${FILE}" =~ .*${ANSIBLE_DIRECTORY}.*) ]] && [[ -d "${FILE}" ]]; then
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-ANSIBLE"
fi
# Handle JSCPD test cases
# At this point, we already processed the options to include or exclude files, so we
# excluded test cases that are not relevant
if [[ "${TEST_CASE_RUN}" == "true" ]] && [[ "${FILE}" =~ .*${DEFAULT_JSCPD_TEST_CASE_DIRECTORY}.* ]] && [[ -d "${FILE}" ]]; then
debug "${FILE} is a test case for JSCPD. Adding it to the list of items to lint with JSCPD"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-JSCPD"
fi
# See https://docs.renovatebot.com/configuration-options/
if [[ "${BASE_FILE}" =~ renovate.json5? ]] ||
[ "${BASE_FILE}" == ".renovaterc" ] || [[ "${BASE_FILE}" =~ .renovaterc.json5? ]]; then
FILE_ARRAY_RENOVATE+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-RENOVATE"
fi
# See https://docs.renovatebot.com/config-presets/
IFS="," read -r -a RENOVATE_SHAREABLE_CONFIG_PRESET_FILE_NAMES_ARRAY <<<"${RENOVATE_SHAREABLE_CONFIG_PRESET_FILE_NAMES}"
for file_name in "${RENOVATE_SHAREABLE_CONFIG_PRESET_FILE_NAMES_ARRAY[@]}"; do
if [ "${BASE_FILE}" == "${file_name}" ]; then
FILE_ARRAY_RENOVATE+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-RENOVATE"
break
fi
done
@ -268,205 +322,99 @@ function BuildFileList() {
else
debug "Considering ${FILE_DIR_NAME} as a Go module."
fi
FILE_ARRAY_GO_MODULES+=("${FILE_DIR_NAME}")
echo "${FILE_DIR_NAME}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-GO_MODULES"
fi
#######################
# Get the shell files #
#######################
if IsValidShellScript "${FILE}"; then
FILE_ARRAY_BASH+=("${FILE}")
FILE_ARRAY_BASH_EXEC+=("${FILE}")
FILE_ARRAY_SHELL_SHFMT+=("${FILE}")
#########################
# Get the CLOJURE files #
#########################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-BASH"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-BASH_EXEC"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-SHELL_SHFMT"
elif [ "${FILE_TYPE}" == "clj" ] || [ "${FILE_TYPE}" == "cljs" ] ||
[ "${FILE_TYPE}" == "cljc" ] || [ "${FILE_TYPE}" == "edn" ]; then
FILE_ARRAY_CLOJURE+=("${FILE}")
#####################
# Get the C++ files #
#####################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-CLOJURE"
elif [ "${FILE_TYPE}" == "cpp" ] || [ "${FILE_TYPE}" == "h" ] ||
[ "${FILE_TYPE}" == "cc" ] || [ "${FILE_TYPE}" == "hpp" ] ||
[ "${FILE_TYPE}" == "cxx" ] || [ "${FILE_TYPE}" == "cu" ] ||
[ "${FILE_TYPE}" == "hxx" ] || [ "${FILE_TYPE}" == "c++" ] ||
[ "${FILE_TYPE}" == "hh" ] || [ "${FILE_TYPE}" == "h++" ] ||
[ "${FILE_TYPE}" == "cuh" ] || [ "${FILE_TYPE}" == "c" ]; then
FILE_ARRAY_CPP+=("${FILE}")
FILE_ARRAY_CLANG_FORMAT+=("${FILE}")
########################
# Get the COFFEE files #
########################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-CPP"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-CLANG_FORMAT"
elif [ "${FILE_TYPE}" == "coffee" ]; then
FILE_ARRAY_COFFEESCRIPT+=("${FILE}")
########################
# Get the CSHARP files #
########################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-COFFEESCRIPT"
elif [ "${FILE_TYPE}" == "cs" ]; then
FILE_ARRAY_CSHARP+=("${FILE}")
#####################
# Get the CSS files #
#####################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-CSHARP"
elif [ "${FILE_TYPE}" == "css" ] || [ "${FILE_TYPE}" == "scss" ] ||
[ "${FILE_TYPE}" == "sass" ]; then
FILE_ARRAY_CSS+=("${FILE}")
######################
# Get the DART files #
######################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-CSS"
elif [ "${FILE_TYPE}" == "dart" ]; then
FILE_ARRAY_DART+=("${FILE}")
########################
# Get the DOCKER files #
########################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-DART"
# Use BASE_FILE here because FILE_TYPE is not reliable when there is no file extension
elif [[ "${FILE_TYPE}" != "tap" ]] && [[ "${FILE_TYPE}" != "yml" ]] &&
[[ "${FILE_TYPE}" != "yaml" ]] && [[ "${FILE_TYPE}" != "json" ]] &&
[[ "${FILE_TYPE}" != "xml" ]] &&
[[ "${BASE_FILE}" =~ ^(.+\.)?(contain|dock)erfile$ ]]; then
FILE_ARRAY_DOCKERFILE_HADOLINT+=("${FILE}")
#####################
# Get the ENV files #
#####################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-DOCKERFILE_HADOLINT"
elif [ "${FILE_TYPE}" == "env" ] || [[ "${BASE_FILE}" == *".env."* ]]; then
FILE_ARRAY_ENV+=("${FILE}")
#########################
# Get the Gherkin files #
#########################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-ENV"
elif [ "${FILE_TYPE}" == "feature" ]; then
FILE_ARRAY_GHERKIN+=("${FILE}")
########################
# Get the Golang files #
########################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-GHERKIN"
elif [ "${FILE_TYPE}" == "go" ]; then
FILE_ARRAY_GO+=("${FILE}")
########################
# Get the GROOVY files #
########################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-GO"
# Use BASE_FILE here because FILE_TYPE is not reliable when there is no file extension
elif [ "$FILE_TYPE" == "groovy" ] || [ "$FILE_TYPE" == "jenkinsfile" ] ||
[ "$FILE_TYPE" == "gradle" ] || [ "$FILE_TYPE" == "nf" ] ||
[[ "$BASE_FILE" =~ .*jenkinsfile.* ]]; then
FILE_ARRAY_GROOVY+=("$FILE")
######################
# Get the HTML files #
######################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-GROOVY"
elif [ "${FILE_TYPE}" == "html" ]; then
FILE_ARRAY_HTML+=("${FILE}")
######################
# Get the Java files #
######################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-HTML"
elif [ "${FILE_TYPE}" == "java" ]; then
FILE_ARRAY_JAVA+=("${FILE}")
FILE_ARRAY_GOOGLE_JAVA_FORMAT+=("${FILE}")
############################
# Get the JavaScript files #
############################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-JAVA"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-GOOGLE_JAVA_FORMAT"
elif [ "${FILE_TYPE}" == "js" ]; then
FILE_ARRAY_JAVASCRIPT_ES+=("${FILE}")
FILE_ARRAY_JAVASCRIPT_STANDARD+=("${FILE}")
FILE_ARRAY_JAVASCRIPT_PRETTIER+=("${FILE}")
#######################
# Get the JSONC files #
#######################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-JAVASCRIPT_ES"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-JAVASCRIPT_STANDARD"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-JAVASCRIPT_PRETTIER"
elif [ "$FILE_TYPE" == "jsonc" ] || [ "$FILE_TYPE" == "json5" ]; then
FILE_ARRAY_JSONC+=("${FILE}")
######################
# Get the JSON files #
######################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-JSONC"
elif [ "${FILE_TYPE}" == "json" ]; then
FILE_ARRAY_JSON+=("${FILE}")
############################
# Check if file is OpenAPI #
############################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-JSON"
if DetectOpenAPIFile "${FILE}"; then
FILE_ARRAY_OPENAPI+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-OPENAPI"
fi
########################
# Check if file is ARM #
########################
if DetectARMFile "${FILE}"; then
FILE_ARRAY_ARM+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-ARM"
fi
#####################################
# Check if the file is CFN template #
#####################################
if DetectCloudFormationFile "${FILE}"; then
FILE_ARRAY_CLOUDFORMATION+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-CLOUDFORMATION"
fi
############################################
# Check if the file is AWS States Language #
############################################
if DetectAWSStatesFIle "${FILE}"; then
FILE_ARRAY_STATES+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-STATES"
fi
#####################
# Get the JSX files #
#####################
elif [ "${FILE_TYPE}" == "jsx" ]; then
FILE_ARRAY_JSX+=("${FILE}")
########################
# Get the KOTLIN files #
########################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-JSX"
elif [ "${FILE_TYPE}" == "kt" ] || [ "${FILE_TYPE}" == "kts" ]; then
FILE_ARRAY_KOTLIN+=("${FILE}")
#####################
# Get the LUA files #
#####################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-KOTLIN"
elif [ "$FILE_TYPE" == "lua" ]; then
FILE_ARRAY_LUA+=("$FILE")
#######################
# Get the LaTeX files #
#######################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-LUA"
elif [ "${FILE_TYPE}" == "tex" ]; then
FILE_ARRAY_LATEX+=("${FILE}")
##########################
# Get the MARKDOWN files #
##########################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-LATEX"
elif [ "${FILE_TYPE}" == "md" ]; then
FILE_ARRAY_MARKDOWN+=("${FILE}")
FILE_ARRAY_NATURAL_LANGUAGE+=("${FILE}")
######################
# Get the PHP files #
######################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-MARKDOWN"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-NATURAL_LANGUAGE"
elif [ "${FILE_TYPE}" == "php" ]; then
FILE_ARRAY_PHP_BUILTIN+=("${FILE}")
FILE_ARRAY_PHP_PHPCS+=("${FILE}")
FILE_ARRAY_PHP_PHPSTAN+=("${FILE}")
FILE_ARRAY_PHP_PSALM+=("${FILE}")
######################
# Get the PERL files #
######################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PHP_BUILTIN"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PHP_PHPCS"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PHP_PHPSTAN"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PHP_PSALM"
elif [ "${FILE_TYPE}" == "pl" ] || [ "${FILE_TYPE}" == "pm" ] ||
[ "${FILE_TYPE}" == "t" ]; then
FILE_ARRAY_PERL+=("${FILE}")
############################
# Get the Powershell files #
############################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PERL"
elif [ "${FILE_TYPE}" == "ps1" ] ||
[ "${FILE_TYPE}" == "psm1" ] ||
[ "${FILE_TYPE}" == "psd1" ] ||
@ -474,179 +422,83 @@ function BuildFileList() {
[ "${FILE_TYPE}" == "pssc" ] ||
[ "${FILE_TYPE}" == "psrc" ] ||
[ "${FILE_TYPE}" == "cdxml" ]; then
FILE_ARRAY_POWERSHELL+=("${FILE}")
#################################
# Get the PROTOCOL BUFFER files #
#################################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-POWERSHELL"
elif [ "${FILE_TYPE}" == "proto" ]; then
FILE_ARRAY_PROTOBUF+=("${FILE}")
########################
# Get the PYTHON files #
########################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PROTOBUF"
elif [ "${FILE_TYPE}" == "py" ]; then
FILE_ARRAY_PYTHON_BLACK+=("${FILE}")
FILE_ARRAY_PYTHON_FLAKE8+=("${FILE}")
FILE_ARRAY_PYTHON_ISORT+=("${FILE}")
FILE_ARRAY_PYTHON_PYLINT+=("${FILE}")
FILE_ARRAY_PYTHON_MYPY+=("${FILE}")
######################
# Get the RAKU files #
######################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PYTHON_BLACK"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PYTHON_FLAKE8"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PYTHON_ISORT"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PYTHON_PYLINT"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PYTHON_MYPY"
elif [ "${FILE_TYPE}" == "raku" ] || [ "${FILE_TYPE}" == "rakumod" ] ||
[ "${FILE_TYPE}" == "rakutest" ] || [ "${FILE_TYPE}" == "pm6" ] ||
[ "${FILE_TYPE}" == "pl6" ] || [ "${FILE_TYPE}" == "p6" ]; then
FILE_ARRAY_RAKU+=("${FILE}")
####################
# Get the R files #
####################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-RAKU"
elif [ "${FILE_TYPE}" == "r" ] || [ "${FILE_TYPE}" == "rmd" ]; then
FILE_ARRAY_R+=("${FILE}")
######################
# Get the RUBY files #
######################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-R"
elif [ "${FILE_TYPE}" == "rb" ]; then
FILE_ARRAY_RUBY+=("${FILE}")
######################
# Get the RUST files #
######################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-RUBY"
elif [ "${FILE_TYPE}" == "rs" ]; then
FILE_ARRAY_RUST_2015+=("${FILE}")
FILE_ARRAY_RUST_2018+=("${FILE}")
FILE_ARRAY_RUST_2021+=("${FILE}")
#######################
# Get the RUST crates #
#######################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-RUST_2015"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-RUST_2018"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-RUST_2021"
elif [ "${BASE_FILE}" == "cargo.toml" ]; then
###############################################
# Append the crate manifest file to the array #
###############################################
FILE_ARRAY_RUST_CLIPPY+=("${FILE}")
###########################
# Get the SCALA files #
###########################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-RUST_CLIPPY"
elif [ "${FILE_TYPE}" == "scala" ] || [ "${FILE_TYPE}" == "sc" ] || [ "${BASE_FILE}" == "??????" ]; then
FILE_ARRAY_SCALAFMT+=("${FILE}")
###########################
# Get the SNAKEMAKE files #
###########################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-SCALAFMT"
elif [ "${FILE_TYPE}" == "smk" ] || [ "${BASE_FILE}" == "snakefile" ]; then
FILE_ARRAY_SNAKEMAKE_LINT+=("${FILE}")
FILE_ARRAY_SNAKEMAKE_SNAKEFMT+=("${FILE}")
#####################
# Get the SQL files #
#####################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-SNAKEMAKE_LINT"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-SNAKEMAKE_SNAKEFMT"
elif [ "${FILE_TYPE}" == "sql" ]; then
FILE_ARRAY_SQL+=("${FILE}")
FILE_ARRAY_SQLFLUFF+=("${FILE}")
###########################
# Get the Terraform files #
###########################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-SQL"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-SQLFLUFF"
elif [ "${FILE_TYPE}" == "tf" ]; then
FILE_ARRAY_TERRAFORM_TFLINT+=("${FILE}")
FILE_ARRAY_TERRAFORM_TERRASCAN+=("${FILE}")
FILE_ARRAY_TERRAFORM_FMT+=("${FILE}")
############################
# Get the Terragrunt files #
############################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-TERRAFORM_TFLINT"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-TERRAFORM_TERRASCAN"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-TERRAFORM_FMT"
elif [ "${FILE_TYPE}" == "hcl" ] &&
[[ ${FILE} != *".tflint.hcl"* ]] &&
[[ ${FILE} != *".pkr.hcl"* ]] &&
[[ ${FILE} != *"docker-bake.hcl"* ]] &&
[[ ${FILE} != *"docker-bake.override.hcl"* ]]; then
FILE_ARRAY_TERRAGRUNT+=("${FILE}")
############################
# Get the TypeScript files #
############################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-TERRAGRUNT"
elif [ "${FILE_TYPE}" == "ts" ]; then
FILE_ARRAY_TYPESCRIPT_ES+=("${FILE}")
FILE_ARRAY_TYPESCRIPT_STANDARD+=("${FILE}")
FILE_ARRAY_TYPESCRIPT_PRETTIER+=("${FILE}")
#####################
# Get the TSX files #
#####################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-TYPESCRIPT_ES"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-TYPESCRIPT_STANDARD"
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-TYPESCRIPT_PRETTIER"
elif [ "${FILE_TYPE}" == "tsx" ]; then
FILE_ARRAY_TSX+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-TSX"
elif [ "${FILE_TYPE}" == "txt" ]; then
FILE_ARRAY_NATURAL_LANGUAGE+=("${FILE}")
#####################
# Get the XML files #
#####################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-TXT"
elif [ "${FILE_TYPE}" == "xml" ]; then
FILE_ARRAY_XML+=("${FILE}")
################################
# Get the CLOUDFORMATION files #
################################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-XML"
elif [ "${FILE_TYPE}" == "yml" ] || [ "${FILE_TYPE}" == "yaml" ]; then
FILE_ARRAY_YAML+=("${FILE}")
###################################
# Check if file is GitHub Actions #
###################################
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-YAML"
if DetectActions "${FILE}"; then
FILE_ARRAY_GITHUB_ACTIONS+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-GITHUB_ACTIONS"
fi
#####################################
# Check if the file is CFN template #
#####################################
if DetectCloudFormationFile "${FILE}"; then
FILE_ARRAY_CLOUDFORMATION+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-CLOUDFORMATION"
fi
############################
# Check if file is OpenAPI #
############################
if DetectOpenAPIFile "${FILE}"; then
FILE_ARRAY_OPENAPI+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-OPENAPI"
fi
########################################
# Check if the file is Tekton template #
########################################
if DetectTektonFile "${FILE}"; then
FILE_ARRAY_TEKTON+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-TEKTON"
fi
############################################
# Check if the file is Kubernetes template #
############################################
if DetectKubernetesFile "${FILE}"; then
FILE_ARRAY_KUBERNETES_KUBECONFORM+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-KUBERNETES_KUBECONFORM"
fi
########################################################################
# We have something that we need to try to check file type another way #
########################################################################
else
##############################################
# Use file to see if we can parse what it is #
##############################################
CheckFileType "${FILE}"
fi
##########################################
# Print line break after each file debug #
##########################################
debug ""
done
################
# Footer print #
################
info "----------------------------------------------"
info "Successfully gathered list of files..."
}
# We need this for parallel
export -f BuildFileArrays

View file

@ -139,21 +139,6 @@ DetectAWSStatesFIle() {
return 1
}
CheckInArray() {
NEEDLE="$1" # Language we need to match
######################################
# Check if Language was in the array #
######################################
for LANG in "${UNIQUE_LINTED_ARRAY[@]}"; do
if [[ "${LANG}" == "${NEEDLE}" ]]; then
return 0
fi
done
return 1
}
function GetFileType() {
# Need to run the file through the 'file' exec to help determine
# The type of file being parsed
@ -168,21 +153,23 @@ function CheckFileType() {
# Need to run the file through the 'file' exec to help determine
# The type of file being parsed
local FILE
FILE="$1"
local GET_FILE_TYPE_CMD
GET_FILE_TYPE_CMD="$(GetFileType "$FILE")"
local FILE_TYPE_MESSAGE
if [[ ${GET_FILE_TYPE_CMD} == *"Ruby script"* ]]; then
FILE_TYPE_MESSAGE="Found Ruby script without extension (${FILE}). Rename the file with proper extension for Ruby files."
FILE_ARRAY_RUBY+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-RUBY"
elif [[ ${GET_FILE_TYPE_CMD} == *"Python script"* ]]; then
FILE_TYPE_MESSAGE="Found Python script without extension (${FILE}). Rename the file with proper extension for Python files."
FILE_ARRAY_PYTHON+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PYTHON"
elif [[ ${GET_FILE_TYPE_CMD} == *"Perl script"* ]]; then
FILE_TYPE_MESSAGE="Found Perl script without extension (${FILE}). Rename the file with proper extension for Perl files."
FILE_ARRAY_PERL+=("${FILE}")
echo "${FILE}" >>"${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PERL"
else
FILE_TYPE_MESSAGE="Failed to get file type for: ${FILE}"
fi
@ -266,11 +253,30 @@ function IsGenerated() {
fi
}
# We need these functions when building the file list with paralle
export -f CheckFileType
export -f DetectActions
export -f DetectARMFile
export -f DetectAWSStatesFIle
export -f DetectCloudFormationFile
export -f DetectKubernetesFile
export -f DetectOpenAPIFile
export -f DetectTektonFile
export -f GetFileExtension
export -f GetFileType
export -f IsValidShellScript
export -f IsGenerated
function RunAdditionalInstalls() {
if [ -z "${FILE_ARRAYS_DIRECTORY_PATH}" ] || [ ! -d "${FILE_ARRAYS_DIRECTORY_PATH}" ]; then
fatal "FILE_ARRAYS_DIRECTORY_PATH (set to ${FILE_ARRAYS_DIRECTORY_PATH}) is empty or doesn't exist"
fi
##################################
# Run installs for Psalm and PHP #
##################################
if [ "${VALIDATE_PHP_PSALM}" == "true" ] && [ "${#FILE_ARRAY_PHP_PSALM[@]}" -ne 0 ]; then
if [ "${VALIDATE_PHP_PSALM}" == "true" ] && [ -e "${FILE_ARRAYS_DIRECTORY_PATH}/file-array-PHP_PSALM" ]; then
# found PHP files and were validating it, need to composer install
info "Found PHP files to validate, and [VALIDATE_PHP_PSALM] set to true, need to run composer install"
info "looking for composer.json in the users repository..."
@ -311,13 +317,13 @@ function RunAdditionalInstalls() {
###############################
# Run installs for R language #
###############################
if [ "${VALIDATE_R}" == "true" ] && [ "${#FILE_ARRAY_R[@]}" -ne 0 ]; then
if [ "${VALIDATE_R}" == "true" ] && [ -e "${FILE_ARRAYS_DIRECTORY_PATH}/file-array-R" ]; then
info "Detected R Language files to lint."
info "Trying to install the R package inside:[${WORKSPACE_PATH}]"
info "Trying to install the R package inside:[${GITHUB_WORKSPACE}]"
#########################
# Run the build command #
#########################
BUILD_CMD=$(R CMD build "${WORKSPACE_PATH}" 2>&1)
BUILD_CMD=$(R CMD build "${GITHUB_WORKSPACE}" 2>&1)
##############
# Error code #
@ -329,19 +335,19 @@ function RunAdditionalInstalls() {
##############################
if [ "${ERROR_CODE}" -ne 0 ]; then
# Error
warn "ERROR! Failed to run:[R CMD build] at location:[${WORKSPACE_PATH}]"
warn "ERROR! Failed to run:[R CMD build] at location:[${GITHUB_WORKSPACE}]"
warn "BUILD_CMD:[${BUILD_CMD}]"
else
# Get the build package
BUILD_PKG=$(
cd "${WORKSPACE_PATH}" || exit 0
cd "${GITHUB_WORKSPACE}" || exit 0
echo *.tar.gz 2>&1
)
##############################
# Install the build packages #
##############################
INSTALL_CMD=$(
cd "${WORKSPACE_PATH}" || exit 0
cd "${GITHUB_WORKSPACE}" || exit 0
R -e "remotes::install_local('.', dependencies=T)" 2>&1
)
@ -358,19 +364,26 @@ function RunAdditionalInstalls() {
warn "ERROR: Failed to install the build package at:[${BUILD_PKG}]"
fi
fi
if [ ! -f "${R_RULES_FILE_PATH_IN_ROOT}" ]; then
info "No .lintr configuration file found, using defaults."
cp "$R_LINTER_RULES" "$GITHUB_WORKSPACE"
# shellcheck disable=SC2034
SUPER_LINTER_COPIED_R_LINTER_RULES_FILE="true"
fi
fi
####################################
# Run installs for TFLINT language #
####################################
if [ "${VALIDATE_TERRAFORM_TFLINT}" == "true" ] && [ "${#FILE_ARRAY_TERRAFORM_TFLINT[@]}" -ne 0 ]; then
if [ "${VALIDATE_TERRAFORM_TFLINT}" == "true" ] && [ -e "${FILE_ARRAYS_DIRECTORY_PATH}/file-array-TERRAFORM_TFLINT" ]; then
info "Detected TFLint Language files to lint."
info "Trying to install the TFLint init inside:[${WORKSPACE_PATH}]"
info "Trying to install the TFLint init inside:[${GITHUB_WORKSPACE}]"
#########################
# Run the build command #
#########################
BUILD_CMD=$(
cd "${WORKSPACE_PATH}" || exit 0
cd "${GITHUB_WORKSPACE}" || exit 0
tflint --init -c "${TERRAFORM_TFLINT_LINTER_RULES}" 2>&1
)
@ -388,5 +401,44 @@ function RunAdditionalInstalls() {
info "Successfully initialized tflint with the ${TERRAFORM_TFLINT_LINTER_RULES} config file"
debug "Tflint output: ${BUILD_CMD}"
fi
# Array to track directories where tflint was run
local -A TFLINT_SEEN_DIRS
TFLINT_SEEN_DIRS=()
for FILE in "${FILE_ARRAY_TERRAFORM_TFLINT[@]}"; do
local DIR_NAME
DIR_NAME=$(dirname "${FILE}" 2>&1)
debug "DIR_NAME for ${FILE}: ${DIR_NAME}"
# Check the cache to see if we've already prepped this directory for tflint
if [[ ! -v "TFLINT_SEEN_DIRS[${DIR_NAME}]" ]]; then
debug "Configuring Terraform data directory for ${DIR_NAME}"
# Define the path to an empty Terraform data directory
# (def: https://developer.hashicorp.com/terraform/cli/config/environment-variables#tf_data_dir)
# in case the user has a Terraform data directory already, and we don't
# want to modify it.
# TFlint considers this variable as well.
# Ref: https://github.com/terraform-linters/tflint/blob/master/docs/user-guide/compatibility.md#environment-variables
TF_DATA_DIR="/tmp/.terraform-${TERRAFORM_TFLINT}-${DIR_NAME}"
# Fetch Terraform modules
debug "Fetch Terraform modules for ${FILE} in ${DIR_NAME} in ${TF_DATA_DIR}"
local FETCH_TERRAFORM_MODULES_CMD
if ! FETCH_TERRAFORM_MODULES_CMD="$(terraform get)"; then
fatal "Error when fetching Terraform modules while linting ${FILE}. Command output: ${FETCH_TERRAFORM_MODULES_CMD}"
fi
debug "Fetch Terraform modules command for ${FILE} output: ${FETCH_TERRAFORM_MODULES_CMD}"
# Let the cache know we've seen this before
# Set the value to an arbitrary non-empty string.
TFLINT_SEEN_DIRS[${DIR_NAME}]="false"
else
debug "Skip fetching Terraform modules for ${FILE} because we already did that for ${DIR_NAME}"
fi
done
fi
# Check if there's local configuration for the Raku linter
if [ -e "${GITHUB_WORKSPACE}/META6.json" ]; then
cd "${GITHUB_WORKSPACE}" && zef install --deps-only --/test .
fi
}

124
lib/functions/linterCommands.sh Executable file
View file

@ -0,0 +1,124 @@
#!/usr/bin/env bash
##########################
# Define linter commands #
##########################
# If there's no input argument, parallel adds a default {} at the end of the command.
# In a few cases, such as ANSIBLE and GO_MODULES,
# Consume the input before running the command because we need the input
# to set the working directory, but we don't need it appended at the end of the command.
# Setting -n 0 would not help in this case, because the input will not be passed
# to the --workdir option as well.
# shellcheck disable=SC2034 # Variable is referenced in other scripts
LINTER_COMMANDS_ARRAY_ANSIBLE=(ansible-lint -c "${ANSIBLE_LINTER_RULES}" "&& echo \"Linted: {}\"")
LINTER_COMMANDS_ARRAY_ARM=(pwsh -NoProfile -NoLogo -Command "\"Import-Module ${ARM_TTK_PSD1} ; \\\${config} = \\\$(Import-PowerShellDataFile -Path ${ARM_LINTER_RULES}) ; Test-AzTemplate @config -TemplatePath '{}'; if (\\\${Error}.Count) { exit 1 }\"")
LINTER_COMMANDS_ARRAY_BASH=(shellcheck --color --external-sources)
if [ -n "${BASH_SEVERITY}" ]; then
LINTER_COMMANDS_ARRAY_BASH+=(--severity="${BASH_SEVERITY}")
fi
LINTER_COMMANDS_ARRAY_BASH_EXEC=(bash-exec)
LINTER_COMMANDS_ARRAY_CHECKOV=(checkov --config-file "${CHECKOV_LINTER_RULES}")
if CheckovConfigurationFileContainsDirectoryOption "${CHECKOV_LINTER_RULES}"; then
# Consume the input as we do with ANSIBLE
debug "Consume the input of the Checkov command because we don't need to add it as an argument."
LINTER_COMMANDS_ARRAY_CHECKOV+=("&& echo \"Got the list of directories to lint from the configuration file: {}\"")
else
debug "Adding the '--directory' option to the Checkov command."
LINTER_COMMANDS_ARRAY_CHECKOV+=(--directory)
fi
LINTER_COMMANDS_ARRAY_CLANG_FORMAT=(clang-format --Werror --dry-run)
LINTER_COMMANDS_ARRAY_CLOJURE=(clj-kondo --config "${CLOJURE_LINTER_RULES}" --lint)
LINTER_COMMANDS_ARRAY_CLOUDFORMATION=(cfn-lint --config-file "${CLOUDFORMATION_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_COFFEESCRIPT=(coffeelint -f "${COFFEESCRIPT_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_CPP=(cpplint)
LINTER_COMMANDS_ARRAY_CSHARP=(dotnet format whitespace --folder --verify-no-changes --exclude / --include "{/}")
LINTER_COMMANDS_ARRAY_CSS=(stylelint --config "${CSS_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_DART=(dart analyze --fatal-infos --fatal-warnings)
LINTER_COMMANDS_ARRAY_DOCKERFILE_HADOLINT=(hadolint -c "${DOCKERFILE_HADOLINT_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_EDITORCONFIG=(editorconfig-checker -config "${EDITORCONFIG_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_ENV=(dotenv-linter)
LINTER_COMMANDS_ARRAY_GITHUB_ACTIONS=(actionlint -config-file "${GITHUB_ACTIONS_LINTER_RULES}")
if [ "${GITHUB_ACTIONS_COMMAND_ARGS}" != "null" ] && [ -n "${GITHUB_ACTIONS_COMMAND_ARGS}" ]; then
LINTER_COMMANDS_ARRAY_GITHUB_ACTIONS+=("${GITHUB_ACTIONS_COMMAND_ARGS}")
fi
LINTER_COMMANDS_ARRAY_GITLEAKS=(gitleaks detect --no-banner --no-git --redact --config "${GITLEAKS_LINTER_RULES}" --verbose --source)
LINTER_COMMANDS_ARRAY_GHERKIN=(gherkin-lint -c "${GHERKIN_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_GO=(golangci-lint run -c "${GO_LINTER_RULES}" --fast)
# Consume the input as we do with ANSIBLE
LINTER_COMMANDS_ARRAY_GO_MODULES=(golangci-lint run --allow-parallel-runners -c "${GO_LINTER_RULES}" "&& echo \"Linted: {}\"")
LINTER_COMMANDS_ARRAY_GOOGLE_JAVA_FORMAT=(java -jar /usr/bin/google-java-format --dry-run --set-exit-if-changed)
LINTER_COMMANDS_ARRAY_GROOVY=(npm-groovy-lint -c "${GROOVY_LINTER_RULES}" --failon warning --no-insight)
LINTER_COMMANDS_ARRAY_HTML=(htmlhint --config "${HTML_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_JAVA=(java -jar /usr/bin/checkstyle -c "${JAVA_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_JAVASCRIPT_ES=(eslint --no-eslintrc -c "${JAVASCRIPT_ES_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_JAVASCRIPT_STANDARD=(standard "${JAVASCRIPT_STANDARD_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_JAVASCRIPT_PRETTIER=(prettier --check)
LINTER_COMMANDS_ARRAY_JSCPD=(jscpd --config "${JSCPD_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_JSON=(eslint --no-eslintrc -c "${JAVASCRIPT_ES_LINTER_RULES}" --ext '.json')
LINTER_COMMANDS_ARRAY_JSONC=(eslint --no-eslintrc -c "${JAVASCRIPT_ES_LINTER_RULES}" --ext '.json5,.jsonc')
LINTER_COMMANDS_ARRAY_JSX=(eslint --no-eslintrc -c "${JSX_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_KOTLIN=(ktlint "{/}")
LINTER_COMMANDS_ARRAY_KUBERNETES_KUBECONFORM=(kubeconform -strict)
if [ "${KUBERNETES_KUBECONFORM_OPTIONS}" != "null" ] && [ -n "${KUBERNETES_KUBECONFORM_OPTIONS}" ]; then
LINTER_COMMANDS_ARRAY_KUBERNETES_KUBECONFORM+=("${KUBERNETES_KUBECONFORM_OPTIONS}")
fi
LINTER_COMMANDS_ARRAY_LATEX=(chktex -q -l "${LATEX_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_LUA=(luacheck --config "${LUA_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_MARKDOWN=(markdownlint -c "${MARKDOWN_LINTER_RULES}")
if [ -n "${MARKDOWN_CUSTOM_RULE_GLOBS}" ]; then
IFS="," read -r -a MARKDOWN_CUSTOM_RULE_GLOBS_ARRAY <<<"${MARKDOWN_CUSTOM_RULE_GLOBS}"
for glob in "${MARKDOWN_CUSTOM_RULE_GLOBS_ARRAY[@]}"; do
if [ -z "${LINTER_RULES_PATH}" ]; then
LINTER_COMMANDS_ARRAY_MARKDOWN+=(-r "${GITHUB_WORKSPACE}/${glob}")
else
LINTER_COMMANDS_ARRAY_MARKDOWN+=(-r "${GITHUB_WORKSPACE}/${LINTER_RULES_PATH}/${glob}")
fi
done
fi
LINTER_COMMANDS_ARRAY_NATURAL_LANGUAGE=(textlint -c "${NATURAL_LANGUAGE_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_OPENAPI=(spectral lint -r "${OPENAPI_LINTER_RULES}" -D)
LINTER_COMMANDS_ARRAY_PERL=(perlcritic)
if [ "${PERL_PERLCRITIC_OPTIONS}" != "null" ] && [ -n "${PERL_PERLCRITIC_OPTIONS}" ]; then
LINTER_COMMANDS_ARRAY_PERL+=("${PERL_PERLCRITIC_OPTIONS}")
fi
LINTER_COMMANDS_ARRAY_PHP_BUILTIN=(php -l -c "${PHP_BUILTIN_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_PHP_PHPCS=(phpcs --standard="${PHP_PHPCS_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_PHP_PHPSTAN=(phpstan analyse --no-progress --no-ansi --memory-limit 1G -c "${PHP_PHPSTAN_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_PHP_PSALM=(psalm --config="${PHP_PSALM_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_POWERSHELL=(pwsh -NoProfile -NoLogo -Command "\"Invoke-ScriptAnalyzer -EnableExit -Settings ${POWERSHELL_LINTER_RULES} -Path '{}'; if (\\\${Error}.Count) { exit 1 }\"")
LINTER_COMMANDS_ARRAY_PROTOBUF=(protolint lint --config_path "${PROTOBUF_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_PYTHON_BLACK=(black --config "${PYTHON_BLACK_LINTER_RULES}" --diff --check)
LINTER_COMMANDS_ARRAY_PYTHON_PYLINT=(pylint --rcfile "${PYTHON_PYLINT_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_PYTHON_FLAKE8=(flake8 --config="${PYTHON_FLAKE8_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_PYTHON_ISORT=(isort --check --diff --sp "${PYTHON_ISORT_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_PYTHON_MYPY=(mypy --config-file "${PYTHON_MYPY_LINTER_RULES}" --install-types --non-interactive)
LINTER_COMMANDS_ARRAY_R=(R --slave -e "\"lints <- lintr::lint('{}');print(lints);errors <- purrr::keep(lints, ~ .\\\$type == 'error');quit(save = 'no', status = if (length(errors) > 0) 1 else 0)\"")
LINTER_COMMANDS_ARRAY_RAKU=(raku)
LINTER_COMMANDS_ARRAY_RENOVATE=(renovate-config-validator --strict)
LINTER_COMMANDS_ARRAY_RUBY=(rubocop -c "${RUBY_LINTER_RULES}" --force-exclusion --ignore-unrecognized-cops)
LINTER_COMMANDS_ARRAY_RUST_2015=(rustfmt --check --edition 2015)
LINTER_COMMANDS_ARRAY_RUST_2018=(rustfmt --check --edition 2018)
LINTER_COMMANDS_ARRAY_RUST_2021=(rustfmt --check --edition 2021)
LINTER_COMMANDS_ARRAY_RUST_CLIPPY=(clippy)
LINTER_COMMANDS_ARRAY_SCALAFMT=(scalafmt --config "${SCALAFMT_LINTER_RULES}" --test)
LINTER_COMMANDS_ARRAY_SHELL_SHFMT=(shfmt -d)
LINTER_COMMANDS_ARRAY_SNAKEMAKE_LINT=(snakemake --lint -s)
LINTER_COMMANDS_ARRAY_SNAKEMAKE_SNAKEFMT=(snakefmt --config "${SNAKEMAKE_SNAKEFMT_LINTER_RULES}" --check --compact-diff)
LINTER_COMMANDS_ARRAY_STATES=(asl-validator --json-path)
LINTER_COMMANDS_ARRAY_SQL=(sql-lint --config "${SQL_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_SQLFLUFF=(sqlfluff lint --config "${SQLFLUFF_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_TEKTON=(tekton-lint)
LINTER_COMMANDS_ARRAY_TERRAFORM_FMT=(terraform fmt -check -diff)
LINTER_COMMANDS_ARRAY_TERRAFORM_TFLINT=("TF_DATA_DIR=\"/tmp/.terraform-TERRAFORM_TFLINT-{//}\"" tflint -c "${TERRAFORM_TFLINT_LINTER_RULES}" "--filter=\"{/}\"")
LINTER_COMMANDS_ARRAY_TERRAFORM_TERRASCAN=(terrascan scan -i terraform -t all -c "${TERRAFORM_TERRASCAN_LINTER_RULES}" -f)
LINTER_COMMANDS_ARRAY_TERRAGRUNT=(terragrunt hclfmt --terragrunt-check --terragrunt-log-level error --terragrunt-hclfmt-file)
LINTER_COMMANDS_ARRAY_TSX=(eslint --no-eslintrc -c "${TSX_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_TYPESCRIPT_ES=(eslint --no-eslintrc -c "${TYPESCRIPT_ES_LINTER_RULES}")
LINTER_COMMANDS_ARRAY_TYPESCRIPT_STANDARD=(ts-standard --parser @typescript-eslint/parser --plugin @typescript-eslint/eslint-plugin --project "${TYPESCRIPT_STANDARD_TSCONFIG_FILE}")
LINTER_COMMANDS_ARRAY_TYPESCRIPT_PRETTIER=(prettier --check)
LINTER_COMMANDS_ARRAY_XML=(xmllint)
LINTER_COMMANDS_ARRAY_YAML=(yamllint -c "${YAML_LINTER_RULES}" -f parsable)
if [ "${YAML_ERROR_ON_WARNING}" == 'true' ]; then
LINTER_COMMANDS_ARRAY_YAML+=(--strict)
fi

View file

@ -98,8 +98,11 @@ fatal() {
# shellcheck disable=SC2034 # Variable is referenced in other files
SUPER_LINTER_INITIALIZATION_LOG_GROUP_TITLE="Super-Linter initialization"
export SUPER_LINTER_INITIALIZATION_LOG_GROUP_TITLE
GITHUB_ACTIONS_LOG_GROUP_MARKER_START="start"
export GITHUB_ACTIONS_LOG_GROUP_MARKER_START
GITHUB_ACTIONS_LOG_GROUP_MARKER_END="end"
export GITHUB_ACTIONS_LOG_GROUP_MARKER_END
writeGitHubActionsLogGroupMarker() {
local LOG_GROUP_MARKER_MODE="${1}"
@ -139,3 +142,16 @@ startGitHubActionsLogGroup() {
endGitHubActionsLogGroup() {
writeGitHubActionsLogGroupMarker "${GITHUB_ACTIONS_LOG_GROUP_MARKER_END}" "${1}"
}
# We need these functions to be available when using parallel to run subprocesses
export -f debug
export -f endGitHubActionsLogGroup
export -f error
export -f fatal
export -f info
export -f log
export -f notice
export -f startGitHubActionsLogGroup
export -f trace
export -f warn
export -f writeGitHubActionsLogGroupMarker

View file

@ -13,11 +13,11 @@ function ValidateBooleanConfigurationVariables() {
ValidateBooleanVariable "SSH_SETUP_GITHUB" "${SSH_SETUP_GITHUB}"
ValidateBooleanVariable "SUPPRESS_FILE_TYPE_WARN" "${SUPPRESS_FILE_TYPE_WARN}"
ValidateBooleanVariable "SUPPRESS_POSSUM" "${SUPPRESS_POSSUM}"
ValidateBooleanVariable "USE_FIND_ALGORITHM" "${USE_FIND_ALGORITHM}"
ValidateBooleanVariable "TEST_CASE_RUN" "${TEST_CASE_RUN}"
ValidateBooleanVariable "USE_FIND_ALGORITHM" "${USE_FIND_ALGORITHM}"
ValidateBooleanVariable "VALIDATE_ALL_CODEBASE" "${VALIDATE_ALL_CODEBASE}"
ValidateBooleanVariable "YAML_ERROR_ON_WARNING" "${YAML_ERROR_ON_WARNING}"
ValidateBooleanVariable "WRITE_LINTER_VERSIONS_FILE" "${WRITE_LINTER_VERSIONS_FILE}"
ValidateBooleanVariable "YAML_ERROR_ON_WARNING" "${YAML_ERROR_ON_WARNING}"
}
function ValidateGitHubWorkspace() {
@ -100,14 +100,6 @@ function GetValidationInfo() {
VALIDATE_LANGUAGE="VALIDATE_${LANGUAGE}"
if [[ ${!VALIDATE_LANGUAGE} == "true" ]]; then
debug "- Validating [${LANGUAGE}] files in code base..."
debug "Defining variables for ${LANGUAGE} linter..."
ERRORS_VARIABLE_NAME="ERRORS_FOUND_${LANGUAGE}"
debug "Setting ${ERRORS_VARIABLE_NAME} variable value to 0..."
eval "${ERRORS_VARIABLE_NAME}=0"
debug "Exporting ${ERRORS_VARIABLE_NAME} variable..."
eval "export ${ERRORS_VARIABLE_NAME}"
else
debug "- Excluding [$LANGUAGE] files in code base..."
fi
@ -139,10 +131,6 @@ function GetValidationInfo() {
ANSIBLE_DIRECTORY="${TEMP_ANSIBLE_DIRECTORY}"
debug "Setting Ansible directory to: ${ANSIBLE_DIRECTORY}"
fi
debug "Runner: $(id -un 2>/dev/null)"
debug "ENV:"
debug "$(printenv | sort)"
}
function CheckIfGitBranchExists() {
@ -170,6 +158,7 @@ function ValidateBooleanVariable() {
debug "${VAR_NAME} has a valid boolean string value: ${VAR_VALUE}"
fi
}
export -f ValidateBooleanVariable
function ValidateLocalGitRepository() {
debug "Check if ${GITHUB_WORKSPACE} is a Git repository"
@ -251,6 +240,10 @@ function CheckovConfigurationFileContainsDirectoryOption() {
local CONFIGURATION_OPTION_KEY="directory:"
debug "Checking if ${CHECKOV_LINTER_RULES_PATH} contains a '${CONFIGURATION_OPTION_KEY}' configuration option"
if [ ! -e "${CHECKOV_LINTER_RULES_PATH}" ]; then
fatal "${CHECKOV_LINTER_RULES_PATH} doesn't exist. Cannot check if it contains a '${CONFIGURATION_OPTION_KEY}' configuration option"
fi
if grep -q "${CONFIGURATION_OPTION_KEY}" "${CHECKOV_LINTER_RULES_PATH}"; then
debug "${CHECKOV_LINTER_RULES_PATH} contains a '${CONFIGURATION_OPTION_KEY}' statement"
return 0
@ -259,6 +252,7 @@ function CheckovConfigurationFileContainsDirectoryOption() {
return 1
fi
}
export -f CheckovConfigurationFileContainsDirectoryOption
function WarnIfVariableIsSet() {
local INPUT_VARIABLE="${1}"

View file

@ -1,213 +1,218 @@
#!/usr/bin/env bash
function LintCodebase() {
local FILE_TYPE
FILE_TYPE="${1}" && shift
LINTER_NAME="${1}" && shift
LINTER_COMMAND="${1}" && shift
FILTER_REGEX_INCLUDE="${1}" && shift
FILTER_REGEX_EXCLUDE="${1}" && shift
local TEST_CASE_RUN
TEST_CASE_RUN="${1}" && shift
FILE_ARRAY=("$@")
# Array to track directories where tflint was run
declare -A TFLINT_SEEN_DIRS
declare -n VALIDATE_LANGUAGE
VALIDATE_LANGUAGE="VALIDATE_${FILE_TYPE}"
# To count how many files were checked for a given FILE_TYPE
INDEX=0
# To check how many "bad" and "good" test cases we ran
BAD_TEST_CASES_COUNT=0
GOOD_TEST_CASES_COUNT=0
WORKSPACE_PATH="${GITHUB_WORKSPACE}"
if [ "${TEST_CASE_RUN}" == "true" ]; then
WORKSPACE_PATH="${GITHUB_WORKSPACE}/${TEST_CASE_FOLDER}"
if [[ "${VALIDATE_LANGUAGE}" == "false" ]]; then
if [[ "${TEST_CASE_RUN}" == "false" ]]; then
debug "Skip validation of ${FILE_TYPE} because VALIDATE_LANGUAGE is ${VALIDATE_LANGUAGE}"
unset -n VALIDATE_LANGUAGE
return 0
else
fatal "Don't disable any validation when running in test mode. VALIDATE_${FILE_TYPE} is set to: ${VALIDATE_LANGUAGE}. Set it to: true"
fi
fi
debug "Workspace path: ${WORKSPACE_PATH}"
info ""
info "----------------------------------------------"
info "----------------------------------------------"
debug "Running LintCodebase. FILE_TYPE: ${FILE_TYPE}. Linter name: ${LINTER_NAME}, linter command: ${LINTER_COMMAND}, TEST_CASE_RUN: ${TEST_CASE_RUN}, FILTER_REGEX_INCLUDE: ${FILTER_REGEX_INCLUDE}, FILTER_REGEX_EXCLUDE: ${FILTER_REGEX_EXCLUDE}, files to lint: ${FILE_ARRAY[*]}"
info "Linting ${FILE_TYPE} files..."
info "----------------------------------------------"
info "----------------------------------------------"
debug "Running LintCodebase. FILE_TYPE: ${FILE_TYPE}. TEST_CASE_RUN: ${TEST_CASE_RUN}"
for FILE in "${FILE_ARRAY[@]}"; do
info "Checking file: ${FILE}"
debug "VALIDATE_LANGUAGE for ${FILE_TYPE}: ${VALIDATE_LANGUAGE}..."
ValidateBooleanVariable "TEST_CASE_RUN" "${TEST_CASE_RUN}"
ValidateBooleanVariable "VALIDATE_${FILE_TYPE}" "${VALIDATE_LANGUAGE}"
unset -n VALIDATE_LANGUAGE
debug "Populating file array for ${FILE_TYPE}"
local -n FILE_ARRAY="FILE_ARRAY_${FILE_TYPE}"
local FILE_ARRAY_LANGUAGE_PATH="${FILE_ARRAYS_DIRECTORY_PATH}/file-array-${FILE_TYPE}"
if [[ -e "${FILE_ARRAY_LANGUAGE_PATH}" ]]; then
while read -r FILE; do
if [[ "${TEST_CASE_RUN}" == "true" ]]; then
debug "Ensure that the list files to check for ${FILE_TYPE} doesn't include test cases for other languages"
# Folder for specific tests. By convention, the last part of the path is the lowercased FILE_TYPE
local TEST_CASE_DIRECTORY
TEST_CASE_DIRECTORY="${TEST_CASE_FOLDER}/${FILE_TYPE,,}/"
debug "TEST_CASE_DIRECTORY for ${FILE}: ${TEST_CASE_DIRECTORY}"
TEST_CASE_DIRECTORY="${FILE_TYPE,,}"
# We use configuration files to pass the list of files to lint to checkov
# Their name includes "checkov", which is equal to FILE_TYPE for Checkov.
# In this case, we don't add a trailing slash so we don't fail validation.
if [[ "${FILE_TYPE}" != "CHECKOV" ]]; then
TEST_CASE_DIRECTORY="${TEST_CASE_DIRECTORY}/"
debug "Adding a traling slash to the test case directory for ${FILE_TYPE}: ${TEST_CASE_DIRECTORY}"
fi
debug "TEST_CASE_DIRECTORY for ${FILE_TYPE}: ${TEST_CASE_DIRECTORY}"
if [[ ${FILE} != *"${TEST_CASE_DIRECTORY}"* ]]; then
debug "Skipping ${FILE} because it's not in the test case directory for ${FILE_TYPE}..."
debug "Excluding ${FILE} because it's not in the test case directory for ${FILE_TYPE}..."
continue
fi
fi
local FILE_NAME
FILE_NAME=$(basename "${FILE}" 2>&1)
debug "FILE_NAME for ${FILE}: ${FILE_NAME}"
local DIR_NAME
DIR_NAME=$(dirname "${FILE}" 2>&1)
debug "DIR_NAME for ${FILE}: ${DIR_NAME}"
(("INDEX++"))
LINTED_LANGUAGES_ARRAY+=("${FILE_TYPE}")
local LINT_CMD
LINT_CMD=''
if [[ ${FILE_TYPE} == "POWERSHELL" ]] || [[ ${FILE_TYPE} == "ARM" ]]; then
# Need to run PowerShell commands using pwsh -c, also exit with exit code from inner subshell
LINT_CMD=$(
cd "${WORKSPACE_PATH}" || exit
pwsh -NoProfile -NoLogo -Command "${LINTER_COMMAND} \"${FILE}\"; if (\${Error}.Count) { exit 1 }"
exit $? 2>&1
)
elif [[ ${FILE_TYPE} == "R" ]]; then
local r_dir
if [ ! -f "${DIR_NAME}/.lintr" ]; then
r_dir="${WORKSPACE_PATH}"
else
r_dir="${DIR_NAME}"
debug "Including ${FILE} because it's a test case for ${FILE_TYPE}"
fi
LINT_CMD=$(
cd "$r_dir" || exit
R --slave -e "lints <- lintr::lint('$FILE');print(lints);errors <- purrr::keep(lints, ~ .\$type == 'error');quit(save = 'no', status = if (length(errors) > 0) 1 else 0)" 2>&1
)
elif [[ ${FILE_TYPE} == "CSHARP" ]]; then
# Because the C# linter writes to tty and not stdout
LINT_CMD=$(
cd "${DIR_NAME}" || exit
${LINTER_COMMAND} "${FILE_NAME}" | tee /dev/tty2 2>&1
exit "${PIPESTATUS[0]}"
)
fi
FILE_ARRAY+=("${FILE}")
done <"${FILE_ARRAY_LANGUAGE_PATH}"
else
debug "${FILE_ARRAY_LANGUAGE_PATH} doesn't exist. Skip loading the list of files and directories to lint for ${FILE_TYPE}"
fi
if [[ "${#FILE_ARRAY[@]}" -eq 0 ]]; then
if [[ "${TEST_CASE_RUN}" == "false" ]]; then
debug "There are no items to lint for ${FILE_TYPE}"
unset -n FILE_ARRAY
return 0
else
fatal "Cannot find any tests for ${FILE_TYPE}"
fi
else
debug "There are ${#FILE_ARRAY[@]} items to lint for ${FILE_TYPE}: ${FILE_ARRAY[*]}"
fi
startGitHubActionsLogGroup "${FILE_TYPE}"
info "Linting ${FILE_TYPE} items..."
local PARALLEL_RESULTS_FILE_PATH
PARALLEL_RESULTS_FILE_PATH="/tmp/super-linter-worker-results-${FILE_TYPE}.json"
debug "PARALLEL_RESULTS_FILE_PATH for ${FILE_TYPE}: ${PARALLEL_RESULTS_FILE_PATH}"
local -a PARALLEL_COMMAND
PARALLEL_COMMAND=(parallel --will-cite --keep-order --max-procs "$(($(nproc) * 1))" --xargs --results "${PARALLEL_RESULTS_FILE_PATH}")
if [ "${LOG_DEBUG}" == "true" ]; then
debug "LOG_DEBUG is enabled. Enable verbose ouput for parallel"
PARALLEL_COMMAND+=(--verbose)
fi
debug "PARALLEL_COMMAND for ${FILE_TYPE}: ${PARALLEL_COMMAND[*]}"
# The following linters support linting one file at a time, and don't support linting a list of files,
# so we cannot pass more than one file per invocation
if [[ "${FILE_TYPE}" == "ANSIBLE" ]] ||
[[ "${FILE_TYPE}" == "ARM" ]] ||
[[ "${FILE_TYPE}" == "BASH_EXEC" ]] ||
[[ "${FILE_TYPE}" == "CLOJURE" ]] ||
[[ "${FILE_TYPE}" == "CSHARP" ]] ||
[[ "${FILE_TYPE}" == "GITLEAKS" ]] ||
[[ "${FILE_TYPE}" == "GO_MODULES" ]] ||
[[ "${FILE_TYPE}" == "JSCPD" ]] ||
[[ "${FILE_TYPE}" == "KOTLIN" ]] ||
[[ "${FILE_TYPE}" == "SQL" ]] ||
[[ "${FILE_TYPE}" == "SQLFLUFF" ]] ||
[[ "${FILE_TYPE}" == "CHECKOV" ]] ||
[[ "${FILE_TYPE}" == "POWERSHELL" ]] ||
[[ "${FILE_TYPE}" == "R" ]] ||
[[ "${FILE_TYPE}" == "RUST_CLIPPY" ]] ||
[[ "${FILE_TYPE}" == "SNAKEMAKE_LINT" ]] ||
[[ "${FILE_TYPE}" == "STATES" ]] ||
[[ "${FILE_TYPE}" == "TERRAFORM_TFLINT" ]] ||
[[ "${FILE_TYPE}" == "TERRAFORM_TERRASCAN" ]] ||
[[ "${FILE_TYPE}" == "TERRAGRUNT" ]]; then
debug "${FILE_TYPE} doesn't support linting files in batches. Configure the linter to run over the files to lint one by one"
PARALLEL_COMMAND+=(--max-lines 1)
fi
debug "PARALLEL_COMMAND for ${FILE_TYPE} after updating the number of files to lint per process: ${PARALLEL_COMMAND[*]}"
local LINTER_WORKING_DIRECTORY
LINTER_WORKING_DIRECTORY="${GITHUB_WORKSPACE}"
# GNU parallel parameter expansion:
# - {} input item
# - {/} basename of the input lint
# - {//} dirname of input line
if [[ ${FILE_TYPE} == "CSHARP" ]] ||
[[ (${FILE_TYPE} == "R" && -f "$(dirname "${FILE}")/.lintr") ]] ||
[[ ${FILE_TYPE} == "KOTLIN" ]] ||
[[ ${FILE_TYPE} == "TERRAFORM_TFLINT" ]]; then
LINTER_WORKING_DIRECTORY="{//}"
elif [[ ${FILE_TYPE} == "ANSIBLE" ]] ||
[[ ${FILE_TYPE} == "GO_MODULES" ]]; then
debug "Linting ${FILE_TYPE}. Changing the working directory to ${FILE} before running the linter."
# Because it expects that the working directory is a Go module (GO_MODULES) or
# because we want to enable ansible-lint autodetection mode.
# Ref: https://ansible-lint.readthedocs.io/usage
LINT_CMD=$(
cd "${FILE}" || exit 1
${LINTER_COMMAND} 2>&1
)
elif [[ ${FILE_TYPE} == "KOTLIN" ]]; then
# Because it needs to change directory to where the file to lint is
LINT_CMD=$(
cd "${DIR_NAME}" || exit
${LINTER_COMMAND} "${FILE_NAME}" 2>&1
)
elif [[ ${FILE_TYPE} == "TERRAFORM_TFLINT" ]]; then
# Check the cache to see if we've already prepped this directory for tflint
if [[ ! -v "TFLINT_SEEN_DIRS[${DIR_NAME}]" ]]; then
debug "Configuring Terraform data directory for ${DIR_NAME}"
# Define the path to an empty Terraform data directory
# (def: https://developer.hashicorp.com/terraform/cli/config/environment-variables#tf_data_dir)
# in case the user has a Terraform data directory already, and we don't
# want to modify it.
# TFlint considers this variable as well.
# Ref: https://github.com/terraform-linters/tflint/blob/master/docs/user-guide/compatibility.md#environment-variables
local TF_DATA_DIR
TF_DATA_DIR="/tmp/.terraform-${FILE_TYPE}-${DIR_NAME}"
export TF_DATA_DIR
# Let the cache know we've seen this before
# Set the value to an arbitrary non-empty string.
# Fetch Terraform modules
debug "Fetch Terraform modules for ${DIR_NAME} in ${TF_DATA_DIR}"
local FETCH_TERRAFORM_MODULES_CMD
FETCH_TERRAFORM_MODULES_CMD="$(terraform get)"
ERROR_CODE=$?
debug "Fetch Terraform modules. Exit code: ${ERROR_CODE}. Command output: ${FETCH_TERRAFORM_MODULES_CMD}"
if [ ${ERROR_CODE} -ne 0 ]; then
fatal "Error when fetching Terraform modules while linting ${FILE}"
fi
TFLINT_SEEN_DIRS[${DIR_NAME}]="false"
LINTER_WORKING_DIRECTORY="{}"
fi
# Because it needs to change the directory to where the file to lint is
LINT_CMD=$(
cd "${DIR_NAME}" || exit
${LINTER_COMMAND} --filter="${FILE_NAME}" 2>&1
)
debug "LINTER_WORKING_DIRECTORY for ${FILE_TYPE}: ${LINTER_WORKING_DIRECTORY}"
PARALLEL_COMMAND+=(--workdir "${LINTER_WORKING_DIRECTORY}")
debug "PARALLEL_COMMAND for ${FILE_TYPE} after updating the working directory: ${PARALLEL_COMMAND[*]}"
# shellcheck source=/dev/null
source /action/lib/functions/linterCommands.sh
local -n LINTER_COMMAND_ARRAY
LINTER_COMMAND_ARRAY="LINTER_COMMANDS_ARRAY_${FILE_TYPE}"
if [ ${#LINTER_COMMAND_ARRAY[@]} -eq 0 ]; then
fatal "LINTER_COMMAND_ARRAY for ${FILE_TYPE} is empty."
else
LINT_CMD=$(
cd "${WORKSPACE_PATH}" || exit
${LINTER_COMMAND} "${FILE}" 2>&1
)
debug "LINTER_COMMAND_ARRAY for ${FILE_TYPE} has ${#LINTER_COMMAND_ARRAY[@]} elements: ${LINTER_COMMAND_ARRAY[*]}"
fi
ERROR_CODE=$?
PARALLEL_COMMAND+=("${LINTER_COMMAND_ARRAY[@]}")
debug "PARALLEL_COMMAND for ${FILE_TYPE} after LINTER_COMMAND_ARRAY concatenation: ${PARALLEL_COMMAND[*]}"
local FILE_STATUS
# Assume that the file should pass linting checks
FILE_STATUS="good"
unset -n LINTER_COMMAND_ARRAY
if [[ "${TEST_CASE_RUN}" == "true" ]] && [[ ${FILE} == *"bad"* ]]; then
FILE_STATUS="bad"
debug "We are running in test mode. Updating the expected FILE_STATUS for ${FILE} to: ${FILE_STATUS}"
fi
local PARALLEL_COMMAND_OUTPUT
local PARALLEL_COMMAND_RETURN_CODE
PARALLEL_COMMAND_OUTPUT=$(printf "%s\n" "${FILE_ARRAY[@]}" | "${PARALLEL_COMMAND[@]}" 2>&1)
# Don't check for errors on this return code because commands can fail if linter report errors
PARALLEL_COMMAND_RETURN_CODE=$?
debug "PARALLEL_COMMAND_OUTPUT for ${FILE_TYPE} (exit code: ${PARALLEL_COMMAND_RETURN_CODE}): ${PARALLEL_COMMAND_OUTPUT}"
debug "Parallel output file (${PARALLEL_RESULTS_FILE_PATH}) contents for ${FILE_TYPE}:\n$(cat "${PARALLEL_RESULTS_FILE_PATH}")"
debug "Results for ${FILE}. Exit code: ${ERROR_CODE}. Command output:\n------\n${LINT_CMD}\n------"
echo ${PARALLEL_COMMAND_RETURN_CODE} >"/tmp/super-linter-parallel-command-exit-code-${FILE_TYPE}"
########################################
# File status = good, this should pass #
########################################
if [[ ${FILE_STATUS} == "good" ]]; then
(("GOOD_TEST_CASES_COUNT++"))
if [ ${ERROR_CODE} -ne 0 ]; then
error "Found errors when linting ${FILE_NAME}. Exit code: ${ERROR_CODE}. Command output:\n------\n${LINT_CMD}\n------"
(("ERRORS_FOUND_${FILE_TYPE}++"))
if [ ${PARALLEL_COMMAND_RETURN_CODE} -ne 0 ]; then
error "Found errors when linting ${FILE_TYPE}. Exit code: ${PARALLEL_COMMAND_RETURN_CODE}."
else
notice "${FILE} was linted successfully"
if [ -n "${LINT_CMD}" ]; then
info "Command output for ${FILE_NAME}:\n------\n${LINT_CMD}\n------"
notice "${FILE_TYPE} linted successfully"
fi
local RESULTS_OBJECT
RESULTS_OBJECT=
if ! RESULTS_OBJECT=$(jq -n '[inputs]' "${PARALLEL_RESULTS_FILE_PATH}"); then
fatal "Error loading results for ${FILE_TYPE}: ${RESULTS_OBJECT}"
fi
#######################################
# File status = bad, this should fail #
#######################################
debug "RESULTS_OBJECT for ${FILE_TYPE}:\n${RESULTS_OBJECT}"
# To count how many files were checked for a given FILE_TYPE
local INDEX
INDEX=0
if ! ((INDEX = $(jq '[.[] | .V | length] | add' <<<"${RESULTS_OBJECT}"))); then
fatal "Error when setting INDEX for ${FILE_TYPE}: ${INDEX}"
fi
debug "Set INDEX for ${FILE_TYPE} to: ${INDEX}"
local STDOUT_LINTER
# Get raw output so we can strip quotes from the data we load
if ! STDOUT_LINTER="$(jq --raw-output '.[].Stdout' <<<"${RESULTS_OBJECT}")"; then
fatal "Error when loading stdout for ${FILE_TYPE}:\n${STDOUT_LINTER}"
fi
if [ -n "${STDOUT_LINTER}" ]; then
info "Command output for ${FILE_TYPE}:\n------\n${STDOUT_LINTER}\n------"
else
if [[ "${TEST_CASE_RUN}" == "false" ]]; then
fatal "All files are supposed to pass linting checks when not running in test mode."
debug "Stdout for ${FILE_TYPE} is empty"
fi
(("BAD_TEST_CASES_COUNT++"))
local STDERR_LINTER
if ! STDERR_LINTER="$(jq --raw-output '.[].Stderr' <<<"${RESULTS_OBJECT}")"; then
fatal "Error when loading stderr for ${FILE_TYPE}:\n${STDERR_LINTER}"
fi
if [ ${ERROR_CODE} -eq 0 ]; then
error "${FILE} should have failed test case."
(("ERRORS_FOUND_${FILE_TYPE}++"))
if [ -n "${STDERR_LINTER}" ]; then
info "Command output for ${FILE_TYPE}:\n------\n${STDERR_LINTER}\n------"
else
notice "${FILE} failed the test case as expected"
fi
fi
done
if [ "${TEST_CASE_RUN}" = "true" ]; then
debug "${LINTER_NAME} test suite has ${INDEX} test(s), of which ${BAD_TEST_CASES_COUNT} 'bad' (expected to fail), ${GOOD_TEST_CASES_COUNT} 'good' (expected to pass)."
# Check if we ran at least one test
if [ "${INDEX}" -eq 0 ]; then
fatal "Failed to find any tests ran for: ${LINTER_NAME}. Check that tests exist for linter: ${LINTER_NAME}"
debug "Stderr for ${FILE_TYPE} is empty"
fi
# Check if we ran at least one 'bad' test
if [ "${BAD_TEST_CASES_COUNT}" -eq 0 ]; then
fatal "Failed to find any tests that are expected to fail for: ${LINTER_NAME}. Check that tests that are expected to fail exist for linter: ${LINTER_NAME}"
fi
unset -n FILE_ARRAY
# Check if we ran at least one 'good' test
if [ "${GOOD_TEST_CASES_COUNT}" -eq 0 ]; then
fatal "Failed to find any tests that are expected to pass for: ${LINTER_NAME}. Check that tests that are expected to pass exist for linter: ${LINTER_NAME}"
fi
fi
endGitHubActionsLogGroup "${FILE_TYPE}"
}
# We need this for parallel
export -f LintCodebase

View file

@ -77,6 +77,7 @@ else
fi
# Let users configure GitHub Actions log markers regardless of running locally or not
ENABLE_GITHUB_ACTIONS_GROUP_TITLE="${ENABLE_GITHUB_ACTIONS_GROUP_TITLE:-"${DEFAULT_ENABLE_GITHUB_ACTIONS_GROUP_TITLE}"}"
export ENABLE_GITHUB_ACTIONS_GROUP_TITLE
startGitHubActionsLogGroup "${SUPER_LINTER_INITIALIZATION_LOG_GROUP_TITLE}"
@ -88,10 +89,12 @@ DISABLE_ERRORS="${DISABLE_ERRORS:-"false"}"
declare -l IGNORE_GENERATED_FILES
# Do not ignore generated files by default for backwards compatibility
IGNORE_GENERATED_FILES="${IGNORE_GENERATED_FILES:-false}"
export IGNORE_GENERATED_FILES
# We want a lowercase value
declare -l IGNORE_GITIGNORED_FILES
IGNORE_GITIGNORED_FILES="${IGNORE_GITIGNORED_FILES:-false}"
export IGNORE_GITIGNORED_FILES
# We want a lowercase value
declare -l MULTI_STATUS
@ -151,7 +154,6 @@ GITHUB_API_URL="${GITHUB_API_URL%/}"
GITHUB_SERVER_URL="${GITHUB_DOMAIN:-"https://github.com"}"
# Extract domain name from URL
GITHUB_SERVER_URL=$(echo "$GITHUB_SERVER_URL" | cut -d '/' -f 3)
LINTED_LANGUAGES_ARRAY=() # Will be filled at run time with all languages that were linted
LINTER_RULES_PATH="${LINTER_RULES_PATH:-.github/linters}" # Linter rules directory
# shellcheck disable=SC2034 # Variable is referenced in other scripts
RAW_FILE_ARRAY=() # Array of all files that were changed
@ -610,6 +612,9 @@ GetGitHubVars() {
else
debug "Skip GITHUB_TOKEN, GITHUB_REPOSITORY, and GITHUB_RUN_ID validation because we don't need these variables for GitHub Actions status reports. MULTI_STATUS: ${MULTI_STATUS}"
fi
# We need this for parallel
export GITHUB_WORKSPACE
}
################################################################################
#### Function CallStatusAPI ####################################################
@ -666,84 +671,62 @@ CallStatusAPI() {
fi
fi
}
################################################################################
#### Function Footer ###########################################################
Footer() {
info "----------------------------------------------"
info "----------------------------------------------"
info "The script has completed"
info "----------------------------------------------"
info "----------------------------------------------"
####################################################
# Need to clean up the lanuage array of duplicates #
####################################################
mapfile -t UNIQUE_LINTED_ARRAY < <(for LANG in "${LINTED_LANGUAGES_ARRAY[@]}"; do echo "${LANG}"; done | sort -u)
export UNIQUE_LINTED_ARRAY # Workaround SC2034
local ANY_LINTER_SUCCESS
ANY_LINTER_SUCCESS="false"
local SUPER_LINTER_EXIT_CODE
SUPER_LINTER_EXIT_CODE=0
##############################
# Prints for errors if found #
##############################
for LANGUAGE in "${LANGUAGE_ARRAY[@]}"; do
###########################
# Build the error counter #
###########################
ERROR_COUNTER="ERRORS_FOUND_${LANGUAGE}"
# This used to be the count of errors found for a given LANGUAGE, but since
# after we switched to running linters against a batch of files, it may not
# represent the actual number of files that didn't pass the validation,
# but a number that's less than that because of how GNU parallel returns
# exit codes.
# Ref: https://www.gnu.org/software/parallel/parallel.html#exit-status
ERROR_COUNTER_FILE_PATH="/tmp/super-linter-parallel-command-exit-code-${LANGUAGE}"
if [ ! -f "${ERROR_COUNTER_FILE_PATH}" ]; then
debug "Error counter ${ERROR_COUNTER_FILE_PATH} doesn't exist"
else
ERROR_COUNTER=$(<"${ERROR_COUNTER_FILE_PATH}")
debug "ERROR_COUNTER for ${LANGUAGE}: ${ERROR_COUNTER}"
##################
# Print if not 0 #
##################
if [[ ${!ERROR_COUNTER} -ne 0 ]]; then
# We found errors in the language
###################
# Print the goods #
###################
error "ERRORS FOUND in ${LANGUAGE}:[${!ERROR_COUNTER}]"
#########################################
# Create status API for Failed language #
#########################################
if [[ ${ERROR_COUNTER} -ne 0 ]]; then
error "Errors found in ${LANGUAGE}"
CallStatusAPI "${LANGUAGE}" "error"
######################################
# Check if we validated the language #
######################################
elif [[ ${!ERROR_COUNTER} -eq 0 ]]; then
if CheckInArray "${LANGUAGE}"; then
# No errors found when linting the language
SUPER_LINTER_EXIT_CODE=1
debug "Setting super-linter exit code to ${SUPER_LINTER_EXIT_CODE} because there were errors for ${LANGUAGE}"
elif [[ ${ERROR_COUNTER} -eq 0 ]]; then
notice "Successfully linted ${LANGUAGE}"
CallStatusAPI "${LANGUAGE}" "success"
ANY_LINTER_SUCCESS="true"
debug "Set ANY_LINTER_SUCCESS to ${ANY_LINTER_SUCCESS} because ${LANGUAGE} reported a success"
fi
fi
done
##################################
# Exit with 0 if errors disabled #
##################################
if [[ "${ANY_LINTER_SUCCESS}" == "true" ]] && [[ ${SUPER_LINTER_EXIT_CODE} -ne 0 ]]; then
SUPER_LINTER_EXIT_CODE=2
debug "There was at least one linter that reported a success. Setting the super-linter exit code to: ${SUPER_LINTER_EXIT_CODE}"
fi
if [ "${DISABLE_ERRORS}" == "true" ]; then
warn "Exiting with exit code:[0] as:[DISABLE_ERRORS] was set to:[${DISABLE_ERRORS}]"
exit 0
warn "The super-linter exit code is ${SUPER_LINTER_EXIT_CODE}. Forcibly setting it to 0 because DISABLE_ERRORS is set to: ${DISABLE_ERRORS}"
SUPER_LINTER_EXIT_CODE=0
fi
###############################
# Exit with 1 if errors found #
###############################
# Loop through all languages
for LANGUAGE in "${LANGUAGE_ARRAY[@]}"; do
# build the variable
ERRORS_FOUND_LANGUAGE="ERRORS_FOUND_${LANGUAGE}"
# Check if error was found
if [[ ${!ERRORS_FOUND_LANGUAGE} -ne 0 ]]; then
# Failed exit
fatal "Exiting with errors found!"
if [[ ${SUPER_LINTER_EXIT_CODE} -eq 0 ]]; then
notice "All files and directories linted successfully"
else
error "Super-linter detected linting errors"
fi
done
########################
# Footer prints Exit 0 #
########################
notice "All file(s) linted successfully with no errors detected"
info "----------------------------------------------"
# Successful exit
exit 0
exit ${SUPER_LINTER_EXIT_CODE}
}
################################################################################
#### Function UpdateLoopsForImage ##############################################
@ -892,133 +875,6 @@ done
# Load rules for special cases
GetStandardRules "javascript"
##########################
# Define linter commands #
##########################
declare -A LINTER_COMMANDS_ARRAY
LINTER_COMMANDS_ARRAY['ANSIBLE']="ansible-lint -c ${ANSIBLE_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['ARM']="Import-Module ${ARM_TTK_PSD1} ; \${config} = \$(Import-PowerShellDataFile -Path ${ARM_LINTER_RULES}) ; Test-AzTemplate @config -TemplatePath"
if [ -z "${BASH_SEVERITY}" ]; then
LINTER_COMMANDS_ARRAY['BASH']="shellcheck --color --external-sources"
else
LINTER_COMMANDS_ARRAY['BASH']="shellcheck --color --external-sources --severity=${BASH_SEVERITY}"
fi
LINTER_COMMANDS_ARRAY['BASH_EXEC']="bash-exec"
LINTER_COMMANDS_ARRAY['CHECKOV']="checkov --config-file ${CHECKOV_LINTER_RULES}"
if CheckovConfigurationFileContainsDirectoryOption "${CHECKOV_LINTER_RULES}"; then
debug "No need to update the Checkov command."
else
debug "Adding the '--directory' option to the Checkov command."
LINTER_COMMANDS_ARRAY['CHECKOV']="${LINTER_COMMANDS_ARRAY['CHECKOV']} --directory"
fi
LINTER_COMMANDS_ARRAY['CLANG_FORMAT']="clang-format --Werror --dry-run"
LINTER_COMMANDS_ARRAY['CLOJURE']="clj-kondo --config ${CLOJURE_LINTER_RULES} --lint"
LINTER_COMMANDS_ARRAY['CLOUDFORMATION']="cfn-lint --config-file ${CLOUDFORMATION_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['COFFEESCRIPT']="coffeelint -f ${COFFEESCRIPT_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['CPP']="cpplint"
LINTER_COMMANDS_ARRAY['CSHARP']="dotnet format whitespace --folder --verify-no-changes --exclude / --include"
LINTER_COMMANDS_ARRAY['CSS']="stylelint --config ${CSS_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['DART']="dart analyze --fatal-infos --fatal-warnings"
LINTER_COMMANDS_ARRAY['DOCKERFILE_HADOLINT']="hadolint -c ${DOCKERFILE_HADOLINT_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['EDITORCONFIG']="editorconfig-checker -config ${EDITORCONFIG_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['ENV']="dotenv-linter"
if [ "${GITHUB_ACTIONS_COMMAND_ARGS}" = "null" ]; then
LINTER_COMMANDS_ARRAY['GITHUB_ACTIONS']="actionlint -config-file ${GITHUB_ACTIONS_LINTER_RULES}"
else
LINTER_COMMANDS_ARRAY['GITHUB_ACTIONS']="actionlint -config-file ${GITHUB_ACTIONS_LINTER_RULES} ${GITHUB_ACTIONS_COMMAND_ARGS}"
fi
LINTER_COMMANDS_ARRAY['GITLEAKS']="gitleaks detect --no-banner --no-git --redact --config ${GITLEAKS_LINTER_RULES} --verbose --source"
LINTER_COMMANDS_ARRAY['GHERKIN']="gherkin-lint -c ${GHERKIN_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['GO_MODULES']="golangci-lint run -c ${GO_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['GO']="${LINTER_COMMANDS_ARRAY['GO_MODULES']} --fast"
LINTER_COMMANDS_ARRAY['GOOGLE_JAVA_FORMAT']="java -jar /usr/bin/google-java-format --dry-run --set-exit-if-changed"
LINTER_COMMANDS_ARRAY['GROOVY']="npm-groovy-lint -c ${GROOVY_LINTER_RULES} --failon warning --no-insight"
LINTER_COMMANDS_ARRAY['HTML']="htmlhint --config ${HTML_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['JAVA']="java -jar /usr/bin/checkstyle -c ${JAVA_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['JAVASCRIPT_ES']="eslint --no-eslintrc -c ${JAVASCRIPT_ES_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['JAVASCRIPT_STANDARD']="standard ${JAVASCRIPT_STANDARD_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['JAVASCRIPT_PRETTIER']="prettier --check"
LINTER_COMMANDS_ARRAY['JSCPD']="jscpd --config ${JSCPD_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['JSON']="eslint --no-eslintrc -c ${JAVASCRIPT_ES_LINTER_RULES} --ext .json"
LINTER_COMMANDS_ARRAY['JSONC']="eslint --no-eslintrc -c ${JAVASCRIPT_ES_LINTER_RULES} --ext .json5,.jsonc"
LINTER_COMMANDS_ARRAY['JSX']="eslint --no-eslintrc -c ${JSX_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['KOTLIN']="ktlint"
if [ "${KUBERNETES_KUBECONFORM_OPTIONS}" == "null" ] || [ -z "${KUBERNETES_KUBECONFORM_OPTIONS}" ]; then
LINTER_COMMANDS_ARRAY['KUBERNETES_KUBECONFORM']="kubeconform -strict"
else
LINTER_COMMANDS_ARRAY['KUBERNETES_KUBECONFORM']="kubeconform -strict ${KUBERNETES_KUBECONFORM_OPTIONS}"
fi
LINTER_COMMANDS_ARRAY['LATEX']="chktex -q -l ${LATEX_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['LUA']="luacheck --config ${LUA_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['MARKDOWN']="markdownlint -c ${MARKDOWN_LINTER_RULES}"
if [ -n "${MARKDOWN_CUSTOM_RULE_GLOBS}" ]; then
IFS="," read -r -a MARKDOWN_CUSTOM_RULE_GLOBS_ARRAY <<<"${MARKDOWN_CUSTOM_RULE_GLOBS}"
for glob in "${MARKDOWN_CUSTOM_RULE_GLOBS_ARRAY[@]}"; do
if [ -z "${LINTER_RULES_PATH}" ]; then
LINTER_COMMANDS_ARRAY['MARKDOWN']="${LINTER_COMMANDS_ARRAY['MARKDOWN']} -r ${GITHUB_WORKSPACE}/${glob}"
else
LINTER_COMMANDS_ARRAY['MARKDOWN']="${LINTER_COMMANDS_ARRAY['MARKDOWN']} -r ${GITHUB_WORKSPACE}/${LINTER_RULES_PATH}/${glob}"
fi
done
fi
LINTER_COMMANDS_ARRAY['NATURAL_LANGUAGE']="textlint -c ${NATURAL_LANGUAGE_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['OPENAPI']="spectral lint -r ${OPENAPI_LINTER_RULES} -D"
if [ "${PERL_PERLCRITIC_OPTIONS}" == "null" ] || [ -z "${PERL_PERLCRITIC_OPTIONS}" ]; then
LINTER_COMMANDS_ARRAY['PERL']="perlcritic"
else
LINTER_COMMANDS_ARRAY['PERL']="perlcritic ${PERL_PERLCRITIC_OPTIONS}"
fi
LINTER_COMMANDS_ARRAY['PHP_BUILTIN']="php -l -c ${PHP_BUILTIN_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['PHP_PHPCS']="phpcs --standard=${PHP_PHPCS_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['PHP_PHPSTAN']="phpstan analyse --no-progress --no-ansi --memory-limit 1G -c ${PHP_PHPSTAN_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['PHP_PSALM']="psalm --config=${PHP_PSALM_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['POWERSHELL']="Invoke-ScriptAnalyzer -EnableExit -Settings ${POWERSHELL_LINTER_RULES} -Path"
LINTER_COMMANDS_ARRAY['PROTOBUF']="protolint lint --config_path ${PROTOBUF_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['PYTHON_BLACK']="black --config ${PYTHON_BLACK_LINTER_RULES} --diff --check"
LINTER_COMMANDS_ARRAY['PYTHON_PYLINT']="pylint --rcfile ${PYTHON_PYLINT_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['PYTHON_FLAKE8']="flake8 --config=${PYTHON_FLAKE8_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['PYTHON_ISORT']="isort --check --diff --sp ${PYTHON_ISORT_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['PYTHON_MYPY']="mypy --config-file ${PYTHON_MYPY_LINTER_RULES} --install-types --non-interactive"
LINTER_COMMANDS_ARRAY['R']="lintr"
LINTER_COMMANDS_ARRAY['RAKU']="raku"
LINTER_COMMANDS_ARRAY['RENOVATE']="renovate-config-validator --strict"
LINTER_COMMANDS_ARRAY['RUBY']="rubocop -c ${RUBY_LINTER_RULES} --force-exclusion --ignore-unrecognized-cops"
LINTER_COMMANDS_ARRAY['RUST_2015']="rustfmt --check --edition 2015"
LINTER_COMMANDS_ARRAY['RUST_2018']="rustfmt --check --edition 2018"
LINTER_COMMANDS_ARRAY['RUST_2021']="rustfmt --check --edition 2021"
LINTER_COMMANDS_ARRAY['RUST_CLIPPY']="clippy"
LINTER_COMMANDS_ARRAY['SCALAFMT']="scalafmt --config ${SCALAFMT_LINTER_RULES} --test"
LINTER_COMMANDS_ARRAY['SHELL_SHFMT']="shfmt -d"
LINTER_COMMANDS_ARRAY['SNAKEMAKE_LINT']="snakemake --lint -s"
LINTER_COMMANDS_ARRAY['SNAKEMAKE_SNAKEFMT']="snakefmt --config ${SNAKEMAKE_SNAKEFMT_LINTER_RULES} --check --compact-diff"
LINTER_COMMANDS_ARRAY['STATES']="asl-validator --json-path"
LINTER_COMMANDS_ARRAY['SQL']="sql-lint --config ${SQL_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['SQLFLUFF']="sqlfluff lint --config ${SQLFLUFF_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['TEKTON']="tekton-lint"
LINTER_COMMANDS_ARRAY['TERRAFORM_FMT']="terraform fmt -check -diff"
LINTER_COMMANDS_ARRAY['TERRAFORM_TFLINT']="tflint -c ${TERRAFORM_TFLINT_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['TERRAFORM_TERRASCAN']="terrascan scan -i terraform -t all -c ${TERRAFORM_TERRASCAN_LINTER_RULES} -f"
LINTER_COMMANDS_ARRAY['TERRAGRUNT']="terragrunt hclfmt --terragrunt-check --terragrunt-log-level error --terragrunt-hclfmt-file"
LINTER_COMMANDS_ARRAY['TSX']="eslint --no-eslintrc -c ${TSX_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['TYPESCRIPT_ES']="eslint --no-eslintrc -c ${TYPESCRIPT_ES_LINTER_RULES}"
LINTER_COMMANDS_ARRAY['TYPESCRIPT_STANDARD']="ts-standard --parser @typescript-eslint/parser --plugin @typescript-eslint/eslint-plugin --project ${TYPESCRIPT_STANDARD_TSCONFIG_FILE}"
LINTER_COMMANDS_ARRAY['TYPESCRIPT_PRETTIER']="prettier --check"
LINTER_COMMANDS_ARRAY['XML']="xmllint"
if [ "${YAML_ERROR_ON_WARNING}" == 'false' ]; then
LINTER_COMMANDS_ARRAY['YAML']="yamllint -c ${YAML_LINTER_RULES} -f parsable"
else
LINTER_COMMANDS_ARRAY['YAML']="yamllint --strict -c ${YAML_LINTER_RULES} -f parsable"
fi
debug "--- Linter commands ---"
debug "-----------------------"
for i in "${!LINTER_COMMANDS_ARRAY[@]}"; do
debug "Linter key: $i, command: ${LINTER_COMMANDS_ARRAY[$i]}"
done
debug "---------------------------------------------"
#################################
# Check for SSL cert and update #
#################################
@ -1034,77 +890,74 @@ BuildFileList "${VALIDATE_ALL_CODEBASE}" "${TEST_CASE_RUN}"
#####################################
RunAdditionalInstalls
###############
# Run linters #
###############
EDITORCONFIG_FILE_PATH="${GITHUB_WORKSPACE}"/.editorconfig
####################################
# Print ENV before running linters #
####################################
debug "--- ENV (before running linters) ---"
debug "------------------------------------"
debug "ENV:"
debug "$(printenv | sort)"
debug "$(printenv)"
debug "------------------------------------"
endGitHubActionsLogGroup "${SUPER_LINTER_INITIALIZATION_LOG_GROUP_TITLE}"
for LANGUAGE in "${LANGUAGE_ARRAY[@]}"; do
startGitHubActionsLogGroup "${LANGUAGE}"
debug "Running linter for the ${LANGUAGE} language..."
VALIDATE_LANGUAGE_VARIABLE_NAME="VALIDATE_${LANGUAGE}"
debug "Setting VALIDATE_LANGUAGE_VARIABLE_NAME to ${VALIDATE_LANGUAGE_VARIABLE_NAME}..."
VALIDATE_LANGUAGE_VARIABLE_VALUE="${!VALIDATE_LANGUAGE_VARIABLE_NAME}"
debug "Setting VALIDATE_LANGUAGE_VARIABLE_VALUE to ${VALIDATE_LANGUAGE_VARIABLE_VALUE}..."
###############
# Run linters #
###############
declare PARALLEL_RESULTS_FILE_PATH
PARALLEL_RESULTS_FILE_PATH="/tmp/super-linter-results.json"
debug "PARALLEL_RESULTS_FILE_PATH: ${PARALLEL_RESULTS_FILE_PATH}"
if [ "${VALIDATE_LANGUAGE_VARIABLE_VALUE}" = "true" ]; then
# Check if we need an .editorconfig file
# shellcheck disable=SC2153
if [ "${LANGUAGE}" = "EDITORCONFIG" ] || [ "${LANGUAGE}" = "SHELL_SHFMT" ]; then
if [ -e "${EDITORCONFIG_FILE_PATH}" ]; then
debug "Found an EditorConfig file at ${EDITORCONFIG_FILE_PATH}"
declare -a PARALLEL_COMMAND
PARALLEL_COMMAND=(parallel --will-cite --keep-order --max-procs "$(($(nproc) * 1))" --xargs --results "${PARALLEL_RESULTS_FILE_PATH}")
# Run one LANGUAGE per process. Each of these processes will run more processees in parellel if supported
PARALLEL_COMMAND+=(--max-lines 1)
if [ "${LOG_DEBUG}" == "true" ]; then
debug "LOG_DEBUG is enabled. Enable verbose ouput for parallel"
PARALLEL_COMMAND+=(--verbose)
fi
PARALLEL_COMMAND+=("LintCodebase" "{}" "\"${TEST_CASE_RUN}\"")
debug "PARALLEL_COMMAND: ${PARALLEL_COMMAND[*]}"
PARALLEL_COMMAND_OUTPUT=$(printf "%s\n" "${LANGUAGE_ARRAY[@]}" | "${PARALLEL_COMMAND[@]}" 2>&1)
PARALLEL_COMMAND_RETURN_CODE=$?
debug "PARALLEL_COMMAND_OUTPUT when running linters (exit code: ${PARALLEL_COMMAND_RETURN_CODE}):\n${PARALLEL_COMMAND_OUTPUT}"
debug "Parallel output file (${PARALLEL_RESULTS_FILE_PATH}) contents when running linters:\n$(cat "${PARALLEL_RESULTS_FILE_PATH}")"
RESULTS_OBJECT=
if ! RESULTS_OBJECT=$(jq -n '[inputs]' "${PARALLEL_RESULTS_FILE_PATH}"); then
fatal "Error loading results when building the file list: ${RESULTS_OBJECT}"
fi
debug "RESULTS_OBJECT when running linters:\n${RESULTS_OBJECT}"
# Get raw output so we can strip quotes from the data we load
if ! STDOUT_LINTERS="$(jq --raw-output '.[].Stdout' <<<"${RESULTS_OBJECT}")"; then
fatal "Error when loading stdout when running linters:\n${STDOUT_LINTERS}"
fi
if [ -n "${STDOUT_LINTERS}" ]; then
info "Command output when running linters:\n------\n${STDOUT_LINTERS}\n------"
else
debug "No .editorconfig found at: $EDITORCONFIG_FILE_PATH. Skipping ${LANGUAGE} linting..."
continue
fi
elif [ "${LANGUAGE}" = "R" ] && [ ! -f "${R_RULES_FILE_PATH_IN_ROOT}" ] && ((${#FILE_ARRAY_R[@]})); then
info "No .lintr configuration file found, using defaults."
cp "$R_LINTER_RULES" "$GITHUB_WORKSPACE"
# shellcheck disable=SC2034
SUPER_LINTER_COPIED_R_LINTER_RULES_FILE="true"
# Check if there's local configuration for the Raku linter
elif [ "${LANGUAGE}" = "RAKU" ] && [ -e "${GITHUB_WORKSPACE}/META6.json" ]; then
cd "${GITHUB_WORKSPACE}" && zef install --deps-only --/test .
debug "Stdout when running linters is empty"
fi
LINTER_NAME="${LINTER_NAMES_ARRAY["${LANGUAGE}"]}"
if [ -z "${LINTER_NAME}" ]; then
fatal "Cannot find the linter name for ${LANGUAGE} language."
if ! STDERR_LINTERS="$(jq --raw-output '.[].Stderr' <<<"${RESULTS_OBJECT}")"; then
fatal "Error when loading stderr for ${FILE_TYPE}:\n${STDERR_LINTERS}"
fi
if [ -n "${STDERR_LINTERS}" ]; then
info "Command output for ${FILE_TYPE}:\n------\n${STDERR_LINTERS}\n------"
else
debug "Setting LINTER_NAME to ${LINTER_NAME}..."
debug "Stderr when running linters is empty"
fi
LINTER_COMMAND="${LINTER_COMMANDS_ARRAY["${LANGUAGE}"]}"
if [ -z "${LINTER_COMMAND}" ]; then
fatal "Cannot find the linter command for ${LANGUAGE} language."
else
debug "Setting LINTER_COMMAND to ${LINTER_COMMAND}..."
if [[ ${PARALLEL_COMMAND_RETURN_CODE} -ne 0 ]]; then
fatal "Error when running linters. Exit code: ${PARALLEL_COMMAND_RETURN_CODE}"
fi
FILE_ARRAY_VARIABLE_NAME="FILE_ARRAY_${LANGUAGE}"
debug "Setting FILE_ARRAY_VARIABLE_NAME to ${FILE_ARRAY_VARIABLE_NAME}..."
# shellcheck disable=SC2125
LANGUAGE_FILE_ARRAY="${FILE_ARRAY_VARIABLE_NAME}"[@]
debug "${FILE_ARRAY_VARIABLE_NAME} file array contents: ${!LANGUAGE_FILE_ARRAY}"
debug "Invoking ${LINTER_NAME} linter. TEST_CASE_RUN: ${TEST_CASE_RUN}"
LintCodebase "${LANGUAGE}" "${LINTER_NAME}" "${LINTER_COMMAND}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${TEST_CASE_RUN}" "${!LANGUAGE_FILE_ARRAY}"
fi
endGitHubActionsLogGroup "${LANGUAGE}"
done
##########
# Footer #
##########

View file

@ -436,6 +436,7 @@ control "super-linter-validate-files" do
"/action/lib/functions/buildFileList.sh",
"/action/lib/functions/detectFiles.sh",
"/action/lib/functions/githubEvent.sh",
"/action/lib/functions/linterCommands.sh",
"/action/lib/functions/linterRules.sh",
"/action/lib/functions/linterVersions.sh",
"/action/lib/functions/linterVersions.txt",

48
test/run-super-linter-tests.sh Executable file
View file

@ -0,0 +1,48 @@
#!/usr/bin/env bash
set -o errexit
set -o nounset
set -o pipefail
SUPER_LINTER_TEST_CONTAINER_URL="${1}"
TEST_FUNCTION_NAME="${2}"
COMMAND_TO_RUN=(docker run -e ACTIONS_RUNNER_DEBUG=true -e DEFAULT_BRANCH=main -e ENABLE_GITHUB_ACTIONS_GROUP_TITLE=true -e JSCPD_CONFIG_FILE=".jscpd-test-linters.json" -e RENOVATE_SHAREABLE_CONFIG_PRESET_FILE_NAMES="default.json,hoge.json" -e RUN_LOCAL=true -e TEST_CASE_RUN=true -e TYPESCRIPT_STANDARD_TSCONFIG_FILE=".github/linters/tsconfig.json" -v "$(pwd):/tmp/lint")
run_test_cases_expect_failure() {
COMMAND_TO_RUN+=(-e ANSIBLE_DIRECTORY="/test/linters/ansible/bad" -e CHECKOV_FILE_NAME=".checkov-test-linters-failure.yaml" -e FILTER_REGEX_INCLUDE=".*bad.*")
EXPECTED_EXIT_CODE=1
}
run_test_cases_expect_success() {
COMMAND_TO_RUN+=(-e ANSIBLE_DIRECTORY="/test/linters/ansible/good" -e CHECKOV_FILE_NAME=".checkov-test-linters-success.yaml" -e FILTER_REGEX_INCLUDE=".*good.*")
}
# Run the test setup function
${TEST_FUNCTION_NAME}
COMMAND_TO_RUN+=("${SUPER_LINTER_TEST_CONTAINER_URL}")
declare -i EXPECTED_EXIT_CODE
EXPECTED_EXIT_CODE=${EXPECTED_EXIT_CODE:-0}
if [ ${EXPECTED_EXIT_CODE} -ne 0 ]; then
echo "Disable failures on error because the expected exit code is ${EXPECTED_EXIT_CODE}"
set +o errexit
fi
echo "Command to run: ${COMMAND_TO_RUN[*]}"
"${COMMAND_TO_RUN[@]}"
SUPER_LINTER_EXIT_CODE=$?
# Enable the errexit option in case we disabled it
set -o errexit
echo "Super-linter exit code: ${SUPER_LINTER_EXIT_CODE}"
if [ ${SUPER_LINTER_EXIT_CODE} -ne ${EXPECTED_EXIT_CODE} ]; then
echo "Super-linter exited with an unexpected code: ${SUPER_LINTER_EXIT_CODE}"
exit 1
else
echo "Super-linter exited with the expected code: ${SUPER_LINTER_EXIT_CODE}"
fi