mirror of
https://github.com/super-linter/super-linter.git
synced 2024-11-25 07:31:05 -05:00
Merge branch 'support-for-phpcs-and-psalm' of https://github.com/filips123/super-linter into support-for-phpcs-and-psalm
This commit is contained in:
commit
364f31f2e3
35 changed files with 2019 additions and 1574 deletions
|
@ -13,6 +13,12 @@
|
|||
# - System with Docker installed
|
||||
# - Global variables met
|
||||
|
||||
#########################
|
||||
# Source Function Files #
|
||||
#########################
|
||||
# shellcheck source=/dev/null
|
||||
source ../lib/log.sh # Source the function script(s)
|
||||
|
||||
###########
|
||||
# Globals #
|
||||
###########
|
||||
|
@ -29,11 +35,9 @@ DOCKERFILE_PATH="${DOCKERFILE_PATH}" # Path to the Dockerfile to be uploaded
|
|||
################################################################################
|
||||
#### Function Header ###########################################################
|
||||
Header() {
|
||||
echo ""
|
||||
echo "-------------------------------------------------------"
|
||||
echo "----- GitHub Actions remove image from DockerHub ------"
|
||||
echo "-------------------------------------------------------"
|
||||
echo ""
|
||||
info "-------------------------------------------------------"
|
||||
info "----- GitHub Actions remove image from DockerHub ------"
|
||||
info "-------------------------------------------------------"
|
||||
}
|
||||
################################################################################
|
||||
#### Function ValidateInput ####################################################
|
||||
|
@ -42,21 +46,18 @@ ValidateInput() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Gathering variables..."
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Gathering variables..."
|
||||
info "----------------------------------------------"
|
||||
|
||||
############################
|
||||
# Validate GITHUB_WORKSPACE #
|
||||
############################
|
||||
if [ -z "${GITHUB_WORKSPACE}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_WORKSPACE]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_WORKSPACE}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GITHUB_WORKSPACE]!"
|
||||
fatal "[${GITHUB_WORKSPACE}]"
|
||||
else
|
||||
echo "Successfully found:[GITHUB_WORKSPACE], value:[${GITHUB_WORKSPACE}]"
|
||||
info "Successfully found:[GITHUB_WORKSPACE], value:[${GITHUB_WORKSPACE}]"
|
||||
fi
|
||||
|
||||
#######################
|
||||
|
@ -64,15 +65,14 @@ ValidateInput() {
|
|||
#######################
|
||||
if [ -z "${IMAGE_REPO}" ]; then
|
||||
# No repo was pulled
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [IMAGE_REPO]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${IMAGE_REPO}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [IMAGE_REPO]!"
|
||||
fatal "[${IMAGE_REPO}]"
|
||||
elif [[ ${IMAGE_REPO} == "github/super-linter" ]]; then
|
||||
# Found our main repo
|
||||
echo "Successfully found:[IMAGE_REPO], value:[${IMAGE_REPO}]"
|
||||
info "Successfully found:[IMAGE_REPO], value:[${IMAGE_REPO}]"
|
||||
else
|
||||
# This is a fork and we cant pull vars or any info
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} No image to cleanup as this is a forked branch, and not being built with current automation!${NC}"
|
||||
warn "No image to cleanup as this is a forked branch, and not being built with current automation!"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
@ -80,33 +80,30 @@ ValidateInput() {
|
|||
# Validate IMAGE_VERSION #
|
||||
##########################
|
||||
if [ -z "${IMAGE_VERSION}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [IMAGE_VERSION]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${IMAGE_VERSION}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [IMAGE_VERSION]!"
|
||||
fatal "[${IMAGE_VERSION}]"
|
||||
else
|
||||
echo "Successfully found:[IMAGE_VERSION], value:[${IMAGE_VERSION}]"
|
||||
info "Successfully found:[IMAGE_VERSION], value:[${IMAGE_VERSION}]"
|
||||
fi
|
||||
|
||||
############################
|
||||
# Validate DOCKER_USERNAME #
|
||||
############################
|
||||
if [ -z "${DOCKER_USERNAME}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_USERNAME]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKER_USERNAME}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [DOCKER_USERNAME]!"
|
||||
fatal "[${DOCKER_USERNAME}]"
|
||||
else
|
||||
echo "Successfully found:[DOCKER_USERNAME], value:[${DOCKER_USERNAME}]"
|
||||
info "Successfully found:[DOCKER_USERNAME], value:[${DOCKER_USERNAME}]"
|
||||
fi
|
||||
|
||||
############################
|
||||
# Validate DOCKER_PASSWORD #
|
||||
############################
|
||||
if [ -z "${DOCKER_PASSWORD}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_PASSWORD]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKER_PASSWORD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [DOCKER_PASSWORD]!"
|
||||
fatal "[${DOCKER_PASSWORD}]"
|
||||
else
|
||||
echo "Successfully found:[DOCKER_PASSWORD], value:[********]"
|
||||
info "Successfully found:[DOCKER_PASSWORD], value:[********]"
|
||||
fi
|
||||
|
||||
##################################################
|
||||
|
@ -121,10 +118,9 @@ ValidateInput() {
|
|||
#############################################
|
||||
# Image is 'latest' and we will not destroy #
|
||||
#############################################
|
||||
echo "Image Tag is set to:[latest]..."
|
||||
echo "We will never destroy latest..."
|
||||
echo "Bye!"
|
||||
exit 1
|
||||
error "Image Tag is set to:[latest]..."
|
||||
error "We will never destroy latest..."
|
||||
fatal "Bye!"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
@ -133,11 +129,9 @@ LoginToDocker() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Login to DockerHub..."
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Login to DockerHub..."
|
||||
info "----------------------------------------------"
|
||||
|
||||
######################
|
||||
# Login to DockerHub #
|
||||
|
@ -154,12 +148,11 @@ LoginToDocker() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to authenticate to DockerHub!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LOGIN_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to authenticate to DockerHub!"
|
||||
fatal "[${LOGIN_CMD}]"
|
||||
else
|
||||
# SUCCESS
|
||||
echo "Successfully authenticated to DockerHub!"
|
||||
info "Successfully authenticated to DockerHub!"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
@ -168,11 +161,9 @@ RemoveImage() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Removing the DockerFile image:[${IMAGE_REPO}:${IMAGE_VERSION}]"
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Removing the DockerFile image:[${IMAGE_REPO}:${IMAGE_VERSION}]"
|
||||
info "----------------------------------------------"
|
||||
|
||||
#####################################
|
||||
# Create Token to auth to DockerHub #
|
||||
|
@ -193,12 +184,11 @@ RemoveImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to gain token from DockerHub!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${TOKEN}]${NC}"
|
||||
exit 1
|
||||
error "Failed to gain token from DockerHub!"
|
||||
fatal "[${TOKEN}]"
|
||||
else
|
||||
# SUCCESS
|
||||
echo "Successfully gained auth token from DockerHub!"
|
||||
info "Successfully gained auth token from DockerHub!"
|
||||
fi
|
||||
|
||||
#################################
|
||||
|
@ -218,22 +208,19 @@ RemoveImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to remove tag from DockerHub!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${REMOVE_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to remove tag from DockerHub!"
|
||||
fatal "[${REMOVE_CMD}]"
|
||||
else
|
||||
# SUCCESS
|
||||
echo "Successfully [removed] Docker image tag:[${IMAGE_VERSION}] from DockerHub!"
|
||||
info "Successfully [removed] Docker image tag:[${IMAGE_VERSION}] from DockerHub!"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
#### Function Footer ###########################################################
|
||||
Footer() {
|
||||
echo ""
|
||||
echo "-------------------------------------------------------"
|
||||
echo "The step has completed"
|
||||
echo "-------------------------------------------------------"
|
||||
echo ""
|
||||
info "-------------------------------------------------------"
|
||||
info "The step has completed"
|
||||
info "-------------------------------------------------------"
|
||||
}
|
||||
################################################################################
|
||||
################################## MAIN ########################################
|
||||
|
|
|
@ -28,17 +28,17 @@ CheckGHEPid()
|
|||
##################################
|
||||
if [ ${PID_CHECK} -gt ${PID_CHECK_LIMIT} ]; then
|
||||
# Over the limit, move on
|
||||
echo "We have checked the pid ${PID_CHECK} times, moving on..."
|
||||
info "We have checked the pid ${PID_CHECK} times, moving on..."
|
||||
else
|
||||
################################################
|
||||
# Check to see if the PID is alive and running #
|
||||
################################################
|
||||
if [ ! -f "${GHE_CONFIG_PID}" ]; then
|
||||
# File not found
|
||||
echo "We're good to move forward, no .pid file found at:[${GHE_CONFIG_PID}]"
|
||||
info "We're good to move forward, no .pid file found at:[${GHE_CONFIG_PID}]"
|
||||
else
|
||||
# Found the pid running, need to sleep
|
||||
echo "Current PID found, sleeping ${SLEEP_SECONDS} seconds before next check..."
|
||||
info "Current PID found, sleeping ${SLEEP_SECONDS} seconds before next check..."
|
||||
################
|
||||
# Sleep it off #
|
||||
################
|
||||
|
@ -53,9 +53,9 @@ CheckGHEPid()
|
|||
# Check the shell for errors #
|
||||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to sleep!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${SLEEP_CMD}]${NC}"
|
||||
echo "Will try to call apply as last effort..."
|
||||
error "Failed to sleep!"
|
||||
error "[${SLEEP_CMD}]"
|
||||
info "Will try to call apply as last effort..."
|
||||
####################################
|
||||
# Call config apply as last effort #
|
||||
####################################
|
||||
|
@ -82,7 +82,7 @@ CheckGHEProcess()
|
|||
##################################
|
||||
if [ ${PROCESS_CHECK} -gt ${PROCESS_CHECK_LIMIT} ]; then
|
||||
# Over the limit, move on
|
||||
echo "We have checked the process ${PROCESS_CHECK} times, moving on..."
|
||||
info "We have checked the process ${PROCESS_CHECK} times, moving on..."
|
||||
else
|
||||
####################################################
|
||||
# Check to see if the process is alive and running #
|
||||
|
@ -99,10 +99,10 @@ CheckGHEProcess()
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# No process running on the system
|
||||
echo "Were good to move forward, no process like:[${GHE_APPLY_COMMAND}] running currently on the system"
|
||||
info "Were good to move forward, no process like:[${GHE_APPLY_COMMAND}] running currently on the system"
|
||||
else
|
||||
# Found the process running, need to sleep
|
||||
echo "Current process alive:[${CHECK_PROCESS_CMD}], sleeping ${SLEEP_SECONDS} seconds before next check..."
|
||||
info "Current process alive:[${CHECK_PROCESS_CMD}], sleeping ${SLEEP_SECONDS} seconds before next check..."
|
||||
################
|
||||
# Sleep it off #
|
||||
################
|
||||
|
@ -117,9 +117,9 @@ CheckGHEProcess()
|
|||
# Check the shell for errors #
|
||||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to sleep!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${SLEEP_CMD}]${NC}"
|
||||
echo "Will try to call apply as last effort..."
|
||||
error "Failed to sleep!"
|
||||
error "[${SLEEP_CMD}]"
|
||||
info "Will try to call apply as last effort..."
|
||||
####################################
|
||||
# Call config apply as last effort #
|
||||
####################################
|
||||
|
@ -144,7 +144,7 @@ RunConfigApply()
|
|||
##########
|
||||
# Header #
|
||||
##########
|
||||
echo "Running ${GHE_APPLY_COMMAND} to the server..."
|
||||
info "Running ${GHE_APPLY_COMMAND} to the server..."
|
||||
|
||||
##############################################
|
||||
# Run the command to apply changes to server #
|
||||
|
@ -161,12 +161,11 @@ RunConfigApply()
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Errors
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to run config apply command!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${APPLY_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to run config apply command!"
|
||||
fatal "[${APPLY_CMD}]"
|
||||
else
|
||||
# Success
|
||||
echo -e "${NC}${F[B]}Successfully ran ${F[C]}${GHE_APPLY_COMMAND}${NC}"
|
||||
info "Successfully ran ${F[C]}${GHE_APPLY_COMMAND}"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
|
13
.automation/test/lua/README.md
Normal file
13
.automation/test/lua/README.md
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Lua Test Cases
|
||||
This folder holds the test cases for **Lua**.
|
||||
|
||||
## Additional Docs
|
||||
No Additional information is needed for this test case.
|
||||
|
||||
## Good Test Cases
|
||||
The test cases denoted: `LANGUAGE_good_FILE.EXTENSION` are all valid, and should pass successfully when linted.
|
||||
- **Note:** They are linted utilizing the default linter rules.
|
||||
|
||||
## Bad Test Cases
|
||||
The test cases denoted: `LANGUAGE_bad_FILE.EXTENSION` are **NOT** valid, and should trigger errors when linted.
|
||||
- **Note:** They are linted utilizing the default linter rules.
|
9
.automation/test/lua/lua_bad_1.lua
Normal file
9
.automation/test/lua/lua_bad_1.lua
Normal file
|
@ -0,0 +1,9 @@
|
|||
package.loaded[...] = {}
|
||||
|
||||
local function helper(...)
|
||||
-- NYI
|
||||
end
|
||||
|
||||
function embrace(opt)
|
||||
local opt = opt or "default"
|
||||
return hepler(opt.."?")
|
12
.automation/test/lua/lua_good_1.lua
Normal file
12
.automation/test/lua/lua_good_1.lua
Normal file
|
@ -0,0 +1,12 @@
|
|||
local embracer = {}
|
||||
|
||||
local function helper()
|
||||
-- NYI wontfix
|
||||
end
|
||||
|
||||
function embracer.embrace(opt)
|
||||
opt = opt or "default"
|
||||
return helper(opt.."?")
|
||||
end
|
||||
|
||||
return embracer
|
11
.automation/test/php/reports/expected-PHP.tap
Normal file
11
.automation/test/php/reports/expected-PHP.tap
Normal file
|
@ -0,0 +1,11 @@
|
|||
TAP version 13
|
||||
1..3
|
||||
not ok 1 - php_bad_1.php
|
||||
---
|
||||
message: PHP Parse error syntax error, unexpected 'pe98y' (T_STRING) in /tmp/lint/.automation/test/php/php_bad_1.php on line 3\nErrors parsing /tmp/lint/.automation/test/php/php_bad_1.php\n
|
||||
...
|
||||
not ok 2 - php_bad_2.php
|
||||
---
|
||||
message: PHP Parse error syntax error, unexpected end of file, expecting variable (T_VARIABLE) or ${ (T_DOLLAR_OPEN_CURLY_BRACES) or {$ (T_CURLY_OPEN) in /tmp/lint/.automation/test/php/php_bad_2.php on line 5\nErrors parsing /tmp/lint/.automation/test/php/php_bad_2.php\n
|
||||
...
|
||||
ok 3 - php_good_1.php
|
11
.automation/test/php/reports/expected-PHP_PHPSTAN.tap
Normal file
11
.automation/test/php/reports/expected-PHP_PHPSTAN.tap
Normal file
|
@ -0,0 +1,11 @@
|
|||
TAP version 13
|
||||
1..3
|
||||
not ok 1 - php_bad_1.php
|
||||
---
|
||||
message: ------ ---------------------------------------------- \n Line php_bad_1.php \n ------ ---------------------------------------------- \n 3 Invalid numeric literal on line 3 \n 3 Invalid numeric literal on line 3 \n 3 Syntax error, unexpected '=' on line 3 \n 3 Syntax error, unexpected T_LNUMBER on line 3 \n 3 Syntax error, unexpected T_STRING on line 3 \n 3 Syntax error, unexpected T_STRING on line 3 \n ------ ---------------------------------------------- \n\n [ERROR] Found 6 errors \n
|
||||
...
|
||||
not ok 2 - php_bad_2.php
|
||||
---
|
||||
message: ------ ------------------------------------------------------- \n Line php_bad_2.php \n ------ ------------------------------------------------------- \n 5 Syntax error, unexpected EOF, expecting T_VARIABLE or \n T_DOLLAR_OPEN_CURLY_BRACES or T_CURLY_OPEN on line 5 \n ------ ------------------------------------------------------- \n\n [ERROR] Found 1 error \n
|
||||
...
|
||||
ok 3 - php_good_1.php
|
|
@ -23,6 +23,7 @@ headers = {
|
|||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
|
||||
def create_label(repo_id, label):
|
||||
"""
|
||||
Create label in the supplied repo.
|
||||
|
@ -52,6 +53,7 @@ def create_label(repo_id, label):
|
|||
|
||||
return response
|
||||
|
||||
|
||||
def get_labels(owner, repo):
|
||||
"""
|
||||
Gets a list of labels from the supplied repo.
|
||||
|
@ -83,6 +85,7 @@ def get_labels(owner, repo):
|
|||
'[ERROR] getting issue labels. Status Code: {status_code} - Message: {result}'.format(
|
||||
status_code=status_code, result=result["message"]))
|
||||
|
||||
|
||||
def delete_label(label_id):
|
||||
"""
|
||||
Delete the specified label
|
||||
|
@ -106,6 +109,7 @@ def delete_label(label_id):
|
|||
|
||||
return result
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option('--dry', is_flag=True)
|
||||
@click.argument('source_repo')
|
||||
|
@ -149,6 +153,7 @@ def copy_labels(source_repo, target_repo, dry):
|
|||
|
||||
print('Done')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Pylint doesn't know that @click.command takes care of injecting the
|
||||
# function parameters. Disabling Pylint error.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# Yml Test Cases
|
||||
# Yaml Test Cases
|
||||
|
||||
This folder holds the test cases for **Yml**.
|
||||
This folder holds the test cases for **Yaml**.
|
||||
|
||||
## Additional Docs
|
||||
|
|
@ -14,6 +14,12 @@
|
|||
# - System with Docker installed
|
||||
# - Global variables met
|
||||
|
||||
#########################
|
||||
# Source Function Files #
|
||||
#########################
|
||||
# shellcheck source=/dev/null
|
||||
source ../lib/log.sh # Source the function script(s)
|
||||
|
||||
###########
|
||||
# Globals #
|
||||
###########
|
||||
|
@ -35,11 +41,9 @@ UPDATE_MAJOR_TAG=0 # Flag to deploy the major tag version as
|
|||
################################################################################
|
||||
#### Function Header ###########################################################
|
||||
Header() {
|
||||
echo ""
|
||||
echo "-------------------------------------------------------"
|
||||
echo "---- GitHub Actions Upload image to [${REGISTRY}] ----"
|
||||
echo "-------------------------------------------------------"
|
||||
echo ""
|
||||
info "-------------------------------------------------------"
|
||||
info "---- GitHub Actions Upload image to [${REGISTRY}] ----"
|
||||
info "-------------------------------------------------------"
|
||||
}
|
||||
################################################################################
|
||||
#### Function ValidateInput ####################################################
|
||||
|
@ -48,32 +52,28 @@ ValidateInput() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Gathering variables..."
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Gathering variables..."
|
||||
info "----------------------------------------------"
|
||||
|
||||
#############################
|
||||
# Validate GITHUB_WORKSPACE #
|
||||
#############################
|
||||
if [ -z "${GITHUB_WORKSPACE}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_WORKSPACE]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_WORKSPACE}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GITHUB_WORKSPACE]!"
|
||||
fatal "[${GITHUB_WORKSPACE}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GITHUB_WORKSPACE]${F[B]}, value:${F[W]}[${GITHUB_WORKSPACE}]${NC}"
|
||||
info "Successfully found:${F[W]}[GITHUB_WORKSPACE]${F[B]}, value:${F[W]}[${GITHUB_WORKSPACE}]"
|
||||
fi
|
||||
|
||||
#####################
|
||||
# Validate REGISTRY #
|
||||
#####################
|
||||
if [ -z "${REGISTRY}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [REGISTRY]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${REGISTRY}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [REGISTRY]!"
|
||||
fatal "[${REGISTRY}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[REGISTRY]${F[B]}, value:${F[W]}[${REGISTRY}]${NC}"
|
||||
info "Successfully found:${F[W]}[REGISTRY]${F[B]}, value:${F[W]}[${REGISTRY}]"
|
||||
fi
|
||||
|
||||
#####################################################
|
||||
|
@ -84,22 +84,20 @@ ValidateInput() {
|
|||
# Validate GPR_USERNAME #
|
||||
#########################
|
||||
if [ -z "${GPR_USERNAME}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GPR_USERNAME]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GPR_USERNAME}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GPR_USERNAME]!"
|
||||
fatal "[${GPR_USERNAME}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GPR_USERNAME]${F[B]}, value:${F[W]}[${GPR_USERNAME}]${NC}"
|
||||
info "Successfully found:${F[W]}[GPR_USERNAME]${F[B]}, value:${F[W]}[${GPR_USERNAME}]"
|
||||
fi
|
||||
|
||||
######################
|
||||
# Validate GPR_TOKEN #
|
||||
######################
|
||||
if [ -z "${GPR_TOKEN}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GPR_TOKEN]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GPR_TOKEN}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GPR_TOKEN]!"
|
||||
fatal "[${GPR_TOKEN}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GPR_TOKEN]${F[B]}, value:${F[W]}[********]${NC}"
|
||||
info "Successfully found:${F[W]}[GPR_TOKEN]${F[B]}, value:${F[W]}[********]"
|
||||
fi
|
||||
########################################
|
||||
# See if we need values for Ducker hub #
|
||||
|
@ -109,48 +107,44 @@ ValidateInput() {
|
|||
# Validate DOCKER_USERNAME #
|
||||
############################
|
||||
if [ -z "${DOCKER_USERNAME}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_USERNAME]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKER_USERNAME}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [DOCKER_USERNAME]!"
|
||||
fatal "[${DOCKER_USERNAME}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[DOCKER_USERNAME]${F[B]}, value:${F[W]}[${DOCKER_USERNAME}]${NC}"
|
||||
info "Successfully found:${F[W]}[DOCKER_USERNAME]${F[B]}, value:${F[W]}[${DOCKER_USERNAME}]"
|
||||
fi
|
||||
|
||||
############################
|
||||
# Validate DOCKER_PASSWORD #
|
||||
############################
|
||||
if [ -z "${DOCKER_PASSWORD}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_PASSWORD]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKER_PASSWORD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [DOCKER_PASSWORD]!"
|
||||
fatal "[${DOCKER_PASSWORD}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[DOCKER_PASSWORD]${F[B]}, value:${F[B]}[********]${NC}"
|
||||
info "Successfully found:${F[W]}[DOCKER_PASSWORD]${F[B]}, value:${F[B]}[********]"
|
||||
fi
|
||||
###########################################
|
||||
# We were not passed a registry to update #
|
||||
###########################################
|
||||
else
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find a valid registry!${NC}"
|
||||
echo "Registry:[${REGISTRY}]"
|
||||
exit 1
|
||||
error "Failed to find a valid registry!"
|
||||
fatal "Registry:[${REGISTRY}]"
|
||||
fi
|
||||
|
||||
#######################
|
||||
# Validate IMAGE_REPO #
|
||||
#######################
|
||||
if [ -z "${IMAGE_REPO}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [IMAGE_REPO]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${IMAGE_REPO}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [IMAGE_REPO]!"
|
||||
fatal "[${IMAGE_REPO}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[IMAGE_REPO]${F[B]}, value:${F[W]}[${IMAGE_REPO}]${NC}"
|
||||
info "Successfully found:${F[W]}[IMAGE_REPO]${F[B]}, value:${F[W]}[${IMAGE_REPO}]"
|
||||
###############################################
|
||||
# Need to see if GPR registry and update name #
|
||||
###############################################
|
||||
if [[ ${REGISTRY} == "GPR" ]]; then
|
||||
NAME="docker.pkg.github.com/${IMAGE_REPO}/super-linter"
|
||||
IMAGE_REPO="${NAME}"
|
||||
echo "Updated [IMAGE_REPO] to:[${IMAGE_REPO}] for GPR"
|
||||
info "Updated [IMAGE_REPO] to:[${IMAGE_REPO}] for GPR"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
@ -158,8 +152,8 @@ ValidateInput() {
|
|||
# Validate IMAGE_VERSION #
|
||||
##########################
|
||||
if [ -z "${IMAGE_VERSION}" ]; then
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} Failed to get [IMAGE_VERSION]!${NC}"
|
||||
echo "Pulling from Branch Name..."
|
||||
warn "Failed to get [IMAGE_VERSION]!"
|
||||
info "Pulling from Branch Name..."
|
||||
##############################
|
||||
# Get the name of the branch #
|
||||
##############################
|
||||
|
@ -174,9 +168,8 @@ ValidateInput() {
|
|||
# Check the shell for errors #
|
||||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get branch name!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${BRANCH_NAME}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get branch name!"
|
||||
fatal "[${BRANCH_NAME}]"
|
||||
fi
|
||||
|
||||
##################################
|
||||
|
@ -188,9 +181,9 @@ ValidateInput() {
|
|||
# Set the IMAGE_VERSION to the BRANCH_NAME #
|
||||
############################################
|
||||
IMAGE_VERSION="${BRANCH_NAME}"
|
||||
echo "Tag:[${IMAGE_VERSION}]"
|
||||
info "Tag:[${IMAGE_VERSION}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[IMAGE_VERSION]${F[B]}, value:${F[W]}[${IMAGE_VERSION}]${NC}"
|
||||
info "Successfully found:${F[W]}[IMAGE_VERSION]${F[B]}, value:${F[W]}[${IMAGE_VERSION}]"
|
||||
fi
|
||||
|
||||
##################################
|
||||
|
@ -214,18 +207,17 @@ ValidateInput() {
|
|||
###################################
|
||||
UPDATE_MAJOR_TAG=1
|
||||
|
||||
echo "- Also deploying a major tag of:[${MAJOR_TAG}]"
|
||||
info "- Also deploying a major tag of:[${MAJOR_TAG}]"
|
||||
fi
|
||||
|
||||
############################
|
||||
# Validate DOCKERFILE_PATH #
|
||||
############################
|
||||
if [ -z "${DOCKERFILE_PATH}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKERFILE_PATH]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKERFILE_PATH}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [DOCKERFILE_PATH]!"
|
||||
fatal "[${DOCKERFILE_PATH}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[DOCKERFILE_PATH]${F[B]}, value:${F[W]}[${DOCKERFILE_PATH}]${NC}"
|
||||
info "Successfully found:${F[W]}[DOCKERFILE_PATH]${F[B]}, value:${F[W]}[${DOCKERFILE_PATH}]"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
@ -242,11 +234,9 @@ Authenticate() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Login to ${NAME}..."
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Login to ${NAME}..."
|
||||
info "----------------------------------------------"
|
||||
|
||||
###################
|
||||
# Auth to service #
|
||||
|
@ -263,12 +253,11 @@ Authenticate() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to authenticate to ${NAME}!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LOGIN_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to authenticate to ${NAME}!"
|
||||
fatal "[${LOGIN_CMD}]"
|
||||
else
|
||||
# SUCCESS
|
||||
echo -e "${NC}${F[B]}Successfully authenticated to ${F[C]}${NAME}${F[B]}!${NC}"
|
||||
info "Successfully authenticated to ${F[C]}${NAME}${F[B]}!"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
@ -277,21 +266,18 @@ BuildImage() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Building the DockerFile image..."
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Building the DockerFile image..."
|
||||
info "----------------------------------------------"
|
||||
|
||||
################################
|
||||
# Validate the DOCKERFILE_PATH #
|
||||
################################
|
||||
if [ ! -f "${DOCKERFILE_PATH}" ]; then
|
||||
# No file found
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to find Dockerfile at:[${DOCKERFILE_PATH}]${NC}"
|
||||
echo "Please make sure you give full path!"
|
||||
echo "Example:[/configs/Dockerfile] or [Dockerfile] if at root directory"
|
||||
exit 1
|
||||
error "failed to find Dockerfile at:[${DOCKERFILE_PATH}]"
|
||||
error "Please make sure you give full path!"
|
||||
fatal "Example:[/configs/Dockerfile] or [Dockerfile] if at root directory"
|
||||
fi
|
||||
|
||||
###################
|
||||
|
@ -309,11 +295,10 @@ BuildImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [build] Dockerfile!${NC}"
|
||||
exit 1
|
||||
fatal "failed to [build] Dockerfile!"
|
||||
else
|
||||
# SUCCESS
|
||||
echo -e "${NC}${F[B]}Successfully Built image!${NC}"
|
||||
info "Successfully Built image!"
|
||||
fi
|
||||
|
||||
########################################################
|
||||
|
@ -333,11 +318,10 @@ BuildImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [tag] Dockerfile!${NC}"
|
||||
exit 1
|
||||
fatal "failed to [tag] Dockerfile!"
|
||||
else
|
||||
# SUCCESS
|
||||
echo -e "${NC}${F[B]}Successfully tagged image!${NC}"
|
||||
info "Successfully tagged image!"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
@ -347,11 +331,9 @@ UploadImage() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Uploading the DockerFile image to ${REGISTRY}..."
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Uploading the DockerFile image to ${REGISTRY}..."
|
||||
info "----------------------------------------------"
|
||||
|
||||
############################################
|
||||
# Upload the docker image that was created #
|
||||
|
@ -368,11 +350,10 @@ UploadImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [upload] Dockerfile!${NC}"
|
||||
exit 1
|
||||
fatal "failed to [upload] Dockerfile!"
|
||||
else
|
||||
# SUCCESS
|
||||
echo -e "${NC}${F[B]}Successfully Uploaded Docker image:${F[W]}[${IMAGE_VERSION}]${F[B]} to ${F[C]}${REGISTRY}${F[B]}!${NC}"
|
||||
info "Successfully Uploaded Docker image:${F[W]}[${IMAGE_VERSION}]${F[B]} to ${F[C]}${REGISTRY}${F[B]}!"
|
||||
fi
|
||||
|
||||
#########################
|
||||
|
@ -391,9 +372,8 @@ UploadImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get information about built Image!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GET_INFO_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get information about built Image!"
|
||||
fatal "[${GET_INFO_CMD}]"
|
||||
else
|
||||
################
|
||||
# Get the data #
|
||||
|
@ -406,13 +386,13 @@ UploadImage() {
|
|||
###################
|
||||
# Print the goods #
|
||||
###################
|
||||
echo "----------------------------------------------"
|
||||
echo "Docker Image Details:"
|
||||
echo "Repository:[${REPO}]"
|
||||
echo "Tag:[${TAG}]"
|
||||
echo "Image_ID:[${IMAGE_ID}]"
|
||||
echo "Size:[${SIZE}]"
|
||||
echo "----------------------------------------------"
|
||||
info "----------------------------------------------"
|
||||
info "Docker Image Details:"
|
||||
info "Repository:[${REPO}]"
|
||||
info "Tag:[${TAG}]"
|
||||
info "Image_ID:[${IMAGE_ID}]"
|
||||
info "Size:[${SIZE}]"
|
||||
info "----------------------------------------------"
|
||||
fi
|
||||
|
||||
###############################################################
|
||||
|
@ -434,22 +414,19 @@ UploadImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [upload] MAJOR_TAG:[${MAJOR_TAG}] Dockerfile!${NC}"
|
||||
exit 1
|
||||
fatal "failed to [upload] MAJOR_TAG:[${MAJOR_TAG}] Dockerfile!"
|
||||
else
|
||||
# SUCCESS
|
||||
echo -e "${NC}${F[B]}Successfully Uploaded TAG:${F[W]}[${MAJOR_TAG}]${F[B]} of Docker image to ${F[C]}${REGISTRY}${F[B]}!${NC}"
|
||||
info "Successfully Uploaded TAG:${F[W]}[${MAJOR_TAG}]${F[B]} of Docker image to ${F[C]}${REGISTRY}${F[B]}!"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
#### Function Footer ###########################################################
|
||||
Footer() {
|
||||
echo ""
|
||||
echo "-------------------------------------------------------"
|
||||
echo "The step has completed"
|
||||
echo "-------------------------------------------------------"
|
||||
echo ""
|
||||
info "-------------------------------------------------------"
|
||||
info "The step has completed"
|
||||
info "-------------------------------------------------------"
|
||||
}
|
||||
################################################################################
|
||||
################################## MAIN ########################################
|
||||
|
@ -488,9 +465,8 @@ else
|
|||
#########
|
||||
# ERROR #
|
||||
#########
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Registry not set correctly!${NC}"
|
||||
echo "Registry:[${REGISTRY}]"
|
||||
exit 1
|
||||
error "Registry not set correctly!"
|
||||
fatal "Registry:[${REGISTRY}]"
|
||||
fi
|
||||
|
||||
####################
|
||||
|
|
2
.github/linters/.flake8
vendored
Normal file
2
.github/linters/.flake8
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
[flake8]
|
||||
max-line-length = 120
|
1
.github/linters/.luacheckrc
vendored
Normal file
1
.github/linters/.luacheckrc
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
--std max
|
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
|
@ -29,7 +29,7 @@ jobs:
|
|||
if: "github.event_name == 'schedule'"
|
||||
steps:
|
||||
- name: Mark issue stale
|
||||
uses: actions/stale@v3.0.8
|
||||
uses: actions/stale@v3.0.9
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: "This issue has been automatically marked as stale because it has not had recent activity.\nIt will be closed in 14 days if no further activity occurs.\nThank you for your contributions.\n\nIf you think this issue should stay open, please remove the `O: stale 🤖` label or comment on the issue."
|
||||
|
|
55
Dockerfile
55
Dockerfile
|
@ -7,11 +7,11 @@
|
|||
#########################################
|
||||
# Get dependency images as build stages #
|
||||
#########################################
|
||||
FROM borkdude/clj-kondo:2020.06.21 as clj-kondo
|
||||
FROM borkdude/clj-kondo:2020.07.29 as clj-kondo
|
||||
FROM dotenvlinter/dotenv-linter:2.1.0 as dotenv-linter
|
||||
FROM mstruebing/editorconfig-checker:2.1.0 as editorconfig-checker
|
||||
FROM golangci/golangci-lint:v1.29.0 as golangci-lint
|
||||
FROM yoheimuta/protolint:v0.25.1 as protolint
|
||||
FROM yoheimuta/protolint:v0.26.0 as protolint
|
||||
FROM koalaman/shellcheck:v0.7.1 as shellcheck
|
||||
FROM wata727/tflint:0.18.0 as tflint
|
||||
|
||||
|
@ -52,29 +52,23 @@ ARG GLIBC_VERSION='2.31-r0'
|
|||
RUN apk add --update --no-cache \
|
||||
ansible-lint \
|
||||
bash \
|
||||
coreutils \
|
||||
curl \
|
||||
gcc \
|
||||
git git-lfs\
|
||||
go \
|
||||
icu-libs \
|
||||
jq \
|
||||
libxml2-utils \
|
||||
libc-dev libxml2-utils \
|
||||
make \
|
||||
musl-dev \
|
||||
npm nodejs-current \
|
||||
openjdk8-jre \
|
||||
perl \
|
||||
php7 \
|
||||
php7-phar \
|
||||
php7-json \
|
||||
php7-simplexml \
|
||||
php7-xmlwriter \
|
||||
php7-mbstring \
|
||||
php7-tokenizer \
|
||||
php7-ctype \
|
||||
php7-curl \
|
||||
php7-dom \
|
||||
php7 php7-phar php7-json php7-mbstring php-xmlwriter \
|
||||
php7-tokenizer php7-ctype php7-curl php7-dom \
|
||||
py3-setuptools \
|
||||
readline-dev \
|
||||
ruby ruby-dev ruby-bundler ruby-rdoc \
|
||||
gnupg
|
||||
|
||||
|
@ -206,6 +200,35 @@ RUN wget https://storage.googleapis.com/dart-archive/channels/stable/release/${D
|
|||
RUN echo "http://dl-cdn.alpinelinux.org/alpine/edge/testing/" >> /etc/apk/repositories \
|
||||
&& apk add --update --no-cache rakudo zef
|
||||
|
||||
####################
|
||||
# Install luacheck #
|
||||
####################
|
||||
RUN wget https://www.lua.org/ftp/lua-5.3.5.tar.gz -O - -q | tar -xzf - \
|
||||
&& cd lua-5.3.5 \
|
||||
&& make linux \
|
||||
&& make install \
|
||||
&& cd .. && rm -r lua-5.3.5/
|
||||
|
||||
RUN wget https://github.com/cvega/luarocks/archive/v3.3.1-super-linter.tar.gz -O - -q | tar -xzf - \
|
||||
&& cd luarocks-3.3.1-super-linter \
|
||||
&& ./configure --with-lua-include=/usr/local/include \
|
||||
&& make \
|
||||
&& make -b install \
|
||||
&& cd .. && rm -r luarocks-3.3.1-super-linter/
|
||||
|
||||
RUN luarocks install luacheck
|
||||
|
||||
#############################
|
||||
# Install Phive and PHPStan #
|
||||
#############################
|
||||
RUN wget -O phive.phar https://phar.io/releases/phive.phar \
|
||||
&& wget -O phive.phar.asc https://phar.io/releases/phive.phar.asc \
|
||||
&& gpg --keyserver pool.sks-keyservers.net --recv-keys 0x9D8A98B29B2D5D79 \
|
||||
&& gpg --verify phive.phar.asc phive.phar \
|
||||
&& chmod +x phive.phar \
|
||||
&& mv phive.phar /usr/local/bin/phive \
|
||||
&& yes | phive install -g phpstan
|
||||
|
||||
###########################################
|
||||
# Load GitHub Env Vars for GitHub Actions #
|
||||
###########################################
|
||||
|
@ -218,6 +241,8 @@ ENV ACTIONS_RUNNER_DEBUG=${ACTIONS_RUNNER_DEBUG} \
|
|||
GITHUB_TOKEN=${GITHUB_TOKEN} \
|
||||
GITHUB_WORKSPACE=${GITHUB_WORKSPACE} \
|
||||
LINTER_RULES_PATH=${LINTER_RULES_PATH} \
|
||||
LOG_FILE=${LOG_FILE} \
|
||||
LOG_LEVEL=${LOG_LEVEL} \
|
||||
OUTPUT_DETAILS=${OUTPUT_DETAILS} \
|
||||
OUTPUT_FOLDER=${OUTPUT_FOLDER} \
|
||||
OUTPUT_FORMAT=${OUTPUT_FORMAT} \
|
||||
|
@ -241,13 +266,17 @@ ENV ACTIONS_RUNNER_DEBUG=${ACTIONS_RUNNER_DEBUG} \
|
|||
VALIDATE_JAVASCRIPT_STANDARD=${VALIDATE_JAVASCRIPT_STANDARD} \
|
||||
VALIDATE_JSON=${VALIDATE_JSON} \
|
||||
VALIDATE_KOTLIN=${VALIDATE_KOTLIN} \
|
||||
VALIDATE_LUA=${VALIDATE_LUA} \
|
||||
VALIDATE_MD=${VALIDATE_MD} \
|
||||
VALIDATE_OPENAPI=${VALIDATE_OPENAPI} \
|
||||
VALIDATE_PERL=${VALIDATE_PERL} \
|
||||
VALIDATE_PHP=${VALIDATE_PHP} \
|
||||
VALIDATE_PHP_PHPSTAN=${VALIDATE_PHP_PHPSTAN} \
|
||||
VALIDATE_POWERSHELL=${VALIDATE_POWERSHELL} \
|
||||
VALIDATE_PROTOBUF=${VALIDATE_PROTOBUF} \
|
||||
VALIDATE_PYTHON=${VALIDATE_PYTHON} \
|
||||
VALIDATE_PYTHON_PYLINT=${VALIDATE_PYTHON_PYLINT} \
|
||||
VALIDATE_PYTHON_FLAKE8=${VALIDATE_PYTHON_FLAKE8} \
|
||||
VALIDATE_RAKU=${VALIDATE_RAKU} \
|
||||
VALIDATE_RUBY=${VALIDATE_RUBY} \
|
||||
VALIDATE_STATES=${VALIDATE_STATES} \
|
||||
|
|
16
README.md
16
README.md
|
@ -42,7 +42,7 @@ The design of the **Super-Linter** is currently to allow linting to occur in **G
|
|||
Developers on **GitHub** can call the **GitHub Action** to lint their code base with the following list of linters:
|
||||
|
||||
| _Language_ | _Linter_ |
|
||||
| -------------------------------- | ------------------------------------------------------------------------------------ |
|
||||
| -------------------------------- | -------------------------------------------------------------------------------------------------------- |
|
||||
| **Ansible** | [ansible-lint](https://github.com/ansible/ansible-lint) |
|
||||
| **Azure Resource Manager (ARM)** | [arm-ttk](https://github.com/azure/arm-ttk) |
|
||||
| **AWS CloudFormation templates** | [cfn-lint](https://github.com/aws-cloudformation/cfn-python-lint/) |
|
||||
|
@ -59,13 +59,14 @@ Developers on **GitHub** can call the **GitHub Action** to lint their code base
|
|||
| **JavaScript** | [eslint](https://eslint.org/) [standard js](https://standardjs.com/) |
|
||||
| **JSON** | [jsonlint](https://github.com/zaach/jsonlint) |
|
||||
| **Kotlin** | [ktlint](https://github.com/pinterest/ktlint) |
|
||||
| **Lua** | [luacheck](https://github.com/luarocks/luacheck) |
|
||||
| **Markdown** | [markdownlint](https://github.com/igorshubovych/markdownlint-cli#readme) |
|
||||
| **OpenAPI** | [spectral](https://github.com/stoplightio/spectral) |
|
||||
| **Perl** | [perl](https://pkgs.alpinelinux.org/package/edge/main/x86/perl) |
|
||||
| **PHP** | [PHP](https://www.php.net/) |
|
||||
| **PowerShell** | [PSScriptAnalyzer](https://github.com/PowerShell/Psscriptanalyzer) |
|
||||
| **Protocol Buffers** | [protolint](https://github.com/yoheimuta/protolint) |
|
||||
| **Python3** | [pylint](https://www.pylint.org/) |
|
||||
| **Python3** | [pylint](https://www.pylint.org/) [flake8](https://flake8.pycqa.org/en/latest/) |
|
||||
| **Raku** | [raku](https://raku.org) |
|
||||
| **Ruby** | [RuboCop](https://github.com/rubocop-hq/rubocop) |
|
||||
| **Shell** | [Shellcheck](https://github.com/koalaman/shellcheck) |
|
||||
|
@ -175,10 +176,14 @@ and won't run anything unexpected.
|
|||
| **DISABLE_ERRORS** | `false` | Flag to have the linter complete with exit code 0 even if errors were detected. |
|
||||
| **JAVASCRIPT_ES_CONFIG_FILE** | `.eslintrc.yml` | Filename for [eslint configuration](https://eslint.org/docs/user-guide/configuring#configuration-file-formats) (ex: `.eslintrc.yml`, `.eslintrc.json`) |
|
||||
| **LINTER_RULES_PATH** | `.github/linters` | Directory for all linter configuration rules. |
|
||||
| **LOG_FILE** | `super-linter.log` | The file name for outputting logs. All output is sent to the log file regardless of `LOG_LEVEL`. |
|
||||
| **LOG_LEVEL** | `VERBOSE` | How much output the script will generate to the console. One of `VERBOSE`, `DEBUG` or `TRACE`. |
|
||||
| **MULTI_STATUS** | `true` | A status API is made for each language that is linted to make visual parsing easier. |
|
||||
| **OUTPUT_FORMAT** | `none` | The report format to be generated, besides the stdout one. Output format of tap is currently using v13 of the specification. Supported formats: tap |
|
||||
| **OUTPUT_FOLDER** | `super-linter.report` | The location where the output reporting will be generated to. Output folder must not previously exist. |
|
||||
| **OUTPUT_DETAILS** | `simpler` | What level of details to be reported. Supported formats: simpler or detailed. |
|
||||
| **PYTHON_PYLINT_CONFIG_FILE** | `.python-lint` | Filename for [pylint configuration](http://pylint.pycqa.org/en/latest/user_guide/run.html?highlight=rcfile#command-line-options) (ex: `.python-lint`, `.pylintrc`) |
|
||||
| **PYTHON_FLAKE8_CONFIG_FILE** | `.flake8` | Filename for [flake8 configuration](https://flake8.pycqa.org/en/latest/user/configuration.html) (ex: `.flake8`, `tox.ini`) |
|
||||
| **RUBY_CONFIG_FILE** | `.ruby-lint.yml` | Filename for [rubocop configuration](https://docs.rubocop.org/rubocop/configuration.html) (ex: `.ruby-lint.yml`, `.rubocop.yml`) |
|
||||
| **TYPESCRIPT_ES_CONFIG_FILE** | `.eslintrc.yml` | Filename for [eslint configuration](https://eslint.org/docs/user-guide/configuring#configuration-file-formats) (ex: `.eslintrc.yml`, `.eslintrc.json`) |
|
||||
| **VALIDATE_ALL_CODEBASE** | `true` | Will parse the entire repository and find all files to validate across all types. **NOTE:** When set to `false`, only **new** or **edited** files will be parsed for validation. |
|
||||
|
@ -201,12 +206,16 @@ and won't run anything unexpected.
|
|||
| **VALIDATE_JSON** | `true` | Flag to enable or disable the linting process of the JSON language. |
|
||||
| **VALIDATE_JSX** | `true` | Flag to enable or disable the linting process for jsx files (Utilizing: eslint) |
|
||||
| **VALIDATE_KOTLIN** | `true` | Flag to enable or disable the linting process of the Kotlin language. |
|
||||
| **VALIDATE_LUA** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_MD** | `true` | Flag to enable or disable the linting process of the Markdown language. |
|
||||
| **VALIDATE_OPENAPI** | `true` | Flag to enable or disable the linting process of the OpenAPI language. |
|
||||
| **VALIDATE_PERL** | `true` | Flag to enable or disable the linting process of the Perl language. |
|
||||
| **VALIDATE_PHP** | `true` | Flag to enable or disable the linting process of the PHP language. |
|
||||
| **VALIDATE_PHP_PHPSTAN** | `true` | Flag to enable or disable the linting process of the PHP language (Utilizing: PHPStan). |
|
||||
| **VALIDATE_PROTOBUF** | `true` | Flag to enable or disable the linting process of the Protobuf language. |
|
||||
| **VALIDATE_PYTHON** | `true` | Flag to enable or disable the linting process of the Python language. |
|
||||
| **VALIDATE_PYTHON** | `true` | Flag to enable or disable the linting process of the Python language. (Utilizing: pylint) (keep for backward compatibility) |
|
||||
| **VALIDATE_PYTHON_PYLINT** | `true` | Flag to enable or disable the linting process of the Python language. (Utilizing: pylint) |
|
||||
| **VALIDATE_PYTHON_FLAKE8** | `true` | Flag to enable or disable the linting process of the Python language. (Utilizing: flake8) |
|
||||
| **VALIDATE_POWERSHELL** | `true` | Flag to enable or disable the linting process of the Powershell language. |
|
||||
| **VALIDATE_RAKU** | `true` | Flag to enable or disable the linting process of the Raku language. |
|
||||
| **VALIDATE_RUBY** | `true` | Flag to enable or disable the linting process of the Ruby language. |
|
||||
|
@ -218,6 +227,7 @@ and won't run anything unexpected.
|
|||
| **VALIDATE_TYPESCRIPT_STANDARD** | `true` | Flag to enable or disable the linting process of the Typescript language. (Utilizing: standard) |
|
||||
| **VALIDATE_XML** | `true` | Flag to enable or disable the linting process of the XML language. |
|
||||
| **VALIDATE_YAML** | `true` | Flag to enable or disable the linting process of the YAML language. |
|
||||
| **YAML_CONFIG_FILE** | `.yaml-lint.yml` | Filename for [Yamllint configuration](https://yamllint.readthedocs.io/en/stable/configuration.html) (ex: `.yaml-lint.yml`, `.yamllint.yml`) |
|
||||
|
||||
### Template rules files
|
||||
|
||||
|
|
2
TEMPLATES/.flake8
Normal file
2
TEMPLATES/.flake8
Normal file
|
@ -0,0 +1,2 @@
|
|||
[flake8]
|
||||
max-line-length = 120
|
1
TEMPLATES/.luacheckrc
Normal file
1
TEMPLATES/.luacheckrc
Normal file
|
@ -0,0 +1 @@
|
|||
--std max
|
2
TEMPLATES/phpstan.neon
Normal file
2
TEMPLATES/phpstan.neon
Normal file
|
@ -0,0 +1,2 @@
|
|||
parameters:
|
||||
level: 5
|
1
dependencies/Gemfile
vendored
1
dependencies/Gemfile
vendored
|
@ -8,3 +8,4 @@ gem "rubocop", "~> 0.82.0"
|
|||
gem "rubocop-github", "~> 0.16.0"
|
||||
gem "rubocop-performance", "~>1.7.1"
|
||||
gem "rubocop-rails", "~> 2.5"
|
||||
gem "rubocop-rspec", "~> 1.41.0"
|
||||
|
|
3
dependencies/Gemfile.lock
vendored
3
dependencies/Gemfile.lock
vendored
|
@ -37,6 +37,8 @@ GEM
|
|||
activesupport (>= 4.2.0)
|
||||
rack (>= 1.1)
|
||||
rubocop (>= 0.82.0)
|
||||
rubocop-rspec (1.41.0)
|
||||
rubocop (>= 0.68.1)
|
||||
ruby-progressbar (1.10.1)
|
||||
thread_safe (0.3.6)
|
||||
tzinfo (1.2.7)
|
||||
|
@ -52,6 +54,7 @@ DEPENDENCIES
|
|||
rubocop-github (~> 0.16.0)
|
||||
rubocop-performance (~> 1.7.1)
|
||||
rubocop-rails (~> 2.5)
|
||||
rubocop-rspec (~> 1.41.0)
|
||||
|
||||
BUNDLED WITH
|
||||
2.1.4
|
||||
|
|
1
dependencies/Pipfile
vendored
1
dependencies/Pipfile
vendored
|
@ -11,6 +11,7 @@ pylint = "*"
|
|||
yq = "*"
|
||||
cfn-lint = "*"
|
||||
terrascan = "*"
|
||||
flake8 = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.8"
|
||||
|
|
35
dependencies/Pipfile.lock
generated
vendored
35
dependencies/Pipfile.lock
generated
vendored
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "c62b95247cb67b10711da0012966cc031d5a9ec08885a45736c0ee77bba3a844"
|
||||
"sha256": "f8ea3853c4bb4533103043533d08982436551678c827f59809c94bf0ee54a187"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
|
@ -28,6 +28,7 @@
|
|||
"sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703",
|
||||
"sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386"
|
||||
],
|
||||
"markers": "python_version >= '3.5'",
|
||||
"version": "==2.4.2"
|
||||
},
|
||||
"attrs": {
|
||||
|
@ -35,6 +36,7 @@
|
|||
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
|
||||
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==19.3.0"
|
||||
},
|
||||
"aws-sam-translator": {
|
||||
|
@ -80,13 +82,23 @@
|
|||
"sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827",
|
||||
"sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99"
|
||||
],
|
||||
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'",
|
||||
"version": "==0.15.2"
|
||||
},
|
||||
"flake8": {
|
||||
"hashes": [
|
||||
"sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c",
|
||||
"sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.8.3"
|
||||
},
|
||||
"isort": {
|
||||
"hashes": [
|
||||
"sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
|
||||
"sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==4.3.21"
|
||||
},
|
||||
"jmespath": {
|
||||
|
@ -94,6 +106,7 @@
|
|||
"sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9",
|
||||
"sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"
|
||||
],
|
||||
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'",
|
||||
"version": "==0.10.0"
|
||||
},
|
||||
"jsonpatch": {
|
||||
|
@ -109,6 +122,7 @@
|
|||
"sha256:c192ba86648e05fdae4f08a17ec25180a9aef5008d973407b581798a83975362",
|
||||
"sha256:ff379fa021d1b81ab539f5ec467c7745beb1a5671463f9dcc2b2d458bd361c1e"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==2.0"
|
||||
},
|
||||
"jsonschema": {
|
||||
|
@ -148,6 +162,7 @@
|
|||
"sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4",
|
||||
"sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==1.4.3"
|
||||
},
|
||||
"mccabe": {
|
||||
|
@ -172,6 +187,22 @@
|
|||
],
|
||||
"version": "==0.8.0"
|
||||
},
|
||||
"pycodestyle": {
|
||||
"hashes": [
|
||||
"sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367",
|
||||
"sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==2.6.0"
|
||||
},
|
||||
"pyflakes": {
|
||||
"hashes": [
|
||||
"sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92",
|
||||
"sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
|
||||
"version": "==2.2.0"
|
||||
},
|
||||
"pyhcl": {
|
||||
"hashes": [
|
||||
"sha256:2d9b9dcdf1023d812bfed561ba72c99104c5b3f52e558d595130a44ce081b003"
|
||||
|
@ -197,6 +228,7 @@
|
|||
"sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
|
||||
"sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",
|
||||
"version": "==2.8.1"
|
||||
},
|
||||
"pyyaml": {
|
||||
|
@ -228,6 +260,7 @@
|
|||
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
|
||||
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
|
||||
],
|
||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",
|
||||
"version": "==1.15.0"
|
||||
},
|
||||
"terrascan": {
|
||||
|
|
258
dependencies/package-lock.json
generated
vendored
258
dependencies/package-lock.json
generated
vendored
|
@ -3,6 +3,20 @@
|
|||
"requires": true,
|
||||
"lockfileVersion": 1,
|
||||
"dependencies": {
|
||||
"@amplitude/node": {
|
||||
"version": "0.3.3",
|
||||
"resolved": "https://registry.npmjs.org/@amplitude/node/-/node-0.3.3.tgz",
|
||||
"integrity": "sha512-Uzg4MRAuD053Ex67Iu2lm2GovnVte1uKI3q7CXlMCYZ9ylZmAkPbTnjg9OVyD4f+IiUfgK4p3bE7r9p7jqSDLA==",
|
||||
"requires": {
|
||||
"@amplitude/types": "^0.3.2",
|
||||
"tslib": "^1.9.3"
|
||||
}
|
||||
},
|
||||
"@amplitude/types": {
|
||||
"version": "0.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@amplitude/types/-/types-0.3.2.tgz",
|
||||
"integrity": "sha512-7+m7nhJMFGbpsppOUsCH8f4FOFyAxgKFuXkKknU/LP2CMYVjWEIoLTKKgaJPc2c8wXaK5KPXVetb8VeiGbuaGg=="
|
||||
},
|
||||
"@babel/code-frame": {
|
||||
"version": "7.10.4",
|
||||
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.4.tgz",
|
||||
|
@ -499,11 +513,11 @@
|
|||
"integrity": "sha512-/tiJyrc0GPcsReHzgC0SXwOmoPjLqYe01W7dLYB0yasQXMbcRee+ZIk+g8MIQhoBS8fPoBQO3Y93+aeBrI93Ug=="
|
||||
},
|
||||
"@typescript-eslint/eslint-plugin": {
|
||||
"version": "3.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-3.7.0.tgz",
|
||||
"integrity": "sha512-4OEcPON3QIx0ntsuiuFP/TkldmBGXf0uKxPQlGtS/W2F3ndYm8Vgdpj/woPJkzUc65gd3iR+qi3K8SDQP/obFg==",
|
||||
"version": "3.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-3.7.1.tgz",
|
||||
"integrity": "sha512-3DB9JDYkMrc8Au00rGFiJLK2Ja9CoMP6Ut0sHsXp3ZtSugjNxvSSHTnKLfo4o+QmjYBJqEznDqsG1zj4F2xnsg==",
|
||||
"requires": {
|
||||
"@typescript-eslint/experimental-utils": "3.7.0",
|
||||
"@typescript-eslint/experimental-utils": "3.7.1",
|
||||
"debug": "^4.1.1",
|
||||
"functional-red-black-tree": "^1.0.1",
|
||||
"regexpp": "^3.0.0",
|
||||
|
@ -512,41 +526,53 @@
|
|||
}
|
||||
},
|
||||
"@typescript-eslint/experimental-utils": {
|
||||
"version": "3.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-3.7.0.tgz",
|
||||
"integrity": "sha512-xpfXXAfZqhhqs5RPQBfAFrWDHoNxD5+sVB5A46TF58Bq1hRfVROrWHcQHHUM9aCBdy9+cwATcvCbRg8aIRbaHQ==",
|
||||
"version": "3.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-3.7.1.tgz",
|
||||
"integrity": "sha512-TqE97pv7HrqWcGJbLbZt1v59tcqsSVpWTOf1AqrWK7n8nok2sGgVtYRuGXeNeLw3wXlLEbY1MKP3saB2HsO/Ng==",
|
||||
"requires": {
|
||||
"@types/json-schema": "^7.0.3",
|
||||
"@typescript-eslint/types": "3.7.0",
|
||||
"@typescript-eslint/typescript-estree": "3.7.0",
|
||||
"@typescript-eslint/types": "3.7.1",
|
||||
"@typescript-eslint/typescript-estree": "3.7.1",
|
||||
"eslint-scope": "^5.0.0",
|
||||
"eslint-utils": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/parser": {
|
||||
"version": "3.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-3.7.0.tgz",
|
||||
"integrity": "sha512-2LZauVUt7jAWkcIW7djUc3kyW+fSarNEuM3RF2JdLHR9BfX/nDEnyA4/uWz0wseoWVZbDXDF7iF9Jc342flNqQ==",
|
||||
"version": "3.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-3.7.1.tgz",
|
||||
"integrity": "sha512-W4QV/gXvfIsccN8225784LNOorcm7ch68Fi3V4Wg7gmkWSQRKevO4RrRqWo6N/Z/myK1QAiGgeaXN57m+R/8iQ==",
|
||||
"requires": {
|
||||
"@types/eslint-visitor-keys": "^1.0.0",
|
||||
"@typescript-eslint/experimental-utils": "3.7.0",
|
||||
"@typescript-eslint/types": "3.7.0",
|
||||
"@typescript-eslint/typescript-estree": "3.7.0",
|
||||
"@typescript-eslint/experimental-utils": "3.7.1",
|
||||
"@typescript-eslint/types": "3.7.1",
|
||||
"@typescript-eslint/typescript-estree": "3.7.1",
|
||||
"eslint-visitor-keys": "^1.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@typescript-eslint/experimental-utils": {
|
||||
"version": "3.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-3.7.1.tgz",
|
||||
"integrity": "sha512-TqE97pv7HrqWcGJbLbZt1v59tcqsSVpWTOf1AqrWK7n8nok2sGgVtYRuGXeNeLw3wXlLEbY1MKP3saB2HsO/Ng==",
|
||||
"requires": {
|
||||
"@types/json-schema": "^7.0.3",
|
||||
"@typescript-eslint/types": "3.7.1",
|
||||
"@typescript-eslint/typescript-estree": "3.7.1",
|
||||
"eslint-scope": "^5.0.0",
|
||||
"eslint-utils": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/types": {
|
||||
"version": "3.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-3.7.0.tgz",
|
||||
"integrity": "sha512-reCaK+hyKkKF+itoylAnLzFeNYAEktB0XVfSQvf0gcVgpz1l49Lt6Vo9x4MVCCxiDydA0iLAjTF/ODH0pbfnpg=="
|
||||
"version": "3.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-3.7.1.tgz",
|
||||
"integrity": "sha512-PZe8twm5Z4b61jt7GAQDor6KiMhgPgf4XmUb9zdrwTbgtC/Sj29gXP1dws9yEn4+aJeyXrjsD9XN7AWFhmnUfg=="
|
||||
},
|
||||
"@typescript-eslint/typescript-estree": {
|
||||
"version": "3.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-3.7.0.tgz",
|
||||
"integrity": "sha512-xr5oobkYRebejlACGr1TJ0Z/r0a2/HUf0SXqPvlgUMwiMqOCu/J+/Dr9U3T0IxpE5oLFSkqMx1FE/dKaZ8KsOQ==",
|
||||
"version": "3.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-3.7.1.tgz",
|
||||
"integrity": "sha512-m97vNZkI08dunYOr2lVZOHoyfpqRs0KDpd6qkGaIcLGhQ2WPtgHOd/eVbsJZ0VYCQvupKrObAGTOvk3tfpybYA==",
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "3.7.0",
|
||||
"@typescript-eslint/visitor-keys": "3.7.0",
|
||||
"@typescript-eslint/types": "3.7.1",
|
||||
"@typescript-eslint/visitor-keys": "3.7.1",
|
||||
"debug": "^4.1.1",
|
||||
"glob": "^7.1.6",
|
||||
"is-glob": "^4.0.1",
|
||||
|
@ -556,9 +582,39 @@
|
|||
}
|
||||
},
|
||||
"@typescript-eslint/visitor-keys": {
|
||||
"version": "3.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-3.7.0.tgz",
|
||||
"integrity": "sha512-k5PiZdB4vklUpUX4NBncn5RBKty8G3ihTY+hqJsCdMuD0v4jofI5xuqwnVcWxfv6iTm2P/dfEa2wMUnsUY8ODw==",
|
||||
"version": "3.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-3.7.1.tgz",
|
||||
"integrity": "sha512-xn22sQbEya+Utj2IqJHGLA3i1jDzR43RzWupxojbSWnj3nnPLavaQmWe5utw03CwYao3r00qzXfgJMGNkrzrAA==",
|
||||
"requires": {
|
||||
"eslint-visitor-keys": "^1.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/types": {
|
||||
"version": "3.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-3.7.1.tgz",
|
||||
"integrity": "sha512-PZe8twm5Z4b61jt7GAQDor6KiMhgPgf4XmUb9zdrwTbgtC/Sj29gXP1dws9yEn4+aJeyXrjsD9XN7AWFhmnUfg=="
|
||||
},
|
||||
"@typescript-eslint/typescript-estree": {
|
||||
"version": "3.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-3.7.1.tgz",
|
||||
"integrity": "sha512-m97vNZkI08dunYOr2lVZOHoyfpqRs0KDpd6qkGaIcLGhQ2WPtgHOd/eVbsJZ0VYCQvupKrObAGTOvk3tfpybYA==",
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "3.7.1",
|
||||
"@typescript-eslint/visitor-keys": "3.7.1",
|
||||
"debug": "^4.1.1",
|
||||
"glob": "^7.1.6",
|
||||
"is-glob": "^4.0.1",
|
||||
"lodash": "^4.17.15",
|
||||
"semver": "^7.3.2",
|
||||
"tsutils": "^3.17.1"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/visitor-keys": {
|
||||
"version": "3.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-3.7.1.tgz",
|
||||
"integrity": "sha512-xn22sQbEya+Utj2IqJHGLA3i1jDzR43RzWupxojbSWnj3nnPLavaQmWe5utw03CwYao3r00qzXfgJMGNkrzrAA==",
|
||||
"requires": {
|
||||
"eslint-visitor-keys": "^1.1.0"
|
||||
}
|
||||
|
@ -742,6 +798,14 @@
|
|||
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.10.0.tgz",
|
||||
"integrity": "sha512-3YDiu347mtVtjpyV3u5kVqQLP242c06zwDOgpeRnybmXlYYsLbtTrUBUm8i8srONt+FWobl5aibnU1030PeeuA=="
|
||||
},
|
||||
"axios": {
|
||||
"version": "0.19.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.19.2.tgz",
|
||||
"integrity": "sha512-fjgm5MvRHLhx+osE2xoekY70AhARk3a6hkN+3Io1jc00jtquGvxYlKlsFUhmUET0V5te6CcZI7lcv2Ym61mjHA==",
|
||||
"requires": {
|
||||
"follow-redirects": "1.5.10"
|
||||
}
|
||||
},
|
||||
"babel-eslint": {
|
||||
"version": "10.1.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.1.0.tgz",
|
||||
|
@ -950,6 +1014,15 @@
|
|||
"restore-cursor": "^3.1.0"
|
||||
}
|
||||
},
|
||||
"cli-progress": {
|
||||
"version": "3.8.2",
|
||||
"resolved": "https://registry.npmjs.org/cli-progress/-/cli-progress-3.8.2.tgz",
|
||||
"integrity": "sha512-qRwBxLldMSfxB+YGFgNRaj5vyyHe1yMpVeDL79c+7puGujdKJHQHydgqXDcrkvQgJ5U/d3lpf6vffSoVVUftVQ==",
|
||||
"requires": {
|
||||
"colors": "^1.1.2",
|
||||
"string-width": "^4.2.0"
|
||||
}
|
||||
},
|
||||
"cli-width": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz",
|
||||
|
@ -1159,6 +1232,11 @@
|
|||
"resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.2.0.tgz",
|
||||
"integrity": "sha512-vDPw+rDgn3bZe1+F/pyEwb1oMG2XTlRVgAa6B4KccTEpYgF8w6eQllVbQcfIJnZyvzFtFpxnpGtx8dd7DJp/Rw=="
|
||||
},
|
||||
"decode-html": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/decode-html/-/decode-html-2.0.0.tgz",
|
||||
"integrity": "sha1-fQqIfORCgOYJeKcH67f4CB/WHqo="
|
||||
},
|
||||
"deep-extend": {
|
||||
"version": "0.5.1",
|
||||
"resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.5.1.tgz",
|
||||
|
@ -1847,9 +1925,9 @@
|
|||
}
|
||||
},
|
||||
"eslint-plugin-jest": {
|
||||
"version": "23.18.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-jest/-/eslint-plugin-jest-23.18.0.tgz",
|
||||
"integrity": "sha512-wLPM/Rm1SGhxrFQ2TKM/BYsYPhn7ch6ZEK92S2o/vGkAAnDXM0I4nTIo745RIX+VlCRMFgBuJEax6XfTHMdeKg==",
|
||||
"version": "23.20.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-plugin-jest/-/eslint-plugin-jest-23.20.0.tgz",
|
||||
"integrity": "sha512-+6BGQt85OREevBDWCvhqj1yYA4+BFK4XnRZSGJionuEYmcglMZYLNNBBemwzbqUAckURaHdJSBcjHPyrtypZOw==",
|
||||
"requires": {
|
||||
"@typescript-eslint/experimental-utils": "^2.5.0"
|
||||
},
|
||||
|
@ -2117,6 +2195,27 @@
|
|||
"to-regex-range": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"find-java-home": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/find-java-home/-/find-java-home-1.1.0.tgz",
|
||||
"integrity": "sha512-bSTCKNZ193UM/+ZZoNDzICAEHcVywovkhsWCkZALjCvRXQ+zXTe/XATrrP4CpxkaP6YFhQJOpyRpH0P2U/woDA==",
|
||||
"requires": {
|
||||
"which": "~1.0.5",
|
||||
"winreg": "~1.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"which": {
|
||||
"version": "1.0.9",
|
||||
"resolved": "https://registry.npmjs.org/which/-/which-1.0.9.tgz",
|
||||
"integrity": "sha1-RgwdoPgQED0DIam2M6+eV15kSG8="
|
||||
}
|
||||
}
|
||||
},
|
||||
"find-package-json": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/find-package-json/-/find-package-json-1.2.0.tgz",
|
||||
"integrity": "sha512-+SOGcLGYDJHtyqHd87ysBhmaeQ95oWspDKnMXBrnQ9Eq4OkLNqejgoaD8xVWu6GPa0B6roa6KinCMEMcVeqONw=="
|
||||
},
|
||||
"find-root": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz",
|
||||
|
@ -2146,6 +2245,29 @@
|
|||
"resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz",
|
||||
"integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA=="
|
||||
},
|
||||
"follow-redirects": {
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz",
|
||||
"integrity": "sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==",
|
||||
"requires": {
|
||||
"debug": "=3.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz",
|
||||
"integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==",
|
||||
"requires": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||
}
|
||||
}
|
||||
},
|
||||
"forever-agent": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
|
||||
|
@ -2161,6 +2283,16 @@
|
|||
"mime-types": "^2.1.12"
|
||||
}
|
||||
},
|
||||
"fs-extra": {
|
||||
"version": "8.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
|
||||
"integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
|
||||
"requires": {
|
||||
"graceful-fs": "^4.2.0",
|
||||
"jsonfile": "^4.0.0",
|
||||
"universalify": "^0.1.0"
|
||||
}
|
||||
},
|
||||
"fs.realpath": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||
|
@ -2934,6 +3066,14 @@
|
|||
"resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-2.2.1.tgz",
|
||||
"integrity": "sha512-o6/yDBYccGvTz1+QFevz6l6OBZ2+fMVu2JZ9CIhzsYRX4mjaK5IyX9eldUdCmga16zlgQxyrj5pt9kzuj2C02w=="
|
||||
},
|
||||
"jsonfile": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
|
||||
"integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=",
|
||||
"requires": {
|
||||
"graceful-fs": "^4.1.6"
|
||||
}
|
||||
},
|
||||
"jsonlint": {
|
||||
"version": "1.6.3",
|
||||
"resolved": "https://registry.npmjs.org/jsonlint/-/jsonlint-1.6.3.tgz",
|
||||
|
@ -3472,6 +3612,37 @@
|
|||
"resolved": "https://registry.npmjs.org/normalize-selector/-/normalize-selector-0.2.0.tgz",
|
||||
"integrity": "sha1-0LFF62kRicY6eNIB3E/bEpPvDAM="
|
||||
},
|
||||
"npm-groovy-lint": {
|
||||
"version": "5.7.0",
|
||||
"resolved": "https://registry.npmjs.org/npm-groovy-lint/-/npm-groovy-lint-5.7.0.tgz",
|
||||
"integrity": "sha512-Cq/ncgmNMI6USeazp2gingZj73ia6M+rpR5LfBK8ATSEdRZuIsnKTIGTuqMc/hrZyU4oMN0C/PYQj4DTO+TRFQ==",
|
||||
"requires": {
|
||||
"@amplitude/node": "^0.3.3",
|
||||
"ansi-colors": "^4.1.1",
|
||||
"axios": "^0.19.2",
|
||||
"cli-progress": "^3.6.0",
|
||||
"debug": "^4.1.1",
|
||||
"decode-html": "^2.0.0",
|
||||
"find-java-home": "^1.1.0",
|
||||
"find-package-json": "^1.2.0",
|
||||
"fs-extra": "^8.1.0",
|
||||
"glob": "^7.1.6",
|
||||
"import-fresh": "^3.2.1",
|
||||
"ip": "^1.1.5",
|
||||
"optionator": "^0.8.3",
|
||||
"semver": "^7.1.3",
|
||||
"strip-json-comments": "^3.0.1",
|
||||
"uuid": "^8.2.0",
|
||||
"xml2js": "^0.4.23"
|
||||
},
|
||||
"dependencies": {
|
||||
"uuid": {
|
||||
"version": "8.2.0",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.2.0.tgz",
|
||||
"integrity": "sha512-CYpGiFTUrmI6OBMkAdjSDM0k5h8SkkiTP4WAjQgDgNB1S3Ou9VBEvr6q0Kv2H1mMk7IWfxYGpMH5sd5AvcIV2Q=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"num2fraction": {
|
||||
"version": "1.2.2",
|
||||
"resolved": "https://registry.npmjs.org/num2fraction/-/num2fraction-1.2.2.tgz",
|
||||
|
@ -4445,6 +4616,11 @@
|
|||
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
|
||||
},
|
||||
"sax": {
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
|
||||
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
|
||||
},
|
||||
"semver": {
|
||||
"version": "7.3.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz",
|
||||
|
@ -5301,6 +5477,11 @@
|
|||
"unist-util-is": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="
|
||||
},
|
||||
"unpipe": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
|
||||
|
@ -5397,6 +5578,11 @@
|
|||
"resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz",
|
||||
"integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho="
|
||||
},
|
||||
"winreg": {
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/winreg/-/winreg-1.2.4.tgz",
|
||||
"integrity": "sha1-ugZWKbepJRMOFXeRCM9UCZDpjRs="
|
||||
},
|
||||
"wolfy87-eventemitter": {
|
||||
"version": "5.2.9",
|
||||
"resolved": "https://registry.npmjs.org/wolfy87-eventemitter/-/wolfy87-eventemitter-5.2.9.tgz",
|
||||
|
@ -5446,6 +5632,20 @@
|
|||
"resolved": "https://registry.npmjs.org/xml/-/xml-1.0.1.tgz",
|
||||
"integrity": "sha1-eLpyAgApxbyHuKgaPPzXS0ovweU="
|
||||
},
|
||||
"xml2js": {
|
||||
"version": "0.4.23",
|
||||
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz",
|
||||
"integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==",
|
||||
"requires": {
|
||||
"sax": ">=0.6.0",
|
||||
"xmlbuilder": "~11.0.0"
|
||||
}
|
||||
},
|
||||
"xmlbuilder": {
|
||||
"version": "11.0.1",
|
||||
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz",
|
||||
"integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA=="
|
||||
},
|
||||
"xregexp": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/xregexp/-/xregexp-2.0.0.tgz",
|
||||
|
|
6
dependencies/package.json
vendored
6
dependencies/package.json
vendored
|
@ -3,13 +3,13 @@
|
|||
"dependencies": {
|
||||
"@coffeelint/cli": "^3.2.10",
|
||||
"@stoplight/spectral": "^5.4.0",
|
||||
"@typescript-eslint/eslint-plugin": "^3.7.0",
|
||||
"@typescript-eslint/parser": "^3.7.0",
|
||||
"@typescript-eslint/eslint-plugin": "^3.7.1",
|
||||
"@typescript-eslint/parser": "^3.7.1",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"dockerfilelint": "^1.5.0",
|
||||
"eslint": "^7.5.0",
|
||||
"eslint-config-prettier": "^6.11.0",
|
||||
"eslint-plugin-jest": "^23.18.0",
|
||||
"eslint-plugin-jest": "^23.20.0",
|
||||
"htmlhint": "^0.14.1",
|
||||
"jsonlint": "^1.6.3",
|
||||
"markdownlint-cli": "^0.23.2",
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -16,11 +16,8 @@ function BuildFileList() {
|
|||
################
|
||||
# print header #
|
||||
################
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Pulling in code history and branches..."
|
||||
fi
|
||||
debug "----------------------------------------------"
|
||||
debug "Pulling in code history and branches..."
|
||||
|
||||
#################################################################################
|
||||
# Switch codebase back to the default branch to get a list of all files changed #
|
||||
|
@ -40,19 +37,15 @@ function BuildFileList() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Error
|
||||
echo "Failed to switch to ${DEFAULT_BRANCH} branch to get files changed!"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${SWITCH_CMD}]${NC}"
|
||||
exit 1
|
||||
info "Failed to switch to ${DEFAULT_BRANCH} branch to get files changed!"
|
||||
fatal "[${SWITCH_CMD}]"
|
||||
fi
|
||||
|
||||
################
|
||||
# print header #
|
||||
################
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Generating Diff with:[git diff --name-only '${DEFAULT_BRANCH}..${GITHUB_SHA}' --diff-filter=d]"
|
||||
fi
|
||||
debug "----------------------------------------------"
|
||||
debug "Generating Diff with:[git diff --name-only '${DEFAULT_BRANCH}..${GITHUB_SHA}' --diff-filter=d]"
|
||||
|
||||
#################################################
|
||||
# Get the Array of files changed in the commits #
|
||||
|
@ -69,17 +62,15 @@ function BuildFileList() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Error
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to gain a list of all files changed!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${RAW_FILE_ARRAY[*]}]${NC}"
|
||||
exit 1
|
||||
error "Failed to gain a list of all files changed!"
|
||||
fatal "[${RAW_FILE_ARRAY[*]}]"
|
||||
fi
|
||||
|
||||
################################################
|
||||
# Iterate through the array of all files found #
|
||||
################################################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Files that have been modified in the commit(s):"
|
||||
info "----------------------------------------------"
|
||||
info "Files that have been modified in the commit(s):"
|
||||
for FILE in "${RAW_FILE_ARRAY[@]}"; do
|
||||
###########################
|
||||
# Get the files extension #
|
||||
|
@ -92,12 +83,12 @@ function BuildFileList() {
|
|||
##############
|
||||
# Print file #
|
||||
##############
|
||||
echo "File:[${FILE}], File_type:[${FILE_TYPE}]"
|
||||
info "File:[${FILE}], File_type:[${FILE_TYPE}]"
|
||||
|
||||
#########
|
||||
# DEBUG #
|
||||
#########
|
||||
#echo "FILE_TYPE:[${FILE_TYPE}]"
|
||||
debug "FILE_TYPE:[${FILE_TYPE}]"
|
||||
|
||||
################################
|
||||
# Get the CLOUDFORMATION files #
|
||||
|
@ -106,7 +97,7 @@ function BuildFileList() {
|
|||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
FILE_ARRAY_YML+=("${FILE}")
|
||||
FILE_ARRAY_YAML+=("${FILE}")
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
|
@ -221,9 +212,9 @@ function BuildFileList() {
|
|||
######################
|
||||
# Get the RAKU files #
|
||||
######################
|
||||
elif [ "${FILE_TYPE}" == "raku" ] || [ "${FILE_TYPE}" == "rakumod" ] \
|
||||
|| [ "${FILE_TYPE}" == "rakutest" ] || [ "${FILE_TYPE}" == "pm6" ] \
|
||||
|| [ "${FILE_TYPE}" == "pl6" ] || [ "${FILE_TYPE}" == "p6" ] ; then
|
||||
elif [ "${FILE_TYPE}" == "raku" ] || [ "${FILE_TYPE}" == "rakumod" ] ||
|
||||
[ "${FILE_TYPE}" == "rakutest" ] || [ "${FILE_TYPE}" == "pm6" ] ||
|
||||
[ "${FILE_TYPE}" == "pl6" ] || [ "${FILE_TYPE}" == "p6" ]; then
|
||||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
|
@ -241,6 +232,7 @@ function BuildFileList() {
|
|||
################################
|
||||
FILE_ARRAY_PHP_BUILTIN+=("${FILE}")
|
||||
FILE_ARRAY_PHP_PHPCS+=("${FILE}")
|
||||
FILE_ARRAY_PHP_PHPSTAN+=("${FILE}")
|
||||
FILE_ARRAY_PHP_PSALM+=("${FILE}")
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
|
@ -265,7 +257,8 @@ function BuildFileList() {
|
|||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
FILE_ARRAY_PYTHON+=("${FILE}")
|
||||
FILE_ARRAY_PYTHON_PYLINT+=("${FILE}")
|
||||
FILE_ARRAY_PYTHON_FLAKE8+=("${FILE}")
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
|
@ -395,6 +388,15 @@ function BuildFileList() {
|
|||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
READ_ONLY_CHANGE_FLAG=1
|
||||
elif [ "$FILE_TYPE" == "lua" ]; then
|
||||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
FILE_ARRAY_LUA+=("$FILE")
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
READ_ONLY_CHANGE_FLAG=1
|
||||
############################
|
||||
# Get the Protocol Buffers files #
|
||||
############################
|
||||
|
@ -465,8 +467,8 @@ function BuildFileList() {
|
|||
#######################
|
||||
# It is a bash script #
|
||||
#######################
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} Found bash script without extension:[.sh]${NC}"
|
||||
echo "Please update file with proper extensions."
|
||||
warn "Found bash script without extension:[.sh]"
|
||||
info "Please update file with proper extensions."
|
||||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
|
@ -479,8 +481,8 @@ function BuildFileList() {
|
|||
#######################
|
||||
# It is a Ruby script #
|
||||
#######################
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} Found ruby script without extension:[.rb]${NC}"
|
||||
echo "Please update file with proper extensions."
|
||||
warn "Found ruby script without extension:[.rb]"
|
||||
info "Please update file with proper extensions."
|
||||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
|
@ -493,7 +495,7 @@ function BuildFileList() {
|
|||
############################
|
||||
# Extension was not found! #
|
||||
############################
|
||||
echo -e "${NC}${F[Y]} - WARN!${NC} Failed to get filetype for:[${FILE}]!${NC}"
|
||||
warn "Failed to get filetype for:[${FILE}]!"
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
|
@ -502,7 +504,7 @@ function BuildFileList() {
|
|||
fi
|
||||
done
|
||||
|
||||
echo ${READ_ONLY_CHANGE_FLAG} > /dev/null 2>&1 || true # Workaround SC2034
|
||||
export READ_ONLY_CHANGE_FLAG # Workaround SC2034
|
||||
|
||||
#########################################
|
||||
# Need to switch back to branch of code #
|
||||
|
@ -519,15 +521,13 @@ function BuildFileList() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Error
|
||||
echo "Failed to switch back to branch!"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${SWITCH2_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to switch back to branch!"
|
||||
fatal "[${SWITCH2_CMD}]"
|
||||
fi
|
||||
|
||||
################
|
||||
# Footer print #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo -e "${NC}${F[B]}Successfully gathered list of files...${NC}"
|
||||
info "----------------------------------------------"
|
||||
info "Successfully gathered list of files..."
|
||||
}
|
||||
|
|
359
lib/linter.sh
359
lib/linter.sh
|
@ -10,7 +10,7 @@
|
|||
# Source Function Files #
|
||||
#########################
|
||||
# shellcheck source=/dev/null
|
||||
source /action/lib/termColors.sh # Source the function script(s)
|
||||
source /action/lib/log.sh # Source the function script(s)
|
||||
# shellcheck source=/dev/null
|
||||
source /action/lib/buildFileList.sh # Source the function script(s)
|
||||
# shellcheck source=/dev/null
|
||||
|
@ -23,6 +23,7 @@ source /action/lib/worker.sh # Source the function script(s)
|
|||
###########
|
||||
# Default Vars
|
||||
DEFAULT_RULES_LOCATION='/action/lib/.automation' # Default rules files location
|
||||
GITHUB_API_URL='https://api.github.com' # GitHub API root url
|
||||
# Ansible Vars
|
||||
ANSIBLE_FILE_NAME='.ansible-lint.yml' # Name of the file
|
||||
ANSIBLE_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${ANSIBLE_FILE_NAME}" # Path to the Ansible lint rules
|
||||
|
@ -62,6 +63,9 @@ JAVASCRIPT_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${JAVASCRIPT_FILE_NAME}"
|
|||
JAVASCRIPT_STANDARD_LINTER_RULES='' # ENV string to pass when running js standard
|
||||
# Default linter path
|
||||
LINTER_RULES_PATH="${LINTER_RULES_PATH:-.github/linters}" # Linter Path Directory
|
||||
# Lua Vars
|
||||
LUA_FILE_NAME='.luacheckrc' # Name of the file
|
||||
LUA_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${LUA_FILE_NAME}" # Path to the Lua lint rules
|
||||
# MD Vars
|
||||
MARKDOWN_FILE_NAME='.markdown-lint.yml' # Name of the file
|
||||
MARKDOWN_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${MARKDOWN_FILE_NAME}" # Path to the markdown lint rules
|
||||
|
@ -70,10 +74,22 @@ OPENAPI_FILE_NAME='.openapirc.yml'
|
|||
OPENAPI_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${OPENAPI_FILE_NAME}" # Path to the OpenAPI lint rules
|
||||
# PHPCS Vars
|
||||
PHP_PHPCS_FILE_NAME='phpcs.xml' # Name of the file
|
||||
PHP_PHPCS_LINTER_RULES="${GITHUB_WORKSPACE}/${PHP_PHPCS_FILE_NAME}" # Path to the PHP CodeSniffer lint rules in the repository
|
||||
if [ ! -f "$PHP_PHPCS_LINTER_RULES" ]; then
|
||||
PHP_PHPCS_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PHP_PHPCS_FILE_NAME}" # Path to the PHP CodeSniffer lint rules
|
||||
fi
|
||||
# PHPStan Vars
|
||||
PHP_PHPSTAN_FILE_NAME='phpstan.neon' # Name of the file
|
||||
PHP_PHPSTAN_LINTER_RULES="${GITHUB_WORKSPACE}/${PHP_PHPSTAN_FILE_NAME}" # Path to the PHPStan lint rules in the repository
|
||||
if [ ! -f "$PHP_PHPSTAN_LINTER_RULES" ]; then
|
||||
PHP_PHPSTAN_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PHP_PHPSTAN_FILE_NAME}" # Path to the PHPStan lint rules
|
||||
fi
|
||||
# Psalm Vars
|
||||
PHP_PSALM_FILE_NAME='psalm.xml' # Name of the file
|
||||
PHP_PSALM_LINTER_RULES="${GITHUB_WORKSPACE}/${PHP_PSALM_FILE_NAME}" # Path to the Psalm lint rules in the repository
|
||||
if [ ! -f "$PHP_PSALM_LINTER_RULES" ]; then
|
||||
PHP_PSALM_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PHP_PSALM_FILE_NAME}" # Path to the Psalm lint rules
|
||||
fi
|
||||
# Powershell Vars
|
||||
POWERSHELL_FILE_NAME='.powershell-psscriptanalyzer.psd1' # Name of the file
|
||||
POWERSHELL_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${POWERSHELL_FILE_NAME}" # Path to the Powershell lint rules
|
||||
|
@ -81,8 +97,10 @@ POWERSHELL_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${POWERSHELL_FILE_NAME}"
|
|||
PROTOBUF_FILE_NAME='.protolintrc.yml' # Name of the file
|
||||
PROTOBUF_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PROTOBUF_FILE_NAME}" # Path to the Protocol Buffers lint rules
|
||||
# Python Vars
|
||||
PYTHON_FILE_NAME='.python-lint' # Name of the file
|
||||
PYTHON_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PYTHON_FILE_NAME}" # Path to the python lint rules
|
||||
PYTHON_PYLINT_FILE_NAME="${PYTHON_PYLINT_CONFIG_FILE:-.python-lint}" # Name of the file
|
||||
PYTHON_PYLINT_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PYTHON_PYLINT_FILE_NAME}" # Path to the python lint rules
|
||||
PYTHON_FLAKE8_FILE_NAME="${PYTHON_FLAKE8_CONFIG_FILE:-.flake8}" # Name of the file
|
||||
PYTHON_FLAKE8_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PYTHON_FLAKE8_FILE_NAME}" # Path to the python lint rules
|
||||
# Ruby Vars
|
||||
RUBY_FILE_NAME="${RUBY_CONFIG_FILE:-.ruby-lint.yml}" # Name of the file
|
||||
RUBY_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${RUBY_FILE_NAME}" # Path to the ruby lint rules
|
||||
|
@ -94,35 +112,33 @@ TYPESCRIPT_FILE_NAME="${TYPESCRIPT_ES_CONFIG_FILE:-.eslintrc.yml}"
|
|||
TYPESCRIPT_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${TYPESCRIPT_FILE_NAME}" # Path to the Typescript lint rules
|
||||
TYPESCRIPT_STANDARD_LINTER_RULES='' # ENV string to pass when running js standard
|
||||
# YAML Vars
|
||||
YAML_FILE_NAME='.yaml-lint.yml' # Name of the file
|
||||
YAML_FILE_NAME="${YAML_CONFIG_FILE:-.yaml-lint.yml}" # Name of the file
|
||||
YAML_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${YAML_FILE_NAME}" # Path to the yaml lint rules
|
||||
|
||||
#######################################
|
||||
# Linter array for information prints #
|
||||
#######################################
|
||||
LINTER_ARRAY=('ansible-lint' 'arm-ttk' 'asl-validator' 'cfn-lint' 'clj-kondo'
|
||||
'coffeelint' 'dart' 'dockerfilelint' 'dotenv-linter' 'eslint' 'golangci-lint'
|
||||
'htmlhint' 'jsonlint' 'ktlint' 'markdownlint' 'npm-groovy-lint' 'perl'
|
||||
'protolint' 'pwsh' 'pylint' 'raku' 'rubocop' 'shellcheck' 'spectral'
|
||||
'standard' 'stylelint' 'terrascan' 'tflint' 'xmllint' 'yamllint')
|
||||
|
||||
LINTER_ARRAY=('ansible-lint' 'arm-ttk' 'asl-validator' 'cfn-lint' 'clj-kondo' 'coffeelint'
|
||||
'dart' 'dockerfilelint' 'dotenv-linter' 'eslint' 'flake8' 'golangci-lint' 'htmlhint'
|
||||
'jsonlint' 'ktlint' 'lua' 'markdownlint' 'npm-groovy-lint' 'perl' 'protolint' 'pwsh'
|
||||
'pylint' 'raku' 'rubocop' 'shellcheck' 'spectral' 'standard' 'stylelint' 'terrascan'
|
||||
'tflint' 'xmllint' 'yamllint')
|
||||
|
||||
#############################
|
||||
# Language array for prints #
|
||||
#############################
|
||||
LANGUAGE_ARRAY=('ANSIBLE' 'ARM' 'BASH' 'CLOUDFORMATION' 'CLOJURE' 'COFFEESCRIPT'
|
||||
'CSS' 'DART' 'DOCKER' 'ENV' 'GO' 'GROOVY' 'HTML' 'JAVASCRIPT_ES'
|
||||
'JAVASCRIPT_STANDARD' 'JSON' 'JSX' 'KOTLIN' 'MARKDOWN' 'OPENAPI'
|
||||
'PERL' 'PHP_BUILTIN' 'PHP_PHPCS' 'PHP_PSALM' 'POWERSHELL' 'PROTOBUF' 'PYTHON'
|
||||
LANGUAGE_ARRAY=('ANSIBLE' 'ARM' 'BASH' 'CLOUDFORMATION' 'CLOJURE' 'COFFEESCRIPT' 'CSS'
|
||||
'DART' 'DOCKER' 'ENV' 'GO' 'GROOVY' 'HTML' 'JAVASCRIPT_ES' 'JAVASCRIPT_STANDARD'
|
||||
'JSON' 'JSX' 'KOTLIN' 'LUA' 'MARKDOWN' 'OPENAPI' 'PERL' 'PHP_BUILTIN' 'PHP_PHPCS'
|
||||
'PHP_PHPSTAN' 'PHP_PSALM' 'POWERSHELL' 'PROTOBUF' 'PYTHON_PYLINT' 'PYTHON_FLAKE8'
|
||||
'RAKU' 'RUBY' 'STATES' 'TERRAFORM' 'TERRAFORM_TERRASCAN' 'TSX' 'TYPESCRIPT_ES'
|
||||
'TYPESCRIPT_STANDARD' 'XML' 'YML')
|
||||
'TYPESCRIPT_STANDARD' 'XML' 'YAML')
|
||||
|
||||
############################################
|
||||
# Array for all languages that were linted #
|
||||
############################################
|
||||
LINTED_LANGUAGES_ARRAY=() # Will be filled at run time with all languages that were linted
|
||||
|
||||
|
||||
###################
|
||||
# GitHub ENV Vars #
|
||||
###################
|
||||
|
@ -135,6 +151,8 @@ GITHUB_RUN_ID="${GITHUB_RUN_ID}" # GitHub RUn ID t
|
|||
GITHUB_SHA="${GITHUB_SHA}" # GitHub sha from the commit
|
||||
GITHUB_TOKEN="${GITHUB_TOKEN}" # GitHub Token passed from environment
|
||||
GITHUB_WORKSPACE="${GITHUB_WORKSPACE}" # Github Workspace
|
||||
LOG_FILE="${LOG_FILE:-super-linter.log}" # Default log file name (located in GITHUB_WORKSPACE folder)
|
||||
LOG_LEVEL="${LOG_LEVEL:-VERBOSE}" # Default log level (VERBOSE, DEBUG, TRACE)
|
||||
MULTI_STATUS="${MULTI_STATUS:-true}" # Multiple status are created for each check ran
|
||||
TEST_CASE_RUN="${TEST_CASE_RUN}" # Boolean to validate only test cases
|
||||
VALIDATE_ALL_CODEBASE="${VALIDATE_ALL_CODEBASE}" # Boolean to validate all files
|
||||
|
@ -157,14 +175,17 @@ VALIDATE_JAVASCRIPT_STANDARD="${VALIDATE_JAVASCRIPT_STANDARD}" # Boolean to vali
|
|||
VALIDATE_JSON="${VALIDATE_JSON}" # Boolean to validate language
|
||||
VALIDATE_JSX="${VALIDATE_JSX}" # Boolean to validate language
|
||||
VALIDATE_KOTLIN="${VALIDATE_KOTLIN}" # Boolean to validate language
|
||||
VALIDATE_LUA="${VALIDATE_LUA}" # Boolean to validate language
|
||||
VALIDATE_MARKDOWN="${VALIDATE_MD:-}" # Boolean to validate language
|
||||
VALIDATE_OPENAPI="${VALIDATE_OPENAPI}" # Boolean to validate language
|
||||
VALIDATE_PERL="${VALIDATE_PERL}" # Boolean to validate language
|
||||
VALIDATE_PHP_BUILTIN="${VALIDATE_PHP_BUILTIN}" # Boolean to validate language
|
||||
VALIDATE_PHP_PHPCS="${VALIDATE_PHP_PHPCS}" # Boolean to validate language
|
||||
VALIDATE_PHP_PHPSTAN="${VALIDATE_PHP_PHPSTAN}" # Boolean to validate language
|
||||
VALIDATE_PHP_PSALM="${VALIDATE_PHP_PSALM}" # Boolean to validate language
|
||||
VALIDATE_POWERSHELL="${VALIDATE_POWERSHELL}" # Boolean to validate language
|
||||
VALIDATE_PYTHON="${VALIDATE_PYTHON}" # Boolean to validate language
|
||||
VALIDATE_PYTHON_PYLINT="${VALIDATE_PYTHON:-$VALIDATE_PYTHON_PYLINT}" # Boolean to validate language
|
||||
VALIDATE_PYTHON_FLAKE8="${VALIDATE_PYTHON_FLAKE8}" # Boolean to validate language
|
||||
VALIDATE_RAKU="${VALIDATE_RAKU}" # Boolean to validate language
|
||||
VALIDATE_RUBY="${VALIDATE_RUBY}" # Boolean to validate language
|
||||
VALIDATE_STATES="${VALIDATE_STATES}" # Boolean to validate language
|
||||
|
@ -182,6 +203,20 @@ VALIDATE_YAML="${VALIDATE_YAML}" # Boolean to vali
|
|||
RUN_LOCAL="${RUN_LOCAL}" # Boolean to see if we are running locally
|
||||
ACTIONS_RUNNER_DEBUG="${ACTIONS_RUNNER_DEBUG:-false}" # Boolean to see even more info (debug)
|
||||
|
||||
############
|
||||
# Log Vars #
|
||||
############
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == true ]]; then LOG_LEVEL="DEBUG"; fi
|
||||
# Boolean to see trace logs
|
||||
LOG_TRACE=$(if [[ ${LOG_LEVEL} == "TRACE" ]]; then echo "true"; fi)
|
||||
export LOG_TRACE
|
||||
# Boolean to see debug logs
|
||||
LOG_DEBUG=$(if [[ ${LOG_LEVEL} == "DEBUG" || ${LOG_LEVEL} == "TRACE" ]]; then echo "true"; fi)
|
||||
export LOG_DEBUG
|
||||
# Boolean to see verbose logs (info function)
|
||||
LOG_VERBOSE=$(if [[ ${LOG_LEVEL} == "VERBOSE" || ${LOG_LEVEL} == "DEBUG" || ${LOG_LEVEL} == "TRACE" ]]; then echo "true"; fi)
|
||||
export LOG_VERBOSE
|
||||
|
||||
################
|
||||
# Default Vars #
|
||||
################
|
||||
|
@ -195,17 +230,17 @@ DEFAULT_IFS="${IFS}" # Get the Default IFS for
|
|||
# Default Vars that are called in Subs and need to be ignored #
|
||||
###############################################################
|
||||
DEFAULT_DISABLE_ERRORS='false' # Default to enabling errors
|
||||
echo "${DEFAULT_DISABLE_ERRORS}" > /dev/null 2>&1 || true # Workaround SC2034
|
||||
export DEFAULT_DISABLE_ERRORS # Workaround SC2034
|
||||
RAW_FILE_ARRAY=() # Array of all files that were changed
|
||||
echo "${RAW_FILE_ARRAY[*]}" > /dev/null 2>&1 || true # Workaround SC2034
|
||||
export RAW_FILE_ARRAY # Workaround SC2034
|
||||
READ_ONLY_CHANGE_FLAG=0 # Flag set to 1 if files changed are not txt or md
|
||||
echo "${READ_ONLY_CHANGE_FLAG}" > /dev/null 2>&1 || true # Workaround SC2034
|
||||
export READ_ONLY_CHANGE_FLAG # Workaround SC2034
|
||||
TEST_CASE_FOLDER='.automation/test' # Folder for test cases we should always ignore
|
||||
echo "${TEST_CASE_FOLDER}" > /dev/null 2>&1 || true # Workaround SC2034
|
||||
export TEST_CASE_FOLDER # Workaround SC2034
|
||||
DEFAULT_ANSIBLE_DIRECTORY="${GITHUB_WORKSPACE}/ansible" # Default Ansible Directory
|
||||
echo "${DEFAULT_ANSIBLE_DIRECTORY}" > /dev/null 2>&1 || true # Workaround SC2034
|
||||
export DEFAULT_ANSIBLE_DIRECTORY # Workaround SC2034
|
||||
WARNING_ARRAY_TEST=() # Array of warning linters that did not have an expected test result.
|
||||
echo "${WARNING_ARRAY_TEST[*]}" > /dev/null 2>&1 || true # Workaround SC2034
|
||||
export WARNING_ARRAY_TEST # Workaround SC2034
|
||||
|
||||
##############
|
||||
# Format #
|
||||
|
@ -235,15 +270,18 @@ FILE_ARRAY_JAVASCRIPT_STANDARD=() # Array of files to check
|
|||
FILE_ARRAY_JSON=() # Array of files to check
|
||||
FILE_ARRAY_JSX=() # Array of files to check
|
||||
FILE_ARRAY_KOTLIN=() # Array of files to check
|
||||
FILE_ARRAY_LUA=() # Array of files to check
|
||||
FILE_ARRAY_MARKDOWN=() # Array of files to check
|
||||
FILE_ARRAY_OPENAPI=() # Array of files to check
|
||||
FILE_ARRAY_PERL=() # Array of files to check
|
||||
FILE_ARRAY_PHP_BUILTIN=() # Array of files to check
|
||||
FILE_ARRAY_PHP_PHPCS=() # Array of files to check
|
||||
FILE_ARRAY_PHP_PHPSTAN=() # Array of files to check
|
||||
FILE_ARRAY_PHP_PSALM=() # Array of files to check
|
||||
FILE_ARRAY_POWERSHELL=() # Array of files to check
|
||||
FILE_ARRAY_PROTOBUF=() # Array of files to check
|
||||
FILE_ARRAY_PYTHON=() # Array of files to check
|
||||
FILE_ARRAY_PYTHON_PYLINT=() # Array of files to check
|
||||
FILE_ARRAY_PYTHON_FLAKE8=() # Array of files to check
|
||||
FILE_ARRAY_RAKU=() # Array of files to check
|
||||
FILE_ARRAY_RUBY=() # Array of files to check
|
||||
FILE_ARRAY_STATES=() # Array of files to check
|
||||
|
@ -252,7 +290,7 @@ FILE_ARRAY_TSX=() # Array of files to check
|
|||
FILE_ARRAY_TYPESCRIPT_ES=() # Array of files to check
|
||||
FILE_ARRAY_TYPESCRIPT_STANDARD=() # Array of files to check
|
||||
FILE_ARRAY_XML=() # Array of files to check
|
||||
FILE_ARRAY_YML=() # Array of files to check
|
||||
FILE_ARRAY_YAML=() # Array of files to check
|
||||
|
||||
############
|
||||
# Counters #
|
||||
|
@ -293,6 +331,8 @@ ERRORS_FOUND_JSX=0 # Count of errors found
|
|||
export ERRORS_FOUND_JSX # Workaround SC2034
|
||||
ERRORS_FOUND_KOTLIN=0 # Count of errors found
|
||||
export ERRORS_FOUND_KOTLIN # Workaround SC2034
|
||||
ERRORS_FOUND_LUA=0 # Count of errors found
|
||||
export ERRORS_FOUND_LUA=0 # Workaround SC2034
|
||||
ERRORS_FOUND_MARKDOWN=0 # Count of errors found
|
||||
export ERRORS_FOUND_MARKDOWN # Workaround SC2034
|
||||
ERRORS_FOUND_OPENAPI=0 # Count of errors found
|
||||
|
@ -303,14 +343,18 @@ ERRORS_FOUND_PHP_BUILTIN=0 # Count of errors found
|
|||
export ERRORS_FOUND_PHP_BUILTIN # Workaround SC2034
|
||||
ERRORS_FOUND_PHP_PHPCS=0 # Count of errors found
|
||||
export ERRORS_FOUND_PHP_PHPCS # Workaround SC2034
|
||||
ERRORS_FOUND_PHP_PHPSTAN=0 # Count of errors found
|
||||
export ERRORS_FOUND_PHP_PHPSTAN # Workaround SC2034
|
||||
ERRORS_FOUND_PHP_PSALM=0 # Count of errors found
|
||||
export ERRORS_FOUND_PHP_PSALM # Workaround SC2034
|
||||
ERRORS_FOUND_POWERSHELL=0 # Count of errors found
|
||||
export ERRORS_FOUND_POWERSHELL # Workaround SC2034
|
||||
ERRORS_FOUND_PROTOBUF=0 # Count of errors found
|
||||
export ERRORS_FOUND_PROTOBUF # Workaround SC2034
|
||||
ERRORS_FOUND_PYTHON=0 # Count of errors found
|
||||
export ERRORS_FOUND_PYTHON # Workaround SC2034
|
||||
ERRORS_FOUND_PYTHON_PYLINT=0 # Count of errors found
|
||||
export ERRORS_FOUND_PYTHON_PYLINT # Workaround SC2034
|
||||
ERRORS_FOUND_PYTHON_FLAKE8=0 # Count of errors found
|
||||
export ERRORS_FOUND_PYTHON_FLAKE8 # Workaround SC2034
|
||||
ERRORS_FOUND_RAKU=0 # Count of errors found
|
||||
export ERRORS_FOUND_RAKU # Workaround SC2034
|
||||
ERRORS_FOUND_RUBY=0 # Count of errors found
|
||||
|
@ -329,8 +373,8 @@ ERRORS_FOUND_TYPESCRIPT_ES=0 # Count of errors found
|
|||
export ERRORS_FOUND_TYPESCRIPT_ES # Workaround SC2034
|
||||
ERRORS_FOUND_XML=0 # Count of errors found
|
||||
export ERRORS_FOUND_XML # Workaround SC2034
|
||||
ERRORS_FOUND_YML=0 # Count of errors found
|
||||
export ERRORS_FOUND_YML # Workaround SC2034
|
||||
ERRORS_FOUND_YAML=0 # Count of errors found
|
||||
export ERRORS_FOUND_YAML # Workaround SC2034
|
||||
|
||||
################################################################################
|
||||
########################## FUNCTIONS BELOW #####################################
|
||||
|
@ -346,15 +390,13 @@ Header() {
|
|||
##########
|
||||
# Prints #
|
||||
##########
|
||||
echo ""
|
||||
echo "---------------------------------------------"
|
||||
echo "--- GitHub Actions Multi Language Linter ----"
|
||||
echo "---------------------------------------------"
|
||||
echo ""
|
||||
echo "---------------------------------------------"
|
||||
echo "The Super-Linter source code can be found at:"
|
||||
echo " - https://github.com/github/super-linter"
|
||||
echo "---------------------------------------------"
|
||||
info "---------------------------------------------"
|
||||
info "--- GitHub Actions Multi Language Linter ----"
|
||||
info "---------------------------------------------"
|
||||
info "---------------------------------------------"
|
||||
info "The Super-Linter source code can be found at:"
|
||||
info " - https://github.com/github/super-linter"
|
||||
info "---------------------------------------------"
|
||||
}
|
||||
################################################################################
|
||||
#### Function GetLinterVersions ################################################
|
||||
|
@ -362,9 +404,8 @@ GetLinterVersions() {
|
|||
#########################
|
||||
# Print version headers #
|
||||
#########################
|
||||
echo ""
|
||||
echo "---------------------------------------------"
|
||||
echo "Linter Version Info:"
|
||||
debug "---------------------------------------------"
|
||||
debug "Linter Version Info:"
|
||||
|
||||
##########################################################
|
||||
# Go through the array of linters and print version info #
|
||||
|
@ -373,10 +414,10 @@ GetLinterVersions() {
|
|||
####################
|
||||
# Get the versions #
|
||||
####################
|
||||
if [[ "${LINTER}" == "arm-ttk" ]]; then
|
||||
if [[ ${LINTER} == "arm-ttk" ]]; then
|
||||
# Need specific command for ARM
|
||||
mapfile -t GET_VERSION_CMD < <(grep -iE 'version' "${ARM_TTK_PSD1}" | xargs 2>&1)
|
||||
elif [[ "${LINTER}" == "protolint" ]]; then
|
||||
elif [[ ${LINTER} == "protolint" ]]; then
|
||||
# Need specific command for Protolint
|
||||
mapfile -t GET_VERSION_CMD < <(echo "--version not supported")
|
||||
else
|
||||
|
@ -393,20 +434,19 @@ GetLinterVersions() {
|
|||
# Check the shell for errors #
|
||||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ] || [ -z "${GET_VERSION_CMD[*]}" ]; then
|
||||
echo -e "${NC}[${LINTER}]: ${F[Y]}WARN!${NC} Failed to get version info for:${NC}"
|
||||
warn "[${LINTER}]: Failed to get version info for:"
|
||||
else
|
||||
##########################
|
||||
# Print the version info #
|
||||
##########################
|
||||
echo -e "${NC}${F[B]}Successfully found version for ${F[W]}[${LINTER}]${F[B]}: ${F[W]}${GET_VERSION_CMD[*]}${NC}"
|
||||
debug "Successfully found version for ${F[W]}[${LINTER}]${F[B]}: ${F[W]}${GET_VERSION_CMD[*]}"
|
||||
fi
|
||||
done
|
||||
|
||||
#########################
|
||||
# Print version footers #
|
||||
#########################
|
||||
echo "---------------------------------------------"
|
||||
echo ""
|
||||
debug "---------------------------------------------"
|
||||
}
|
||||
################################################################################
|
||||
#### Function GetLinterRules ###################################################
|
||||
|
@ -428,8 +468,8 @@ GetLinterRules() {
|
|||
# Validate we have the linter rules #
|
||||
#####################################
|
||||
if [ -f "${GITHUB_WORKSPACE}/${LINTER_RULES_PATH}/${!LANGUAGE_FILE_NAME}" ]; then
|
||||
echo "----------------------------------------------"
|
||||
echo "User provided file:[${!LANGUAGE_FILE_NAME}], setting rules file..."
|
||||
info "----------------------------------------------"
|
||||
info "User provided file:[${!LANGUAGE_FILE_NAME}], setting rules file..."
|
||||
|
||||
########################################
|
||||
# Update the path to the file location #
|
||||
|
@ -439,9 +479,7 @@ GetLinterRules() {
|
|||
########################################################
|
||||
# No user default provided, using the template default #
|
||||
########################################################
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
echo " -> Codebase does NOT have file:[${LINTER_RULES_PATH}/${!LANGUAGE_FILE_NAME}], using Default rules at:[${!LANGUAGE_LINTER_RULES}]"
|
||||
fi
|
||||
debug " -> Codebase does NOT have file:[${LINTER_RULES_PATH}/${!LANGUAGE_FILE_NAME}], using Default rules at:[${!LANGUAGE_LINTER_RULES}]"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
@ -481,9 +519,8 @@ GetStandardRules() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to gain list of ENV vars to load!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GET_ENV_ARRAY[*]}]${NC}"
|
||||
exit 1
|
||||
error "Failed to gain list of ENV vars to load!"
|
||||
fatal "[${GET_ENV_ARRAY[*]}]"
|
||||
fi
|
||||
|
||||
##########################
|
||||
|
@ -509,7 +546,7 @@ GetStandardRules() {
|
|||
# Get the env to add to string #
|
||||
################################
|
||||
ENV="$(echo "${ENV}" | cut -d'"' -f2)"
|
||||
# echo "ENV:[${ENV}]"
|
||||
debug "ENV:[${ENV}]"
|
||||
ENV_STRING+="--env ${ENV} "
|
||||
done
|
||||
|
||||
|
@ -647,8 +684,8 @@ GetGitHubVars() {
|
|||
##########
|
||||
# Prints #
|
||||
##########
|
||||
echo "--------------------------------------------"
|
||||
echo "Gathering GitHub information..."
|
||||
info "--------------------------------------------"
|
||||
info "Gathering GitHub information..."
|
||||
|
||||
###############################
|
||||
# Get the Run test cases flag #
|
||||
|
@ -687,8 +724,8 @@ GetGitHubVars() {
|
|||
##########################################
|
||||
# We are running locally for a debug run #
|
||||
##########################################
|
||||
echo "NOTE: ENV VAR [RUN_LOCAL] has been set to:[true]"
|
||||
echo "bypassing GitHub Actions variables..."
|
||||
info "NOTE: ENV VAR [RUN_LOCAL] has been set to:[true]"
|
||||
info "bypassing GitHub Actions variables..."
|
||||
|
||||
############################
|
||||
# Set the GITHUB_WORKSPACE #
|
||||
|
@ -698,11 +735,10 @@ GetGitHubVars() {
|
|||
fi
|
||||
|
||||
if [ ! -d "${GITHUB_WORKSPACE}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC} Provided volume is not a directory!${NC}"
|
||||
exit 1
|
||||
fatal "Provided volume is not a directory!"
|
||||
fi
|
||||
|
||||
echo "Linting all files in mapped directory:[${DEFAULT_WORKSPACE}]"
|
||||
info "Linting all files in mapped directory:[${DEFAULT_WORKSPACE}]"
|
||||
|
||||
# No need to touch or set the GITHUB_SHA
|
||||
# No need to touch or set the GITHUB_EVENT_PATH
|
||||
|
@ -718,33 +754,30 @@ GetGitHubVars() {
|
|||
# Validate we have a value #
|
||||
############################
|
||||
if [ -z "${GITHUB_SHA}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_SHA]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_SHA}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GITHUB_SHA]!"
|
||||
fatal "[${GITHUB_SHA}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GITHUB_SHA]${F[B]}, value:${F[W]}[${GITHUB_SHA}]${NC}"
|
||||
info "Successfully found:${F[W]}[GITHUB_SHA]${F[B]}, value:${F[W]}[${GITHUB_SHA}]"
|
||||
fi
|
||||
|
||||
############################
|
||||
# Validate we have a value #
|
||||
############################
|
||||
if [ -z "${GITHUB_WORKSPACE}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_WORKSPACE]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_WORKSPACE}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GITHUB_WORKSPACE]!"
|
||||
fatal "[${GITHUB_WORKSPACE}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GITHUB_WORKSPACE]${F[B]}, value:${F[W]}[${GITHUB_WORKSPACE}]${NC}"
|
||||
info "Successfully found:${F[W]}[GITHUB_WORKSPACE]${F[B]}, value:${F[W]}[${GITHUB_WORKSPACE}]"
|
||||
fi
|
||||
|
||||
############################
|
||||
# Validate we have a value #
|
||||
############################
|
||||
if [ -z "${GITHUB_EVENT_PATH}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_EVENT_PATH]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_EVENT_PATH}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GITHUB_EVENT_PATH]!"
|
||||
fatal "[${GITHUB_EVENT_PATH}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GITHUB_EVENT_PATH]${F[B]}, value:${F[W]}[${GITHUB_EVENT_PATH}]${F[B]}${NC}"
|
||||
info "Successfully found:${F[W]}[GITHUB_EVENT_PATH]${F[B]}, value:${F[W]}[${GITHUB_EVENT_PATH}]${F[B]}"
|
||||
fi
|
||||
|
||||
##################################################
|
||||
|
@ -760,11 +793,10 @@ GetGitHubVars() {
|
|||
# Validate we have a value #
|
||||
############################
|
||||
if [ -z "${GITHUB_ORG}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_ORG]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_ORG}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GITHUB_ORG]!"
|
||||
fatal "[${GITHUB_ORG}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GITHUB_ORG]${F[B]}, value:${F[W]}[${GITHUB_ORG}]${NC}"
|
||||
info "Successfully found:${F[W]}[GITHUB_ORG]${F[B]}, value:${F[W]}[${GITHUB_ORG}]"
|
||||
fi
|
||||
|
||||
#######################
|
||||
|
@ -776,11 +808,10 @@ GetGitHubVars() {
|
|||
# Validate we have a value #
|
||||
############################
|
||||
if [ -z "${GITHUB_REPO}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_REPO]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_REPO}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GITHUB_REPO]!"
|
||||
fatal "[${GITHUB_REPO}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GITHUB_REPO]${F[B]}, value:${F[W]}[${GITHUB_REPO}]${NC}"
|
||||
info "Successfully found:${F[W]}[GITHUB_REPO]${F[B]}, value:${F[W]}[${GITHUB_REPO}]"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
@ -788,16 +819,16 @@ GetGitHubVars() {
|
|||
# Validate we have a value #
|
||||
############################
|
||||
if [ -z "${GITHUB_TOKEN}" ] && [[ ${RUN_LOCAL} == "false" ]]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_TOKEN]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_TOKEN}]${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Please set a [GITHUB_TOKEN] from the main workflow environment to take advantage of multiple status reports!${NC}"
|
||||
error "Failed to get [GITHUB_TOKEN]!"
|
||||
error "[${GITHUB_TOKEN}]"
|
||||
error "Please set a [GITHUB_TOKEN] from the main workflow environment to take advantage of multiple status reports!"
|
||||
|
||||
################################################################################
|
||||
# Need to set MULTI_STATUS to false as we cant hit API endpoints without token #
|
||||
################################################################################
|
||||
MULTI_STATUS='false'
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GITHUB_TOKEN]${NC}"
|
||||
info "Successfully found:${F[W]}[GITHUB_TOKEN]"
|
||||
fi
|
||||
|
||||
###############################
|
||||
|
@ -813,22 +844,20 @@ GetGitHubVars() {
|
|||
# Validate we have a value #
|
||||
############################
|
||||
if [ -z "${GITHUB_REPOSITORY}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_REPOSITORY]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_REPOSITORY}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GITHUB_REPOSITORY]!"
|
||||
fatal "[${GITHUB_REPOSITORY}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GITHUB_REPOSITORY]${F[B]}, value:${F[W]}[${GITHUB_REPOSITORY}]${NC}"
|
||||
info "Successfully found:${F[W]}[GITHUB_REPOSITORY]${F[B]}, value:${F[W]}[${GITHUB_REPOSITORY}]"
|
||||
fi
|
||||
|
||||
############################
|
||||
# Validate we have a value #
|
||||
############################
|
||||
if [ -z "${GITHUB_RUN_ID}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_RUN_ID]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_RUN_ID}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GITHUB_RUN_ID]!"
|
||||
fatal "[${GITHUB_RUN_ID}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GITHUB_RUN_ID]${F[B]}, value:${F[W]}[${GITHUB_RUN_ID}]${NC}"
|
||||
info "Successfully found:${F[W]}[GITHUB_RUN_ID]${F[B]}, value:${F[W]}[${GITHUB_RUN_ID}]"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
@ -840,16 +869,14 @@ function ValidatePowershellModules() {
|
|||
if [[ ${VALIDATE_PSSA_MODULE} == "PSScriptAnalyzer" ]]; then
|
||||
VALIDATE_PSSA_CMD=$(pwsh -c "(Get-Command Invoke-ScriptAnalyzer | Select-Object -First 1).Name" 2>&1)
|
||||
else
|
||||
# Failed to find module
|
||||
exit 1
|
||||
fatal "Failed to find module."
|
||||
fi
|
||||
|
||||
#########################################
|
||||
# validate we found the script analyzer #
|
||||
#########################################
|
||||
if [[ ${VALIDATE_PSSA_CMD} != "Invoke-ScriptAnalyzer" ]]; then
|
||||
# Failed to find module
|
||||
exit 1
|
||||
fatal "Failed to find module."
|
||||
fi
|
||||
|
||||
#######################
|
||||
|
@ -862,15 +889,12 @@ function ValidatePowershellModules() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Failed
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed find module [PSScriptAnalyzer] for [${LINTER_NAME}] in system!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[PSSA_MODULE ${VALIDATE_PSSA_MODULE}] [PSSA_CMD ${VALIDATE_PSSA_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed find module [PSScriptAnalyzer] for [${LINTER_NAME}] in system!"
|
||||
fatal "[PSSA_MODULE ${VALIDATE_PSSA_MODULE}] [PSSA_CMD ${VALIDATE_PSSA_CMD}]"
|
||||
else
|
||||
# Success
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
echo -e "${NC}${F[B]}Successfully found module ${F[W]}[${VALIDATE_PSSA_MODULE}]${F[B]} in system${NC}"
|
||||
echo -e "${NC}${F[B]}Successfully found command ${F[W]}[${VALIDATE_PSSA_CMD}]${F[B]} in system${NC}"
|
||||
fi
|
||||
debug "Successfully found module ${F[W]}[${VALIDATE_PSSA_MODULE}]${F[B]} in system"
|
||||
debug "Successfully found command ${F[W]}[${VALIDATE_PSSA_CMD}]${F[B]} in system"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
@ -923,8 +947,8 @@ CallStatusAPI() {
|
|||
##############################
|
||||
if [ "${ERROR_CODE}" -ne 0 ]; then
|
||||
# ERROR
|
||||
echo "ERROR! Failed to call GitHub Status API!"
|
||||
echo "ERROR:[${SEND_STATUS_CMD}]"
|
||||
info "ERROR! Failed to call GitHub Status API!"
|
||||
info "ERROR:[${SEND_STATUS_CMD}]"
|
||||
# Not going to fail the script on this yet...
|
||||
fi
|
||||
fi
|
||||
|
@ -932,39 +956,35 @@ CallStatusAPI() {
|
|||
################################################################################
|
||||
#### Function Reports ##########################################################
|
||||
Reports() {
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "----------------------------------------------"
|
||||
echo "Generated reports:"
|
||||
echo "----------------------------------------------"
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "----------------------------------------------"
|
||||
info "Generated reports:"
|
||||
info "----------------------------------------------"
|
||||
info "----------------------------------------------"
|
||||
|
||||
###################################
|
||||
# Prints output report if enabled #
|
||||
###################################
|
||||
if [ -z "${FORMAT_REPORT}" ]; then
|
||||
echo "Reports generated in folder ${REPORT_OUTPUT_FOLDER}"
|
||||
info "Reports generated in folder ${REPORT_OUTPUT_FOLDER}"
|
||||
fi
|
||||
|
||||
################################
|
||||
# Prints for warnings if found #
|
||||
################################
|
||||
for TEST in "${WARNING_ARRAY_TEST[@]}"; do
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} Expected file to compare with was not found for ${TEST}${NC}"
|
||||
warn "Expected file to compare with was not found for ${TEST}"
|
||||
done
|
||||
|
||||
}
|
||||
################################################################################
|
||||
#### Function Footer ###########################################################
|
||||
Footer() {
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "----------------------------------------------"
|
||||
echo "The script has completed"
|
||||
echo "----------------------------------------------"
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "----------------------------------------------"
|
||||
info "The script has completed"
|
||||
info "----------------------------------------------"
|
||||
info "----------------------------------------------"
|
||||
|
||||
####################################################
|
||||
# Need to clean up the lanuage array of duplicates #
|
||||
|
@ -983,12 +1003,12 @@ Footer() {
|
|||
##################
|
||||
# Print if not 0 #
|
||||
##################
|
||||
if [ "${!ERROR_COUNTER}" -ne 0 ]; then
|
||||
if [[ ${!ERROR_COUNTER} -ne 0 ]]; then
|
||||
# We found errors in the language
|
||||
###################
|
||||
# Print the goods #
|
||||
###################
|
||||
echo -e "${NC}${B[R]}${F[W]}ERRORS FOUND${NC} in ${LANGUAGE}:[${!ERROR_COUNTER}]${NC}"
|
||||
error "ERRORS FOUND${NC} in ${LANGUAGE}:[${!ERROR_COUNTER}]"
|
||||
|
||||
#########################################
|
||||
# Create status API for Failed language #
|
||||
|
@ -997,7 +1017,7 @@ Footer() {
|
|||
######################################
|
||||
# Check if we validated the langauge #
|
||||
######################################
|
||||
elif [ "${!ERROR_COUNTER}" -eq 0 ] && [[ "${UNIQUE_LINTED_ARRAY[*]}" =~ ${LANGUAGE} ]]; then
|
||||
elif [[ ${!ERROR_COUNTER} -eq 0 ]] && [[ ${UNIQUE_LINTED_ARRAY[*]} =~ ${LANGUAGE} ]]; then
|
||||
# No errors found when linting the language
|
||||
CallStatusAPI "${LANGUAGE}" "success"
|
||||
fi
|
||||
|
@ -1007,7 +1027,7 @@ Footer() {
|
|||
# Exit with 0 if errors disabled #
|
||||
##################################
|
||||
if [ "${DISABLE_ERRORS}" == "true" ]; then
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} Exiting with exit code:[0] as:[DISABLE_ERRORS] was set to:[${DISABLE_ERRORS}]${NC}"
|
||||
warn "Exiting with exit code:[0] as:[DISABLE_ERRORS] was set to:[${DISABLE_ERRORS}]"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
@ -1019,24 +1039,33 @@ Footer() {
|
|||
# build the variable
|
||||
ERRORS_FOUND_LANGUAGE="ERRORS_FOUND_${LANGUAGE}"
|
||||
# Check if error was found
|
||||
if [ "${!ERRORS_FOUND_LANGUAGE}" -ne 0 ]; then
|
||||
if [[ ${!ERRORS_FOUND_LANGUAGE} -ne 0 ]]; then
|
||||
# Failed exit
|
||||
echo -e "${NC}${F[R]}Exiting with errors found!${NC}"
|
||||
exit 1
|
||||
fatal "Exiting with errors found!"
|
||||
fi
|
||||
done
|
||||
|
||||
########################
|
||||
# Footer prints Exit 0 #
|
||||
########################
|
||||
echo ""
|
||||
echo -e "${NC}${F[G]}All file(s) linted successfully with no errors detected${NC}"
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
notice "All file(s) linted successfully with no errors detected"
|
||||
info "----------------------------------------------"
|
||||
# Successful exit
|
||||
exit 0
|
||||
}
|
||||
|
||||
################################################################################
|
||||
#### Function Cleanup ##########################################################
|
||||
cleanup() {
|
||||
local -ri EXIT_CODE=$?
|
||||
|
||||
sh -c "cat ${LOG_TEMP} >> ${GITHUB_WORKSPACE}/${LOG_FILE}" || true
|
||||
|
||||
exit ${EXIT_CODE}
|
||||
trap - 0 1 2 3 6 14 15
|
||||
}
|
||||
trap 'cleanup' 0 1 2 3 6 14 15
|
||||
|
||||
################################################################################
|
||||
############################### MAIN ###########################################
|
||||
################################################################################
|
||||
|
@ -1051,9 +1080,8 @@ Header
|
|||
##############################################################
|
||||
if [ -n "${OUTPUT_FORMAT}" ]; then
|
||||
if [ -d "${REPORT_OUTPUT_FOLDER}" ]; then
|
||||
echo "ERROR! Found ${REPORT_OUTPUT_FOLDER}"
|
||||
echo "Please remove the folder and try again."
|
||||
exit 1
|
||||
error "ERROR! Found ${REPORT_OUTPUT_FOLDER}"
|
||||
fatal "Please remove the folder and try again."
|
||||
fi
|
||||
fi
|
||||
|
||||
|
@ -1094,30 +1122,29 @@ GetLinterRules "GROOVY"
|
|||
GetLinterRules "HTML"
|
||||
# Get JavaScript rules
|
||||
GetLinterRules "JAVASCRIPT"
|
||||
# Get LUA rules
|
||||
GetLinterRules "LUA"
|
||||
# Get Markdown rules
|
||||
GetLinterRules "MARKDOWN"
|
||||
# Get PowerShell rules
|
||||
GetLinterRules "POWERSHELL"
|
||||
# Get Python rules
|
||||
GetLinterRules "PYTHON"
|
||||
# Get Python pylint rules
|
||||
GetLinterRules "PYTHON_PYLINT"
|
||||
# Get Python flake8 rules
|
||||
GetLinterRules "PYTHON_FLAKE8"
|
||||
# Get Ruby rules
|
||||
GetLinterRules "RUBY"
|
||||
# Get Terraform rules
|
||||
GetLinterRules "TERRAFORM"
|
||||
# Get TypeScript rules
|
||||
GetLinterRules "TYPESCRIPT"
|
||||
# Get YML rules
|
||||
# Get YAML rules
|
||||
GetLinterRules "YAML"
|
||||
|
||||
#################################
|
||||
# Check if were in verbose mode #
|
||||
#################################
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
##################################
|
||||
# Get and print all version info #
|
||||
##################################
|
||||
GetLinterVersions
|
||||
fi
|
||||
|
||||
###########################################
|
||||
# Check to see if this is a test case run #
|
||||
|
@ -1411,6 +1438,17 @@ if [ "${VALIDATE_KOTLIN}" == "true" ]; then
|
|||
LintCodebase "KOTLIN" "ktlint" "ktlint" ".*\.\(kt\|kts\)\$" "${FILE_ARRAY_KOTLIN[@]}"
|
||||
fi
|
||||
|
||||
###############
|
||||
# LUA LINTING #
|
||||
###############
|
||||
if [ "${VALIDATE_LUA}" == "true" ]; then
|
||||
######################
|
||||
# Lint the Lua files #
|
||||
######################
|
||||
# LintCodebase "FILE_TYPE" "LINTER_NAME" "LINTER_CMD" "FILE_TYPES_REGEX" "FILE_ARRAY"
|
||||
LintCodebase "LUA" "lua" "luacheck --config ${LUA_LINTER_RULES}" ".*\.\(lua\)\$" "${FILE_ARRAY_LUA[@]}"
|
||||
fi
|
||||
|
||||
####################
|
||||
# MARKDOWN LINTING #
|
||||
####################
|
||||
|
@ -1483,6 +1521,14 @@ if [ "${VALIDATE_PHP_PHPCS}" == "true" ]; then
|
|||
LintCodebase "PHP_PHPCS" "phpcs" "phpcs --standard=${PHP_PHPCS_LINTER_RULES}" ".*\.\(php\)\$" "${FILE_ARRAY_PHP_PHPCS[@]}"
|
||||
fi
|
||||
|
||||
if [ "${VALIDATE_PHP_PHPSTAN}" == "true" ]; then
|
||||
#######################
|
||||
# Lint the PHP files using PHPStan #
|
||||
#######################
|
||||
# LintCodebase "FILE_TYPE" "LINTER_NAME" "LINTER_CMD" "FILE_TYPES_REGEX" "FILE_ARRAY"
|
||||
LintCodebase "PHP_PHPSTAN" "phpstan" "phpstan analyse --no-progress --no-ansi -c ${PHPSTAN_LINTER_RULES}" ".*\.\(php\)\$" "${FILE_ARRAY_PHP_PHPSTAN[@]}"
|
||||
fi
|
||||
|
||||
if [ "${VALIDATE_PHP_PSALM}" == "true" ]; then
|
||||
##################################
|
||||
# Lint the PHP files using Psalm #
|
||||
|
@ -1521,12 +1567,23 @@ fi
|
|||
##################
|
||||
# PYTHON LINTING #
|
||||
##################
|
||||
if [ "${VALIDATE_PYTHON}" == "true" ]; then
|
||||
if [ "${VALIDATE_PYTHON_PYLINT}" == "true" ]; then
|
||||
#########################
|
||||
# Lint the python files #
|
||||
#########################
|
||||
# LintCodebase "FILE_TYPE" "LINTER_NAME" "LINTER_CMD" "FILE_TYPES_REGEX" "FILE_ARRAY"
|
||||
LintCodebase "PYTHON" "pylint" "pylint --rcfile ${PYTHON_LINTER_RULES}" ".*\.\(py\)\$" "${FILE_ARRAY_PYTHON[@]}"
|
||||
LintCodebase "PYTHON_PYLINT" "pylint" "pylint --rcfile ${PYTHON_PYLINT_LINTER_RULES}" ".*\.\(py\)\$" "${FILE_ARRAY_PYTHON_PYLINT[@]}"
|
||||
fi
|
||||
|
||||
##################
|
||||
# PYTHON LINTING #
|
||||
##################
|
||||
if [ "${VALIDATE_PYTHON_FLAKE8}" == "true" ]; then
|
||||
#########################
|
||||
# Lint the python files #
|
||||
#########################
|
||||
# LintCodebase "FILE_TYPE" "LINTER_NAME" "LINTER_CMD" "FILE_TYPES_REGEX" "FILE_ARRAY"
|
||||
LintCodebase "PYTHON_FLAKE8" "flake8" "flake8 --config=${PYTHON_FLAKE8_LINTER_RULES}" ".*\.\(py\)\$" "${FILE_ARRAY_PYTHON_FLAKE8[@]}"
|
||||
fi
|
||||
|
||||
################
|
||||
|
@ -1536,7 +1593,7 @@ if [ "${VALIDATE_RAKU}" == "true" ]; then
|
|||
#######################
|
||||
# Lint the raku files #
|
||||
#######################
|
||||
echo "${GITHUB_WORKSPACE}/META6.json"
|
||||
info "${GITHUB_WORKSPACE}/META6.json"
|
||||
if [ -e "${GITHUB_WORKSPACE}/META6.json" ]; then
|
||||
cd "${GITHUB_WORKSPACE}" && zef install --deps-only --/test .
|
||||
fi
|
||||
|
@ -1653,14 +1710,14 @@ if [ "${VALIDATE_XML}" == "true" ]; then
|
|||
fi
|
||||
|
||||
###############
|
||||
# YML LINTING #
|
||||
# YAML LINTING #
|
||||
###############
|
||||
if [ "${VALIDATE_YAML}" == "true" ]; then
|
||||
######################
|
||||
# Lint the Yml Files #
|
||||
######################
|
||||
# LintCodebase "FILE_TYPE" "LINTER_NAME" "LINTER_CMD" "FILE_TYPES_REGEX" "FILE_ARRAY"
|
||||
LintCodebase "YML" "yamllint" "yamllint -c ${YAML_LINTER_RULES}" ".*\.\(yml\|yaml\)\$" "${FILE_ARRAY_YML[@]}"
|
||||
LintCodebase "YAML" "yamllint" "yamllint -c ${YAML_LINTER_RULES}" ".*\.\(yml\|yaml\)\$" "${FILE_ARRAY_YAML[@]}"
|
||||
fi
|
||||
|
||||
###########
|
||||
|
|
53
lib/log.sh
Normal file
53
lib/log.sh
Normal file
|
@ -0,0 +1,53 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
declare -Agr B=(
|
||||
[B]=$(echo -e "\e[44m")
|
||||
[C]=$(echo -e "\e[46m")
|
||||
[G]=$(echo -e "\e[42m")
|
||||
[K]=$(echo -e "\e[40m")
|
||||
[M]=$(echo -e "\e[45m")
|
||||
[R]=$(echo -e "\e[41m")
|
||||
[W]=$(echo -e "\e[47m")
|
||||
[Y]=$(echo -e "\e[43m")
|
||||
)
|
||||
declare -Agr F=(
|
||||
[B]=$(echo -e "\e[0;34m")
|
||||
[C]=$(echo -e "\e[0;36m")
|
||||
[G]=$(echo -e "\e[0;32m")
|
||||
[K]=$(echo -e "\e[0;30m")
|
||||
[M]=$(echo -e "\e[0;35m")
|
||||
[R]=$(echo -e "\e[0;31m")
|
||||
[W]=$(echo -e "\e[0;37m")
|
||||
[Y]=$(echo -e "\e[0;33m")
|
||||
)
|
||||
readonly NC=$(echo -e "\e[0m")
|
||||
|
||||
export B
|
||||
export F
|
||||
export NC
|
||||
|
||||
# Log Functions
|
||||
LOG_TEMP=$(mktemp) || echo "Failed to create temporary log file."
|
||||
export LOG_TEMP
|
||||
echo "super-linter Log" > "${LOG_TEMP}"
|
||||
log() {
|
||||
local TOTERM=${1:-}
|
||||
local MESSAGE=${2:-}
|
||||
echo -e "${MESSAGE:-}" | (
|
||||
if [[ -n ${TOTERM} ]]; then
|
||||
tee -a "${LOG_TEMP}" >&2
|
||||
else
|
||||
cat >> "${LOG_TEMP}" 2>&1
|
||||
fi
|
||||
)
|
||||
}
|
||||
trace() { log "${LOG_TRACE:-}" "${NC}$(date +"%F %T") ${F[B]}[TRACE ]${NC} $*${NC}"; }
|
||||
debug() { log "${LOG_DEBUG:-}" "${NC}$(date +"%F %T") ${F[B]}[DEBUG ]${NC} $*${NC}"; }
|
||||
info() { log "${LOG_VERBOSE:-}" "${NC}$(date +"%F %T") ${F[B]}[INFO ]${NC} $*${NC}"; }
|
||||
notice() { log "true" "${NC}$(date +"%F %T") ${F[G]}[NOTICE]${NC} $*${NC}"; }
|
||||
warn() { log "true" "${NC}$(date +"%F %T") ${F[Y]}[WARN ]${NC} $*${NC}"; }
|
||||
error() { log "true" "${NC}$(date +"%F %T") ${F[R]}[ERROR ]${NC} $*${NC}"; }
|
||||
fatal() {
|
||||
log "true" "${NC}$(date +"%F %T") ${B[R]}${F[W]}[FATAL ]${NC} $*${NC}"
|
||||
exit 1
|
||||
}
|
|
@ -8,12 +8,12 @@ cat << EOF
|
|||
@///////@///////////////@@@@ ( @,
|
||||
@/(&/@//////////////////// @
|
||||
@////////////////////////@@ @
|
||||
@%////////(//////////%/////&@ @@ *,@
|
||||
@@@@@/@/#/////(&////////////////// .@
|
||||
*@@@@@. .%///(//@//////////////////&. .@@, @%
|
||||
@@% .&@&&/@.@//&/////(////////// @@@@@@@@@ .. &@
|
||||
@@% @@@@@ @&/////////////////# @/ V @@/ ,@@@ @
|
||||
@@@% @@@@ .%@@@@//////#@ @ @@ @ .,.
|
||||
@%////////(//////////%/////&@ @@ *,@ ______________
|
||||
@@@@@/@/#/////(&////////////////// .@ / \\
|
||||
*@@@@@. .%///(//@//////////////////&. .@@, @% / Don't mind me \\
|
||||
@@% .&@&&/@.@//&/////(////////// @@@@@@@@@ .. &@ / I'm just looking \\
|
||||
@@% @@@@@ @&/////////////////# @/ V @@/ ,@@@ @ < for some trash... |
|
||||
@@@% @@@@ .%@@@@//////#@ @ @@ @ .,. \\__________________/
|
||||
@@@/@( (@@@@% @/\ %
|
||||
@@@@( . .@@/\ #
|
||||
@ %@%
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
declare -Agr B=(
|
||||
[B]=$(echo -e "\e[44m")
|
||||
[C]=$(echo -e "\e[46m")
|
||||
[G]=$(echo -e "\e[42m")
|
||||
[K]=$(echo -e "\e[40m")
|
||||
[M]=$(echo -e "\e[45m")
|
||||
[R]=$(echo -e "\e[41m")
|
||||
[W]=$(echo -e "\e[47m")
|
||||
[Y]=$(echo -e "\e[43m")
|
||||
)
|
||||
declare -Agr F=(
|
||||
[B]=$(echo -e "\e[0;34m")
|
||||
[C]=$(echo -e "\e[0;36m")
|
||||
[G]=$(echo -e "\e[0;32m")
|
||||
[K]=$(echo -e "\e[0;30m")
|
||||
[M]=$(echo -e "\e[0;35m")
|
||||
[R]=$(echo -e "\e[0;31m")
|
||||
[W]=$(echo -e "\e[0;37m")
|
||||
[Y]=$(echo -e "\e[0;33m")
|
||||
)
|
||||
readonly NC=$(echo -e "\e[0m")
|
||||
|
||||
export B
|
||||
export F
|
||||
export NC
|
|
@ -13,9 +13,8 @@ function GetValidationInfo() {
|
|||
############################################
|
||||
# Print headers for user provided env vars #
|
||||
############################################
|
||||
echo ""
|
||||
echo "--------------------------------------------"
|
||||
echo "Gathering user validation information..."
|
||||
info "--------------------------------------------"
|
||||
info "Gathering user validation information..."
|
||||
|
||||
###########################################
|
||||
# Skip validation if were running locally #
|
||||
|
@ -31,10 +30,10 @@ function GetValidationInfo() {
|
|||
if [[ ${VALIDATE_ALL_CODEBASE} != "false" ]]; then
|
||||
# Set to true
|
||||
VALIDATE_ALL_CODEBASE="${DEFAULT_VALIDATE_ALL_CODEBASE}"
|
||||
echo "- Validating ALL files in code base..."
|
||||
info "- Validating ALL files in code base..."
|
||||
else
|
||||
# Its false
|
||||
echo "- Only validating [new], or [edited] files in code base..."
|
||||
info "- Only validating [new], or [edited] files in code base..."
|
||||
fi
|
||||
fi
|
||||
|
||||
|
@ -46,7 +45,6 @@ function GetValidationInfo() {
|
|||
################################
|
||||
# Convert strings to lowercase #
|
||||
################################
|
||||
|
||||
# Loop through all languages
|
||||
for LANGUAGE in "${LANGUAGE_ARRAY[@]}"; do
|
||||
# build the variable
|
||||
|
@ -55,7 +53,6 @@ function GetValidationInfo() {
|
|||
eval "${VALIDATE_LANGUAGE}=${!VALIDATE_LANGUAGE,,}"
|
||||
done
|
||||
|
||||
|
||||
################################################
|
||||
# Determine if any linters were explicitly set #
|
||||
################################################
|
||||
|
@ -71,7 +68,6 @@ function GetValidationInfo() {
|
|||
fi
|
||||
done
|
||||
|
||||
|
||||
###################################################
|
||||
# Validate if we should check individual lanuages #
|
||||
###################################################
|
||||
|
@ -170,23 +166,19 @@ function GetValidationInfo() {
|
|||
ACTIONS_RUNNER_DEBUG="true"
|
||||
fi
|
||||
|
||||
###################
|
||||
# Debug on runner #
|
||||
###################
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
###########################
|
||||
# Print the validate info #
|
||||
###########################
|
||||
for LINE in "${PRINT_ARRAY[@]}"; do
|
||||
echo "${LINE}"
|
||||
debug "${LINE}"
|
||||
done
|
||||
|
||||
echo "--- DEBUG INFO ---"
|
||||
echo "---------------------------------------------"
|
||||
debug "--- DEBUG INFO ---"
|
||||
debug "---------------------------------------------"
|
||||
RUNNER=$(whoami)
|
||||
echo "Runner:[${RUNNER}]"
|
||||
echo "ENV:"
|
||||
printenv
|
||||
echo "---------------------------------------------"
|
||||
fi
|
||||
debug "Runner:[${RUNNER}]"
|
||||
PRINTENV=$(printenv)
|
||||
debug "ENV:"
|
||||
debug "${PRINTENV}"
|
||||
debug "---------------------------------------------"
|
||||
}
|
||||
|
|
135
lib/worker.sh
135
lib/worker.sh
|
@ -49,14 +49,11 @@ function LintCodebase() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Failed
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find [${LINTER_NAME}] in system!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${VALIDATE_INSTALL_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to find [${LINTER_NAME}] in system!"
|
||||
fatal "[${VALIDATE_INSTALL_CMD}]"
|
||||
else
|
||||
# Success
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
echo -e "${NC}${F[B]}Successfully found binary for ${F[W]}[${LINTER_NAME}]${F[B]} in system location: ${F[W]}[${VALIDATE_INSTALL_CMD}]${NC}"
|
||||
fi
|
||||
debug "Successfully found binary for ${F[W]}[${LINTER_NAME}]${F[B]} in system location: ${F[W]}[${VALIDATE_INSTALL_CMD}]"
|
||||
fi
|
||||
|
||||
##########################
|
||||
|
@ -75,7 +72,7 @@ function LintCodebase() {
|
|||
if [ ${#FILE_ARRAY[@]} -eq 0 ] && [ "${VALIDATE_ALL_CODEBASE}" == "false" ]; then
|
||||
# No files found in commit and user has asked to not validate code base
|
||||
SKIP_FLAG=1
|
||||
# echo " - No files found in changeset to lint for language:[${FILE_TYPE}]"
|
||||
debug " - No files found in changeset to lint for language:[${FILE_TYPE}]"
|
||||
elif [ ${#FILE_ARRAY[@]} -ne 0 ]; then
|
||||
# We have files added to array of files to check
|
||||
LIST_FILES=("${FILE_ARRAY[@]}") # Copy the array into list
|
||||
|
@ -121,7 +118,7 @@ function LintCodebase() {
|
|||
#########################
|
||||
# Print the header info #
|
||||
#########################
|
||||
echo "${LINE}"
|
||||
info "${LINE}"
|
||||
done
|
||||
|
||||
########################################
|
||||
|
@ -166,8 +163,8 @@ function LintCodebase() {
|
|||
##############
|
||||
# File print #
|
||||
##############
|
||||
echo "---------------------------"
|
||||
echo "File:[${FILE}]"
|
||||
info "---------------------------"
|
||||
info "File:[${FILE}]"
|
||||
|
||||
#################################
|
||||
# Add the language to the array #
|
||||
|
@ -226,9 +223,9 @@ function LintCodebase() {
|
|||
#########
|
||||
# Error #
|
||||
#########
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [${LINTER_NAME}] linter!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LINT_CMD}]${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC} Linter CMD:[${LINTER_COMMAND} ${FILE}]${NC}"
|
||||
error "Found errors in [${LINTER_NAME}] linter!"
|
||||
error "[${LINT_CMD}]"
|
||||
error "Linter CMD:[${LINTER_COMMAND} ${FILE}]"
|
||||
# Increment the error count
|
||||
(("ERRORS_FOUND_${FILE_TYPE}++"))
|
||||
|
||||
|
@ -243,7 +240,7 @@ function LintCodebase() {
|
|||
###########
|
||||
# Success #
|
||||
###########
|
||||
echo -e "${NC}${F[B]} - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully${NC}"
|
||||
info " - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully"
|
||||
|
||||
#######################################################
|
||||
# Store the linting as a temporary file in TAP format #
|
||||
|
@ -279,13 +276,11 @@ function TestCodebase() {
|
|||
################
|
||||
# print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "----------------------------------------------"
|
||||
echo "Testing Codebase [${FILE_TYPE}] files..."
|
||||
echo "----------------------------------------------"
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "----------------------------------------------"
|
||||
info "Testing Codebase [${FILE_TYPE}] files..."
|
||||
info "----------------------------------------------"
|
||||
info "----------------------------------------------"
|
||||
|
||||
#####################################
|
||||
# Validate we have linter installed #
|
||||
|
@ -302,12 +297,11 @@ function TestCodebase() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Failed
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find [${LINTER_NAME}] in system!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${VALIDATE_INSTALL_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to find [${LINTER_NAME}] in system!"
|
||||
fatal "[${VALIDATE_INSTALL_CMD}]"
|
||||
else
|
||||
# Success
|
||||
echo -e "${NC}${F[B]}Successfully found binary for ${F[W]}[${LINTER_NAME}]${F[B]} in system location: ${F[W]}[${VALIDATE_INSTALL_CMD}]${NC}"
|
||||
info "Successfully found binary for ${F[W]}[${LINTER_NAME}]${F[B]} in system location: ${F[W]}[${VALIDATE_INSTALL_CMD}]"
|
||||
fi
|
||||
|
||||
##########################
|
||||
|
@ -358,8 +352,8 @@ function TestCodebase() {
|
|||
##############
|
||||
# File print #
|
||||
##############
|
||||
echo "---------------------------"
|
||||
echo "File:[${FILE}]"
|
||||
info "---------------------------"
|
||||
info "File:[${FILE}]"
|
||||
|
||||
########################
|
||||
# Set the lint command #
|
||||
|
@ -454,16 +448,16 @@ function TestCodebase() {
|
|||
#########
|
||||
# Error #
|
||||
#########
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [${LINTER_NAME}] linter!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LINT_CMD}]${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC} Linter CMD:[${LINTER_COMMAND} ${FILE}]${NC}"
|
||||
error "Found errors in [${LINTER_NAME}] linter!"
|
||||
error "[${LINT_CMD}]"
|
||||
error "Linter CMD:[${LINTER_COMMAND} ${FILE}]"
|
||||
# Increment the error count
|
||||
(("ERRORS_FOUND_${FILE_TYPE}++"))
|
||||
else
|
||||
###########
|
||||
# Success #
|
||||
###########
|
||||
echo -e "${NC}${F[B]} - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully${NC}"
|
||||
info " - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully"
|
||||
fi
|
||||
#######################################################
|
||||
# Store the linting as a temporary file in TAP format #
|
||||
|
@ -482,18 +476,18 @@ function TestCodebase() {
|
|||
#########
|
||||
# Error #
|
||||
#########
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [${LINTER_NAME}] linter!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} This file should have failed test case!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}Command run:${NC}[\$${LINT_CMD}]${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LINT_CMD}]${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC} Linter CMD:[${LINTER_COMMAND} ${FILE}]${NC}"
|
||||
error "Found errors in [${LINTER_NAME}] linter!"
|
||||
error "This file should have failed test case!"
|
||||
error "Command run:${NC}[\$${LINT_CMD}]"
|
||||
error "[${LINT_CMD}]"
|
||||
error "Linter CMD:[${LINTER_COMMAND} ${FILE}]"
|
||||
# Increment the error count
|
||||
(("ERRORS_FOUND_${FILE_TYPE}++"))
|
||||
else
|
||||
###########
|
||||
# Success #
|
||||
###########
|
||||
echo -e "${NC}${F[B]} - File:${F[W]}[${FILE_NAME}]${F[B]} failed test case with ${F[W]}[${LINTER_NAME}]${F[B]} successfully${NC}"
|
||||
info " - File:${F[W]}[${FILE_NAME}]${F[B]} failed test case with ${F[W]}[${LINTER_NAME}]${F[B]} successfully"
|
||||
fi
|
||||
#######################################################
|
||||
# Store the linting as a temporary file in TAP format #
|
||||
|
@ -523,17 +517,17 @@ function TestCodebase() {
|
|||
#############################################
|
||||
# We failed to compare the reporting output #
|
||||
#############################################
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to assert TAP output:[${LINTER_NAME}]${NC}"!
|
||||
echo "Please validate the asserts!"
|
||||
error "Failed to assert TAP output:[${LINTER_NAME}]"!
|
||||
info "Please validate the asserts!"
|
||||
cat "${TMPFILE}"
|
||||
exit 1
|
||||
else
|
||||
# Success
|
||||
echo -e "${NC}${F[B]}Successfully validation in the expected TAP format for ${F[W]}[${LINTER_NAME}]${NC}"
|
||||
info "Successfully validation in the expected TAP format for ${F[W]}[${LINTER_NAME}]"
|
||||
fi
|
||||
else
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} No TAP expected file found at:[${EXPECTED_FILE}]${NC}"
|
||||
echo "skipping report assertions"
|
||||
warn "No TAP expected file found at:[${EXPECTED_FILE}]"
|
||||
info "skipping report assertions"
|
||||
#####################################
|
||||
# Append the file type to the array #
|
||||
#####################################
|
||||
|
@ -548,9 +542,8 @@ function TestCodebase() {
|
|||
#################################################
|
||||
# We failed to find files and no tests were ran #
|
||||
#################################################
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find any tests ran for the Linter:[${LINTER_NAME}]${NC}"!
|
||||
echo "Please validate logic or that tests exist!"
|
||||
exit 1
|
||||
error "Failed to find any tests ran for the Linter:[${LINTER_NAME}]"!
|
||||
fatal "Please validate logic or that tests exist!"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
@ -567,11 +560,9 @@ function RunTestCases() {
|
|||
#################
|
||||
# Header prints #
|
||||
#################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "-------------- TEST CASE RUN -----------------"
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "-------------- TEST CASE RUN -----------------"
|
||||
info "----------------------------------------------"
|
||||
|
||||
#######################
|
||||
# Test case languages #
|
||||
|
@ -595,15 +586,18 @@ function RunTestCases() {
|
|||
TestCodebase "JAVASCRIPT_STANDARD" "standard" "standard ${JAVASCRIPT_STANDARD_LINTER_RULES}" ".*\.\(js\)\$" "javascript"
|
||||
TestCodebase "JSON" "jsonlint" "jsonlint" ".*\.\(json\)\$" "json"
|
||||
TestCodebase "KOTLIN" "ktlint" "ktlint" ".*\.\(kt\|kts\)\$" "kotlin"
|
||||
TestCodebase "LUA" "lua" "luacheck" ".*\.\(lua\)\$" "lua"
|
||||
TestCodebase "MARKDOWN" "markdownlint" "markdownlint -c ${MARKDOWN_LINTER_RULES}" ".*\.\(md\)\$" "markdown"
|
||||
TestCodebase "PERL" "perl" "perl -Mstrict -cw" ".*\.\(pl\)\$" "perl"
|
||||
TestCodebase "PHP_BUILTIN" "php" "php -l" ".*\.\(php\)\$" "php"
|
||||
TestCodebase "PHP_PHPCS" "phpcs" "phpcs --standard=${PHP_PHPCS_LINTER_RULES}" ".*\.\(php\)\$" "php"
|
||||
TestCodebase "PHP_PHPSTAN" "phpstan" "phpstan analyse --no-progress --no-ansi -c ${PHPSTAN_LINTER_RULES}" ".*\.\(php\)\$" "php"
|
||||
TestCodebase "PHP_PSALM" "psalm" "psalm --config=${PHP_PSALM_LINTER_RULES}" ".*\.\(php\)\$" "php"
|
||||
TestCodebase "OPENAPI" "spectral" "spectral lint -r ${OPENAPI_LINTER_RULES}" ".*\.\(ymlopenapi\|jsonopenapi\)\$" "openapi"
|
||||
TestCodebase "POWERSHELL" "pwsh" "Invoke-ScriptAnalyzer -EnableExit -Settings ${POWERSHELL_LINTER_RULES} -Path" ".*\.\(ps1\|psm1\|psd1\|ps1xml\|pssc\|psrc\|cdxml\)\$" "powershell"
|
||||
TestCodebase "PROTOBUF" "protolint" "protolint lint --config_path ${PROTOBUF_LINTER_RULES}" ".*\.\(proto\)\$" "protobuf"
|
||||
TestCodebase "PYTHON" "pylint" "pylint --rcfile ${PYTHON_LINTER_RULES}" ".*\.\(py\)\$" "python"
|
||||
TestCodebase "PYTHON_PYLINT" "pylint" "pylint --rcfile ${PYTHON_PYLINT_LINTER_RULES}" ".*\.\(py\)\$" "python"
|
||||
TestCodebase "PYTHON_FLAKE8" "flake8" "flake8 --config ${PYTHON_FLAKE8_LINTER_RULES}" ".*\.\(py\)\$" "python"
|
||||
TestCodebase "RAKU" "raku" "raku -c" ".*\.\(raku\|rakumod\|rakutest\|pm6\|pl6\|p6\)\$" "raku"
|
||||
TestCodebase "RUBY" "rubocop" "rubocop -c ${RUBY_LINTER_RULES}" ".*\.\(rb\)\$" "ruby"
|
||||
TestCodebase "STATES" "asl-validator" "asl-validator --json-path" ".*\.\(json\)\$" "states"
|
||||
|
@ -612,7 +606,7 @@ function RunTestCases() {
|
|||
TestCodebase "TYPESCRIPT_ES" "eslint" "eslint --no-eslintrc -c ${TYPESCRIPT_LINTER_RULES}" ".*\.\(ts\)\$" "typescript"
|
||||
TestCodebase "TYPESCRIPT_STANDARD" "standard" "standard --parser @typescript-eslint/parser --plugin @typescript-eslint/eslint-plugin ${TYPESCRIPT_STANDARD_LINTER_RULES}" ".*\.\(ts\)\$" "typescript"
|
||||
TestCodebase "XML" "xmllint" "xmllint" ".*\.\(xml\)\$" "xml"
|
||||
TestCodebase "YML" "yamllint" "yamllint -c ${YAML_LINTER_RULES}" ".*\.\(yml\|yaml\)\$" "yml"
|
||||
TestCodebase "YAML" "yamllint" "yamllint -c ${YAML_LINTER_RULES}" ".*\.\(yml\|yaml\)\$" "yaml"
|
||||
|
||||
#################
|
||||
# Footer prints #
|
||||
|
@ -659,16 +653,12 @@ function LintAnsibleFiles() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Failed
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find ${LINTER_NAME} in system!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${VALIDATE_INSTALL_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to find ${LINTER_NAME} in system!"
|
||||
fatal "[${VALIDATE_INSTALL_CMD}]"
|
||||
else
|
||||
# Success
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
# Success
|
||||
echo -e "${NC}${F[B]}Successfully found binary in system${NC}"
|
||||
echo "Location:[${VALIDATE_INSTALL_CMD}]"
|
||||
fi
|
||||
debug "Successfully found binary in system"
|
||||
debug "Location:[${VALIDATE_INSTALL_CMD}]"
|
||||
fi
|
||||
|
||||
##########################
|
||||
|
@ -703,7 +693,7 @@ function LintAnsibleFiles() {
|
|||
###################################
|
||||
# Send message that were skipping #
|
||||
###################################
|
||||
#echo "- Skipping Ansible lint run as file(s) that were modified were read only..."
|
||||
debug "- Skipping Ansible lint run as file(s) that were modified were read only..."
|
||||
############################
|
||||
# Create flag to skip loop #
|
||||
############################
|
||||
|
@ -718,7 +708,7 @@ function LintAnsibleFiles() {
|
|||
#########################
|
||||
# Print the header line #
|
||||
#########################
|
||||
echo "${LINE}"
|
||||
info "${LINE}"
|
||||
done
|
||||
fi
|
||||
|
||||
|
@ -758,8 +748,8 @@ function LintAnsibleFiles() {
|
|||
##############
|
||||
# File print #
|
||||
##############
|
||||
echo "---------------------------"
|
||||
echo "File:[${FILE}]"
|
||||
info "---------------------------"
|
||||
info "File:[${FILE}]"
|
||||
|
||||
################################
|
||||
# Lint the file with the rules #
|
||||
|
@ -778,8 +768,8 @@ function LintAnsibleFiles() {
|
|||
#########
|
||||
# Error #
|
||||
#########
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [${LINTER_NAME}] linter!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LINT_CMD}]${NC}"
|
||||
error "Found errors in [${LINTER_NAME}] linter!"
|
||||
error "[${LINT_CMD}]"
|
||||
# Increment error count
|
||||
((ERRORS_FOUND_ANSIBLE++))
|
||||
|
||||
|
@ -795,7 +785,7 @@ function LintAnsibleFiles() {
|
|||
###########
|
||||
# Success #
|
||||
###########
|
||||
echo -e "${NC}${F[B]} - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully${NC}"
|
||||
info " - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully"
|
||||
|
||||
#######################################################
|
||||
# Store the linting as a temporary file in TAP format #
|
||||
|
@ -813,17 +803,12 @@ function LintAnsibleFiles() {
|
|||
HeaderTap "${INDEX}" "${REPORT_OUTPUT_FILE}"
|
||||
cat "${TMPFILE}" >> "${REPORT_OUTPUT_FILE}"
|
||||
fi
|
||||
else # No ansible directory found in path
|
||||
###############################
|
||||
# Check to see if debug is on #
|
||||
###############################
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
else
|
||||
########################
|
||||
# No Ansible dir found #
|
||||
########################
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} No Ansible base directory found at:[${ANSIBLE_DIRECTORY}]${NC}"
|
||||
echo "skipping ansible lint"
|
||||
fi
|
||||
warn "No Ansible base directory found at:[${ANSIBLE_DIRECTORY}]"
|
||||
debug "skipping ansible lint"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
|
Loading…
Reference in a new issue