Merge branch 'support-for-phpcs-and-psalm' of https://github.com/filips123/super-linter into support-for-phpcs-and-psalm

This commit is contained in:
Filip Š 2020-08-03 21:43:38 +02:00
commit 364f31f2e3
35 changed files with 2019 additions and 1574 deletions

View file

@ -13,6 +13,12 @@
# - System with Docker installed
# - Global variables met
#########################
# Source Function Files #
#########################
# shellcheck source=/dev/null
source ../lib/log.sh # Source the function script(s)
###########
# Globals #
###########
@ -29,11 +35,9 @@ DOCKERFILE_PATH="${DOCKERFILE_PATH}" # Path to the Dockerfile to be uploaded
################################################################################
#### Function Header ###########################################################
Header() {
echo ""
echo "-------------------------------------------------------"
echo "----- GitHub Actions remove image from DockerHub ------"
echo "-------------------------------------------------------"
echo ""
info "-------------------------------------------------------"
info "----- GitHub Actions remove image from DockerHub ------"
info "-------------------------------------------------------"
}
################################################################################
#### Function ValidateInput ####################################################
@ -42,21 +46,18 @@ ValidateInput() {
################
# Print header #
################
echo ""
echo "----------------------------------------------"
echo "Gathering variables..."
echo "----------------------------------------------"
echo ""
info "----------------------------------------------"
info "Gathering variables..."
info "----------------------------------------------"
############################
# Validate GITHUB_WORKSPACE #
############################
if [ -z "${GITHUB_WORKSPACE}" ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_WORKSPACE]!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_WORKSPACE}]${NC}"
exit 1
error "Failed to get [GITHUB_WORKSPACE]!"
fatal "[${GITHUB_WORKSPACE}]"
else
echo "Successfully found:[GITHUB_WORKSPACE], value:[${GITHUB_WORKSPACE}]"
info "Successfully found:[GITHUB_WORKSPACE], value:[${GITHUB_WORKSPACE}]"
fi
#######################
@ -64,15 +65,14 @@ ValidateInput() {
#######################
if [ -z "${IMAGE_REPO}" ]; then
# No repo was pulled
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [IMAGE_REPO]!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${IMAGE_REPO}]${NC}"
exit 1
error "Failed to get [IMAGE_REPO]!"
fatal "[${IMAGE_REPO}]"
elif [[ ${IMAGE_REPO} == "github/super-linter" ]]; then
# Found our main repo
echo "Successfully found:[IMAGE_REPO], value:[${IMAGE_REPO}]"
info "Successfully found:[IMAGE_REPO], value:[${IMAGE_REPO}]"
else
# This is a fork and we cant pull vars or any info
echo -e "${NC}${F[Y]}WARN!${NC} No image to cleanup as this is a forked branch, and not being built with current automation!${NC}"
warn "No image to cleanup as this is a forked branch, and not being built with current automation!"
exit 0
fi
@ -80,33 +80,30 @@ ValidateInput() {
# Validate IMAGE_VERSION #
##########################
if [ -z "${IMAGE_VERSION}" ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [IMAGE_VERSION]!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${IMAGE_VERSION}]${NC}"
exit 1
error "Failed to get [IMAGE_VERSION]!"
fatal "[${IMAGE_VERSION}]"
else
echo "Successfully found:[IMAGE_VERSION], value:[${IMAGE_VERSION}]"
info "Successfully found:[IMAGE_VERSION], value:[${IMAGE_VERSION}]"
fi
############################
# Validate DOCKER_USERNAME #
############################
if [ -z "${DOCKER_USERNAME}" ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_USERNAME]!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKER_USERNAME}]${NC}"
exit 1
error "Failed to get [DOCKER_USERNAME]!"
fatal "[${DOCKER_USERNAME}]"
else
echo "Successfully found:[DOCKER_USERNAME], value:[${DOCKER_USERNAME}]"
info "Successfully found:[DOCKER_USERNAME], value:[${DOCKER_USERNAME}]"
fi
############################
# Validate DOCKER_PASSWORD #
############################
if [ -z "${DOCKER_PASSWORD}" ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_PASSWORD]!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKER_PASSWORD}]${NC}"
exit 1
error "Failed to get [DOCKER_PASSWORD]!"
fatal "[${DOCKER_PASSWORD}]"
else
echo "Successfully found:[DOCKER_PASSWORD], value:[********]"
info "Successfully found:[DOCKER_PASSWORD], value:[********]"
fi
##################################################
@ -121,10 +118,9 @@ ValidateInput() {
#############################################
# Image is 'latest' and we will not destroy #
#############################################
echo "Image Tag is set to:[latest]..."
echo "We will never destroy latest..."
echo "Bye!"
exit 1
error "Image Tag is set to:[latest]..."
error "We will never destroy latest..."
fatal "Bye!"
fi
}
################################################################################
@ -133,11 +129,9 @@ LoginToDocker() {
################
# Print header #
################
echo ""
echo "----------------------------------------------"
echo "Login to DockerHub..."
echo "----------------------------------------------"
echo ""
info "----------------------------------------------"
info "Login to DockerHub..."
info "----------------------------------------------"
######################
# Login to DockerHub #
@ -154,12 +148,11 @@ LoginToDocker() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# ERROR
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to authenticate to DockerHub!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LOGIN_CMD}]${NC}"
exit 1
error "Failed to authenticate to DockerHub!"
fatal "[${LOGIN_CMD}]"
else
# SUCCESS
echo "Successfully authenticated to DockerHub!"
info "Successfully authenticated to DockerHub!"
fi
}
################################################################################
@ -168,11 +161,9 @@ RemoveImage() {
################
# Print header #
################
echo ""
echo "----------------------------------------------"
echo "Removing the DockerFile image:[${IMAGE_REPO}:${IMAGE_VERSION}]"
echo "----------------------------------------------"
echo ""
info "----------------------------------------------"
info "Removing the DockerFile image:[${IMAGE_REPO}:${IMAGE_VERSION}]"
info "----------------------------------------------"
#####################################
# Create Token to auth to DockerHub #
@ -193,12 +184,11 @@ RemoveImage() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# ERROR
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to gain token from DockerHub!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${TOKEN}]${NC}"
exit 1
error "Failed to gain token from DockerHub!"
fatal "[${TOKEN}]"
else
# SUCCESS
echo "Successfully gained auth token from DockerHub!"
info "Successfully gained auth token from DockerHub!"
fi
#################################
@ -218,22 +208,19 @@ RemoveImage() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# ERROR
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to remove tag from DockerHub!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${REMOVE_CMD}]${NC}"
exit 1
error "Failed to remove tag from DockerHub!"
fatal "[${REMOVE_CMD}]"
else
# SUCCESS
echo "Successfully [removed] Docker image tag:[${IMAGE_VERSION}] from DockerHub!"
info "Successfully [removed] Docker image tag:[${IMAGE_VERSION}] from DockerHub!"
fi
}
################################################################################
#### Function Footer ###########################################################
Footer() {
echo ""
echo "-------------------------------------------------------"
echo "The step has completed"
echo "-------------------------------------------------------"
echo ""
info "-------------------------------------------------------"
info "The step has completed"
info "-------------------------------------------------------"
}
################################################################################
################################## MAIN ########################################

View file

@ -28,17 +28,17 @@ CheckGHEPid()
##################################
if [ ${PID_CHECK} -gt ${PID_CHECK_LIMIT} ]; then
# Over the limit, move on
echo "We have checked the pid ${PID_CHECK} times, moving on..."
info "We have checked the pid ${PID_CHECK} times, moving on..."
else
################################################
# Check to see if the PID is alive and running #
################################################
if [ ! -f "${GHE_CONFIG_PID}" ]; then
# File not found
echo "We're good to move forward, no .pid file found at:[${GHE_CONFIG_PID}]"
info "We're good to move forward, no .pid file found at:[${GHE_CONFIG_PID}]"
else
# Found the pid running, need to sleep
echo "Current PID found, sleeping ${SLEEP_SECONDS} seconds before next check..."
info "Current PID found, sleeping ${SLEEP_SECONDS} seconds before next check..."
################
# Sleep it off #
################
@ -53,9 +53,9 @@ CheckGHEPid()
# Check the shell for errors #
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to sleep!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${SLEEP_CMD}]${NC}"
echo "Will try to call apply as last effort..."
error "Failed to sleep!"
error "[${SLEEP_CMD}]"
info "Will try to call apply as last effort..."
####################################
# Call config apply as last effort #
####################################
@ -82,7 +82,7 @@ CheckGHEProcess()
##################################
if [ ${PROCESS_CHECK} -gt ${PROCESS_CHECK_LIMIT} ]; then
# Over the limit, move on
echo "We have checked the process ${PROCESS_CHECK} times, moving on..."
info "We have checked the process ${PROCESS_CHECK} times, moving on..."
else
####################################################
# Check to see if the process is alive and running #
@ -99,10 +99,10 @@ CheckGHEProcess()
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# No process running on the system
echo "Were good to move forward, no process like:[${GHE_APPLY_COMMAND}] running currently on the system"
info "Were good to move forward, no process like:[${GHE_APPLY_COMMAND}] running currently on the system"
else
# Found the process running, need to sleep
echo "Current process alive:[${CHECK_PROCESS_CMD}], sleeping ${SLEEP_SECONDS} seconds before next check..."
info "Current process alive:[${CHECK_PROCESS_CMD}], sleeping ${SLEEP_SECONDS} seconds before next check..."
################
# Sleep it off #
################
@ -117,9 +117,9 @@ CheckGHEProcess()
# Check the shell for errors #
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to sleep!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${SLEEP_CMD}]${NC}"
echo "Will try to call apply as last effort..."
error "Failed to sleep!"
error "[${SLEEP_CMD}]"
info "Will try to call apply as last effort..."
####################################
# Call config apply as last effort #
####################################
@ -144,7 +144,7 @@ RunConfigApply()
##########
# Header #
##########
echo "Running ${GHE_APPLY_COMMAND} to the server..."
info "Running ${GHE_APPLY_COMMAND} to the server..."
##############################################
# Run the command to apply changes to server #
@ -161,12 +161,11 @@ RunConfigApply()
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# Errors
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to run config apply command!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${APPLY_CMD}]${NC}"
exit 1
error "Failed to run config apply command!"
fatal "[${APPLY_CMD}]"
else
# Success
echo -e "${NC}${F[B]}Successfully ran ${F[C]}${GHE_APPLY_COMMAND}${NC}"
info "Successfully ran ${F[C]}${GHE_APPLY_COMMAND}"
fi
}
################################################################################

View file

@ -0,0 +1,13 @@
# Lua Test Cases
This folder holds the test cases for **Lua**.
## Additional Docs
No Additional information is needed for this test case.
## Good Test Cases
The test cases denoted: `LANGUAGE_good_FILE.EXTENSION` are all valid, and should pass successfully when linted.
- **Note:** They are linted utilizing the default linter rules.
## Bad Test Cases
The test cases denoted: `LANGUAGE_bad_FILE.EXTENSION` are **NOT** valid, and should trigger errors when linted.
- **Note:** They are linted utilizing the default linter rules.

View file

@ -0,0 +1,9 @@
package.loaded[...] = {}
local function helper(...)
-- NYI
end
function embrace(opt)
local opt = opt or "default"
return hepler(opt.."?")

View file

@ -0,0 +1,12 @@
local embracer = {}
local function helper()
-- NYI wontfix
end
function embracer.embrace(opt)
opt = opt or "default"
return helper(opt.."?")
end
return embracer

View file

@ -0,0 +1,11 @@
TAP version 13
1..3
not ok 1 - php_bad_1.php
---
message: PHP Parse error syntax error, unexpected 'pe98y' (T_STRING) in /tmp/lint/.automation/test/php/php_bad_1.php on line 3\nErrors parsing /tmp/lint/.automation/test/php/php_bad_1.php\n
...
not ok 2 - php_bad_2.php
---
message: PHP Parse error syntax error, unexpected end of file, expecting variable (T_VARIABLE) or ${ (T_DOLLAR_OPEN_CURLY_BRACES) or {$ (T_CURLY_OPEN) in /tmp/lint/.automation/test/php/php_bad_2.php on line 5\nErrors parsing /tmp/lint/.automation/test/php/php_bad_2.php\n
...
ok 3 - php_good_1.php

View file

@ -0,0 +1,11 @@
TAP version 13
1..3
not ok 1 - php_bad_1.php
---
message: ------ ---------------------------------------------- \n Line php_bad_1.php \n ------ ---------------------------------------------- \n 3 Invalid numeric literal on line 3 \n 3 Invalid numeric literal on line 3 \n 3 Syntax error, unexpected '=' on line 3 \n 3 Syntax error, unexpected T_LNUMBER on line 3 \n 3 Syntax error, unexpected T_STRING on line 3 \n 3 Syntax error, unexpected T_STRING on line 3 \n ------ ---------------------------------------------- \n\n [ERROR] Found 6 errors \n
...
not ok 2 - php_bad_2.php
---
message: ------ ------------------------------------------------------- \n Line php_bad_2.php \n ------ ------------------------------------------------------- \n 5 Syntax error, unexpected EOF, expecting T_VARIABLE or \n T_DOLLAR_OPEN_CURLY_BRACES or T_CURLY_OPEN on line 5 \n ------ ------------------------------------------------------- \n\n [ERROR] Found 1 error \n
...
ok 3 - php_good_1.php

View file

@ -3,9 +3,9 @@ from os import getenv, path
from pprint import pprint
import sys
import click # pylint: disable=import-error
from dotenv import load_dotenv # pylint: disable=import-error
import requests # pylint: disable=import-error
import click # pylint: disable=import-error
from dotenv import load_dotenv # pylint: disable=import-error
import requests # pylint: disable=import-error
env = load_dotenv()
api_url = getenv('API_URL', default='https://api.github.com/graphql')
@ -13,8 +13,8 @@ github_token = getenv("GITHUB_TOKEN", default=None)
if github_token is None:
sys.exit("GitHub Token is not set." +
"Please set the GITHUB_TOKEN env variable in your system or " +
"the .env file of your project.")
"Please set the GITHUB_TOKEN env variable in your system or " +
"the .env file of your project.")
client_id = getenv('CLIENT_ID', default='copy_labels.py')
headers = {
@ -23,6 +23,7 @@ headers = {
'Content-Type': 'application/json'
}
def create_label(repo_id, label):
"""
Create label in the supplied repo.
@ -52,6 +53,7 @@ def create_label(repo_id, label):
return response
def get_labels(owner, repo):
"""
Gets a list of labels from the supplied repo.
@ -62,7 +64,7 @@ def get_labels(owner, repo):
:return: A tuple with the GitHub id for the repository and a list of labels defined in the repository
"""
query_variables = { "owner": owner, "name": repo, }
query_variables = {"owner": owner, "name": repo, }
with open(path.join(path.dirname(__file__), 'queries/get_repo_data.gql'), 'r') as query_file:
query = "".join(query_file.readlines())
@ -83,6 +85,7 @@ def get_labels(owner, repo):
'[ERROR] getting issue labels. Status Code: {status_code} - Message: {result}'.format(
status_code=status_code, result=result["message"]))
def delete_label(label_id):
"""
Delete the specified label
@ -106,6 +109,7 @@ def delete_label(label_id):
return result
@click.command()
@click.option('--dry', is_flag=True)
@click.argument('source_repo')
@ -149,7 +153,8 @@ def copy_labels(source_repo, target_repo, dry):
print('Done')
if __name__ == "__main__":
# Pylint doesn't know that @click.command takes care of injecting the
# function parameters. Disabling Pylint error.
copy_labels() # pylint: disable=no-value-for-parameter
copy_labels() # pylint: disable=no-value-for-parameter

View file

@ -1,6 +1,6 @@
# Yml Test Cases
# Yaml Test Cases
This folder holds the test cases for **Yml**.
This folder holds the test cases for **Yaml**.
## Additional Docs

View file

@ -14,6 +14,12 @@
# - System with Docker installed
# - Global variables met
#########################
# Source Function Files #
#########################
# shellcheck source=/dev/null
source ../lib/log.sh # Source the function script(s)
###########
# Globals #
###########
@ -35,11 +41,9 @@ UPDATE_MAJOR_TAG=0 # Flag to deploy the major tag version as
################################################################################
#### Function Header ###########################################################
Header() {
echo ""
echo "-------------------------------------------------------"
echo "---- GitHub Actions Upload image to [${REGISTRY}] ----"
echo "-------------------------------------------------------"
echo ""
info "-------------------------------------------------------"
info "---- GitHub Actions Upload image to [${REGISTRY}] ----"
info "-------------------------------------------------------"
}
################################################################################
#### Function ValidateInput ####################################################
@ -48,32 +52,28 @@ ValidateInput() {
################
# Print header #
################
echo ""
echo "----------------------------------------------"
echo "Gathering variables..."
echo "----------------------------------------------"
echo ""
info "----------------------------------------------"
info "Gathering variables..."
info "----------------------------------------------"
#############################
# Validate GITHUB_WORKSPACE #
#############################
if [ -z "${GITHUB_WORKSPACE}" ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_WORKSPACE]!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_WORKSPACE}]${NC}"
exit 1
error "Failed to get [GITHUB_WORKSPACE]!"
fatal "[${GITHUB_WORKSPACE}]"
else
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GITHUB_WORKSPACE]${F[B]}, value:${F[W]}[${GITHUB_WORKSPACE}]${NC}"
info "Successfully found:${F[W]}[GITHUB_WORKSPACE]${F[B]}, value:${F[W]}[${GITHUB_WORKSPACE}]"
fi
#####################
# Validate REGISTRY #
#####################
if [ -z "${REGISTRY}" ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [REGISTRY]!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${REGISTRY}]${NC}"
exit 1
error "Failed to get [REGISTRY]!"
fatal "[${REGISTRY}]"
else
echo -e "${NC}${F[B]}Successfully found:${F[W]}[REGISTRY]${F[B]}, value:${F[W]}[${REGISTRY}]${NC}"
info "Successfully found:${F[W]}[REGISTRY]${F[B]}, value:${F[W]}[${REGISTRY}]"
fi
#####################################################
@ -84,22 +84,20 @@ ValidateInput() {
# Validate GPR_USERNAME #
#########################
if [ -z "${GPR_USERNAME}" ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GPR_USERNAME]!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GPR_USERNAME}]${NC}"
exit 1
error "Failed to get [GPR_USERNAME]!"
fatal "[${GPR_USERNAME}]"
else
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GPR_USERNAME]${F[B]}, value:${F[W]}[${GPR_USERNAME}]${NC}"
info "Successfully found:${F[W]}[GPR_USERNAME]${F[B]}, value:${F[W]}[${GPR_USERNAME}]"
fi
######################
# Validate GPR_TOKEN #
######################
if [ -z "${GPR_TOKEN}" ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GPR_TOKEN]!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GPR_TOKEN}]${NC}"
exit 1
error "Failed to get [GPR_TOKEN]!"
fatal "[${GPR_TOKEN}]"
else
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GPR_TOKEN]${F[B]}, value:${F[W]}[********]${NC}"
info "Successfully found:${F[W]}[GPR_TOKEN]${F[B]}, value:${F[W]}[********]"
fi
########################################
# See if we need values for Ducker hub #
@ -109,48 +107,44 @@ ValidateInput() {
# Validate DOCKER_USERNAME #
############################
if [ -z "${DOCKER_USERNAME}" ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_USERNAME]!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKER_USERNAME}]${NC}"
exit 1
error "Failed to get [DOCKER_USERNAME]!"
fatal "[${DOCKER_USERNAME}]"
else
echo -e "${NC}${F[B]}Successfully found:${F[W]}[DOCKER_USERNAME]${F[B]}, value:${F[W]}[${DOCKER_USERNAME}]${NC}"
info "Successfully found:${F[W]}[DOCKER_USERNAME]${F[B]}, value:${F[W]}[${DOCKER_USERNAME}]"
fi
############################
# Validate DOCKER_PASSWORD #
############################
if [ -z "${DOCKER_PASSWORD}" ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_PASSWORD]!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKER_PASSWORD}]${NC}"
exit 1
error "Failed to get [DOCKER_PASSWORD]!"
fatal "[${DOCKER_PASSWORD}]"
else
echo -e "${NC}${F[B]}Successfully found:${F[W]}[DOCKER_PASSWORD]${F[B]}, value:${F[B]}[********]${NC}"
info "Successfully found:${F[W]}[DOCKER_PASSWORD]${F[B]}, value:${F[B]}[********]"
fi
###########################################
# We were not passed a registry to update #
###########################################
else
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find a valid registry!${NC}"
echo "Registry:[${REGISTRY}]"
exit 1
error "Failed to find a valid registry!"
fatal "Registry:[${REGISTRY}]"
fi
#######################
# Validate IMAGE_REPO #
#######################
if [ -z "${IMAGE_REPO}" ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [IMAGE_REPO]!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${IMAGE_REPO}]${NC}"
exit 1
error "Failed to get [IMAGE_REPO]!"
fatal "[${IMAGE_REPO}]"
else
echo -e "${NC}${F[B]}Successfully found:${F[W]}[IMAGE_REPO]${F[B]}, value:${F[W]}[${IMAGE_REPO}]${NC}"
info "Successfully found:${F[W]}[IMAGE_REPO]${F[B]}, value:${F[W]}[${IMAGE_REPO}]"
###############################################
# Need to see if GPR registry and update name #
###############################################
if [[ ${REGISTRY} == "GPR" ]]; then
NAME="docker.pkg.github.com/${IMAGE_REPO}/super-linter"
IMAGE_REPO="${NAME}"
echo "Updated [IMAGE_REPO] to:[${IMAGE_REPO}] for GPR"
info "Updated [IMAGE_REPO] to:[${IMAGE_REPO}] for GPR"
fi
fi
@ -158,8 +152,8 @@ ValidateInput() {
# Validate IMAGE_VERSION #
##########################
if [ -z "${IMAGE_VERSION}" ]; then
echo -e "${NC}${F[Y]}WARN!${NC} Failed to get [IMAGE_VERSION]!${NC}"
echo "Pulling from Branch Name..."
warn "Failed to get [IMAGE_VERSION]!"
info "Pulling from Branch Name..."
##############################
# Get the name of the branch #
##############################
@ -174,9 +168,8 @@ ValidateInput() {
# Check the shell for errors #
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get branch name!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${BRANCH_NAME}]${NC}"
exit 1
error "Failed to get branch name!"
fatal "[${BRANCH_NAME}]"
fi
##################################
@ -188,9 +181,9 @@ ValidateInput() {
# Set the IMAGE_VERSION to the BRANCH_NAME #
############################################
IMAGE_VERSION="${BRANCH_NAME}"
echo "Tag:[${IMAGE_VERSION}]"
info "Tag:[${IMAGE_VERSION}]"
else
echo -e "${NC}${F[B]}Successfully found:${F[W]}[IMAGE_VERSION]${F[B]}, value:${F[W]}[${IMAGE_VERSION}]${NC}"
info "Successfully found:${F[W]}[IMAGE_VERSION]${F[B]}, value:${F[W]}[${IMAGE_VERSION}]"
fi
##################################
@ -214,18 +207,17 @@ ValidateInput() {
###################################
UPDATE_MAJOR_TAG=1
echo "- Also deploying a major tag of:[${MAJOR_TAG}]"
info "- Also deploying a major tag of:[${MAJOR_TAG}]"
fi
############################
# Validate DOCKERFILE_PATH #
############################
if [ -z "${DOCKERFILE_PATH}" ]; then
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKERFILE_PATH]!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKERFILE_PATH}]${NC}"
exit 1
error "Failed to get [DOCKERFILE_PATH]!"
fatal "[${DOCKERFILE_PATH}]"
else
echo -e "${NC}${F[B]}Successfully found:${F[W]}[DOCKERFILE_PATH]${F[B]}, value:${F[W]}[${DOCKERFILE_PATH}]${NC}"
info "Successfully found:${F[W]}[DOCKERFILE_PATH]${F[B]}, value:${F[W]}[${DOCKERFILE_PATH}]"
fi
}
################################################################################
@ -242,11 +234,9 @@ Authenticate() {
################
# Print header #
################
echo ""
echo "----------------------------------------------"
echo "Login to ${NAME}..."
echo "----------------------------------------------"
echo ""
info "----------------------------------------------"
info "Login to ${NAME}..."
info "----------------------------------------------"
###################
# Auth to service #
@ -263,12 +253,11 @@ Authenticate() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# ERROR
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to authenticate to ${NAME}!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LOGIN_CMD}]${NC}"
exit 1
error "Failed to authenticate to ${NAME}!"
fatal "[${LOGIN_CMD}]"
else
# SUCCESS
echo -e "${NC}${F[B]}Successfully authenticated to ${F[C]}${NAME}${F[B]}!${NC}"
info "Successfully authenticated to ${F[C]}${NAME}${F[B]}!"
fi
}
################################################################################
@ -277,21 +266,18 @@ BuildImage() {
################
# Print header #
################
echo ""
echo "----------------------------------------------"
echo "Building the DockerFile image..."
echo "----------------------------------------------"
echo ""
info "----------------------------------------------"
info "Building the DockerFile image..."
info "----------------------------------------------"
################################
# Validate the DOCKERFILE_PATH #
################################
if [ ! -f "${DOCKERFILE_PATH}" ]; then
# No file found
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to find Dockerfile at:[${DOCKERFILE_PATH}]${NC}"
echo "Please make sure you give full path!"
echo "Example:[/configs/Dockerfile] or [Dockerfile] if at root directory"
exit 1
error "failed to find Dockerfile at:[${DOCKERFILE_PATH}]"
error "Please make sure you give full path!"
fatal "Example:[/configs/Dockerfile] or [Dockerfile] if at root directory"
fi
###################
@ -309,11 +295,10 @@ BuildImage() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# ERROR
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [build] Dockerfile!${NC}"
exit 1
fatal "failed to [build] Dockerfile!"
else
# SUCCESS
echo -e "${NC}${F[B]}Successfully Built image!${NC}"
info "Successfully Built image!"
fi
########################################################
@ -333,11 +318,10 @@ BuildImage() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# ERROR
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [tag] Dockerfile!${NC}"
exit 1
fatal "failed to [tag] Dockerfile!"
else
# SUCCESS
echo -e "${NC}${F[B]}Successfully tagged image!${NC}"
info "Successfully tagged image!"
fi
fi
}
@ -347,11 +331,9 @@ UploadImage() {
################
# Print header #
################
echo ""
echo "----------------------------------------------"
echo "Uploading the DockerFile image to ${REGISTRY}..."
echo "----------------------------------------------"
echo ""
info "----------------------------------------------"
info "Uploading the DockerFile image to ${REGISTRY}..."
info "----------------------------------------------"
############################################
# Upload the docker image that was created #
@ -368,11 +350,10 @@ UploadImage() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# ERROR
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [upload] Dockerfile!${NC}"
exit 1
fatal "failed to [upload] Dockerfile!"
else
# SUCCESS
echo -e "${NC}${F[B]}Successfully Uploaded Docker image:${F[W]}[${IMAGE_VERSION}]${F[B]} to ${F[C]}${REGISTRY}${F[B]}!${NC}"
info "Successfully Uploaded Docker image:${F[W]}[${IMAGE_VERSION}]${F[B]} to ${F[C]}${REGISTRY}${F[B]}!"
fi
#########################
@ -391,9 +372,8 @@ UploadImage() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# ERROR
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get information about built Image!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GET_INFO_CMD}]${NC}"
exit 1
error "Failed to get information about built Image!"
fatal "[${GET_INFO_CMD}]"
else
################
# Get the data #
@ -406,13 +386,13 @@ UploadImage() {
###################
# Print the goods #
###################
echo "----------------------------------------------"
echo "Docker Image Details:"
echo "Repository:[${REPO}]"
echo "Tag:[${TAG}]"
echo "Image_ID:[${IMAGE_ID}]"
echo "Size:[${SIZE}]"
echo "----------------------------------------------"
info "----------------------------------------------"
info "Docker Image Details:"
info "Repository:[${REPO}]"
info "Tag:[${TAG}]"
info "Image_ID:[${IMAGE_ID}]"
info "Size:[${SIZE}]"
info "----------------------------------------------"
fi
###############################################################
@ -434,22 +414,19 @@ UploadImage() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# ERROR
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [upload] MAJOR_TAG:[${MAJOR_TAG}] Dockerfile!${NC}"
exit 1
fatal "failed to [upload] MAJOR_TAG:[${MAJOR_TAG}] Dockerfile!"
else
# SUCCESS
echo -e "${NC}${F[B]}Successfully Uploaded TAG:${F[W]}[${MAJOR_TAG}]${F[B]} of Docker image to ${F[C]}${REGISTRY}${F[B]}!${NC}"
info "Successfully Uploaded TAG:${F[W]}[${MAJOR_TAG}]${F[B]} of Docker image to ${F[C]}${REGISTRY}${F[B]}!"
fi
fi
}
################################################################################
#### Function Footer ###########################################################
Footer() {
echo ""
echo "-------------------------------------------------------"
echo "The step has completed"
echo "-------------------------------------------------------"
echo ""
info "-------------------------------------------------------"
info "The step has completed"
info "-------------------------------------------------------"
}
################################################################################
################################## MAIN ########################################
@ -488,9 +465,8 @@ else
#########
# ERROR #
#########
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Registry not set correctly!${NC}"
echo "Registry:[${REGISTRY}]"
exit 1
error "Registry not set correctly!"
fatal "Registry:[${REGISTRY}]"
fi
####################

2
.github/linters/.flake8 vendored Normal file
View file

@ -0,0 +1,2 @@
[flake8]
max-line-length = 120

1
.github/linters/.luacheckrc vendored Normal file
View file

@ -0,0 +1 @@
--std max

View file

@ -29,7 +29,7 @@ jobs:
if: "github.event_name == 'schedule'"
steps:
- name: Mark issue stale
uses: actions/stale@v3.0.8
uses: actions/stale@v3.0.9
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: "This issue has been automatically marked as stale because it has not had recent activity.\nIt will be closed in 14 days if no further activity occurs.\nThank you for your contributions.\n\nIf you think this issue should stay open, please remove the `O: stale 🤖` label or comment on the issue."

View file

@ -7,11 +7,11 @@
#########################################
# Get dependency images as build stages #
#########################################
FROM borkdude/clj-kondo:2020.06.21 as clj-kondo
FROM borkdude/clj-kondo:2020.07.29 as clj-kondo
FROM dotenvlinter/dotenv-linter:2.1.0 as dotenv-linter
FROM mstruebing/editorconfig-checker:2.1.0 as editorconfig-checker
FROM golangci/golangci-lint:v1.29.0 as golangci-lint
FROM yoheimuta/protolint:v0.25.1 as protolint
FROM yoheimuta/protolint:v0.26.0 as protolint
FROM koalaman/shellcheck:v0.7.1 as shellcheck
FROM wata727/tflint:0.18.0 as tflint
@ -52,29 +52,23 @@ ARG GLIBC_VERSION='2.31-r0'
RUN apk add --update --no-cache \
ansible-lint \
bash \
coreutils \
curl \
gcc \
git git-lfs\
go \
icu-libs \
jq \
libxml2-utils \
libc-dev libxml2-utils \
make \
musl-dev \
npm nodejs-current \
openjdk8-jre \
perl \
php7 \
php7-phar \
php7-json \
php7-simplexml \
php7-xmlwriter \
php7-mbstring \
php7-tokenizer \
php7-ctype \
php7-curl \
php7-dom \
php7 php7-phar php7-json php7-mbstring php-xmlwriter \
php7-tokenizer php7-ctype php7-curl php7-dom \
py3-setuptools \
readline-dev \
ruby ruby-dev ruby-bundler ruby-rdoc \
gnupg
@ -206,6 +200,35 @@ RUN wget https://storage.googleapis.com/dart-archive/channels/stable/release/${D
RUN echo "http://dl-cdn.alpinelinux.org/alpine/edge/testing/" >> /etc/apk/repositories \
&& apk add --update --no-cache rakudo zef
####################
# Install luacheck #
####################
RUN wget https://www.lua.org/ftp/lua-5.3.5.tar.gz -O - -q | tar -xzf - \
&& cd lua-5.3.5 \
&& make linux \
&& make install \
&& cd .. && rm -r lua-5.3.5/
RUN wget https://github.com/cvega/luarocks/archive/v3.3.1-super-linter.tar.gz -O - -q | tar -xzf - \
&& cd luarocks-3.3.1-super-linter \
&& ./configure --with-lua-include=/usr/local/include \
&& make \
&& make -b install \
&& cd .. && rm -r luarocks-3.3.1-super-linter/
RUN luarocks install luacheck
#############################
# Install Phive and PHPStan #
#############################
RUN wget -O phive.phar https://phar.io/releases/phive.phar \
&& wget -O phive.phar.asc https://phar.io/releases/phive.phar.asc \
&& gpg --keyserver pool.sks-keyservers.net --recv-keys 0x9D8A98B29B2D5D79 \
&& gpg --verify phive.phar.asc phive.phar \
&& chmod +x phive.phar \
&& mv phive.phar /usr/local/bin/phive \
&& yes | phive install -g phpstan
###########################################
# Load GitHub Env Vars for GitHub Actions #
###########################################
@ -218,6 +241,8 @@ ENV ACTIONS_RUNNER_DEBUG=${ACTIONS_RUNNER_DEBUG} \
GITHUB_TOKEN=${GITHUB_TOKEN} \
GITHUB_WORKSPACE=${GITHUB_WORKSPACE} \
LINTER_RULES_PATH=${LINTER_RULES_PATH} \
LOG_FILE=${LOG_FILE} \
LOG_LEVEL=${LOG_LEVEL} \
OUTPUT_DETAILS=${OUTPUT_DETAILS} \
OUTPUT_FOLDER=${OUTPUT_FOLDER} \
OUTPUT_FORMAT=${OUTPUT_FORMAT} \
@ -241,13 +266,17 @@ ENV ACTIONS_RUNNER_DEBUG=${ACTIONS_RUNNER_DEBUG} \
VALIDATE_JAVASCRIPT_STANDARD=${VALIDATE_JAVASCRIPT_STANDARD} \
VALIDATE_JSON=${VALIDATE_JSON} \
VALIDATE_KOTLIN=${VALIDATE_KOTLIN} \
VALIDATE_LUA=${VALIDATE_LUA} \
VALIDATE_MD=${VALIDATE_MD} \
VALIDATE_OPENAPI=${VALIDATE_OPENAPI} \
VALIDATE_PERL=${VALIDATE_PERL} \
VALIDATE_PHP=${VALIDATE_PHP} \
VALIDATE_PHP_PHPSTAN=${VALIDATE_PHP_PHPSTAN} \
VALIDATE_POWERSHELL=${VALIDATE_POWERSHELL} \
VALIDATE_PROTOBUF=${VALIDATE_PROTOBUF} \
VALIDATE_PYTHON=${VALIDATE_PYTHON} \
VALIDATE_PYTHON_PYLINT=${VALIDATE_PYTHON_PYLINT} \
VALIDATE_PYTHON_FLAKE8=${VALIDATE_PYTHON_FLAKE8} \
VALIDATE_RAKU=${VALIDATE_RAKU} \
VALIDATE_RUBY=${VALIDATE_RUBY} \
VALIDATE_STATES=${VALIDATE_STATES} \

View file

@ -41,38 +41,39 @@ The design of the **Super-Linter** is currently to allow linting to occur in **G
Developers on **GitHub** can call the **GitHub Action** to lint their code base with the following list of linters:
| _Language_ | _Linter_ |
| -------------------------------- | ------------------------------------------------------------------------------------ |
| **Ansible** | [ansible-lint](https://github.com/ansible/ansible-lint) |
| **Azure Resource Manager (ARM)** | [arm-ttk](https://github.com/azure/arm-ttk) |
| **AWS CloudFormation templates** | [cfn-lint](https://github.com/aws-cloudformation/cfn-python-lint/) |
| **CSS** | [stylelint](https://stylelint.io/) |
| **Clojure** | [clj-kondo](https://github.com/borkdude/clj-kondo) |
| **CoffeeScript** | [coffeelint](https://coffeelint.github.io/) |
| **Dart** | [dartanalyzer](https://dart.dev/guides/language/analysis-options) |
| **Dockerfile** | [dockerfilelint](https://github.com/replicatedhq/dockerfilelint.git) |
| **EDITORCONFIG** | [editorconfig-checker](https://github.com/editorconfig-checker/editorconfig-checker) |
| **ENV** | [dotenv-linter](https://github.com/dotenv-linter/dotenv-linter) |
| **Golang** | [golangci-lint](https://github.com/golangci/golangci-lint) |
| **Groovy** | [npm-groovy-lint](https://github.com/nvuillam/npm-groovy-lint) |
| **HTMLHint** | [HTMLHint](https://github.com/htmlhint/HTMLHint) |
| **JavaScript** | [eslint](https://eslint.org/) [standard js](https://standardjs.com/) |
| **JSON** | [jsonlint](https://github.com/zaach/jsonlint) |
| **Kotlin** | [ktlint](https://github.com/pinterest/ktlint) |
| **Markdown** | [markdownlint](https://github.com/igorshubovych/markdownlint-cli#readme) |
| **OpenAPI** | [spectral](https://github.com/stoplightio/spectral) |
| **Perl** | [perl](https://pkgs.alpinelinux.org/package/edge/main/x86/perl) |
| **PHP** | [PHP](https://www.php.net/) |
| **PowerShell** | [PSScriptAnalyzer](https://github.com/PowerShell/Psscriptanalyzer) |
| **Protocol Buffers** | [protolint](https://github.com/yoheimuta/protolint) |
| **Python3** | [pylint](https://www.pylint.org/) |
| **Raku** | [raku](https://raku.org) |
| **Ruby** | [RuboCop](https://github.com/rubocop-hq/rubocop) |
| **Shell** | [Shellcheck](https://github.com/koalaman/shellcheck) |
| **Terraform** | [tflint](https://github.com/terraform-linters/tflint) [terrascan](https://github.com/accurics/terrascan) |
| **TypeScript** | [eslint](https://eslint.org/) [standard js](https://standardjs.com/) |
| **XML** | [LibXML](http://xmlsoft.org/) |
| **YAML** | [YamlLint](https://github.com/adrienverge/yamllint) |
| _Language_ | _Linter_ |
| -------------------------------- | -------------------------------------------------------------------------------------------------------- |
| **Ansible** | [ansible-lint](https://github.com/ansible/ansible-lint) |
| **Azure Resource Manager (ARM)** | [arm-ttk](https://github.com/azure/arm-ttk) |
| **AWS CloudFormation templates** | [cfn-lint](https://github.com/aws-cloudformation/cfn-python-lint/) |
| **CSS** | [stylelint](https://stylelint.io/) |
| **Clojure** | [clj-kondo](https://github.com/borkdude/clj-kondo) |
| **CoffeeScript** | [coffeelint](https://coffeelint.github.io/) |
| **Dart** | [dartanalyzer](https://dart.dev/guides/language/analysis-options) |
| **Dockerfile** | [dockerfilelint](https://github.com/replicatedhq/dockerfilelint.git) |
| **EDITORCONFIG** | [editorconfig-checker](https://github.com/editorconfig-checker/editorconfig-checker) |
| **ENV** | [dotenv-linter](https://github.com/dotenv-linter/dotenv-linter) |
| **Golang** | [golangci-lint](https://github.com/golangci/golangci-lint) |
| **Groovy** | [npm-groovy-lint](https://github.com/nvuillam/npm-groovy-lint) |
| **HTMLHint** | [HTMLHint](https://github.com/htmlhint/HTMLHint) |
| **JavaScript** | [eslint](https://eslint.org/) [standard js](https://standardjs.com/) |
| **JSON** | [jsonlint](https://github.com/zaach/jsonlint) |
| **Kotlin** | [ktlint](https://github.com/pinterest/ktlint) |
| **Lua** | [luacheck](https://github.com/luarocks/luacheck) |
| **Markdown** | [markdownlint](https://github.com/igorshubovych/markdownlint-cli#readme) |
| **OpenAPI** | [spectral](https://github.com/stoplightio/spectral) |
| **Perl** | [perl](https://pkgs.alpinelinux.org/package/edge/main/x86/perl) |
| **PHP** | [PHP](https://www.php.net/) |
| **PowerShell** | [PSScriptAnalyzer](https://github.com/PowerShell/Psscriptanalyzer) |
| **Protocol Buffers** | [protolint](https://github.com/yoheimuta/protolint) |
| **Python3** | [pylint](https://www.pylint.org/) [flake8](https://flake8.pycqa.org/en/latest/) |
| **Raku** | [raku](https://raku.org) |
| **Ruby** | [RuboCop](https://github.com/rubocop-hq/rubocop) |
| **Shell** | [Shellcheck](https://github.com/koalaman/shellcheck) |
| **Terraform** | [tflint](https://github.com/terraform-linters/tflint) [terrascan](https://github.com/accurics/terrascan) |
| **TypeScript** | [eslint](https://eslint.org/) [standard js](https://standardjs.com/) |
| **XML** | [LibXML](http://xmlsoft.org/) |
| **YAML** | [YamlLint](https://github.com/adrienverge/yamllint) |
## How to use
@ -175,10 +176,14 @@ and won't run anything unexpected.
| **DISABLE_ERRORS** | `false` | Flag to have the linter complete with exit code 0 even if errors were detected. |
| **JAVASCRIPT_ES_CONFIG_FILE** | `.eslintrc.yml` | Filename for [eslint configuration](https://eslint.org/docs/user-guide/configuring#configuration-file-formats) (ex: `.eslintrc.yml`, `.eslintrc.json`) |
| **LINTER_RULES_PATH** | `.github/linters` | Directory for all linter configuration rules. |
| **LOG_FILE** | `super-linter.log` | The file name for outputting logs. All output is sent to the log file regardless of `LOG_LEVEL`. |
| **LOG_LEVEL** | `VERBOSE` | How much output the script will generate to the console. One of `VERBOSE`, `DEBUG` or `TRACE`. |
| **MULTI_STATUS** | `true` | A status API is made for each language that is linted to make visual parsing easier. |
| **OUTPUT_FORMAT** | `none` | The report format to be generated, besides the stdout one. Output format of tap is currently using v13 of the specification. Supported formats: tap |
| **OUTPUT_FOLDER** | `super-linter.report` | The location where the output reporting will be generated to. Output folder must not previously exist. |
| **OUTPUT_DETAILS** | `simpler` | What level of details to be reported. Supported formats: simpler or detailed. |
| **PYTHON_PYLINT_CONFIG_FILE** | `.python-lint` | Filename for [pylint configuration](http://pylint.pycqa.org/en/latest/user_guide/run.html?highlight=rcfile#command-line-options) (ex: `.python-lint`, `.pylintrc`) |
| **PYTHON_FLAKE8_CONFIG_FILE** | `.flake8` | Filename for [flake8 configuration](https://flake8.pycqa.org/en/latest/user/configuration.html) (ex: `.flake8`, `tox.ini`) |
| **RUBY_CONFIG_FILE** | `.ruby-lint.yml` | Filename for [rubocop configuration](https://docs.rubocop.org/rubocop/configuration.html) (ex: `.ruby-lint.yml`, `.rubocop.yml`) |
| **TYPESCRIPT_ES_CONFIG_FILE** | `.eslintrc.yml` | Filename for [eslint configuration](https://eslint.org/docs/user-guide/configuring#configuration-file-formats) (ex: `.eslintrc.yml`, `.eslintrc.json`) |
| **VALIDATE_ALL_CODEBASE** | `true` | Will parse the entire repository and find all files to validate across all types. **NOTE:** When set to `false`, only **new** or **edited** files will be parsed for validation. |
@ -201,23 +206,28 @@ and won't run anything unexpected.
| **VALIDATE_JSON** | `true` | Flag to enable or disable the linting process of the JSON language. |
| **VALIDATE_JSX** | `true` | Flag to enable or disable the linting process for jsx files (Utilizing: eslint) |
| **VALIDATE_KOTLIN** | `true` | Flag to enable or disable the linting process of the Kotlin language. |
| **VALIDATE_LUA** | `true` | Flag to enable or disable the linting process of the language. |
| **VALIDATE_MD** | `true` | Flag to enable or disable the linting process of the Markdown language. |
| **VALIDATE_OPENAPI** | `true` | Flag to enable or disable the linting process of the OpenAPI language. |
| **VALIDATE_PERL** | `true` | Flag to enable or disable the linting process of the Perl language. |
| **VALIDATE_PHP** | `true` | Flag to enable or disable the linting process of the PHP language. |
| **VALIDATE_PHP_PHPSTAN** | `true` | Flag to enable or disable the linting process of the PHP language (Utilizing: PHPStan). |
| **VALIDATE_PROTOBUF** | `true` | Flag to enable or disable the linting process of the Protobuf language. |
| **VALIDATE_PYTHON** | `true` | Flag to enable or disable the linting process of the Python language. |
| **VALIDATE_PYTHON** | `true` | Flag to enable or disable the linting process of the Python language. (Utilizing: pylint) (keep for backward compatibility) |
| **VALIDATE_PYTHON_PYLINT** | `true` | Flag to enable or disable the linting process of the Python language. (Utilizing: pylint) |
| **VALIDATE_PYTHON_FLAKE8** | `true` | Flag to enable or disable the linting process of the Python language. (Utilizing: flake8) |
| **VALIDATE_POWERSHELL** | `true` | Flag to enable or disable the linting process of the Powershell language. |
| **VALIDATE_RAKU** | `true` | Flag to enable or disable the linting process of the Raku language. |
| **VALIDATE_RUBY** | `true` | Flag to enable or disable the linting process of the Ruby language. |
| **VALIDATE_STATES** | `true` | Flag to enable or disable the linting process for AWS States Language. |
| **VALIDATE_TERRAFORM** | `true` | Flag to enable or disable the linting process of the Terraform language. |
| **VALIDATE_TERRAFORM_TERRASCAN** | `false` | Flag to enable or disable the linting process of the Terraform language for security related issues. |
| **VALIDATE_TERRAFORM_TERRASCAN** | `false` | Flag to enable or disable the linting process of the Terraform language for security related issues. |
| **VALIDATE_TSX** | `true` | Flag to enable or disable the linting process for tsx files (Utilizing: eslint) |
| **VALIDATE_TYPESCRIPT_ES** | `true` | Flag to enable or disable the linting process of the Typescript language. (Utilizing: eslint) |
| **VALIDATE_TYPESCRIPT_STANDARD** | `true` | Flag to enable or disable the linting process of the Typescript language. (Utilizing: standard) |
| **VALIDATE_XML** | `true` | Flag to enable or disable the linting process of the XML language. |
| **VALIDATE_YAML** | `true` | Flag to enable or disable the linting process of the YAML language. |
| **YAML_CONFIG_FILE** | `.yaml-lint.yml` | Filename for [Yamllint configuration](https://yamllint.readthedocs.io/en/stable/configuration.html) (ex: `.yaml-lint.yml`, `.yamllint.yml`) |
### Template rules files

2
TEMPLATES/.flake8 Normal file
View file

@ -0,0 +1,2 @@
[flake8]
max-line-length = 120

1
TEMPLATES/.luacheckrc Normal file
View file

@ -0,0 +1 @@
--std max

2
TEMPLATES/phpstan.neon Normal file
View file

@ -0,0 +1,2 @@
parameters:
level: 5

View file

@ -8,3 +8,4 @@ gem "rubocop", "~> 0.82.0"
gem "rubocop-github", "~> 0.16.0"
gem "rubocop-performance", "~>1.7.1"
gem "rubocop-rails", "~> 2.5"
gem "rubocop-rspec", "~> 1.41.0"

View file

@ -37,6 +37,8 @@ GEM
activesupport (>= 4.2.0)
rack (>= 1.1)
rubocop (>= 0.82.0)
rubocop-rspec (1.41.0)
rubocop (>= 0.68.1)
ruby-progressbar (1.10.1)
thread_safe (0.3.6)
tzinfo (1.2.7)
@ -52,6 +54,7 @@ DEPENDENCIES
rubocop-github (~> 0.16.0)
rubocop-performance (~> 1.7.1)
rubocop-rails (~> 2.5)
rubocop-rspec (~> 1.41.0)
BUNDLED WITH
2.1.4

View file

@ -11,6 +11,7 @@ pylint = "*"
yq = "*"
cfn-lint = "*"
terrascan = "*"
flake8 = "*"
[requires]
python_version = "3.8"

35
dependencies/Pipfile.lock generated vendored
View file

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "c62b95247cb67b10711da0012966cc031d5a9ec08885a45736c0ee77bba3a844"
"sha256": "f8ea3853c4bb4533103043533d08982436551678c827f59809c94bf0ee54a187"
},
"pipfile-spec": 6,
"requires": {
@ -28,6 +28,7 @@
"sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703",
"sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386"
],
"markers": "python_version >= '3.5'",
"version": "==2.4.2"
},
"attrs": {
@ -35,6 +36,7 @@
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==19.3.0"
},
"aws-sam-translator": {
@ -80,13 +82,23 @@
"sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827",
"sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'",
"version": "==0.15.2"
},
"flake8": {
"hashes": [
"sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c",
"sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208"
],
"index": "pypi",
"version": "==3.8.3"
},
"isort": {
"hashes": [
"sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
"sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==4.3.21"
},
"jmespath": {
@ -94,6 +106,7 @@
"sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9",
"sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'",
"version": "==0.10.0"
},
"jsonpatch": {
@ -109,6 +122,7 @@
"sha256:c192ba86648e05fdae4f08a17ec25180a9aef5008d973407b581798a83975362",
"sha256:ff379fa021d1b81ab539f5ec467c7745beb1a5671463f9dcc2b2d458bd361c1e"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.0"
},
"jsonschema": {
@ -148,6 +162,7 @@
"sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4",
"sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.4.3"
},
"mccabe": {
@ -172,6 +187,22 @@
],
"version": "==0.8.0"
},
"pycodestyle": {
"hashes": [
"sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367",
"sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.6.0"
},
"pyflakes": {
"hashes": [
"sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92",
"sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.2.0"
},
"pyhcl": {
"hashes": [
"sha256:2d9b9dcdf1023d812bfed561ba72c99104c5b3f52e558d595130a44ce081b003"
@ -197,6 +228,7 @@
"sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
"sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",
"version": "==2.8.1"
},
"pyyaml": {
@ -228,6 +260,7 @@
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'",
"version": "==1.15.0"
},
"terrascan": {

258
dependencies/package-lock.json generated vendored
View file

@ -3,6 +3,20 @@
"requires": true,
"lockfileVersion": 1,
"dependencies": {
"@amplitude/node": {
"version": "0.3.3",
"resolved": "https://registry.npmjs.org/@amplitude/node/-/node-0.3.3.tgz",
"integrity": "sha512-Uzg4MRAuD053Ex67Iu2lm2GovnVte1uKI3q7CXlMCYZ9ylZmAkPbTnjg9OVyD4f+IiUfgK4p3bE7r9p7jqSDLA==",
"requires": {
"@amplitude/types": "^0.3.2",
"tslib": "^1.9.3"
}
},
"@amplitude/types": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/@amplitude/types/-/types-0.3.2.tgz",
"integrity": "sha512-7+m7nhJMFGbpsppOUsCH8f4FOFyAxgKFuXkKknU/LP2CMYVjWEIoLTKKgaJPc2c8wXaK5KPXVetb8VeiGbuaGg=="
},
"@babel/code-frame": {
"version": "7.10.4",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.4.tgz",
@ -499,11 +513,11 @@
"integrity": "sha512-/tiJyrc0GPcsReHzgC0SXwOmoPjLqYe01W7dLYB0yasQXMbcRee+ZIk+g8MIQhoBS8fPoBQO3Y93+aeBrI93Ug=="
},
"@typescript-eslint/eslint-plugin": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-3.7.0.tgz",
"integrity": "sha512-4OEcPON3QIx0ntsuiuFP/TkldmBGXf0uKxPQlGtS/W2F3ndYm8Vgdpj/woPJkzUc65gd3iR+qi3K8SDQP/obFg==",
"version": "3.7.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-3.7.1.tgz",
"integrity": "sha512-3DB9JDYkMrc8Au00rGFiJLK2Ja9CoMP6Ut0sHsXp3ZtSugjNxvSSHTnKLfo4o+QmjYBJqEznDqsG1zj4F2xnsg==",
"requires": {
"@typescript-eslint/experimental-utils": "3.7.0",
"@typescript-eslint/experimental-utils": "3.7.1",
"debug": "^4.1.1",
"functional-red-black-tree": "^1.0.1",
"regexpp": "^3.0.0",
@ -512,41 +526,83 @@
}
},
"@typescript-eslint/experimental-utils": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-3.7.0.tgz",
"integrity": "sha512-xpfXXAfZqhhqs5RPQBfAFrWDHoNxD5+sVB5A46TF58Bq1hRfVROrWHcQHHUM9aCBdy9+cwATcvCbRg8aIRbaHQ==",
"version": "3.7.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-3.7.1.tgz",
"integrity": "sha512-TqE97pv7HrqWcGJbLbZt1v59tcqsSVpWTOf1AqrWK7n8nok2sGgVtYRuGXeNeLw3wXlLEbY1MKP3saB2HsO/Ng==",
"requires": {
"@types/json-schema": "^7.0.3",
"@typescript-eslint/types": "3.7.0",
"@typescript-eslint/typescript-estree": "3.7.0",
"@typescript-eslint/types": "3.7.1",
"@typescript-eslint/typescript-estree": "3.7.1",
"eslint-scope": "^5.0.0",
"eslint-utils": "^2.0.0"
}
},
"@typescript-eslint/parser": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-3.7.0.tgz",
"integrity": "sha512-2LZauVUt7jAWkcIW7djUc3kyW+fSarNEuM3RF2JdLHR9BfX/nDEnyA4/uWz0wseoWVZbDXDF7iF9Jc342flNqQ==",
"version": "3.7.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-3.7.1.tgz",
"integrity": "sha512-W4QV/gXvfIsccN8225784LNOorcm7ch68Fi3V4Wg7gmkWSQRKevO4RrRqWo6N/Z/myK1QAiGgeaXN57m+R/8iQ==",
"requires": {
"@types/eslint-visitor-keys": "^1.0.0",
"@typescript-eslint/experimental-utils": "3.7.0",
"@typescript-eslint/types": "3.7.0",
"@typescript-eslint/typescript-estree": "3.7.0",
"@typescript-eslint/experimental-utils": "3.7.1",
"@typescript-eslint/types": "3.7.1",
"@typescript-eslint/typescript-estree": "3.7.1",
"eslint-visitor-keys": "^1.1.0"
},
"dependencies": {
"@typescript-eslint/experimental-utils": {
"version": "3.7.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-3.7.1.tgz",
"integrity": "sha512-TqE97pv7HrqWcGJbLbZt1v59tcqsSVpWTOf1AqrWK7n8nok2sGgVtYRuGXeNeLw3wXlLEbY1MKP3saB2HsO/Ng==",
"requires": {
"@types/json-schema": "^7.0.3",
"@typescript-eslint/types": "3.7.1",
"@typescript-eslint/typescript-estree": "3.7.1",
"eslint-scope": "^5.0.0",
"eslint-utils": "^2.0.0"
}
},
"@typescript-eslint/types": {
"version": "3.7.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-3.7.1.tgz",
"integrity": "sha512-PZe8twm5Z4b61jt7GAQDor6KiMhgPgf4XmUb9zdrwTbgtC/Sj29gXP1dws9yEn4+aJeyXrjsD9XN7AWFhmnUfg=="
},
"@typescript-eslint/typescript-estree": {
"version": "3.7.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-3.7.1.tgz",
"integrity": "sha512-m97vNZkI08dunYOr2lVZOHoyfpqRs0KDpd6qkGaIcLGhQ2WPtgHOd/eVbsJZ0VYCQvupKrObAGTOvk3tfpybYA==",
"requires": {
"@typescript-eslint/types": "3.7.1",
"@typescript-eslint/visitor-keys": "3.7.1",
"debug": "^4.1.1",
"glob": "^7.1.6",
"is-glob": "^4.0.1",
"lodash": "^4.17.15",
"semver": "^7.3.2",
"tsutils": "^3.17.1"
}
},
"@typescript-eslint/visitor-keys": {
"version": "3.7.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-3.7.1.tgz",
"integrity": "sha512-xn22sQbEya+Utj2IqJHGLA3i1jDzR43RzWupxojbSWnj3nnPLavaQmWe5utw03CwYao3r00qzXfgJMGNkrzrAA==",
"requires": {
"eslint-visitor-keys": "^1.1.0"
}
}
}
},
"@typescript-eslint/types": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-3.7.0.tgz",
"integrity": "sha512-reCaK+hyKkKF+itoylAnLzFeNYAEktB0XVfSQvf0gcVgpz1l49Lt6Vo9x4MVCCxiDydA0iLAjTF/ODH0pbfnpg=="
"version": "3.7.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-3.7.1.tgz",
"integrity": "sha512-PZe8twm5Z4b61jt7GAQDor6KiMhgPgf4XmUb9zdrwTbgtC/Sj29gXP1dws9yEn4+aJeyXrjsD9XN7AWFhmnUfg=="
},
"@typescript-eslint/typescript-estree": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-3.7.0.tgz",
"integrity": "sha512-xr5oobkYRebejlACGr1TJ0Z/r0a2/HUf0SXqPvlgUMwiMqOCu/J+/Dr9U3T0IxpE5oLFSkqMx1FE/dKaZ8KsOQ==",
"version": "3.7.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-3.7.1.tgz",
"integrity": "sha512-m97vNZkI08dunYOr2lVZOHoyfpqRs0KDpd6qkGaIcLGhQ2WPtgHOd/eVbsJZ0VYCQvupKrObAGTOvk3tfpybYA==",
"requires": {
"@typescript-eslint/types": "3.7.0",
"@typescript-eslint/visitor-keys": "3.7.0",
"@typescript-eslint/types": "3.7.1",
"@typescript-eslint/visitor-keys": "3.7.1",
"debug": "^4.1.1",
"glob": "^7.1.6",
"is-glob": "^4.0.1",
@ -556,9 +612,9 @@
}
},
"@typescript-eslint/visitor-keys": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-3.7.0.tgz",
"integrity": "sha512-k5PiZdB4vklUpUX4NBncn5RBKty8G3ihTY+hqJsCdMuD0v4jofI5xuqwnVcWxfv6iTm2P/dfEa2wMUnsUY8ODw==",
"version": "3.7.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-3.7.1.tgz",
"integrity": "sha512-xn22sQbEya+Utj2IqJHGLA3i1jDzR43RzWupxojbSWnj3nnPLavaQmWe5utw03CwYao3r00qzXfgJMGNkrzrAA==",
"requires": {
"eslint-visitor-keys": "^1.1.0"
}
@ -742,6 +798,14 @@
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.10.0.tgz",
"integrity": "sha512-3YDiu347mtVtjpyV3u5kVqQLP242c06zwDOgpeRnybmXlYYsLbtTrUBUm8i8srONt+FWobl5aibnU1030PeeuA=="
},
"axios": {
"version": "0.19.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.19.2.tgz",
"integrity": "sha512-fjgm5MvRHLhx+osE2xoekY70AhARk3a6hkN+3Io1jc00jtquGvxYlKlsFUhmUET0V5te6CcZI7lcv2Ym61mjHA==",
"requires": {
"follow-redirects": "1.5.10"
}
},
"babel-eslint": {
"version": "10.1.0",
"resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.1.0.tgz",
@ -950,6 +1014,15 @@
"restore-cursor": "^3.1.0"
}
},
"cli-progress": {
"version": "3.8.2",
"resolved": "https://registry.npmjs.org/cli-progress/-/cli-progress-3.8.2.tgz",
"integrity": "sha512-qRwBxLldMSfxB+YGFgNRaj5vyyHe1yMpVeDL79c+7puGujdKJHQHydgqXDcrkvQgJ5U/d3lpf6vffSoVVUftVQ==",
"requires": {
"colors": "^1.1.2",
"string-width": "^4.2.0"
}
},
"cli-width": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz",
@ -1159,6 +1232,11 @@
"resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.2.0.tgz",
"integrity": "sha512-vDPw+rDgn3bZe1+F/pyEwb1oMG2XTlRVgAa6B4KccTEpYgF8w6eQllVbQcfIJnZyvzFtFpxnpGtx8dd7DJp/Rw=="
},
"decode-html": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/decode-html/-/decode-html-2.0.0.tgz",
"integrity": "sha1-fQqIfORCgOYJeKcH67f4CB/WHqo="
},
"deep-extend": {
"version": "0.5.1",
"resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.5.1.tgz",
@ -1847,9 +1925,9 @@
}
},
"eslint-plugin-jest": {
"version": "23.18.0",
"resolved": "https://registry.npmjs.org/eslint-plugin-jest/-/eslint-plugin-jest-23.18.0.tgz",
"integrity": "sha512-wLPM/Rm1SGhxrFQ2TKM/BYsYPhn7ch6ZEK92S2o/vGkAAnDXM0I4nTIo745RIX+VlCRMFgBuJEax6XfTHMdeKg==",
"version": "23.20.0",
"resolved": "https://registry.npmjs.org/eslint-plugin-jest/-/eslint-plugin-jest-23.20.0.tgz",
"integrity": "sha512-+6BGQt85OREevBDWCvhqj1yYA4+BFK4XnRZSGJionuEYmcglMZYLNNBBemwzbqUAckURaHdJSBcjHPyrtypZOw==",
"requires": {
"@typescript-eslint/experimental-utils": "^2.5.0"
},
@ -2117,6 +2195,27 @@
"to-regex-range": "^5.0.1"
}
},
"find-java-home": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/find-java-home/-/find-java-home-1.1.0.tgz",
"integrity": "sha512-bSTCKNZ193UM/+ZZoNDzICAEHcVywovkhsWCkZALjCvRXQ+zXTe/XATrrP4CpxkaP6YFhQJOpyRpH0P2U/woDA==",
"requires": {
"which": "~1.0.5",
"winreg": "~1.2.2"
},
"dependencies": {
"which": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/which/-/which-1.0.9.tgz",
"integrity": "sha1-RgwdoPgQED0DIam2M6+eV15kSG8="
}
}
},
"find-package-json": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/find-package-json/-/find-package-json-1.2.0.tgz",
"integrity": "sha512-+SOGcLGYDJHtyqHd87ysBhmaeQ95oWspDKnMXBrnQ9Eq4OkLNqejgoaD8xVWu6GPa0B6roa6KinCMEMcVeqONw=="
},
"find-root": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz",
@ -2146,6 +2245,29 @@
"resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz",
"integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA=="
},
"follow-redirects": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz",
"integrity": "sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==",
"requires": {
"debug": "=3.1.0"
},
"dependencies": {
"debug": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz",
"integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==",
"requires": {
"ms": "2.0.0"
}
},
"ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
}
}
},
"forever-agent": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
@ -2161,6 +2283,16 @@
"mime-types": "^2.1.12"
}
},
"fs-extra": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
"integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==",
"requires": {
"graceful-fs": "^4.2.0",
"jsonfile": "^4.0.0",
"universalify": "^0.1.0"
}
},
"fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
@ -2934,6 +3066,14 @@
"resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-2.2.1.tgz",
"integrity": "sha512-o6/yDBYccGvTz1+QFevz6l6OBZ2+fMVu2JZ9CIhzsYRX4mjaK5IyX9eldUdCmga16zlgQxyrj5pt9kzuj2C02w=="
},
"jsonfile": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
"integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=",
"requires": {
"graceful-fs": "^4.1.6"
}
},
"jsonlint": {
"version": "1.6.3",
"resolved": "https://registry.npmjs.org/jsonlint/-/jsonlint-1.6.3.tgz",
@ -3472,6 +3612,37 @@
"resolved": "https://registry.npmjs.org/normalize-selector/-/normalize-selector-0.2.0.tgz",
"integrity": "sha1-0LFF62kRicY6eNIB3E/bEpPvDAM="
},
"npm-groovy-lint": {
"version": "5.7.0",
"resolved": "https://registry.npmjs.org/npm-groovy-lint/-/npm-groovy-lint-5.7.0.tgz",
"integrity": "sha512-Cq/ncgmNMI6USeazp2gingZj73ia6M+rpR5LfBK8ATSEdRZuIsnKTIGTuqMc/hrZyU4oMN0C/PYQj4DTO+TRFQ==",
"requires": {
"@amplitude/node": "^0.3.3",
"ansi-colors": "^4.1.1",
"axios": "^0.19.2",
"cli-progress": "^3.6.0",
"debug": "^4.1.1",
"decode-html": "^2.0.0",
"find-java-home": "^1.1.0",
"find-package-json": "^1.2.0",
"fs-extra": "^8.1.0",
"glob": "^7.1.6",
"import-fresh": "^3.2.1",
"ip": "^1.1.5",
"optionator": "^0.8.3",
"semver": "^7.1.3",
"strip-json-comments": "^3.0.1",
"uuid": "^8.2.0",
"xml2js": "^0.4.23"
},
"dependencies": {
"uuid": {
"version": "8.2.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.2.0.tgz",
"integrity": "sha512-CYpGiFTUrmI6OBMkAdjSDM0k5h8SkkiTP4WAjQgDgNB1S3Ou9VBEvr6q0Kv2H1mMk7IWfxYGpMH5sd5AvcIV2Q=="
}
}
},
"num2fraction": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/num2fraction/-/num2fraction-1.2.2.tgz",
@ -4445,6 +4616,11 @@
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"semver": {
"version": "7.3.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz",
@ -5301,6 +5477,11 @@
"unist-util-is": "^4.0.0"
}
},
"universalify": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="
},
"unpipe": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
@ -5397,6 +5578,11 @@
"resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz",
"integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho="
},
"winreg": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/winreg/-/winreg-1.2.4.tgz",
"integrity": "sha1-ugZWKbepJRMOFXeRCM9UCZDpjRs="
},
"wolfy87-eventemitter": {
"version": "5.2.9",
"resolved": "https://registry.npmjs.org/wolfy87-eventemitter/-/wolfy87-eventemitter-5.2.9.tgz",
@ -5446,6 +5632,20 @@
"resolved": "https://registry.npmjs.org/xml/-/xml-1.0.1.tgz",
"integrity": "sha1-eLpyAgApxbyHuKgaPPzXS0ovweU="
},
"xml2js": {
"version": "0.4.23",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz",
"integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==",
"requires": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"
}
},
"xmlbuilder": {
"version": "11.0.1",
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz",
"integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA=="
},
"xregexp": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/xregexp/-/xregexp-2.0.0.tgz",

View file

@ -3,13 +3,13 @@
"dependencies": {
"@coffeelint/cli": "^3.2.10",
"@stoplight/spectral": "^5.4.0",
"@typescript-eslint/eslint-plugin": "^3.7.0",
"@typescript-eslint/parser": "^3.7.0",
"@typescript-eslint/eslint-plugin": "^3.7.1",
"@typescript-eslint/parser": "^3.7.1",
"babel-eslint": "^10.1.0",
"dockerfilelint": "^1.5.0",
"eslint": "^7.5.0",
"eslint-config-prettier": "^6.11.0",
"eslint-plugin-jest": "^23.18.0",
"eslint-plugin-jest": "^23.20.0",
"htmlhint": "^0.14.1",
"jsonlint": "^1.6.3",
"markdownlint-cli": "^0.23.2",

File diff suppressed because it is too large Load diff

View file

@ -16,11 +16,8 @@ function BuildFileList() {
################
# print header #
################
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
echo ""
echo "----------------------------------------------"
echo "Pulling in code history and branches..."
fi
debug "----------------------------------------------"
debug "Pulling in code history and branches..."
#################################################################################
# Switch codebase back to the default branch to get a list of all files changed #
@ -40,19 +37,15 @@ function BuildFileList() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# Error
echo "Failed to switch to ${DEFAULT_BRANCH} branch to get files changed!"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${SWITCH_CMD}]${NC}"
exit 1
info "Failed to switch to ${DEFAULT_BRANCH} branch to get files changed!"
fatal "[${SWITCH_CMD}]"
fi
################
# print header #
################
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
echo ""
echo "----------------------------------------------"
echo "Generating Diff with:[git diff --name-only '${DEFAULT_BRANCH}..${GITHUB_SHA}' --diff-filter=d]"
fi
debug "----------------------------------------------"
debug "Generating Diff with:[git diff --name-only '${DEFAULT_BRANCH}..${GITHUB_SHA}' --diff-filter=d]"
#################################################
# Get the Array of files changed in the commits #
@ -69,17 +62,15 @@ function BuildFileList() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# Error
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to gain a list of all files changed!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${RAW_FILE_ARRAY[*]}]${NC}"
exit 1
error "Failed to gain a list of all files changed!"
fatal "[${RAW_FILE_ARRAY[*]}]"
fi
################################################
# Iterate through the array of all files found #
################################################
echo ""
echo "----------------------------------------------"
echo "Files that have been modified in the commit(s):"
info "----------------------------------------------"
info "Files that have been modified in the commit(s):"
for FILE in "${RAW_FILE_ARRAY[@]}"; do
###########################
# Get the files extension #
@ -92,12 +83,12 @@ function BuildFileList() {
##############
# Print file #
##############
echo "File:[${FILE}], File_type:[${FILE_TYPE}]"
info "File:[${FILE}], File_type:[${FILE_TYPE}]"
#########
# DEBUG #
#########
#echo "FILE_TYPE:[${FILE_TYPE}]"
debug "FILE_TYPE:[${FILE_TYPE}]"
################################
# Get the CLOUDFORMATION files #
@ -106,7 +97,7 @@ function BuildFileList() {
################################
# Append the file to the array #
################################
FILE_ARRAY_YML+=("${FILE}")
FILE_ARRAY_YAML+=("${FILE}")
##########################################################
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
##########################################################
@ -221,9 +212,9 @@ function BuildFileList() {
######################
# Get the RAKU files #
######################
elif [ "${FILE_TYPE}" == "raku" ] || [ "${FILE_TYPE}" == "rakumod" ] \
|| [ "${FILE_TYPE}" == "rakutest" ] || [ "${FILE_TYPE}" == "pm6" ] \
|| [ "${FILE_TYPE}" == "pl6" ] || [ "${FILE_TYPE}" == "p6" ] ; then
elif [ "${FILE_TYPE}" == "raku" ] || [ "${FILE_TYPE}" == "rakumod" ] ||
[ "${FILE_TYPE}" == "rakutest" ] || [ "${FILE_TYPE}" == "pm6" ] ||
[ "${FILE_TYPE}" == "pl6" ] || [ "${FILE_TYPE}" == "p6" ]; then
################################
# Append the file to the array #
################################
@ -241,6 +232,7 @@ function BuildFileList() {
################################
FILE_ARRAY_PHP_BUILTIN+=("${FILE}")
FILE_ARRAY_PHP_PHPCS+=("${FILE}")
FILE_ARRAY_PHP_PHPSTAN+=("${FILE}")
FILE_ARRAY_PHP_PSALM+=("${FILE}")
##########################################################
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
@ -265,7 +257,8 @@ function BuildFileList() {
################################
# Append the file to the array #
################################
FILE_ARRAY_PYTHON+=("${FILE}")
FILE_ARRAY_PYTHON_PYLINT+=("${FILE}")
FILE_ARRAY_PYTHON_FLAKE8+=("${FILE}")
##########################################################
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
##########################################################
@ -395,6 +388,15 @@ function BuildFileList() {
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
##########################################################
READ_ONLY_CHANGE_FLAG=1
elif [ "$FILE_TYPE" == "lua" ]; then
################################
# Append the file to the array #
################################
FILE_ARRAY_LUA+=("$FILE")
##########################################################
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
##########################################################
READ_ONLY_CHANGE_FLAG=1
############################
# Get the Protocol Buffers files #
############################
@ -465,8 +467,8 @@ function BuildFileList() {
#######################
# It is a bash script #
#######################
echo -e "${NC}${F[Y]}WARN!${NC} Found bash script without extension:[.sh]${NC}"
echo "Please update file with proper extensions."
warn "Found bash script without extension:[.sh]"
info "Please update file with proper extensions."
################################
# Append the file to the array #
################################
@ -479,8 +481,8 @@ function BuildFileList() {
#######################
# It is a Ruby script #
#######################
echo -e "${NC}${F[Y]}WARN!${NC} Found ruby script without extension:[.rb]${NC}"
echo "Please update file with proper extensions."
warn "Found ruby script without extension:[.rb]"
info "Please update file with proper extensions."
################################
# Append the file to the array #
################################
@ -493,7 +495,7 @@ function BuildFileList() {
############################
# Extension was not found! #
############################
echo -e "${NC}${F[Y]} - WARN!${NC} Failed to get filetype for:[${FILE}]!${NC}"
warn "Failed to get filetype for:[${FILE}]!"
##########################################################
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
##########################################################
@ -502,7 +504,7 @@ function BuildFileList() {
fi
done
echo ${READ_ONLY_CHANGE_FLAG} > /dev/null 2>&1 || true # Workaround SC2034
export READ_ONLY_CHANGE_FLAG # Workaround SC2034
#########################################
# Need to switch back to branch of code #
@ -519,15 +521,13 @@ function BuildFileList() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# Error
echo "Failed to switch back to branch!"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${SWITCH2_CMD}]${NC}"
exit 1
error "Failed to switch back to branch!"
fatal "[${SWITCH2_CMD}]"
fi
################
# Footer print #
################
echo ""
echo "----------------------------------------------"
echo -e "${NC}${F[B]}Successfully gathered list of files...${NC}"
info "----------------------------------------------"
info "Successfully gathered list of files..."
}

File diff suppressed because it is too large Load diff

53
lib/log.sh Normal file
View file

@ -0,0 +1,53 @@
#!/usr/bin/env bash
declare -Agr B=(
[B]=$(echo -e "\e[44m")
[C]=$(echo -e "\e[46m")
[G]=$(echo -e "\e[42m")
[K]=$(echo -e "\e[40m")
[M]=$(echo -e "\e[45m")
[R]=$(echo -e "\e[41m")
[W]=$(echo -e "\e[47m")
[Y]=$(echo -e "\e[43m")
)
declare -Agr F=(
[B]=$(echo -e "\e[0;34m")
[C]=$(echo -e "\e[0;36m")
[G]=$(echo -e "\e[0;32m")
[K]=$(echo -e "\e[0;30m")
[M]=$(echo -e "\e[0;35m")
[R]=$(echo -e "\e[0;31m")
[W]=$(echo -e "\e[0;37m")
[Y]=$(echo -e "\e[0;33m")
)
readonly NC=$(echo -e "\e[0m")
export B
export F
export NC
# Log Functions
LOG_TEMP=$(mktemp) || echo "Failed to create temporary log file."
export LOG_TEMP
echo "super-linter Log" > "${LOG_TEMP}"
log() {
local TOTERM=${1:-}
local MESSAGE=${2:-}
echo -e "${MESSAGE:-}" | (
if [[ -n ${TOTERM} ]]; then
tee -a "${LOG_TEMP}" >&2
else
cat >> "${LOG_TEMP}" 2>&1
fi
)
}
trace() { log "${LOG_TRACE:-}" "${NC}$(date +"%F %T") ${F[B]}[TRACE ]${NC} $*${NC}"; }
debug() { log "${LOG_DEBUG:-}" "${NC}$(date +"%F %T") ${F[B]}[DEBUG ]${NC} $*${NC}"; }
info() { log "${LOG_VERBOSE:-}" "${NC}$(date +"%F %T") ${F[B]}[INFO ]${NC} $*${NC}"; }
notice() { log "true" "${NC}$(date +"%F %T") ${F[G]}[NOTICE]${NC} $*${NC}"; }
warn() { log "true" "${NC}$(date +"%F %T") ${F[Y]}[WARN ]${NC} $*${NC}"; }
error() { log "true" "${NC}$(date +"%F %T") ${F[R]}[ERROR ]${NC} $*${NC}"; }
fatal() {
log "true" "${NC}$(date +"%F %T") ${B[R]}${F[W]}[FATAL ]${NC} $*${NC}"
exit 1
}

View file

@ -8,12 +8,12 @@ cat << EOF
@///////@///////////////@@@@ ( @,
@/(&/@//////////////////// @
@////////////////////////@@ @
@%////////(//////////%/////&@ @@ *,@
@@@@@/@/#/////(&////////////////// .@
*@@@@@. .%///(//@//////////////////&. .@@, @%
@@% .&@&&/@.@//&/////(////////// @@@@@@@@@ .. &@
@@% @@@@@ @&/////////////////# @/ V @@/ ,@@@ @
@@@% @@@@ .%@@@@//////#@ @ @@ @ .,.
@%////////(//////////%/////&@ @@ *,@ ______________
@@@@@/@/#/////(&////////////////// .@ / \\
*@@@@@. .%///(//@//////////////////&. .@@, @% / Don't mind me \\
@@% .&@&&/@.@//&/////(////////// @@@@@@@@@ .. &@ / I'm just looking \\
@@% @@@@@ @&/////////////////# @/ V @@/ ,@@@ @ < for some trash... |
@@@% @@@@ .%@@@@//////#@ @ @@ @ .,. \\__________________/
@@@/@( (@@@@% @/\ %
@@@@( . .@@/\ #
@ %@%

View file

@ -1,27 +0,0 @@
#!/usr/bin/env bash
declare -Agr B=(
[B]=$(echo -e "\e[44m")
[C]=$(echo -e "\e[46m")
[G]=$(echo -e "\e[42m")
[K]=$(echo -e "\e[40m")
[M]=$(echo -e "\e[45m")
[R]=$(echo -e "\e[41m")
[W]=$(echo -e "\e[47m")
[Y]=$(echo -e "\e[43m")
)
declare -Agr F=(
[B]=$(echo -e "\e[0;34m")
[C]=$(echo -e "\e[0;36m")
[G]=$(echo -e "\e[0;32m")
[K]=$(echo -e "\e[0;30m")
[M]=$(echo -e "\e[0;35m")
[R]=$(echo -e "\e[0;31m")
[W]=$(echo -e "\e[0;37m")
[Y]=$(echo -e "\e[0;33m")
)
readonly NC=$(echo -e "\e[0m")
export B
export F
export NC

View file

@ -13,9 +13,8 @@ function GetValidationInfo() {
############################################
# Print headers for user provided env vars #
############################################
echo ""
echo "--------------------------------------------"
echo "Gathering user validation information..."
info "--------------------------------------------"
info "Gathering user validation information..."
###########################################
# Skip validation if were running locally #
@ -31,10 +30,10 @@ function GetValidationInfo() {
if [[ ${VALIDATE_ALL_CODEBASE} != "false" ]]; then
# Set to true
VALIDATE_ALL_CODEBASE="${DEFAULT_VALIDATE_ALL_CODEBASE}"
echo "- Validating ALL files in code base..."
info "- Validating ALL files in code base..."
else
# Its false
echo "- Only validating [new], or [edited] files in code base..."
info "- Only validating [new], or [edited] files in code base..."
fi
fi
@ -46,7 +45,6 @@ function GetValidationInfo() {
################################
# Convert strings to lowercase #
################################
# Loop through all languages
for LANGUAGE in "${LANGUAGE_ARRAY[@]}"; do
# build the variable
@ -55,7 +53,6 @@ function GetValidationInfo() {
eval "${VALIDATE_LANGUAGE}=${!VALIDATE_LANGUAGE,,}"
done
################################################
# Determine if any linters were explicitly set #
################################################
@ -71,7 +68,6 @@ function GetValidationInfo() {
fi
done
###################################################
# Validate if we should check individual lanuages #
###################################################
@ -170,23 +166,19 @@ function GetValidationInfo() {
ACTIONS_RUNNER_DEBUG="true"
fi
###################
# Debug on runner #
###################
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
###########################
# Print the validate info #
###########################
for LINE in "${PRINT_ARRAY[@]}"; do
echo "${LINE}"
done
###########################
# Print the validate info #
###########################
for LINE in "${PRINT_ARRAY[@]}"; do
debug "${LINE}"
done
echo "--- DEBUG INFO ---"
echo "---------------------------------------------"
RUNNER=$(whoami)
echo "Runner:[${RUNNER}]"
echo "ENV:"
printenv
echo "---------------------------------------------"
fi
debug "--- DEBUG INFO ---"
debug "---------------------------------------------"
RUNNER=$(whoami)
debug "Runner:[${RUNNER}]"
PRINTENV=$(printenv)
debug "ENV:"
debug "${PRINTENV}"
debug "---------------------------------------------"
}

View file

@ -17,7 +17,7 @@ function LintCodebase() {
LINTER_NAME="${1}" && shift # Pull the variable and remove from array path (Example: jsonlint)
LINTER_COMMAND="${1}" && shift # Pull the variable and remove from array path (Example: jsonlint -c ConfigFile /path/to/file)
FILE_EXTENSIONS="${1}" && shift # Pull the variable and remove from array path (Example: *.json)
FILE_ARRAY=("$@") # Array of files to validate (Example: ${FILE_ARRAY_JSON})
FILE_ARRAY=("$@") # Array of files to validate (Example: ${FILE_ARRAY_JSON})
######################
# Create Print Array #
@ -49,14 +49,11 @@ function LintCodebase() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# Failed
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find [${LINTER_NAME}] in system!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${VALIDATE_INSTALL_CMD}]${NC}"
exit 1
error "Failed to find [${LINTER_NAME}] in system!"
fatal "[${VALIDATE_INSTALL_CMD}]"
else
# Success
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
echo -e "${NC}${F[B]}Successfully found binary for ${F[W]}[${LINTER_NAME}]${F[B]} in system location: ${F[W]}[${VALIDATE_INSTALL_CMD}]${NC}"
fi
debug "Successfully found binary for ${F[W]}[${LINTER_NAME}]${F[B]} in system location: ${F[W]}[${VALIDATE_INSTALL_CMD}]"
fi
##########################
@ -75,7 +72,7 @@ function LintCodebase() {
if [ ${#FILE_ARRAY[@]} -eq 0 ] && [ "${VALIDATE_ALL_CODEBASE}" == "false" ]; then
# No files found in commit and user has asked to not validate code base
SKIP_FLAG=1
# echo " - No files found in changeset to lint for language:[${FILE_TYPE}]"
debug " - No files found in changeset to lint for language:[${FILE_TYPE}]"
elif [ ${#FILE_ARRAY[@]} -ne 0 ]; then
# We have files added to array of files to check
LIST_FILES=("${FILE_ARRAY[@]}") # Copy the array into list
@ -121,13 +118,13 @@ function LintCodebase() {
#########################
# Print the header info #
#########################
echo "${LINE}"
info "${LINE}"
done
########################################
# Prepare context if TAP format output #
########################################
if IsTAP ; then
if IsTAP; then
TMPFILE=$(mktemp -q "/tmp/super-linter-${FILE_TYPE}.XXXXXX")
INDEX=0
mkdir -p "${REPORT_OUTPUT_FOLDER}"
@ -166,8 +163,8 @@ function LintCodebase() {
##############
# File print #
##############
echo "---------------------------"
echo "File:[${FILE}]"
info "---------------------------"
info "File:[${FILE}]"
#################################
# Add the language to the array #
@ -226,16 +223,16 @@ function LintCodebase() {
#########
# Error #
#########
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [${LINTER_NAME}] linter!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LINT_CMD}]${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC} Linter CMD:[${LINTER_COMMAND} ${FILE}]${NC}"
error "Found errors in [${LINTER_NAME}] linter!"
error "[${LINT_CMD}]"
error "Linter CMD:[${LINTER_COMMAND} ${FILE}]"
# Increment the error count
(("ERRORS_FOUND_${FILE_TYPE}++"))
#######################################################
# Store the linting as a temporary file in TAP format #
#######################################################
if IsTAP ; then
if IsTAP; then
NotOkTap "${INDEX}" "${FILE}" "${TMPFILE}"
AddDetailedMessageIfEnabled "${LINT_CMD}" "${TMPFILE}"
fi
@ -243,12 +240,12 @@ function LintCodebase() {
###########
# Success #
###########
echo -e "${NC}${F[B]} - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully${NC}"
info " - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully"
#######################################################
# Store the linting as a temporary file in TAP format #
#######################################################
if IsTAP ; then
if IsTAP; then
OkTap "${INDEX}" "${FILE}" "${TMPFILE}"
fi
fi
@ -257,7 +254,7 @@ function LintCodebase() {
#################################
# Generate report in TAP format #
#################################
if IsTAP && [ ${INDEX} -gt 0 ] ; then
if IsTAP && [ ${INDEX} -gt 0 ]; then
HeaderTap "${INDEX}" "${REPORT_OUTPUT_FILE}"
cat "${TMPFILE}" >> "${REPORT_OUTPUT_FILE}"
fi
@ -274,18 +271,16 @@ function TestCodebase() {
LINTER_COMMAND="${3}" # Pull the variable and remove from array path (Example: jsonlint -c ConfigFile /path/to/file)
FILE_EXTENSIONS="${4}" # Pull the variable and remove from array path (Example: *.json)
INDVIDUAL_TEST_FOLDER="${5}" # Folder for specific tests
TESTS_RAN=0 # Incremented when tests are ran, this will help find failed finds
TESTS_RAN=0 # Incremented when tests are ran, this will help find failed finds
################
# print header #
################
echo ""
echo "----------------------------------------------"
echo "----------------------------------------------"
echo "Testing Codebase [${FILE_TYPE}] files..."
echo "----------------------------------------------"
echo "----------------------------------------------"
echo ""
info "----------------------------------------------"
info "----------------------------------------------"
info "Testing Codebase [${FILE_TYPE}] files..."
info "----------------------------------------------"
info "----------------------------------------------"
#####################################
# Validate we have linter installed #
@ -302,12 +297,11 @@ function TestCodebase() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# Failed
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find [${LINTER_NAME}] in system!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${VALIDATE_INSTALL_CMD}]${NC}"
exit 1
error "Failed to find [${LINTER_NAME}] in system!"
fatal "[${VALIDATE_INSTALL_CMD}]"
else
# Success
echo -e "${NC}${F[B]}Successfully found binary for ${F[W]}[${LINTER_NAME}]${F[B]} in system location: ${F[W]}[${VALIDATE_INSTALL_CMD}]${NC}"
info "Successfully found binary for ${F[W]}[${LINTER_NAME}]${F[B]} in system location: ${F[W]}[${VALIDATE_INSTALL_CMD}]"
fi
##########################
@ -323,7 +317,7 @@ function TestCodebase() {
########################################
# Prepare context if TAP output format #
########################################
if IsTAP ; then
if IsTAP; then
TMPFILE=$(mktemp -q "/tmp/super-linter-${FILE_TYPE}.XXXXXX")
mkdir -p "${REPORT_OUTPUT_FOLDER}"
REPORT_OUTPUT_FILE="${REPORT_OUTPUT_FOLDER}/super-linter-${FILE_TYPE}.${OUTPUT_FORMAT}"
@ -358,8 +352,8 @@ function TestCodebase() {
##############
# File print #
##############
echo "---------------------------"
echo "File:[${FILE}]"
info "---------------------------"
info "File:[${FILE}]"
########################
# Set the lint command #
@ -454,21 +448,21 @@ function TestCodebase() {
#########
# Error #
#########
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [${LINTER_NAME}] linter!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LINT_CMD}]${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC} Linter CMD:[${LINTER_COMMAND} ${FILE}]${NC}"
error "Found errors in [${LINTER_NAME}] linter!"
error "[${LINT_CMD}]"
error "Linter CMD:[${LINTER_COMMAND} ${FILE}]"
# Increment the error count
(("ERRORS_FOUND_${FILE_TYPE}++"))
else
###########
# Success #
###########
echo -e "${NC}${F[B]} - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully${NC}"
info " - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully"
fi
#######################################################
# Store the linting as a temporary file in TAP format #
#######################################################
if IsTAP ; then
if IsTAP; then
OkTap "${TESTS_RAN}" "${FILE_NAME}" "${TMPFILE}"
fi
else
@ -482,23 +476,23 @@ function TestCodebase() {
#########
# Error #
#########
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [${LINTER_NAME}] linter!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} This file should have failed test case!${NC}"
echo -e "${NC}${B[R]}${F[W]}Command run:${NC}[\$${LINT_CMD}]${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LINT_CMD}]${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC} Linter CMD:[${LINTER_COMMAND} ${FILE}]${NC}"
error "Found errors in [${LINTER_NAME}] linter!"
error "This file should have failed test case!"
error "Command run:${NC}[\$${LINT_CMD}]"
error "[${LINT_CMD}]"
error "Linter CMD:[${LINTER_COMMAND} ${FILE}]"
# Increment the error count
(("ERRORS_FOUND_${FILE_TYPE}++"))
else
###########
# Success #
###########
echo -e "${NC}${F[B]} - File:${F[W]}[${FILE_NAME}]${F[B]} failed test case with ${F[W]}[${LINTER_NAME}]${F[B]} successfully${NC}"
info " - File:${F[W]}[${FILE_NAME}]${F[B]} failed test case with ${F[W]}[${LINTER_NAME}]${F[B]} successfully"
fi
#######################################################
# Store the linting as a temporary file in TAP format #
#######################################################
if IsTAP ; then
if IsTAP; then
NotOkTap "${TESTS_RAN}" "${FILE_NAME}" "${TMPFILE}"
AddDetailedMessageIfEnabled "${LINT_CMD}" "${TMPFILE}"
fi
@ -508,7 +502,7 @@ function TestCodebase() {
###########################################################################
# Generate report in TAP format and validate with the expected TAP output #
###########################################################################
if IsTAP && [ ${TESTS_RAN} -gt 0 ] ; then
if IsTAP && [ ${TESTS_RAN} -gt 0 ]; then
HeaderTap "${TESTS_RAN}" "${REPORT_OUTPUT_FILE}"
cat "${TMPFILE}" >> "${REPORT_OUTPUT_FILE}"
@ -516,24 +510,24 @@ function TestCodebase() {
# If expected TAP report exists then compare with the generated report #
########################################################################
EXPECTED_FILE="${GITHUB_WORKSPACE}/${TEST_CASE_FOLDER}/${INDVIDUAL_TEST_FOLDER}/reports/expected-${FILE_TYPE}.tap"
if [ -e "${EXPECTED_FILE}" ] ; then
if [ -e "${EXPECTED_FILE}" ]; then
TMPFILE=$(mktemp -q "/tmp/diff-${FILE_TYPE}.XXXXXX")
## Ignore white spaces, case sensitive
if ! diff -a -w -i "${EXPECTED_FILE}" "${REPORT_OUTPUT_FILE}" > "${TMPFILE}" 2>&1; then
#############################################
# We failed to compare the reporting output #
#############################################
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to assert TAP output:[${LINTER_NAME}]${NC}"!
echo "Please validate the asserts!"
error "Failed to assert TAP output:[${LINTER_NAME}]"!
info "Please validate the asserts!"
cat "${TMPFILE}"
exit 1
else
# Success
echo -e "${NC}${F[B]}Successfully validation in the expected TAP format for ${F[W]}[${LINTER_NAME}]${NC}"
info "Successfully validation in the expected TAP format for ${F[W]}[${LINTER_NAME}]"
fi
else
echo -e "${NC}${F[Y]}WARN!${NC} No TAP expected file found at:[${EXPECTED_FILE}]${NC}"
echo "skipping report assertions"
warn "No TAP expected file found at:[${EXPECTED_FILE}]"
info "skipping report assertions"
#####################################
# Append the file type to the array #
#####################################
@ -548,9 +542,8 @@ function TestCodebase() {
#################################################
# We failed to find files and no tests were ran #
#################################################
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find any tests ran for the Linter:[${LINTER_NAME}]${NC}"!
echo "Please validate logic or that tests exist!"
exit 1
error "Failed to find any tests ran for the Linter:[${LINTER_NAME}]"!
fatal "Please validate logic or that tests exist!"
fi
}
################################################################################
@ -567,11 +560,9 @@ function RunTestCases() {
#################
# Header prints #
#################
echo ""
echo "----------------------------------------------"
echo "-------------- TEST CASE RUN -----------------"
echo "----------------------------------------------"
echo ""
info "----------------------------------------------"
info "-------------- TEST CASE RUN -----------------"
info "----------------------------------------------"
#######################
# Test case languages #
@ -595,15 +586,18 @@ function RunTestCases() {
TestCodebase "JAVASCRIPT_STANDARD" "standard" "standard ${JAVASCRIPT_STANDARD_LINTER_RULES}" ".*\.\(js\)\$" "javascript"
TestCodebase "JSON" "jsonlint" "jsonlint" ".*\.\(json\)\$" "json"
TestCodebase "KOTLIN" "ktlint" "ktlint" ".*\.\(kt\|kts\)\$" "kotlin"
TestCodebase "LUA" "lua" "luacheck" ".*\.\(lua\)\$" "lua"
TestCodebase "MARKDOWN" "markdownlint" "markdownlint -c ${MARKDOWN_LINTER_RULES}" ".*\.\(md\)\$" "markdown"
TestCodebase "PERL" "perl" "perl -Mstrict -cw" ".*\.\(pl\)\$" "perl"
TestCodebase "PHP_BUILTIN" "php" "php -l" ".*\.\(php\)\$" "php"
TestCodebase "PHP_PHPCS" "phpcs" "phpcs --standard=${PHP_PHPCS_LINTER_RULES}" ".*\.\(php\)\$" "php"
TestCodebase "PHP_PHPSTAN" "phpstan" "phpstan analyse --no-progress --no-ansi -c ${PHPSTAN_LINTER_RULES}" ".*\.\(php\)\$" "php"
TestCodebase "PHP_PSALM" "psalm" "psalm --config=${PHP_PSALM_LINTER_RULES}" ".*\.\(php\)\$" "php"
TestCodebase "OPENAPI" "spectral" "spectral lint -r ${OPENAPI_LINTER_RULES}" ".*\.\(ymlopenapi\|jsonopenapi\)\$" "openapi"
TestCodebase "POWERSHELL" "pwsh" "Invoke-ScriptAnalyzer -EnableExit -Settings ${POWERSHELL_LINTER_RULES} -Path" ".*\.\(ps1\|psm1\|psd1\|ps1xml\|pssc\|psrc\|cdxml\)\$" "powershell"
TestCodebase "PROTOBUF" "protolint" "protolint lint --config_path ${PROTOBUF_LINTER_RULES}" ".*\.\(proto\)\$" "protobuf"
TestCodebase "PYTHON" "pylint" "pylint --rcfile ${PYTHON_LINTER_RULES}" ".*\.\(py\)\$" "python"
TestCodebase "PYTHON_PYLINT" "pylint" "pylint --rcfile ${PYTHON_PYLINT_LINTER_RULES}" ".*\.\(py\)\$" "python"
TestCodebase "PYTHON_FLAKE8" "flake8" "flake8 --config ${PYTHON_FLAKE8_LINTER_RULES}" ".*\.\(py\)\$" "python"
TestCodebase "RAKU" "raku" "raku -c" ".*\.\(raku\|rakumod\|rakutest\|pm6\|pl6\|p6\)\$" "raku"
TestCodebase "RUBY" "rubocop" "rubocop -c ${RUBY_LINTER_RULES}" ".*\.\(rb\)\$" "ruby"
TestCodebase "STATES" "asl-validator" "asl-validator --json-path" ".*\.\(json\)\$" "states"
@ -612,7 +606,7 @@ function RunTestCases() {
TestCodebase "TYPESCRIPT_ES" "eslint" "eslint --no-eslintrc -c ${TYPESCRIPT_LINTER_RULES}" ".*\.\(ts\)\$" "typescript"
TestCodebase "TYPESCRIPT_STANDARD" "standard" "standard --parser @typescript-eslint/parser --plugin @typescript-eslint/eslint-plugin ${TYPESCRIPT_STANDARD_LINTER_RULES}" ".*\.\(ts\)\$" "typescript"
TestCodebase "XML" "xmllint" "xmllint" ".*\.\(xml\)\$" "xml"
TestCodebase "YML" "yamllint" "yamllint -c ${YAML_LINTER_RULES}" ".*\.\(yml\|yaml\)\$" "yml"
TestCodebase "YAML" "yamllint" "yamllint -c ${YAML_LINTER_RULES}" ".*\.\(yml\|yaml\)\$" "yaml"
#################
# Footer prints #
@ -659,16 +653,12 @@ function LintAnsibleFiles() {
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# Failed
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find ${LINTER_NAME} in system!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${VALIDATE_INSTALL_CMD}]${NC}"
exit 1
error "Failed to find ${LINTER_NAME} in system!"
fatal "[${VALIDATE_INSTALL_CMD}]"
else
# Success
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
# Success
echo -e "${NC}${F[B]}Successfully found binary in system${NC}"
echo "Location:[${VALIDATE_INSTALL_CMD}]"
fi
debug "Successfully found binary in system"
debug "Location:[${VALIDATE_INSTALL_CMD}]"
fi
##########################
@ -703,7 +693,7 @@ function LintAnsibleFiles() {
###################################
# Send message that were skipping #
###################################
#echo "- Skipping Ansible lint run as file(s) that were modified were read only..."
debug "- Skipping Ansible lint run as file(s) that were modified were read only..."
############################
# Create flag to skip loop #
############################
@ -718,14 +708,14 @@ function LintAnsibleFiles() {
#########################
# Print the header line #
#########################
echo "${LINE}"
info "${LINE}"
done
fi
########################################
# Prepare context if TAP output format #
########################################
if IsTAP ; then
if IsTAP; then
TMPFILE=$(mktemp -q "/tmp/super-linter-${FILE_TYPE}.XXXXXX")
INDEX=0
mkdir -p "${REPORT_OUTPUT_FOLDER}"
@ -758,8 +748,8 @@ function LintAnsibleFiles() {
##############
# File print #
##############
echo "---------------------------"
echo "File:[${FILE}]"
info "---------------------------"
info "File:[${FILE}]"
################################
# Lint the file with the rules #
@ -778,15 +768,15 @@ function LintAnsibleFiles() {
#########
# Error #
#########
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [${LINTER_NAME}] linter!${NC}"
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LINT_CMD}]${NC}"
error "Found errors in [${LINTER_NAME}] linter!"
error "[${LINT_CMD}]"
# Increment error count
((ERRORS_FOUND_ANSIBLE++))
#######################################################
# Store the linting as a temporary file in TAP format #
#######################################################
if IsTAP ; then
if IsTAP; then
NotOkTap "${INDEX}" "${FILE}" "${TMPFILE}"
AddDetailedMessageIfEnabled "${LINT_CMD}" "${TMPFILE}"
fi
@ -795,12 +785,12 @@ function LintAnsibleFiles() {
###########
# Success #
###########
echo -e "${NC}${F[B]} - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully${NC}"
info " - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully"
#######################################################
# Store the linting as a temporary file in TAP format #
#######################################################
if IsTAP ; then
if IsTAP; then
OkTap "${INDEX}" "${FILE}" "${TMPFILE}"
fi
fi
@ -809,27 +799,22 @@ function LintAnsibleFiles() {
#################################
# Generate report in TAP format #
#################################
if IsTAP && [ ${INDEX} -gt 0 ] ; then
if IsTAP && [ ${INDEX} -gt 0 ]; then
HeaderTap "${INDEX}" "${REPORT_OUTPUT_FILE}"
cat "${TMPFILE}" >> "${REPORT_OUTPUT_FILE}"
fi
else # No ansible directory found in path
###############################
# Check to see if debug is on #
###############################
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
########################
# No Ansible dir found #
########################
echo -e "${NC}${F[Y]}WARN!${NC} No Ansible base directory found at:[${ANSIBLE_DIRECTORY}]${NC}"
echo "skipping ansible lint"
fi
else
########################
# No Ansible dir found #
########################
warn "No Ansible base directory found at:[${ANSIBLE_DIRECTORY}]"
debug "skipping ansible lint"
fi
}
################################################################################
#### Function IsTap ############################################################
function IsTAP() {
if [ "${OUTPUT_FORMAT}" == "tap" ] ; then
if [ "${OUTPUT_FORMAT}" == "tap" ]; then
return 0
else
return 1
@ -839,7 +824,7 @@ function IsTAP() {
#### Function TransformTAPDetails ##############################################
function TransformTAPDetails() {
DATA=${1}
if [ -n "${DATA}" ] && [ "${OUTPUT_DETAILS}" == "detailed" ] ; then
if [ -n "${DATA}" ] && [ "${OUTPUT_DETAILS}" == "detailed" ]; then
#########################################################
# Transform new lines to \\n, remove colours and colons #
#########################################################
@ -852,8 +837,8 @@ function HeaderTap() {
################
# Pull in Vars #
################
INDEX="${1}" # File being validated
OUTPUT_FILE="${2}" # Output location
INDEX="${1}" # File being validated
OUTPUT_FILE="${2}" # Output location
###################
# Print the goods #
@ -866,9 +851,9 @@ function OkTap() {
################
# Pull in Vars #
################
INDEX="${1}" # Location
FILE="${2}" # File being validated
TEMP_FILE="${3}" # Temp file location
INDEX="${1}" # Location
FILE="${2}" # File being validated
TEMP_FILE="${3}" # Temp file location
###################
# Print the goods #
@ -881,9 +866,9 @@ function NotOkTap() {
################
# Pull in Vars #
################
INDEX="${1}" # Location
FILE="${2}" # File being validated
TEMP_FILE="${3}" # Temp file location
INDEX="${1}" # Location
FILE="${2}" # File being validated
TEMP_FILE="${3}" # Temp file location
###################
# Print the goods #
@ -896,14 +881,14 @@ function AddDetailedMessageIfEnabled() {
################
# Pull in Vars #
################
LINT_CMD="${1}" # Linter command
TEMP_FILE="${2}" # Temp file
LINT_CMD="${1}" # Linter command
TEMP_FILE="${2}" # Temp file
####################
# Check the return #
####################
DETAILED_MSG=$(TransformTAPDetails "${LINT_CMD}")
if [ -n "${DETAILED_MSG}" ] ; then
if [ -n "${DETAILED_MSG}" ]; then
printf " ---\n message: %s\n ...\n" "${DETAILED_MSG}" >> "${TEMP_FILE}"
fi
}