mirror of
https://github.com/super-linter/super-linter.git
synced 2024-12-22 04:32:16 -05:00
fixed merge conflicts
This commit is contained in:
commit
786c6fda03
74 changed files with 3998 additions and 3814 deletions
|
@ -13,6 +13,12 @@
|
|||
# - System with Docker installed
|
||||
# - Global variables met
|
||||
|
||||
#########################
|
||||
# Source Function Files #
|
||||
#########################
|
||||
# shellcheck source=/dev/null
|
||||
source ../lib/log.sh # Source the function script(s)
|
||||
|
||||
###########
|
||||
# Globals #
|
||||
###########
|
||||
|
@ -29,11 +35,9 @@ DOCKERFILE_PATH="${DOCKERFILE_PATH}" # Path to the Dockerfile to be uploaded
|
|||
################################################################################
|
||||
#### Function Header ###########################################################
|
||||
Header() {
|
||||
echo ""
|
||||
echo "-------------------------------------------------------"
|
||||
echo "----- GitHub Actions remove image from DockerHub ------"
|
||||
echo "-------------------------------------------------------"
|
||||
echo ""
|
||||
info "-------------------------------------------------------"
|
||||
info "----- GitHub Actions remove image from DockerHub ------"
|
||||
info "-------------------------------------------------------"
|
||||
}
|
||||
################################################################################
|
||||
#### Function ValidateInput ####################################################
|
||||
|
@ -42,21 +46,18 @@ ValidateInput() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Gathering variables..."
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Gathering variables..."
|
||||
info "----------------------------------------------"
|
||||
|
||||
############################
|
||||
# Validate GITHUB_WORKSPACE #
|
||||
############################
|
||||
if [ -z "${GITHUB_WORKSPACE}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_WORKSPACE]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_WORKSPACE}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GITHUB_WORKSPACE]!"
|
||||
fatal "[${GITHUB_WORKSPACE}]"
|
||||
else
|
||||
echo "Successfully found:[GITHUB_WORKSPACE], value:[${GITHUB_WORKSPACE}]"
|
||||
info "Successfully found:[GITHUB_WORKSPACE], value:[${GITHUB_WORKSPACE}]"
|
||||
fi
|
||||
|
||||
#######################
|
||||
|
@ -64,15 +65,14 @@ ValidateInput() {
|
|||
#######################
|
||||
if [ -z "${IMAGE_REPO}" ]; then
|
||||
# No repo was pulled
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [IMAGE_REPO]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${IMAGE_REPO}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [IMAGE_REPO]!"
|
||||
fatal "[${IMAGE_REPO}]"
|
||||
elif [[ ${IMAGE_REPO} == "github/super-linter" ]]; then
|
||||
# Found our main repo
|
||||
echo "Successfully found:[IMAGE_REPO], value:[${IMAGE_REPO}]"
|
||||
info "Successfully found:[IMAGE_REPO], value:[${IMAGE_REPO}]"
|
||||
else
|
||||
# This is a fork and we cant pull vars or any info
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} No image to cleanup as this is a forked branch, and not being built with current automation!${NC}"
|
||||
warn "No image to cleanup as this is a forked branch, and not being built with current automation!"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
@ -80,33 +80,30 @@ ValidateInput() {
|
|||
# Validate IMAGE_VERSION #
|
||||
##########################
|
||||
if [ -z "${IMAGE_VERSION}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [IMAGE_VERSION]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${IMAGE_VERSION}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [IMAGE_VERSION]!"
|
||||
fatal "[${IMAGE_VERSION}]"
|
||||
else
|
||||
echo "Successfully found:[IMAGE_VERSION], value:[${IMAGE_VERSION}]"
|
||||
info "Successfully found:[IMAGE_VERSION], value:[${IMAGE_VERSION}]"
|
||||
fi
|
||||
|
||||
############################
|
||||
# Validate DOCKER_USERNAME #
|
||||
############################
|
||||
if [ -z "${DOCKER_USERNAME}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_USERNAME]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKER_USERNAME}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [DOCKER_USERNAME]!"
|
||||
fatal "[${DOCKER_USERNAME}]"
|
||||
else
|
||||
echo "Successfully found:[DOCKER_USERNAME], value:[${DOCKER_USERNAME}]"
|
||||
info "Successfully found:[DOCKER_USERNAME], value:[${DOCKER_USERNAME}]"
|
||||
fi
|
||||
|
||||
############################
|
||||
# Validate DOCKER_PASSWORD #
|
||||
############################
|
||||
if [ -z "${DOCKER_PASSWORD}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_PASSWORD]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKER_PASSWORD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [DOCKER_PASSWORD]!"
|
||||
fatal "[${DOCKER_PASSWORD}]"
|
||||
else
|
||||
echo "Successfully found:[DOCKER_PASSWORD], value:[********]"
|
||||
info "Successfully found:[DOCKER_PASSWORD], value:[********]"
|
||||
fi
|
||||
|
||||
##################################################
|
||||
|
@ -121,10 +118,9 @@ ValidateInput() {
|
|||
#############################################
|
||||
# Image is 'latest' and we will not destroy #
|
||||
#############################################
|
||||
echo "Image Tag is set to:[latest]..."
|
||||
echo "We will never destroy latest..."
|
||||
echo "Bye!"
|
||||
exit 1
|
||||
error "Image Tag is set to:[latest]..."
|
||||
error "We will never destroy latest..."
|
||||
fatal "Bye!"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
@ -133,11 +129,9 @@ LoginToDocker() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Login to DockerHub..."
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Login to DockerHub..."
|
||||
info "----------------------------------------------"
|
||||
|
||||
######################
|
||||
# Login to DockerHub #
|
||||
|
@ -154,12 +148,11 @@ LoginToDocker() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to authenticate to DockerHub!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LOGIN_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to authenticate to DockerHub!"
|
||||
fatal "[${LOGIN_CMD}]"
|
||||
else
|
||||
# SUCCESS
|
||||
echo "Successfully authenticated to DockerHub!"
|
||||
info "Successfully authenticated to DockerHub!"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
@ -168,11 +161,9 @@ RemoveImage() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Removing the DockerFile image:[${IMAGE_REPO}:${IMAGE_VERSION}]"
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Removing the DockerFile image:[${IMAGE_REPO}:${IMAGE_VERSION}]"
|
||||
info "----------------------------------------------"
|
||||
|
||||
#####################################
|
||||
# Create Token to auth to DockerHub #
|
||||
|
@ -193,12 +184,11 @@ RemoveImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to gain token from DockerHub!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${TOKEN}]${NC}"
|
||||
exit 1
|
||||
error "Failed to gain token from DockerHub!"
|
||||
fatal "[${TOKEN}]"
|
||||
else
|
||||
# SUCCESS
|
||||
echo "Successfully gained auth token from DockerHub!"
|
||||
info "Successfully gained auth token from DockerHub!"
|
||||
fi
|
||||
|
||||
#################################
|
||||
|
@ -218,22 +208,19 @@ RemoveImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to remove tag from DockerHub!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${REMOVE_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to remove tag from DockerHub!"
|
||||
fatal "[${REMOVE_CMD}]"
|
||||
else
|
||||
# SUCCESS
|
||||
echo "Successfully [removed] Docker image tag:[${IMAGE_VERSION}] from DockerHub!"
|
||||
info "Successfully [removed] Docker image tag:[${IMAGE_VERSION}] from DockerHub!"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
#### Function Footer ###########################################################
|
||||
Footer() {
|
||||
echo ""
|
||||
echo "-------------------------------------------------------"
|
||||
echo "The step has completed"
|
||||
echo "-------------------------------------------------------"
|
||||
echo ""
|
||||
info "-------------------------------------------------------"
|
||||
info "The step has completed"
|
||||
info "-------------------------------------------------------"
|
||||
}
|
||||
################################################################################
|
||||
################################## MAIN ########################################
|
||||
|
|
|
@ -28,17 +28,17 @@ CheckGHEPid()
|
|||
##################################
|
||||
if [ ${PID_CHECK} -gt ${PID_CHECK_LIMIT} ]; then
|
||||
# Over the limit, move on
|
||||
echo "We have checked the pid ${PID_CHECK} times, moving on..."
|
||||
info "We have checked the pid ${PID_CHECK} times, moving on..."
|
||||
else
|
||||
################################################
|
||||
# Check to see if the PID is alive and running #
|
||||
################################################
|
||||
if [ ! -f "${GHE_CONFIG_PID}" ]; then
|
||||
# File not found
|
||||
echo "We're good to move forward, no .pid file found at:[${GHE_CONFIG_PID}]"
|
||||
info "We're good to move forward, no .pid file found at:[${GHE_CONFIG_PID}]"
|
||||
else
|
||||
# Found the pid running, need to sleep
|
||||
echo "Current PID found, sleeping ${SLEEP_SECONDS} seconds before next check..."
|
||||
info "Current PID found, sleeping ${SLEEP_SECONDS} seconds before next check..."
|
||||
################
|
||||
# Sleep it off #
|
||||
################
|
||||
|
@ -53,9 +53,9 @@ CheckGHEPid()
|
|||
# Check the shell for errors #
|
||||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to sleep!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${SLEEP_CMD}]${NC}"
|
||||
echo "Will try to call apply as last effort..."
|
||||
error "Failed to sleep!"
|
||||
error "[${SLEEP_CMD}]"
|
||||
info "Will try to call apply as last effort..."
|
||||
####################################
|
||||
# Call config apply as last effort #
|
||||
####################################
|
||||
|
@ -82,7 +82,7 @@ CheckGHEProcess()
|
|||
##################################
|
||||
if [ ${PROCESS_CHECK} -gt ${PROCESS_CHECK_LIMIT} ]; then
|
||||
# Over the limit, move on
|
||||
echo "We have checked the process ${PROCESS_CHECK} times, moving on..."
|
||||
info "We have checked the process ${PROCESS_CHECK} times, moving on..."
|
||||
else
|
||||
####################################################
|
||||
# Check to see if the process is alive and running #
|
||||
|
@ -99,10 +99,10 @@ CheckGHEProcess()
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# No process running on the system
|
||||
echo "Were good to move forward, no process like:[${GHE_APPLY_COMMAND}] running currently on the system"
|
||||
info "Were good to move forward, no process like:[${GHE_APPLY_COMMAND}] running currently on the system"
|
||||
else
|
||||
# Found the process running, need to sleep
|
||||
echo "Current process alive:[${CHECK_PROCESS_CMD}], sleeping ${SLEEP_SECONDS} seconds before next check..."
|
||||
info "Current process alive:[${CHECK_PROCESS_CMD}], sleeping ${SLEEP_SECONDS} seconds before next check..."
|
||||
################
|
||||
# Sleep it off #
|
||||
################
|
||||
|
@ -117,9 +117,9 @@ CheckGHEProcess()
|
|||
# Check the shell for errors #
|
||||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to sleep!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${SLEEP_CMD}]${NC}"
|
||||
echo "Will try to call apply as last effort..."
|
||||
error "Failed to sleep!"
|
||||
error "[${SLEEP_CMD}]"
|
||||
info "Will try to call apply as last effort..."
|
||||
####################################
|
||||
# Call config apply as last effort #
|
||||
####################################
|
||||
|
@ -144,7 +144,7 @@ RunConfigApply()
|
|||
##########
|
||||
# Header #
|
||||
##########
|
||||
echo "Running ${GHE_APPLY_COMMAND} to the server..."
|
||||
info "Running ${GHE_APPLY_COMMAND} to the server..."
|
||||
|
||||
##############################################
|
||||
# Run the command to apply changes to server #
|
||||
|
@ -161,12 +161,11 @@ RunConfigApply()
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Errors
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to run config apply command!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${APPLY_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to run config apply command!"
|
||||
fatal "[${APPLY_CMD}]"
|
||||
else
|
||||
# Success
|
||||
echo -e "${NC}${F[B]}Successfully ran ${F[C]}${GHE_APPLY_COMMAND}${NC}"
|
||||
info "Successfully ran ${F[C]}${GHE_APPLY_COMMAND}"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
TAP version 13
|
||||
1..8
|
||||
not ok 1 - cfn_bad_1.json
|
||||
---
|
||||
message: E3001 Invalid or unsupported Type AWS Route53 HostedZonee for resource DNS in us-east-1\n/tmp/lint/.automation/test/cfn/cfn_bad_1.json 6 10\n
|
||||
...
|
||||
not ok 2 - cfn_bad_2.yaml
|
||||
---
|
||||
message: E3001 Invalid or unsupported Type AWS Route53 HostedZonee for resource DNS in us-east-1\n/tmp/lint/.automation/test/cfn/cfn_bad_2.yaml 5 5\n
|
||||
...
|
||||
not ok 3 - cfn_bad_3.json
|
||||
---
|
||||
message: E3002 Invalid Property Resources/myDNSRecord/Properties/Ttl\n/tmp/lint/.automation/test/cfn/cfn_bad_3.json 11 13\n
|
||||
...
|
||||
not ok 4 - cfn_bad_4.yaml
|
||||
---
|
||||
message: E3002 Invalid Property Resources/myDNSRecord/Properties/Ttl\n/tmp/lint/.automation/test/cfn/cfn_bad_4.yaml 9 7\n
|
||||
...
|
||||
ok 5 - cfn_good_1.json
|
||||
ok 6 - cfn_good_2.yaml
|
||||
ok 7 - cfn_good_3.json
|
||||
ok 8 - cfn_good_4.yaml
|
|
@ -0,0 +1,22 @@
|
|||
TAP version 13
|
||||
1..8
|
||||
not ok 1 - cloudformation_bad_1.json
|
||||
---
|
||||
message: E3001 Invalid or unsupported Type AWS Route53 HostedZonee for resource DNS in us-east-1\n/tmp/lint/.automation/test/cloudformation/cloudformation_bad_1.json 6 10\n
|
||||
...
|
||||
not ok 2 - cloudformation_bad_2.yaml
|
||||
---
|
||||
message: E3001 Invalid or unsupported Type AWS Route53 HostedZonee for resource DNS in us-east-1\n/tmp/lint/.automation/test/cloudformation/cloudformation_bad_2.yaml 5 5\n
|
||||
...
|
||||
not ok 3 - cloudformation_bad_3.json
|
||||
---
|
||||
message: E3002 Invalid Property Resources/myDNSRecord/Properties/Ttl\n/tmp/lint/.automation/test/cloudformation/cloudformation_bad_3.json 11 13\n
|
||||
...
|
||||
not ok 4 - cloudformation_bad_4.yaml
|
||||
---
|
||||
message: E3002 Invalid Property Resources/myDNSRecord/Properties/Ttl\n/tmp/lint/.automation/test/cloudformation/cloudformation_bad_4.yaml 9 7\n
|
||||
...
|
||||
ok 5 - cloudformation_good_1.json
|
||||
ok 6 - cloudformation_good_2.yaml
|
||||
ok 7 - cloudformation_good_3.json
|
||||
ok 8 - cloudformation_good_4.yaml
|
13
.automation/test/groovy/README.md
Normal file
13
.automation/test/groovy/README.md
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Groovy Test Cases
|
||||
This folder holds the test cases for **Groovy**.
|
||||
|
||||
## Additional Docs
|
||||
No Additional information is needed for this test case.
|
||||
|
||||
## Good Test Cases
|
||||
The test cases denoted: `LANGUAGE_good_FILE.EXTENSION` are all valid, and should pass successfully when linted.
|
||||
- **Note:** They are linted utilizing the default linter rules.
|
||||
|
||||
## Bad Test Cases
|
||||
The test cases denoted: `LANGUAGE_bad_FILE.EXTENSION` are **NOT** valid, and should trigger errors when linted.
|
||||
- **Note:** They are linted utilizing the default linter rules.
|
6
.automation/test/groovy/groovy_bad_01.groovy
Normal file
6
.automation/test/groovy/groovy_bad_01.groovy
Normal file
|
@ -0,0 +1,6 @@
|
|||
class Example {
|
||||
static void main(String[] args)
|
||||
File file = new File("E:/Example.txt")
|
||||
The file ${file.absolutePath} has ${file.length()} bytes"
|
||||
}
|
||||
}
|
6
.automation/test/groovy/groovy_good_01.groovy
Normal file
6
.automation/test/groovy/groovy_good_01.groovy
Normal file
|
@ -0,0 +1,6 @@
|
|||
class Example {
|
||||
static void main(String[] args) {
|
||||
File file = new File("E:/Example.txt")
|
||||
println "The file ${file.absolutePath} has ${file.length()} bytes"
|
||||
}
|
||||
}
|
13
.automation/test/lua/README.md
Normal file
13
.automation/test/lua/README.md
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Lua Test Cases
|
||||
This folder holds the test cases for **Lua**.
|
||||
|
||||
## Additional Docs
|
||||
No Additional information is needed for this test case.
|
||||
|
||||
## Good Test Cases
|
||||
The test cases denoted: `LANGUAGE_good_FILE.EXTENSION` are all valid, and should pass successfully when linted.
|
||||
- **Note:** They are linted utilizing the default linter rules.
|
||||
|
||||
## Bad Test Cases
|
||||
The test cases denoted: `LANGUAGE_bad_FILE.EXTENSION` are **NOT** valid, and should trigger errors when linted.
|
||||
- **Note:** They are linted utilizing the default linter rules.
|
9
.automation/test/lua/lua_bad_1.lua
Normal file
9
.automation/test/lua/lua_bad_1.lua
Normal file
|
@ -0,0 +1,9 @@
|
|||
package.loaded[...] = {}
|
||||
|
||||
local function helper(...)
|
||||
-- NYI
|
||||
end
|
||||
|
||||
function embrace(opt)
|
||||
local opt = opt or "default"
|
||||
return hepler(opt.."?")
|
12
.automation/test/lua/lua_good_1.lua
Normal file
12
.automation/test/lua/lua_good_1.lua
Normal file
|
@ -0,0 +1,12 @@
|
|||
local embracer = {}
|
||||
|
||||
local function helper()
|
||||
-- NYI wontfix
|
||||
end
|
||||
|
||||
function embracer.embrace(opt)
|
||||
opt = opt or "default"
|
||||
return helper(opt.."?")
|
||||
end
|
||||
|
||||
return embracer
|
15
.automation/test/php/php_bad_2.php
Normal file
15
.automation/test/php/php_bad_2.php
Normal file
|
@ -0,0 +1,15 @@
|
|||
<?php
|
||||
|
||||
/**
|
||||
* @return array<string>
|
||||
*/
|
||||
function takesAnInt(int $i) {
|
||||
return [$i, "hello"];
|
||||
}
|
||||
|
||||
$data = ["some text", 5];
|
||||
takesAnInt($data[0]);
|
||||
|
||||
$condition = rand(0, 5);
|
||||
iff ($condition) {
|
||||
} elseif ($condition) {}
|
25
.automation/test/php/php_good_2.php
Normal file
25
.automation/test/php/php_good_2.php
Normal file
|
@ -0,0 +1,25 @@
|
|||
<?php
|
||||
|
||||
/**
|
||||
* @return array<string>
|
||||
*/
|
||||
function helloName(string $name): array
|
||||
{
|
||||
return ["hello", $name];
|
||||
}
|
||||
|
||||
function helloSuperLinter(): void
|
||||
{
|
||||
$hello = helloName("Super-Linter");
|
||||
echo implode(" ", $hello) . PHP_EOL;
|
||||
}
|
||||
|
||||
function helloOrWorld(): void
|
||||
{
|
||||
$random = rand(0, 10);
|
||||
if ($random >= 5) {
|
||||
echo "Hello";
|
||||
} else {
|
||||
echo "World";
|
||||
}
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
TAP version 13
|
||||
1..2
|
||||
not ok 1 - php_bad_1.php
|
||||
---
|
||||
message: PHP Parse error syntax error, unexpected 'pe98y' (T_STRING) in /tmp/lint/.automation/test/php/php_bad_1.php on line 3\nErrors parsing /tmp/lint/.automation/test/php/php_bad_1.php\n
|
||||
...
|
||||
ok 2 - php_good_1.php
|
12
.automation/test/php/reports/expected-PHP_BUILTIN.tap
Normal file
12
.automation/test/php/reports/expected-PHP_BUILTIN.tap
Normal file
|
@ -0,0 +1,12 @@
|
|||
TAP version 13
|
||||
1..4
|
||||
not ok 1 - php_bad_1.php
|
||||
---
|
||||
message: PHP Parse error syntax error, unexpected 'pe98y' (T_STRING) in /tmp/lint/.automation/test/php/php_bad_1.php on line 3\nErrors parsing /tmp/lint/.automation/test/php/php_bad_1.php\n
|
||||
...
|
||||
not ok 2 - php_bad_2.php
|
||||
---
|
||||
message: PHP Parse error syntax error, unexpected '}' in /tmp/lint/.automation/test/php/php_bad_2.php on line 15\nErrors parsing /tmp/lint/.automation/test/php/php_bad_2.php\n
|
||||
...
|
||||
ok 3 - php_good_1.php
|
||||
ok 4 - php_good_2.php
|
12
.automation/test/php/reports/expected-PHP_PHPCS.tap.ignored
Normal file
12
.automation/test/php/reports/expected-PHP_PHPCS.tap.ignored
Normal file
|
@ -0,0 +1,12 @@
|
|||
TAP version 13
|
||||
1..4
|
||||
not ok 1 - php_bad_1.php
|
||||
---
|
||||
message: \nFILE /tmp/lint/.automation/test/php/php_bad_1.php\n----------------------------------------------------------------------\nFOUND 7 ERRORS AFFECTING 1 LINE\n----------------------------------------------------------------------\n 3 | ERROR | [x] Expected at least 1 space before "-"; 0 found\n 3 | ERROR | [x] Expected at least 1 space after "-"; 0 found\n 3 | ERROR | [x] Expected at least 1 space before "="; 0 found\n 3 | ERROR | [x] Expected at least 1 space after "="; 0 found\n 3 | ERROR | [x] Expected at least 1 space before "-"; 0 found\n 3 | ERROR | [x] Expected at least 1 space before "-"; 0 found\n 3 | ERROR | [x] Expected at least 1 space after "-"; 0 found\n----------------------------------------------------------------------\nPHPCBF CAN FIX THE 7 MARKED SNIFF VIOLATIONS AUTOMATICALLY\n----------------------------------------------------------------------\n\n
|
||||
...
|
||||
not ok 2 - php_bad_2.php
|
||||
---
|
||||
message: \nFILE /tmp/lint/.automation/test/php/php_bad_2.php\n----------------------------------------------------------------------\nFOUND 4 ERRORS AND 1 WARNING AFFECTING 4 LINES\n----------------------------------------------------------------------\n 1 | WARNING | [ ] A file should declare new symbols (classes,\n | | functions, constants, etc.) and cause no other\n | | side effects, or it should execute logic with\n | | side effects, but should not do both. The first\n | | symbol is defined on line 6 and the first side\n | | effect is on line 10.\n 6 | ERROR | [x] Opening brace should be on a new line\n 14 | ERROR | [x] Space before opening parenthesis of function call\n | | prohibited\n 15 | ERROR | [x] Newline required after opening brace\n 15 | ERROR | [x] Closing brace must be on a line by itself\n----------------------------------------------------------------------\nPHPCBF CAN FIX THE 4 MARKED SNIFF VIOLATIONS AUTOMATICALLY\n----------------------------------------------------------------------\n\n
|
||||
...
|
||||
ok 3 - php_good_1.php
|
||||
ok 4 - php_good_2.php
|
12
.automation/test/php/reports/expected-PHP_PHPSTAN.tap
Normal file
12
.automation/test/php/reports/expected-PHP_PHPSTAN.tap
Normal file
|
@ -0,0 +1,12 @@
|
|||
TAP version 13
|
||||
1..4
|
||||
not ok 1 - php_bad_1.php
|
||||
---
|
||||
message: ------ ---------------------------------------------- \n Line php_bad_1.php \n ------ ---------------------------------------------- \n 3 Invalid numeric literal on line 3 \n 3 Invalid numeric literal on line 3 \n 3 Syntax error, unexpected '=' on line 3 \n 3 Syntax error, unexpected T_LNUMBER on line 3 \n 3 Syntax error, unexpected T_STRING on line 3 \n 3 Syntax error, unexpected T_STRING on line 3 \n ------ ---------------------------------------------- \n\n [ERROR] Found 6 errors \n
|
||||
...
|
||||
not ok 2 - php_bad_2.php
|
||||
---
|
||||
message: ------ ----------------------------------------- \n Line php_bad_2.php \n ------ ----------------------------------------- \n 15 Syntax error, unexpected '}' on line 15 \n ------ ----------------------------------------- \n\n [ERROR] Found 1 error \n
|
||||
...
|
||||
ok 3 - php_good_1.php
|
||||
ok 4 - php_good_2.php
|
12
.automation/test/php/reports/expected-PHP_PSALM.tap.ignored
Normal file
12
.automation/test/php/reports/expected-PHP_PSALM.tap.ignored
Normal file
|
@ -0,0 +1,12 @@
|
|||
TAP version 13
|
||||
1..4
|
||||
not ok 1 - php_bad_1.php
|
||||
---
|
||||
message: Scanning files...\nAnalyzing files...\n\nE\n\nERROR ParseError - php/php_bad_1.php 3 2 - Syntax error, unexpected T_STRING on line 3 (see https //psalm.dev/173)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR UndefinedConstant - php/php_bad_1.php 3 2 - Const pe98y is not defined (see https //psalm.dev/020)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR UndefinedConstant - php/php_bad_1.php 3 8 - Const r is not defined (see https //psalm.dev/020)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR UndefinedConstant - php/php_bad_1.php 3 10 - Const n0u823n is not defined (see https //psalm.dev/020)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR ParseError - php/php_bad_1.php 3 17 - Syntax error, unexpected '=' on line 3 (see https //psalm.dev/173)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR UndefinedConstant - php/php_bad_1.php 3 18 - Const r is not defined (see https //psalm.dev/020)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR ParseError - php/php_bad_1.php 3 21 - Invalid numeric literal on line 3 (see https //psalm.dev/173)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR UndefinedConstant - php/php_bad_1.php 3 24 - Const u3 is not defined (see https //psalm.dev/020)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR UndefinedConstant - php/php_bad_1.php 3 28 - Const r08u2q098ry is not defined (see https //psalm.dev/020)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR ParseError - php/php_bad_1.php 3 40 - Syntax error, unexpected T_LNUMBER on line 3 (see https //psalm.dev/173)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR UndefinedConstant - php/php_bad_1.php 3 42 - Const nq2yr09n2yr9 is not defined (see https //psalm.dev/020)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR UndefinedConstant - php/php_bad_1.php 3 55 - Const y2n is not defined (see https //psalm.dev/020)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR ParseError - php/php_bad_1.php 3 61 - Syntax error, unexpected T_STRING on line 3 (see https //psalm.dev/173)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR UndefinedConstant - php/php_bad_1.php 3 61 - Const yr is not defined (see https //psalm.dev/020)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\nERROR UndefinedConstant - php/php_bad_1.php 3 68 - Const yr3 is not defined (see https //psalm.dev/020)\n2pe98y r-n0u823n=r 092u3- r08u2q098ry 09nq2yr09n2yr9 y2n-93yr 298yr3 29\n\n\n------------------------------\n15 errors found\n------------------------------\n\n
|
||||
...
|
||||
not ok 2 - php_bad_2.php
|
||||
---
|
||||
message: Scanning files...\nAnalyzing files...\n\nE\n\nERROR InvalidReturnType - php/php_bad_2.php 4 12 - The declared return type 'array<array-key, string>' for takesAnInt is incorrect, got 'array{int, string(hello)}' (see https //psalm.dev/011)\n * @return array<string>\n\n\nERROR InvalidReturnStatement - php/php_bad_2.php 7 12 - The inferred type 'array{int, string(hello)}' does not match the declared return type 'array<array-key, string>' for takesAnInt (see https //psalm.dev/128)\n return [$i, "hello"];\n\n\nERROR InvalidScalarArgument - php/php_bad_2.php 11 12 - Argument 1 of takesAnInt expects int, string(some text) provided (see https //psalm.dev/012)\ntakesAnInt($data[0]);\n\n\nERROR ParseError - php/php_bad_2.php 15 1 - Syntax error, unexpected '}' on line 15 (see https //psalm.dev/173)\n} elseif ($condition) {}\n\n\n------------------------------\n4 errors found\n------------------------------\nPsalm can automatically fix 1 of these issues.\nRun Psalm again with \n[30;48;5;195m--alter --issues=InvalidReturnType --dry-run\nto see what it can fix.\n------------------------------\n\n
|
||||
...
|
||||
ok 3 - php_good_1.php
|
||||
ok 4 - php_good_2.php
|
|
@ -3,9 +3,9 @@ from os import getenv, path
|
|||
from pprint import pprint
|
||||
import sys
|
||||
|
||||
import click # pylint: disable=import-error
|
||||
from dotenv import load_dotenv # pylint: disable=import-error
|
||||
import requests # pylint: disable=import-error
|
||||
import click # pylint: disable=import-error
|
||||
from dotenv import load_dotenv # pylint: disable=import-error
|
||||
import requests # pylint: disable=import-error
|
||||
|
||||
env = load_dotenv()
|
||||
api_url = getenv('API_URL', default='https://api.github.com/graphql')
|
||||
|
@ -13,8 +13,8 @@ github_token = getenv("GITHUB_TOKEN", default=None)
|
|||
|
||||
if github_token is None:
|
||||
sys.exit("GitHub Token is not set." +
|
||||
"Please set the GITHUB_TOKEN env variable in your system or " +
|
||||
"the .env file of your project.")
|
||||
"Please set the GITHUB_TOKEN env variable in your system or " +
|
||||
"the .env file of your project.")
|
||||
|
||||
client_id = getenv('CLIENT_ID', default='copy_labels.py')
|
||||
headers = {
|
||||
|
@ -23,6 +23,7 @@ headers = {
|
|||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
|
||||
def create_label(repo_id, label):
|
||||
"""
|
||||
Create label in the supplied repo.
|
||||
|
@ -52,6 +53,7 @@ def create_label(repo_id, label):
|
|||
|
||||
return response
|
||||
|
||||
|
||||
def get_labels(owner, repo):
|
||||
"""
|
||||
Gets a list of labels from the supplied repo.
|
||||
|
@ -62,7 +64,7 @@ def get_labels(owner, repo):
|
|||
:return: A tuple with the GitHub id for the repository and a list of labels defined in the repository
|
||||
"""
|
||||
|
||||
query_variables = { "owner": owner, "name": repo, }
|
||||
query_variables = {"owner": owner, "name": repo, }
|
||||
|
||||
with open(path.join(path.dirname(__file__), 'queries/get_repo_data.gql'), 'r') as query_file:
|
||||
query = "".join(query_file.readlines())
|
||||
|
@ -83,6 +85,7 @@ def get_labels(owner, repo):
|
|||
'[ERROR] getting issue labels. Status Code: {status_code} - Message: {result}'.format(
|
||||
status_code=status_code, result=result["message"]))
|
||||
|
||||
|
||||
def delete_label(label_id):
|
||||
"""
|
||||
Delete the specified label
|
||||
|
@ -106,6 +109,7 @@ def delete_label(label_id):
|
|||
|
||||
return result
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option('--dry', is_flag=True)
|
||||
@click.argument('source_repo')
|
||||
|
@ -149,7 +153,8 @@ def copy_labels(source_repo, target_repo, dry):
|
|||
|
||||
print('Done')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Pylint doesn't know that @click.command takes care of injecting the
|
||||
# function parameters. Disabling Pylint error.
|
||||
copy_labels() # pylint: disable=no-value-for-parameter
|
||||
copy_labels() # pylint: disable=no-value-for-parameter
|
||||
|
|
19
.automation/test/states/README.md
Normal file
19
.automation/test/states/README.md
Normal file
|
@ -0,0 +1,19 @@
|
|||
# AWS States language Test Cases
|
||||
|
||||
This folder holds the test cases for **AWS States Language**.
|
||||
|
||||
## Additional Docs
|
||||
|
||||
No Additional information is needed for this test case.
|
||||
|
||||
## Good Test Cases
|
||||
|
||||
The test cases denoted: `LANGUAGE_good_FILE.EXTENSION` are all valid, and should pass successfully when linted.
|
||||
|
||||
- **Note:** They are linted utilizing the default linter rules.
|
||||
|
||||
## Bad Test Cases
|
||||
|
||||
The test cases denoted: `LANGUAGE_bad_FILE.EXTENSION` are **NOT** valid, and should trigger errors when linted.
|
||||
|
||||
- **Note:** They are linted utilizing the default linter rules.
|
|
@ -0,0 +1,7 @@
|
|||
TAP version 13
|
||||
1..2
|
||||
not ok 1 - json_bad_1.json
|
||||
---
|
||||
message: Error Parse error on line 6 \n...ng" { "level" 'ignore', "space\n----------------------^\nExpecting 'STRING', 'NUMBER', 'NULL', 'TRUE', 'FALSE', '{', '[', got 'undefined'\n at Object.parseError (/node_modules/jsonlint/lib/jsonlint.js 55 11)\n at Object.parse (/node_modules/jsonlint/lib/jsonlint.js 132 22)\n at parse (/node_modules/jsonlint/lib/cli.js 82 14)\n at main (/node_modules/jsonlint/lib/cli.js 135 14)\n at Object.<anonymous> (/node_modules/jsonlint/lib/cli.js 179 1)\n at Module._compile (internal/modules/cjs/loader.js 1138 30)\n at Object.Module._extensions..js (internal/modules/cjs/loader.js 1158 10)\n at Module.load (internal/modules/cjs/loader.js 986 32)\n at Function.Module._load (internal/modules/cjs/loader.js 879 14)\n at Function.executeUserEntryPoint [as runMain] (internal/modules/run_main.js 71 12)\n
|
||||
...
|
||||
ok 2 - json_good_1.json
|
10
.automation/test/states/states_bad_1.json
Normal file
10
.automation/test/states/states_bad_1.json
Normal file
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"Comment": "A simple minimal example of the States language",
|
||||
"StartAt": "Hello World",
|
||||
"States": {
|
||||
"Hello World": {
|
||||
"Resource": "arn:aws:lambda:us-east-1:123456789012:function:HelloWorld",
|
||||
"End": true
|
||||
}
|
||||
}
|
||||
}
|
11
.automation/test/states/states_good_1.json
Normal file
11
.automation/test/states/states_good_1.json
Normal file
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"Comment": "A simple minimal example of the States language",
|
||||
"StartAt": "Hello World",
|
||||
"States": {
|
||||
"Hello World": {
|
||||
"Type": "Task",
|
||||
"Resource": "arn:aws:lambda:us-east-1:123456789012:function:HelloWorld",
|
||||
"End": true
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
# AWS States language Test Cases
|
||||
|
||||
This folder holds the test cases for **Terraform**.
|
||||
|
||||
## Additional Docs
|
||||
|
||||
No Additional information is needed for this test case.
|
||||
|
||||
## Good Test Cases
|
||||
|
||||
The test cases denoted: `LANGUAGE_good_FILE.EXTENSION` are all valid, and should pass successfully when linted.
|
||||
|
||||
- **Note:** They are linted utilizing the default linter rules.
|
||||
|
||||
## Bad Test Cases
|
||||
|
||||
The test cases denoted: `LANGUAGE_bad_FILE.EXTENSION` are **NOT** valid, and should trigger errors when linted.
|
||||
|
||||
- **Note:** They are linted utilizing the default linter rules.
|
19
.automation/test/terraform_terrascan/README.md
Normal file
19
.automation/test/terraform_terrascan/README.md
Normal file
|
@ -0,0 +1,19 @@
|
|||
# AWS States language Test Cases
|
||||
|
||||
This folder holds the test cases for **Terraform terrascan**.
|
||||
|
||||
## Additional Docs
|
||||
|
||||
No Additional information is needed for this test case.
|
||||
|
||||
## Good Test Cases
|
||||
|
||||
The test cases denoted: `LANGUAGE_good_FILE.EXTENSION` are all valid, and should pass successfully when linted.
|
||||
|
||||
- **Note:** They are linted utilizing the default linter rules.
|
||||
|
||||
## Bad Test Cases
|
||||
|
||||
The test cases denoted: `LANGUAGE_bad_FILE.EXTENSION` are **NOT** valid, and should trigger errors when linted.
|
||||
|
||||
- **Note:** They are linted utilizing the default linter rules.
|
|
@ -0,0 +1,9 @@
|
|||
resource "aws_instance" "bad" {
|
||||
ami = "ami-0ff8a91507f77f867"
|
||||
instance_type = "t2.small"
|
||||
associate_public_ip_address = true
|
||||
|
||||
ebs_block_device {
|
||||
encrypted = true
|
||||
}
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
resource "aws_instance" "good" {
|
||||
ami = "ami-0ff8a91507f77f867"
|
||||
instance_type = "t2.small"
|
||||
associate_public_ip_address = false
|
||||
|
||||
ebs_block_device {
|
||||
encrypted = true
|
||||
}
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
TAP version 13
|
||||
1..2
|
||||
not ok 1 - terraform_bad_1.tf
|
||||
terrascan version 0.2.3
|
||||
Logging level set to error.
|
||||
........................................................................
|
||||
----------------------------------------------------------------------
|
||||
Ran 72 tests in 0.008s
|
||||
|
||||
OK
|
||||
|
||||
Processed 1 files in /tmp/lint/.terrascan
|
||||
|
||||
|
||||
Processed on 07/24/2020 at 03:22
|
||||
Results (took 0.42 seconds):
|
||||
|
||||
Failures: (1)
|
||||
[high] [aws_instance.bad.associate_public_ip_address] should not be 'True'. Is: 'True' in module .terrascan, file /tmp/lint/.terrascan/terraform_bad_1.tf
|
||||
|
||||
Errors: (0)
|
||||
ok 2 - terraform_good_1.tf
|
|
@ -1,6 +1,6 @@
|
|||
# Yml Test Cases
|
||||
# Yaml Test Cases
|
||||
|
||||
This folder holds the test cases for **Yml**.
|
||||
This folder holds the test cases for **Yaml**.
|
||||
|
||||
## Additional Docs
|
||||
|
|
@ -14,6 +14,12 @@
|
|||
# - System with Docker installed
|
||||
# - Global variables met
|
||||
|
||||
#########################
|
||||
# Source Function Files #
|
||||
#########################
|
||||
# shellcheck source=/dev/null
|
||||
source ../lib/log.sh # Source the function script(s)
|
||||
|
||||
###########
|
||||
# Globals #
|
||||
###########
|
||||
|
@ -35,11 +41,9 @@ UPDATE_MAJOR_TAG=0 # Flag to deploy the major tag version as
|
|||
################################################################################
|
||||
#### Function Header ###########################################################
|
||||
Header() {
|
||||
echo ""
|
||||
echo "-------------------------------------------------------"
|
||||
echo "---- GitHub Actions Upload image to [${REGISTRY}] ----"
|
||||
echo "-------------------------------------------------------"
|
||||
echo ""
|
||||
info "-------------------------------------------------------"
|
||||
info "---- GitHub Actions Upload image to [${REGISTRY}] ----"
|
||||
info "-------------------------------------------------------"
|
||||
}
|
||||
################################################################################
|
||||
#### Function ValidateInput ####################################################
|
||||
|
@ -48,32 +52,28 @@ ValidateInput() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Gathering variables..."
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Gathering variables..."
|
||||
info "----------------------------------------------"
|
||||
|
||||
#############################
|
||||
# Validate GITHUB_WORKSPACE #
|
||||
#############################
|
||||
if [ -z "${GITHUB_WORKSPACE}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_WORKSPACE]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GITHUB_WORKSPACE}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GITHUB_WORKSPACE]!"
|
||||
fatal "[${GITHUB_WORKSPACE}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GITHUB_WORKSPACE]${F[B]}, value:${F[W]}[${GITHUB_WORKSPACE}]${NC}"
|
||||
info "Successfully found:${F[W]}[GITHUB_WORKSPACE]${F[B]}, value:${F[W]}[${GITHUB_WORKSPACE}]"
|
||||
fi
|
||||
|
||||
#####################
|
||||
# Validate REGISTRY #
|
||||
#####################
|
||||
if [ -z "${REGISTRY}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [REGISTRY]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${REGISTRY}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [REGISTRY]!"
|
||||
fatal "[${REGISTRY}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[REGISTRY]${F[B]}, value:${F[W]}[${REGISTRY}]${NC}"
|
||||
info "Successfully found:${F[W]}[REGISTRY]${F[B]}, value:${F[W]}[${REGISTRY}]"
|
||||
fi
|
||||
|
||||
#####################################################
|
||||
|
@ -84,22 +84,20 @@ ValidateInput() {
|
|||
# Validate GPR_USERNAME #
|
||||
#########################
|
||||
if [ -z "${GPR_USERNAME}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GPR_USERNAME]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GPR_USERNAME}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GPR_USERNAME]!"
|
||||
fatal "[${GPR_USERNAME}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GPR_USERNAME]${F[B]}, value:${F[W]}[${GPR_USERNAME}]${NC}"
|
||||
info "Successfully found:${F[W]}[GPR_USERNAME]${F[B]}, value:${F[W]}[${GPR_USERNAME}]"
|
||||
fi
|
||||
|
||||
######################
|
||||
# Validate GPR_TOKEN #
|
||||
######################
|
||||
if [ -z "${GPR_TOKEN}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GPR_TOKEN]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GPR_TOKEN}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [GPR_TOKEN]!"
|
||||
fatal "[${GPR_TOKEN}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GPR_TOKEN]${F[B]}, value:${F[W]}[********]${NC}"
|
||||
info "Successfully found:${F[W]}[GPR_TOKEN]${F[B]}, value:${F[W]}[********]"
|
||||
fi
|
||||
########################################
|
||||
# See if we need values for Ducker hub #
|
||||
|
@ -109,48 +107,44 @@ ValidateInput() {
|
|||
# Validate DOCKER_USERNAME #
|
||||
############################
|
||||
if [ -z "${DOCKER_USERNAME}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_USERNAME]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKER_USERNAME}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [DOCKER_USERNAME]!"
|
||||
fatal "[${DOCKER_USERNAME}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[DOCKER_USERNAME]${F[B]}, value:${F[W]}[${DOCKER_USERNAME}]${NC}"
|
||||
info "Successfully found:${F[W]}[DOCKER_USERNAME]${F[B]}, value:${F[W]}[${DOCKER_USERNAME}]"
|
||||
fi
|
||||
|
||||
############################
|
||||
# Validate DOCKER_PASSWORD #
|
||||
############################
|
||||
if [ -z "${DOCKER_PASSWORD}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_PASSWORD]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKER_PASSWORD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [DOCKER_PASSWORD]!"
|
||||
fatal "[${DOCKER_PASSWORD}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[DOCKER_PASSWORD]${F[B]}, value:${F[B]}[********]${NC}"
|
||||
info "Successfully found:${F[W]}[DOCKER_PASSWORD]${F[B]}, value:${F[B]}[********]"
|
||||
fi
|
||||
###########################################
|
||||
# We were not passed a registry to update #
|
||||
###########################################
|
||||
else
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find a valid registry!${NC}"
|
||||
echo "Registry:[${REGISTRY}]"
|
||||
exit 1
|
||||
error "Failed to find a valid registry!"
|
||||
fatal "Registry:[${REGISTRY}]"
|
||||
fi
|
||||
|
||||
#######################
|
||||
# Validate IMAGE_REPO #
|
||||
#######################
|
||||
if [ -z "${IMAGE_REPO}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [IMAGE_REPO]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${IMAGE_REPO}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [IMAGE_REPO]!"
|
||||
fatal "[${IMAGE_REPO}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[IMAGE_REPO]${F[B]}, value:${F[W]}[${IMAGE_REPO}]${NC}"
|
||||
info "Successfully found:${F[W]}[IMAGE_REPO]${F[B]}, value:${F[W]}[${IMAGE_REPO}]"
|
||||
###############################################
|
||||
# Need to see if GPR registry and update name #
|
||||
###############################################
|
||||
if [[ ${REGISTRY} == "GPR" ]]; then
|
||||
NAME="docker.pkg.github.com/${IMAGE_REPO}/super-linter"
|
||||
IMAGE_REPO="${NAME}"
|
||||
echo "Updated [IMAGE_REPO] to:[${IMAGE_REPO}] for GPR"
|
||||
info "Updated [IMAGE_REPO] to:[${IMAGE_REPO}] for GPR"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
@ -158,8 +152,8 @@ ValidateInput() {
|
|||
# Validate IMAGE_VERSION #
|
||||
##########################
|
||||
if [ -z "${IMAGE_VERSION}" ]; then
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} Failed to get [IMAGE_VERSION]!${NC}"
|
||||
echo "Pulling from Branch Name..."
|
||||
warn "Failed to get [IMAGE_VERSION]!"
|
||||
info "Pulling from Branch Name..."
|
||||
##############################
|
||||
# Get the name of the branch #
|
||||
##############################
|
||||
|
@ -174,9 +168,8 @@ ValidateInput() {
|
|||
# Check the shell for errors #
|
||||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get branch name!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${BRANCH_NAME}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get branch name!"
|
||||
fatal "[${BRANCH_NAME}]"
|
||||
fi
|
||||
|
||||
##################################
|
||||
|
@ -188,9 +181,9 @@ ValidateInput() {
|
|||
# Set the IMAGE_VERSION to the BRANCH_NAME #
|
||||
############################################
|
||||
IMAGE_VERSION="${BRANCH_NAME}"
|
||||
echo "Tag:[${IMAGE_VERSION}]"
|
||||
info "Tag:[${IMAGE_VERSION}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[IMAGE_VERSION]${F[B]}, value:${F[W]}[${IMAGE_VERSION}]${NC}"
|
||||
info "Successfully found:${F[W]}[IMAGE_VERSION]${F[B]}, value:${F[W]}[${IMAGE_VERSION}]"
|
||||
fi
|
||||
|
||||
##################################
|
||||
|
@ -214,18 +207,17 @@ ValidateInput() {
|
|||
###################################
|
||||
UPDATE_MAJOR_TAG=1
|
||||
|
||||
echo "- Also deploying a major tag of:[${MAJOR_TAG}]"
|
||||
info "- Also deploying a major tag of:[${MAJOR_TAG}]"
|
||||
fi
|
||||
|
||||
############################
|
||||
# Validate DOCKERFILE_PATH #
|
||||
############################
|
||||
if [ -z "${DOCKERFILE_PATH}" ]; then
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKERFILE_PATH]!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${DOCKERFILE_PATH}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get [DOCKERFILE_PATH]!"
|
||||
fatal "[${DOCKERFILE_PATH}]"
|
||||
else
|
||||
echo -e "${NC}${F[B]}Successfully found:${F[W]}[DOCKERFILE_PATH]${F[B]}, value:${F[W]}[${DOCKERFILE_PATH}]${NC}"
|
||||
info "Successfully found:${F[W]}[DOCKERFILE_PATH]${F[B]}, value:${F[W]}[${DOCKERFILE_PATH}]"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
@ -242,11 +234,9 @@ Authenticate() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Login to ${NAME}..."
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Login to ${NAME}..."
|
||||
info "----------------------------------------------"
|
||||
|
||||
###################
|
||||
# Auth to service #
|
||||
|
@ -263,12 +253,11 @@ Authenticate() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to authenticate to ${NAME}!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LOGIN_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to authenticate to ${NAME}!"
|
||||
fatal "[${LOGIN_CMD}]"
|
||||
else
|
||||
# SUCCESS
|
||||
echo -e "${NC}${F[B]}Successfully authenticated to ${F[C]}${NAME}${F[B]}!${NC}"
|
||||
info "Successfully authenticated to ${F[C]}${NAME}${F[B]}!"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
@ -277,21 +266,18 @@ BuildImage() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Building the DockerFile image..."
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Building the DockerFile image..."
|
||||
info "----------------------------------------------"
|
||||
|
||||
################################
|
||||
# Validate the DOCKERFILE_PATH #
|
||||
################################
|
||||
if [ ! -f "${DOCKERFILE_PATH}" ]; then
|
||||
# No file found
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to find Dockerfile at:[${DOCKERFILE_PATH}]${NC}"
|
||||
echo "Please make sure you give full path!"
|
||||
echo "Example:[/configs/Dockerfile] or [Dockerfile] if at root directory"
|
||||
exit 1
|
||||
error "failed to find Dockerfile at:[${DOCKERFILE_PATH}]"
|
||||
error "Please make sure you give full path!"
|
||||
fatal "Example:[/configs/Dockerfile] or [Dockerfile] if at root directory"
|
||||
fi
|
||||
|
||||
###################
|
||||
|
@ -309,11 +295,10 @@ BuildImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [build] Dockerfile!${NC}"
|
||||
exit 1
|
||||
fatal "failed to [build] Dockerfile!"
|
||||
else
|
||||
# SUCCESS
|
||||
echo -e "${NC}${F[B]}Successfully Built image!${NC}"
|
||||
info "Successfully Built image!"
|
||||
fi
|
||||
|
||||
########################################################
|
||||
|
@ -333,11 +318,10 @@ BuildImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [tag] Dockerfile!${NC}"
|
||||
exit 1
|
||||
fatal "failed to [tag] Dockerfile!"
|
||||
else
|
||||
# SUCCESS
|
||||
echo -e "${NC}${F[B]}Successfully tagged image!${NC}"
|
||||
info "Successfully tagged image!"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
@ -347,11 +331,9 @@ UploadImage() {
|
|||
################
|
||||
# Print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Uploading the DockerFile image to ${REGISTRY}..."
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "Uploading the DockerFile image to ${REGISTRY}..."
|
||||
info "----------------------------------------------"
|
||||
|
||||
############################################
|
||||
# Upload the docker image that was created #
|
||||
|
@ -368,11 +350,10 @@ UploadImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [upload] Dockerfile!${NC}"
|
||||
exit 1
|
||||
fatal "failed to [upload] Dockerfile!"
|
||||
else
|
||||
# SUCCESS
|
||||
echo -e "${NC}${F[B]}Successfully Uploaded Docker image:${F[W]}[${IMAGE_VERSION}]${F[B]} to ${F[C]}${REGISTRY}${F[B]}!${NC}"
|
||||
info "Successfully Uploaded Docker image:${F[W]}[${IMAGE_VERSION}]${F[B]} to ${F[C]}${REGISTRY}${F[B]}!"
|
||||
fi
|
||||
|
||||
#########################
|
||||
|
@ -391,9 +372,8 @@ UploadImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get information about built Image!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${GET_INFO_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to get information about built Image!"
|
||||
fatal "[${GET_INFO_CMD}]"
|
||||
else
|
||||
################
|
||||
# Get the data #
|
||||
|
@ -406,13 +386,13 @@ UploadImage() {
|
|||
###################
|
||||
# Print the goods #
|
||||
###################
|
||||
echo "----------------------------------------------"
|
||||
echo "Docker Image Details:"
|
||||
echo "Repository:[${REPO}]"
|
||||
echo "Tag:[${TAG}]"
|
||||
echo "Image_ID:[${IMAGE_ID}]"
|
||||
echo "Size:[${SIZE}]"
|
||||
echo "----------------------------------------------"
|
||||
info "----------------------------------------------"
|
||||
info "Docker Image Details:"
|
||||
info "Repository:[${REPO}]"
|
||||
info "Tag:[${TAG}]"
|
||||
info "Image_ID:[${IMAGE_ID}]"
|
||||
info "Size:[${SIZE}]"
|
||||
info "----------------------------------------------"
|
||||
fi
|
||||
|
||||
###############################################################
|
||||
|
@ -434,22 +414,19 @@ UploadImage() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# ERROR
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [upload] MAJOR_TAG:[${MAJOR_TAG}] Dockerfile!${NC}"
|
||||
exit 1
|
||||
fatal "failed to [upload] MAJOR_TAG:[${MAJOR_TAG}] Dockerfile!"
|
||||
else
|
||||
# SUCCESS
|
||||
echo -e "${NC}${F[B]}Successfully Uploaded TAG:${F[W]}[${MAJOR_TAG}]${F[B]} of Docker image to ${F[C]}${REGISTRY}${F[B]}!${NC}"
|
||||
info "Successfully Uploaded TAG:${F[W]}[${MAJOR_TAG}]${F[B]} of Docker image to ${F[C]}${REGISTRY}${F[B]}!"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
#### Function Footer ###########################################################
|
||||
Footer() {
|
||||
echo ""
|
||||
echo "-------------------------------------------------------"
|
||||
echo "The step has completed"
|
||||
echo "-------------------------------------------------------"
|
||||
echo ""
|
||||
info "-------------------------------------------------------"
|
||||
info "The step has completed"
|
||||
info "-------------------------------------------------------"
|
||||
}
|
||||
################################################################################
|
||||
################################## MAIN ########################################
|
||||
|
@ -488,9 +465,8 @@ else
|
|||
#########
|
||||
# ERROR #
|
||||
#########
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Registry not set correctly!${NC}"
|
||||
echo "Registry:[${REGISTRY}]"
|
||||
exit 1
|
||||
error "Registry not set correctly!"
|
||||
fatal "Registry:[${REGISTRY}]"
|
||||
fi
|
||||
|
||||
####################
|
||||
|
|
2
.github/linters/.flake8
vendored
Normal file
2
.github/linters/.flake8
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
[flake8]
|
||||
max-line-length = 120
|
65
.github/linters/.groovylintrc.json
vendored
Normal file
65
.github/linters/.groovylintrc.json
vendored
Normal file
|
@ -0,0 +1,65 @@
|
|||
{
|
||||
"extends": "recommended",
|
||||
"rules": {
|
||||
"CatchException": {
|
||||
"enabled": false
|
||||
},
|
||||
"CatchThrowable": {
|
||||
"enabled": false
|
||||
},
|
||||
"ClassJavadoc": {
|
||||
"enabled": false
|
||||
},
|
||||
"ClosureAsLastMethodParameter": {
|
||||
"enabled": false
|
||||
},
|
||||
"DuplicateNumberLiteral": {
|
||||
"enabled": false
|
||||
},
|
||||
"DuplicateStringLiteral": {
|
||||
"enabled": false
|
||||
},
|
||||
"FieldTypeRequired": {
|
||||
"enabled": false
|
||||
},
|
||||
"JavaIoPackageAccess": {
|
||||
"enabled": false
|
||||
},
|
||||
"MethodParameterTypeRequired": {
|
||||
"enabled": false
|
||||
},
|
||||
"MethodSize": {
|
||||
"enabled": false
|
||||
},
|
||||
"NoDef": {
|
||||
"enabled": false
|
||||
},
|
||||
"PrintStackTrace": {
|
||||
"enabled": false
|
||||
},
|
||||
"PropertyName": {
|
||||
"enabled": false
|
||||
},
|
||||
"SpaceAroundMapEntryColon": {
|
||||
"enabled": false
|
||||
},
|
||||
"SystemExit": {
|
||||
"enabled": false
|
||||
},
|
||||
"UnnecessaryGetter": {
|
||||
"enabled": false
|
||||
},
|
||||
"UnnecessaryObjectReferences": {
|
||||
"enabled": false
|
||||
},
|
||||
"UnnecessarySetter": {
|
||||
"enabled": false
|
||||
},
|
||||
"VariableName": {
|
||||
"enabled": false
|
||||
},
|
||||
"VariableTypeRequired": {
|
||||
"enabled": false
|
||||
}
|
||||
}
|
||||
}
|
1
.github/linters/.luacheckrc
vendored
Normal file
1
.github/linters/.luacheckrc
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
--std max
|
7
.github/workflows/automerge-dependabot.yml
vendored
7
.github/workflows/automerge-dependabot.yml
vendored
|
@ -30,18 +30,17 @@ jobs:
|
|||
if: github.actor == 'dependabot[bot]'
|
||||
steps:
|
||||
- name: merge
|
||||
uses: actions/github-script@0.2.0
|
||||
uses: actions/github-script@v2
|
||||
with:
|
||||
script: |
|
||||
github.pullRequests.createReview({
|
||||
github.pulls.createReview({
|
||||
owner: context.payload.repository.owner.login,
|
||||
repo: context.payload.repository.name,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
event: 'APPROVE'
|
||||
})
|
||||
github.pullRequests.merge({
|
||||
github.pulls.merge({
|
||||
owner: context.payload.repository.owner.login,
|
||||
repo: context.payload.repository.name,
|
||||
pull_number: context.payload.pull_request.number
|
||||
})
|
||||
github-token: ${{github.token}}
|
||||
|
|
1
.github/workflows/stack-linter.yml
vendored
1
.github/workflows/stack-linter.yml
vendored
|
@ -45,3 +45,4 @@ jobs:
|
|||
uses: docker://github/super-linter:latest
|
||||
env:
|
||||
VALIDATE_ALL_CODEBASE: false
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
|
@ -29,7 +29,7 @@ jobs:
|
|||
if: "github.event_name == 'schedule'"
|
||||
steps:
|
||||
- name: Mark issue stale
|
||||
uses: actions/stale@v3.0.8
|
||||
uses: actions/stale@v3.0.9
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: "This issue has been automatically marked as stale because it has not had recent activity.\nIt will be closed in 14 days if no further activity occurs.\nThank you for your contributions.\n\nIf you think this issue should stay open, please remove the `O: stale 🤖` label or comment on the issue."
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -4,6 +4,7 @@ logs
|
|||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
/package-lock.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
|
|
151
Dockerfile
151
Dockerfile
|
@ -7,11 +7,11 @@
|
|||
#########################################
|
||||
# Get dependency images as build stages #
|
||||
#########################################
|
||||
FROM borkdude/clj-kondo:2020.06.21 as clj-kondo
|
||||
FROM borkdude/clj-kondo:2020.07.29 as clj-kondo
|
||||
FROM dotenvlinter/dotenv-linter:2.1.0 as dotenv-linter
|
||||
FROM mstruebing/editorconfig-checker:2.1.0 as editorconfig-checker
|
||||
FROM golangci/golangci-lint:v1.29.0 as golangci-lint
|
||||
FROM yoheimuta/protolint:v0.25.1 as protolint
|
||||
FROM golangci/golangci-lint:v1.30.0 as golangci-lint
|
||||
FROM yoheimuta/protolint:v0.26.0 as protolint
|
||||
FROM koalaman/shellcheck:v0.7.1 as shellcheck
|
||||
FROM wata727/tflint:0.18.0 as tflint
|
||||
|
||||
|
@ -52,25 +52,25 @@ ARG GLIBC_VERSION='2.31-r0'
|
|||
RUN apk add --update --no-cache \
|
||||
ansible-lint \
|
||||
bash \
|
||||
coreutils \
|
||||
curl \
|
||||
gcc \
|
||||
git \
|
||||
git git-lfs\
|
||||
go \
|
||||
icu-libs \
|
||||
jq \
|
||||
libxml2-utils \
|
||||
libc-dev libxml2-utils \
|
||||
make \
|
||||
musl-dev \
|
||||
npm \
|
||||
nodejs \
|
||||
npm nodejs-current \
|
||||
openjdk8-jre \
|
||||
perl \
|
||||
php7 \
|
||||
php7 php7-phar php7-json php7-mbstring php-xmlwriter \
|
||||
php7-tokenizer php7-ctype php7-curl php7-dom php7-simplexml \
|
||||
py3-setuptools \
|
||||
ruby \
|
||||
ruby-dev \
|
||||
ruby-bundler \
|
||||
ruby-rdoc
|
||||
readline-dev \
|
||||
ruby ruby-dev ruby-bundler ruby-rdoc \
|
||||
gnupg
|
||||
|
||||
########################################
|
||||
# Copy dependencies files to container #
|
||||
|
@ -100,6 +100,19 @@ ENV PATH="/node_modules/.bin:${PATH}"
|
|||
##############################
|
||||
RUN bundle install
|
||||
|
||||
##############################
|
||||
# Install Phive dependencies #
|
||||
##############################
|
||||
RUN wget -O phive.phar https://phar.io/releases/phive.phar \
|
||||
&& wget -O phive.phar.asc https://phar.io/releases/phive.phar.asc \
|
||||
&& gpg --keyserver pool.sks-keyservers.net --recv-keys 0x9D8A98B29B2D5D79 \
|
||||
&& gpg --verify phive.phar.asc phive.phar \
|
||||
&& chmod +x phive.phar \
|
||||
&& mv phive.phar /usr/local/bin/phive \
|
||||
&& rm phive.phar.asc \
|
||||
&& phive install --trust-gpg-keys 31C7E470E2138192,CF1A108D0E7AE720,8A03EA3B385DBAA1
|
||||
# Trusted GPG keys for PHP linters: phpcs, phpstan, psalm
|
||||
|
||||
#########################################
|
||||
# Install Powershell + PSScriptAnalyzer #
|
||||
#########################################
|
||||
|
@ -174,7 +187,7 @@ RUN curl -sSLO https://github.com/pinterest/ktlint/releases/latest/download/ktli
|
|||
####################
|
||||
RUN wget -q -O /etc/apk/keys/sgerrand.rsa.pub https://alpine-pkgs.sgerrand.com/sgerrand.rsa.pub
|
||||
RUN wget https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-${GLIBC_VERSION}.apk
|
||||
RUN apk add --no-cache glibc-${GLIBC_VERSION}.apk
|
||||
RUN apk add --no-cache glibc-${GLIBC_VERSION}.apk && rm glibc-${GLIBC_VERSION}.apk
|
||||
RUN wget https://storage.googleapis.com/dart-archive/channels/stable/release/${DART_VERSION}/sdk/dartsdk-linux-x64-release.zip -O - -q | unzip -q - \
|
||||
&& chmod +x dart-sdk/bin/dart* \
|
||||
&& mv dart-sdk/bin/* /usr/bin/ && mv dart-sdk/lib/* /usr/lib/ && mv dart-sdk/include/* /usr/include/ \
|
||||
|
@ -198,54 +211,82 @@ RUN CHECKSTYLE_LATEST=$(curl -s https://api.github.com/repos/checkstyle/checksty
|
|||
&& curl -sSL $CHECKSTYLE_LATEST \
|
||||
--output /usr/bin/checkstyle.jar
|
||||
|
||||
####################
|
||||
# Install luacheck #
|
||||
####################
|
||||
RUN wget https://www.lua.org/ftp/lua-5.3.5.tar.gz -O - -q | tar -xzf - \
|
||||
&& cd lua-5.3.5 \
|
||||
&& make linux \
|
||||
&& make install \
|
||||
&& cd .. && rm -r lua-5.3.5/
|
||||
|
||||
RUN wget https://github.com/cvega/luarocks/archive/v3.3.1-super-linter.tar.gz -O - -q | tar -xzf - \
|
||||
&& cd luarocks-3.3.1-super-linter \
|
||||
&& ./configure --with-lua-include=/usr/local/include \
|
||||
&& make \
|
||||
&& make -b install \
|
||||
&& cd .. && rm -r luarocks-3.3.1-super-linter/
|
||||
|
||||
RUN luarocks install luacheck
|
||||
|
||||
###########################################
|
||||
# Load GitHub Env Vars for GitHub Actions #
|
||||
###########################################
|
||||
ENV GITHUB_SHA=${GITHUB_SHA} \
|
||||
GITHUB_EVENT_PATH=${GITHUB_EVENT_PATH} \
|
||||
GITHUB_WORKSPACE=${GITHUB_WORKSPACE} \
|
||||
DEFAULT_BRANCH=${DEFAULT_BRANCH} \
|
||||
VALIDATE_ALL_CODEBASE=${VALIDATE_ALL_CODEBASE} \
|
||||
LINTER_RULES_PATH=${LINTER_RULES_PATH} \
|
||||
VALIDATE_YAML=${VALIDATE_YAML} \
|
||||
VALIDATE_JSON=${VALIDATE_JSON} \
|
||||
VALIDATE_XML=${VALIDATE_XML} \
|
||||
VALIDATE_MD=${VALIDATE_MD} \
|
||||
VALIDATE_BASH=${VALIDATE_BASH} \
|
||||
VALIDATE_PERL=${VALIDATE_PERL} \
|
||||
VALIDATE_RAKU=${VALIDATE_RAKU} \
|
||||
VALIDATE_PHP=${VALIDATE_PHP} \
|
||||
VALIDATE_PYTHON=${VALIDATE_PYTHON} \
|
||||
VALIDATE_RUBY=${VALIDATE_RUBY} \
|
||||
VALIDATE_COFFEE=${VALIDATE_COFFEE} \
|
||||
VALIDATE_ANSIBLE=${VALIDATE_ANSIBLE} \
|
||||
VALIDATE_DOCKER=${VALIDATE_DOCKER} \
|
||||
VALIDATE_JAVASCRIPT_ES=${VALIDATE_JAVASCRIPT_ES} \
|
||||
VALIDATE_JAVASCRIPT_STANDARD=${VALIDATE_JAVASCRIPT_STANDARD} \
|
||||
VALIDATE_TYPESCRIPT_ES=${VALIDATE_TYPESCRIPT_ES} \
|
||||
VALIDATE_TYPESCRIPT_STANDARD=${VALIDATE_TYPESCRIPT_STANDARD} \
|
||||
VALIDATE_GO=${VALIDATE_GO} \
|
||||
VALIDATE_TERRAFORM=${VALIDATE_TERRAFORM} \
|
||||
VALIDATE_CSS=${VALIDATE_CSS} \
|
||||
VALIDATE_ENV=${VALIDATE_ENV} \
|
||||
VALIDATE_HTML=${VALIDATE_HTML} \
|
||||
VALIDATE_CLOJURE=${VALIDATE_CLOJURE} \
|
||||
VALIDATE_KOTLIN=${VALIDATE_KOTLIN} \
|
||||
VALIDATE_DART=${VALIDATE_DART} \
|
||||
VALIDATE_POWERSHELL=${VALIDATE_POWERSHELL} \
|
||||
VALIDATE_JAVA=${VALIDATE_JAVA} \
|
||||
VALIDATE_ARM=${VALIDATE_ARM} \
|
||||
VALIDATE_OPENAPI=${VALIDATE_OPENAPI} \
|
||||
VALIDATE_PROTOBUF=${VALIDATE_PROTOBUF} \
|
||||
VALIDATE_EDITORCONFIG=${VALIDATE_EDITORCONFIG} \
|
||||
ENV ACTIONS_RUNNER_DEBUG=${ACTIONS_RUNNER_DEBUG} \
|
||||
ANSIBLE_DIRECTORY=${ANSIBLE_DIRECTORY} \
|
||||
DEFAULT_BRANCH=${DEFAULT_BRANCH} \
|
||||
DISABLE_ERRORS=${DISABLE_ERRORS} \
|
||||
GITHUB_EVENT_PATH=${GITHUB_EVENT_PATH} \
|
||||
GITHUB_SHA=${GITHUB_SHA} \
|
||||
GITHUB_TOKEN=${GITHUB_TOKEN} \
|
||||
GITHUB_WORKSPACE=${GITHUB_WORKSPACE} \
|
||||
LINTER_RULES_PATH=${LINTER_RULES_PATH} \
|
||||
LOG_FILE=${LOG_FILE} \
|
||||
LOG_LEVEL=${LOG_LEVEL} \
|
||||
OUTPUT_DETAILS=${OUTPUT_DETAILS} \
|
||||
OUTPUT_FOLDER=${OUTPUT_FOLDER} \
|
||||
OUTPUT_FORMAT=${OUTPUT_FORMAT} \
|
||||
RUN_LOCAL=${RUN_LOCAL} \
|
||||
TEST_CASE_RUN=${TEST_CASE_RUN} \
|
||||
ACTIONS_RUNNER_DEBUG=${ACTIONS_RUNNER_DEBUG} \
|
||||
DISABLE_ERRORS=${DISABLE_ERRORS} \
|
||||
OUTPUT_FORMAT=${OUTPUT_FORMAT} \
|
||||
OUTPUT_FOLDER=${OUTPUT_FOLDER} \
|
||||
OUTPUT_DETAILS=${OUTPUT_DETAILS}
|
||||
VALIDATE_ALL_CODEBASE=${VALIDATE_ALL_CODEBASE} \
|
||||
VALIDATE_ANSIBLE=${VALIDATE_ANSIBLE} \
|
||||
VALIDATE_ARM=${VALIDATE_ARM} \
|
||||
VALIDATE_BASH=${VALIDATE_BASH} \
|
||||
VALIDATE_CLOJURE=${VALIDATE_CLOJURE} \
|
||||
VALIDATE_CLOUDFORMATION=${VALIDATE_CLOUDFORMATION} \
|
||||
VALIDATE_COFFEE=${VALIDATE_COFFEE} \
|
||||
VALIDATE_CSS=${VALIDATE_CSS} \
|
||||
VALIDATE_DART=${VALIDATE_DART} \
|
||||
VALIDATE_DOCKER=${VALIDATE_DOCKER} \
|
||||
VALIDATE_EDITORCONFIG=${VALIDATE_EDITORCONFIG} \
|
||||
VALIDATE_ENV=${VALIDATE_ENV} \
|
||||
VALIDATE_GO=${VALIDATE_GO} \
|
||||
VALIDATE_HTML=${VALIDATE_HTML} \
|
||||
VALIDATE_JAVA=${VALIDATE_JAVA} \
|
||||
VALIDATE_JAVASCRIPT_ES=${VALIDATE_JAVASCRIPT_ES} \
|
||||
VALIDATE_JAVASCRIPT_STANDARD=${VALIDATE_JAVASCRIPT_STANDARD} \
|
||||
VALIDATE_JSON=${VALIDATE_JSON} \
|
||||
VALIDATE_KOTLIN=${VALIDATE_KOTLIN} \
|
||||
VALIDATE_LUA=${VALIDATE_LUA} \
|
||||
VALIDATE_MD=${VALIDATE_MD} \
|
||||
VALIDATE_OPENAPI=${VALIDATE_OPENAPI} \
|
||||
VALIDATE_PERL=${VALIDATE_PERL} \
|
||||
VALIDATE_PHP=${VALIDATE_PHP} \
|
||||
VALIDATE_PHP_PHPSTAN=${VALIDATE_PHP_PHPSTAN} \
|
||||
VALIDATE_POWERSHELL=${VALIDATE_POWERSHELL} \
|
||||
VALIDATE_PROTOBUF=${VALIDATE_PROTOBUF} \
|
||||
VALIDATE_PYTHON=${VALIDATE_PYTHON} \
|
||||
VALIDATE_PYTHON_PYLINT=${VALIDATE_PYTHON_PYLINT} \
|
||||
VALIDATE_PYTHON_FLAKE8=${VALIDATE_PYTHON_FLAKE8} \
|
||||
VALIDATE_RAKU=${VALIDATE_RAKU} \
|
||||
VALIDATE_RUBY=${VALIDATE_RUBY} \
|
||||
VALIDATE_STATES=${VALIDATE_STATES} \
|
||||
VALIDATE_TERRAFORM=${VALIDATE_TERRAFORM} \
|
||||
VALIDATE_TERRAFORM_TERRASCAN=${VALIDATE_TERRAFORM_TERRASCAN} \
|
||||
VALIDATE_TYPESCRIPT_ES=${VALIDATE_TYPESCRIPT_ES} \
|
||||
VALIDATE_TYPESCRIPT_STANDARD=${VALIDATE_TYPESCRIPT_STANDARD} \
|
||||
VALIDATE_XML=${VALIDATE_XML} \
|
||||
VALIDATE_YAML=${VALIDATE_YAML}
|
||||
|
||||
#############################
|
||||
# Copy scripts to container #
|
||||
|
|
168
README.md
168
README.md
|
@ -41,39 +41,40 @@ The design of the **Super-Linter** is currently to allow linting to occur in **G
|
|||
|
||||
Developers on **GitHub** can call the **GitHub Action** to lint their code base with the following list of linters:
|
||||
|
||||
| *Language* | *Linter* |
|
||||
| --- | --- |
|
||||
| **Ansible** | [ansible-lint](https://github.com/ansible/ansible-lint) |
|
||||
| **Azure Resource Manager (ARM)** | [arm-ttk](https://github.com/azure/arm-ttk) |
|
||||
| **AWS CloudFormation templates** | [cfn-lint](https://github.com/aws-cloudformation/cfn-python-lint/) |
|
||||
| **CSS** | [stylelint](https://stylelint.io/) |
|
||||
| **Clojure** | [clj-kondo](https://github.com/borkdude/clj-kondo) |
|
||||
| **CoffeeScript** | [coffeelint](https://coffeelint.github.io/) |
|
||||
| **Dart** | [dartanalyzer](https://dart.dev/guides/language/analysis-options) |
|
||||
| **Dockerfile** | [dockerfilelint](https://github.com/replicatedhq/dockerfilelint.git) |
|
||||
| **EDITORCONFIG** | [editorconfig-checker](https://github.com/editorconfig-checker/editorconfig-checker) |
|
||||
| **ENV** | [dotenv-linter](https://github.com/dotenv-linter/dotenv-linter) |
|
||||
| **Golang** | [golangci-lint](https://github.com/golangci/golangci-lint) |
|
||||
| **HTMLHint** | [HTMLHint](https://github.com/htmlhint/HTMLHint) |
|
||||
| **JavaScript** | [eslint](https://eslint.org/) [standard js](https://standardjs.com/) |
|
||||
| **JSON** | [jsonlint](https://github.com/zaach/jsonlint) |
|
||||
| **CSS** | [stylelint](https://stylelint.io/) |
|
||||
| **Java** | [checkstyle](https://checkstyle.org) |
|
||||
| **Kotlin** | [ktlint](https://github.com/pinterest/ktlint) |
|
||||
| **Markdown** | [markdownlint](https://github.com/igorshubovych/markdownlint-cli#readme) |
|
||||
| **OpenAPI** | [spectral](https://github.com/stoplightio/spectral) |
|
||||
| **Perl** | [perl](https://pkgs.alpinelinux.org/package/edge/main/x86/perl) |
|
||||
| **PHP** | [PHP](https://www.php.net/) |
|
||||
| **PowerShell** | [PSScriptAnalyzer](https://github.com/PowerShell/Psscriptanalyzer) |
|
||||
| **Protocol Buffers** | [protolint](https://github.com/yoheimuta/protolint) |
|
||||
| **Python3** | [pylint](https://www.pylint.org/) |
|
||||
| **Raku** | [raku](https://raku.org) |
|
||||
| **Ruby** | [RuboCop](https://github.com/rubocop-hq/rubocop) |
|
||||
| **Shell** | [Shellcheck](https://github.com/koalaman/shellcheck) |
|
||||
| **Terraform** | [tflint](https://github.com/terraform-linters/tflint) |
|
||||
| **TypeScript** | [eslint](https://eslint.org/) [standard js](https://standardjs.com/) |
|
||||
| **XML** | [LibXML](http://xmlsoft.org/) |
|
||||
| **YAML** | [YamlLint](https://github.com/adrienverge/yamllint) |
|
||||
| _Language_ | _Linter_ |
|
||||
| -------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| **Ansible** | [ansible-lint](https://github.com/ansible/ansible-lint) |
|
||||
| **Azure Resource Manager (ARM)** | [arm-ttk](https://github.com/azure/arm-ttk) |
|
||||
| **AWS CloudFormation templates** | [cfn-lint](https://github.com/aws-cloudformation/cfn-python-lint/) |
|
||||
| **CSS** | [stylelint](https://stylelint.io/) |
|
||||
| **Clojure** | [clj-kondo](https://github.com/borkdude/clj-kondo) |
|
||||
| **CoffeeScript** | [coffeelint](https://coffeelint.github.io/) |
|
||||
| **Dart** | [dartanalyzer](https://dart.dev/guides/language/analysis-options) |
|
||||
| **Dockerfile** | [dockerfilelint](https://github.com/replicatedhq/dockerfilelint.git) |
|
||||
| **EDITORCONFIG** | [editorconfig-checker](https://github.com/editorconfig-checker/editorconfig-checker) |
|
||||
| **ENV** | [dotenv-linter](https://github.com/dotenv-linter/dotenv-linter) |
|
||||
| **Golang** | [golangci-lint](https://github.com/golangci/golangci-lint) |
|
||||
| **Groovy** | [npm-groovy-lint](https://github.com/nvuillam/npm-groovy-lint) |
|
||||
| **HTMLHint** | [HTMLHint](https://github.com/htmlhint/HTMLHint) |
|
||||
| **Java** | [checkstyle](https://checkstyle.org) |
|
||||
| **JavaScript** | [eslint](https://eslint.org/) [standard js](https://standardjs.com/) |
|
||||
| **JSON** | [jsonlint](https://github.com/zaach/jsonlint) |
|
||||
| **Kotlin** | [ktlint](https://github.com/pinterest/ktlint) |
|
||||
| **Lua** | [luacheck](https://github.com/luarocks/luacheck) |
|
||||
| **Markdown** | [markdownlint](https://github.com/igorshubovych/markdownlint-cli#readme) |
|
||||
| **OpenAPI** | [spectral](https://github.com/stoplightio/spectral) |
|
||||
| **Perl** | [perl](https://pkgs.alpinelinux.org/package/edge/main/x86/perl) |
|
||||
| **PHP** | [PHP built-in linter](https://www.php.net/) [PHP CodeSniffer](https://github.com/squizlabs/PHP_CodeSniffer) [PHPStan](https://phpstan.org/n) [Psalm](https://psalm.dev/) |
|
||||
| **PowerShell** | [PSScriptAnalyzer](https://github.com/PowerShell/Psscriptanalyzer) |
|
||||
| **Protocol Buffers** | [protolint](https://github.com/yoheimuta/protolint) |
|
||||
| **Python3** | [pylint](https://www.pylint.org/) [flake8](https://flake8.pycqa.org/en/latest/) |
|
||||
| **Raku** | [raku](https://raku.org) |
|
||||
| **Ruby** | [RuboCop](https://github.com/rubocop-hq/rubocop) |
|
||||
| **Shell** | [Shellcheck](https://github.com/koalaman/shellcheck) |
|
||||
| **Terraform** | [tflint](https://github.com/terraform-linters/tflint) [terrascan](https://github.com/accurics/terrascan) |
|
||||
| **TypeScript** | [eslint](https://eslint.org/) [standard js](https://standardjs.com/) |
|
||||
| **XML** | [LibXML](http://xmlsoft.org/) |
|
||||
| **YAML** | [YamlLint](https://github.com/adrienverge/yamllint) |
|
||||
|
||||
## How to use
|
||||
|
||||
|
@ -169,54 +170,69 @@ and won't run anything unexpected.
|
|||
|
||||
| **ENV VAR** | **Default Value** | **Notes** |
|
||||
| -------------------------------- | --------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **VALIDATE_ALL_CODEBASE** | `true` | Will parse the entire repository and find all files to validate across all types. **NOTE:** When set to `false`, only **new** or **edited** files will be parsed for validation. |
|
||||
| **DEFAULT_BRANCH** | `master` | The name of the repository default branch. |
|
||||
| **LINTER_RULES_PATH** | `.github/linters` | Directory for all linter configuration rules. |
|
||||
| **VALIDATE_YAML** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_JSON** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_XML** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_MD** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_BASH** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_PERL** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_RAKU** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_PHP** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_PYTHON** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_RUBY** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **RUBY_CONFIG_FILE** | `.ruby-lint.yml` | Filename for [rubocop configuration](https://docs.rubocop.org/rubocop/configuration.html) (ex: `.ruby-lint.yml`, `.rubocop.yml`) |
|
||||
| **VALIDATE_COFFEE** | `true` | Flag to enable or disable the linting process of the language . |
|
||||
| **VALIDATE_ANSIBLE** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_JAVASCRIPT_ES** | `true` | Flag to enable or disable the linting process of the language. (Utilizing: eslint) |
|
||||
| **JAVASCRIPT_ES_CONFIG_FILE** | `.eslintrc.yml` | Filename for [eslint configuration](https://eslint.org/docs/user-guide/configuring#configuration-file-formats) (ex: `.eslintrc.yml`, `.eslintrc.json`) |
|
||||
| **VALIDATE_JAVASCRIPT_STANDARD** | `true` | Flag to enable or disable the linting process of the language. (Utilizing: standard) |
|
||||
| **VALIDATE_JSX** | `true` | Flag to enable or disable the linting process for jsx files (Utilizing: eslint) |
|
||||
| **VALIDATE_TSX** | `true` | Flag to enable or disable the linting process for tsx files (Utilizing: eslint) |
|
||||
| **VALIDATE_TYPESCRIPT_ES** | `true` | Flag to enable or disable the linting process of the language. (Utilizing: eslint) |
|
||||
| **TYPESCRIPT_ES_CONFIG_FILE** | `.eslintrc.yml` | Filename for [eslint configuration](https://eslint.org/docs/user-guide/configuring#configuration-file-formats) (ex: `.eslintrc.yml`, `.eslintrc.json`) |
|
||||
| **VALIDATE_TYPESCRIPT_STANDARD** | `true` | Flag to enable or disable the linting process of the language. (Utilizing: standard) |
|
||||
| **VALIDATE_DOCKER** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_GO** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_POWERSHELL** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_ARM** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_TERRAFORM** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_CSS** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_ENV** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_CLOJURE** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_HTML** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_JAVA** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_KOTLIN** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_DART** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_OPENAPI** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_CLOUDFORMATION** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_PROTOBUF** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_EDITORCONFIG** | `true` | Flag to enable or disable the linting process with the editorconfig. |
|
||||
| **ANSIBLE_DIRECTORY** | `/ansible` | Flag to set the root directory for Ansible file location(s). |
|
||||
| **ACTIONS_RUNNER_DEBUG** | `false` | Flag to enable additional information about the linter, versions, and additional output. |
|
||||
| **DISABLE_ERRORS** | `false` | Flag to have the linter complete with exit code 0 even if errors were detected. |
|
||||
| **ANSIBLE_DIRECTORY** | `/ansible` | Flag to set the root directory for Ansible file location(s). |
|
||||
| **DEFAULT_BRANCH** | `master` | The name of the repository default branch. |
|
||||
| **DEFAULT_WORKSPACE** | `/tmp/lint` | The location containing files to lint if you are running locally. |
|
||||
| **DISABLE_ERRORS** | `false` | Flag to have the linter complete with exit code 0 even if errors were detected. |
|
||||
| **JAVASCRIPT_ES_CONFIG_FILE** | `.eslintrc.yml` | Filename for [eslint configuration](https://eslint.org/docs/user-guide/configuring#configuration-file-formats) (ex: `.eslintrc.yml`, `.eslintrc.json`) |
|
||||
| **LINTER_RULES_PATH** | `.github/linters` | Directory for all linter configuration rules. |
|
||||
| **LOG_FILE** | `super-linter.log` | The file name for outputting logs. All output is sent to the log file regardless of `LOG_LEVEL`. |
|
||||
| **LOG_LEVEL** | `VERBOSE` | How much output the script will generate to the console. One of `VERBOSE`, `DEBUG` or `TRACE`. |
|
||||
| **MULTI_STATUS** | `true` | A status API is made for each language that is linted to make visual parsing easier. |
|
||||
| **OUTPUT_FORMAT** | `none` | The report format to be generated, besides the stdout one. Output format of tap is currently using v13 of the specification. Supported formats: tap |
|
||||
| **OUTPUT_FOLDER** | `super-linter.report` | The location where the output reporting will be generated to. Output folder must not previously exist. |
|
||||
| **OUTPUT_DETAILS** | `simpler` | What level of details to be reported. Supported formats: simpler or detailed. |
|
||||
| **MULTI_STATUS** | `true` | A status API is made for each language that is linted to make visual parsing easier. |
|
||||
| **PYTHON_PYLINT_CONFIG_FILE** | `.python-lint` | Filename for [pylint configuration](http://pylint.pycqa.org/en/latest/user_guide/run.html?highlight=rcfile#command-line-options) (ex: `.python-lint`, `.pylintrc`) |
|
||||
| **PYTHON_FLAKE8_CONFIG_FILE** | `.flake8` | Filename for [flake8 configuration](https://flake8.pycqa.org/en/latest/user/configuration.html) (ex: `.flake8`, `tox.ini`) |
|
||||
| **RUBY_CONFIG_FILE** | `.ruby-lint.yml` | Filename for [rubocop configuration](https://docs.rubocop.org/rubocop/configuration.html) (ex: `.ruby-lint.yml`, `.rubocop.yml`) |
|
||||
| **TYPESCRIPT_ES_CONFIG_FILE** | `.eslintrc.yml` | Filename for [eslint configuration](https://eslint.org/docs/user-guide/configuring#configuration-file-formats) (ex: `.eslintrc.yml`, `.eslintrc.json`) |
|
||||
| **VALIDATE_ALL_CODEBASE** | `true` | Will parse the entire repository and find all files to validate across all types. **NOTE:** When set to `false`, only **new** or **edited** files will be parsed for validation. |
|
||||
| **VALIDATE_ANSIBLE** | `true` | Flag to enable or disable the linting process of the Ansible language. |
|
||||
| **VALIDATE_ARM** | `true` | Flag to enable or disable the linting process of the ARM language. |
|
||||
| **VALIDATE_BASH** | `true` | Flag to enable or disable the linting process of the Bash language. |
|
||||
| **VALIDATE_CLOJURE** | `true` | Flag to enable or disable the linting process of the Clojure language. |
|
||||
| **VALIDATE_CLOUDFORMATION** | `true` | Flag to enable or disable the linting process of the AWS Cloud Formation language. |
|
||||
| **VALIDATE_COFFEE** | `true` | Flag to enable or disable the linting process of the Coffeescript language . |
|
||||
| **VALIDATE_CSS** | `true` | Flag to enable or disable the linting process of the CSS language. |
|
||||
| **VALIDATE_DART** | `true` | Flag to enable or disable the linting process of the Dart language. |
|
||||
| **VALIDATE_DOCKER** | `true` | Flag to enable or disable the linting process of the Docker language. |
|
||||
| **VALIDATE_EDITORCONFIG** | `true` | Flag to enable or disable the linting process with the editorconfig. |
|
||||
| **VALIDATE_ENV** | `true` | Flag to enable or disable the linting process of the ENV language. |
|
||||
| **VALIDATE_GO** | `true` | Flag to enable or disable the linting process of the Golang language. |
|
||||
| **VALIDATE_GROOVY** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_HTML** | `true` | Flag to enable or disable the linting process of the HTML language. |
|
||||
| **VALIDATE_JAVA** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_JAVASCRIPT_ES** | `true` | Flag to enable or disable the linting process of the Javascript language. (Utilizing: eslint) |
|
||||
| **VALIDATE_JAVASCRIPT_STANDARD** | `true` | Flag to enable or disable the linting process of the Javascript language. (Utilizing: standard) |
|
||||
| **VALIDATE_JSON** | `true` | Flag to enable or disable the linting process of the JSON language. |
|
||||
| **VALIDATE_JSX** | `true` | Flag to enable or disable the linting process for jsx files (Utilizing: eslint) |
|
||||
| **VALIDATE_KOTLIN** | `true` | Flag to enable or disable the linting process of the Kotlin language. |
|
||||
| **VALIDATE_LUA** | `true` | Flag to enable or disable the linting process of the language. |
|
||||
| **VALIDATE_MD** | `true` | Flag to enable or disable the linting process of the Markdown language. |
|
||||
| **VALIDATE_OPENAPI** | `true` | Flag to enable or disable the linting process of the OpenAPI language. |
|
||||
| **VALIDATE_PERL** | `true` | Flag to enable or disable the linting process of the Perl language. |
|
||||
| **VALIDATE_PHP** | `true` | Flag to enable or disable the linting process of the PHP language. (Utilizing: PHP built-in linter) (keep for backward compatibility) |
|
||||
| **VALIDATE_PHP_BUILTIN** | `true` | Flag to enable or disable the linting process of the PHP language. (Utilizing: PHP built-in linter) |
|
||||
| **VALIDATE_PHP_PHPCS** | `true` | Flag to enable or disable the linting process of the PHP language. (Utilizing: PHP CodeSniffer) |
|
||||
| **VALIDATE_PHP_PHPSTAN** | `true` | Flag to enable or disable the linting process of the PHP language. (Utilizing: PHPStan) |
|
||||
| **VALIDATE_PHP_PSALM** | `true` | Flag to enable or disable the linting process of the PHP language. (Utilizing: PSalm) |
|
||||
| **VALIDATE_PROTOBUF** | `true` | Flag to enable or disable the linting process of the Protobuf language. |
|
||||
| **VALIDATE_PYTHON** | `true` | Flag to enable or disable the linting process of the Python language. (Utilizing: pylint) (keep for backward compatibility) |
|
||||
| **VALIDATE_PYTHON_PYLINT** | `true` | Flag to enable or disable the linting process of the Python language. (Utilizing: pylint) |
|
||||
| **VALIDATE_PYTHON_FLAKE8** | `true` | Flag to enable or disable the linting process of the Python language. (Utilizing: flake8) |
|
||||
| **VALIDATE_POWERSHELL** | `true` | Flag to enable or disable the linting process of the Powershell language. |
|
||||
| **VALIDATE_RAKU** | `true` | Flag to enable or disable the linting process of the Raku language. |
|
||||
| **VALIDATE_RUBY** | `true` | Flag to enable or disable the linting process of the Ruby language. |
|
||||
| **VALIDATE_STATES** | `true` | Flag to enable or disable the linting process for AWS States Language. |
|
||||
| **VALIDATE_TERRAFORM** | `true` | Flag to enable or disable the linting process of the Terraform language. |
|
||||
| **VALIDATE_TERRAFORM_TERRASCAN** | `false` | Flag to enable or disable the linting process of the Terraform language for security related issues. |
|
||||
| **VALIDATE_TSX** | `true` | Flag to enable or disable the linting process for tsx files (Utilizing: eslint) |
|
||||
| **VALIDATE_TYPESCRIPT_ES** | `true` | Flag to enable or disable the linting process of the Typescript language. (Utilizing: eslint) |
|
||||
| **VALIDATE_TYPESCRIPT_STANDARD** | `true` | Flag to enable or disable the linting process of the Typescript language. (Utilizing: standard) |
|
||||
| **VALIDATE_XML** | `true` | Flag to enable or disable the linting process of the XML language. |
|
||||
| **VALIDATE_YAML** | `true` | Flag to enable or disable the linting process of the YAML language. |
|
||||
| **YAML_CONFIG_FILE** | `.yaml-lint.yml` | Filename for [Yamllint configuration](https://yamllint.readthedocs.io/en/stable/configuration.html) (ex: `.yaml-lint.yml`, `.yamllint.yml`) |
|
||||
|
||||
|
||||
### Template rules files
|
||||
|
|
2
TEMPLATES/.flake8
Normal file
2
TEMPLATES/.flake8
Normal file
|
@ -0,0 +1,2 @@
|
|||
[flake8]
|
||||
max-line-length = 120
|
65
TEMPLATES/.groovylintrc.json
Normal file
65
TEMPLATES/.groovylintrc.json
Normal file
|
@ -0,0 +1,65 @@
|
|||
{
|
||||
"extends": "recommended",
|
||||
"rules": {
|
||||
"CatchException": {
|
||||
"enabled": false
|
||||
},
|
||||
"CatchThrowable": {
|
||||
"enabled": false
|
||||
},
|
||||
"ClassJavadoc": {
|
||||
"enabled": false
|
||||
},
|
||||
"ClosureAsLastMethodParameter": {
|
||||
"enabled": false
|
||||
},
|
||||
"DuplicateNumberLiteral": {
|
||||
"enabled": false
|
||||
},
|
||||
"DuplicateStringLiteral": {
|
||||
"enabled": false
|
||||
},
|
||||
"FieldTypeRequired": {
|
||||
"enabled": false
|
||||
},
|
||||
"JavaIoPackageAccess": {
|
||||
"enabled": false
|
||||
},
|
||||
"MethodParameterTypeRequired": {
|
||||
"enabled": false
|
||||
},
|
||||
"MethodSize": {
|
||||
"enabled": false
|
||||
},
|
||||
"NoDef": {
|
||||
"enabled": false
|
||||
},
|
||||
"PrintStackTrace": {
|
||||
"enabled": false
|
||||
},
|
||||
"PropertyName": {
|
||||
"enabled": false
|
||||
},
|
||||
"SpaceAroundMapEntryColon": {
|
||||
"enabled": false
|
||||
},
|
||||
"SystemExit": {
|
||||
"enabled": false
|
||||
},
|
||||
"UnnecessaryGetter": {
|
||||
"enabled": false
|
||||
},
|
||||
"UnnecessaryObjectReferences": {
|
||||
"enabled": false
|
||||
},
|
||||
"UnnecessarySetter": {
|
||||
"enabled": false
|
||||
},
|
||||
"VariableName": {
|
||||
"enabled": false
|
||||
},
|
||||
"VariableTypeRequired": {
|
||||
"enabled": false
|
||||
}
|
||||
}
|
||||
}
|
1
TEMPLATES/.luacheckrc
Normal file
1
TEMPLATES/.luacheckrc
Normal file
|
@ -0,0 +1 @@
|
|||
--std max
|
57
TEMPLATES/analysis_options.yaml
Normal file
57
TEMPLATES/analysis_options.yaml
Normal file
|
@ -0,0 +1,57 @@
|
|||
---
|
||||
##########################
|
||||
##########################
|
||||
## Dart Linter rules ##
|
||||
##########################
|
||||
##########################
|
||||
|
||||
# Pedantic Rules
|
||||
# https://github.com/dart-lang/pedantic
|
||||
|
||||
linter:
|
||||
rules:
|
||||
- always_declare_return_types
|
||||
- always_require_non_null_named_parameters
|
||||
- annotate_overrides
|
||||
- avoid_empty_else
|
||||
- avoid_init_to_null
|
||||
- avoid_null_checks_in_equality_operators
|
||||
- avoid_relative_lib_imports
|
||||
- avoid_return_types_on_setters
|
||||
- avoid_shadowing_type_parameters
|
||||
- avoid_types_as_parameter_names
|
||||
- camel_case_extensions
|
||||
- curly_braces_in_flow_control_structures
|
||||
- empty_catches
|
||||
- empty_constructor_bodies
|
||||
- library_names
|
||||
- library_prefixes
|
||||
- no_duplicate_case_values
|
||||
- null_closures
|
||||
- omit_local_variable_types
|
||||
- prefer_adjacent_string_concatenation
|
||||
- prefer_collection_literals
|
||||
- prefer_conditional_assignment
|
||||
- prefer_contains
|
||||
- prefer_equal_for_default_values
|
||||
- prefer_final_fields
|
||||
- prefer_for_elements_to_map_fromIterable
|
||||
- prefer_generic_function_type_aliases
|
||||
- prefer_if_null_operators
|
||||
- prefer_is_empty
|
||||
- prefer_is_not_empty
|
||||
- prefer_iterable_whereType
|
||||
- prefer_single_quotes
|
||||
- prefer_spread_collections
|
||||
- recursive_getters
|
||||
- slash_for_doc_comments
|
||||
- type_init_formals
|
||||
- unawaited_futures
|
||||
- unnecessary_const
|
||||
- unnecessary_new
|
||||
- unnecessary_null_in_if_null_operators
|
||||
- unnecessary_this
|
||||
- unrelated_type_equality_checks
|
||||
- use_function_type_syntax_for_parameters
|
||||
- use_rethrow_when_possible
|
||||
- valid_regexps
|
6
TEMPLATES/phpcs.xml
Normal file
6
TEMPLATES/phpcs.xml
Normal file
|
@ -0,0 +1,6 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
|
||||
<ruleset name="super-linter">
|
||||
<description>The default coding standard for usage with GitHub Super-Linter. It just includes PSR12.</description>
|
||||
<rule ref="PSR12" />
|
||||
</ruleset>
|
2
TEMPLATES/phpstan.neon
Normal file
2
TEMPLATES/phpstan.neon
Normal file
|
@ -0,0 +1,2 @@
|
|||
parameters:
|
||||
level: 5
|
4
TEMPLATES/psalm.xml
Normal file
4
TEMPLATES/psalm.xml
Normal file
|
@ -0,0 +1,4 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
|
||||
<psalm xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="https://getpsalm.org/schema/config" xmlns="https://getpsalm.org/schema/config">
|
||||
</psalm>
|
1
dependencies/Gemfile
vendored
1
dependencies/Gemfile
vendored
|
@ -8,3 +8,4 @@ gem "rubocop", "~> 0.82.0"
|
|||
gem "rubocop-github", "~> 0.16.0"
|
||||
gem "rubocop-performance", "~>1.7.1"
|
||||
gem "rubocop-rails", "~> 2.5"
|
||||
gem "rubocop-rspec", "~> 1.41.0"
|
||||
|
|
5
dependencies/Gemfile.lock
vendored
5
dependencies/Gemfile.lock
vendored
|
@ -9,7 +9,7 @@ GEM
|
|||
zeitwerk (~> 2.2, >= 2.2.2)
|
||||
ast (2.4.1)
|
||||
concurrent-ruby (1.1.6)
|
||||
i18n (1.8.3)
|
||||
i18n (1.8.4)
|
||||
concurrent-ruby (~> 1.0)
|
||||
jaro_winkler (1.5.4)
|
||||
minitest (5.14.1)
|
||||
|
@ -37,6 +37,8 @@ GEM
|
|||
activesupport (>= 4.2.0)
|
||||
rack (>= 1.1)
|
||||
rubocop (>= 0.82.0)
|
||||
rubocop-rspec (1.41.0)
|
||||
rubocop (>= 0.68.1)
|
||||
ruby-progressbar (1.10.1)
|
||||
thread_safe (0.3.6)
|
||||
tzinfo (1.2.7)
|
||||
|
@ -52,6 +54,7 @@ DEPENDENCIES
|
|||
rubocop-github (~> 0.16.0)
|
||||
rubocop-performance (~> 1.7.1)
|
||||
rubocop-rails (~> 2.5)
|
||||
rubocop-rspec (~> 1.41.0)
|
||||
|
||||
BUNDLED WITH
|
||||
2.1.4
|
||||
|
|
3
dependencies/Pipfile
vendored
3
dependencies/Pipfile
vendored
|
@ -10,7 +10,8 @@ yamllint = "*"
|
|||
pylint = "*"
|
||||
yq = "*"
|
||||
cfn-lint = "*"
|
||||
shyaml = "*"
|
||||
terrascan = "*"
|
||||
flake8 = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.8"
|
||||
|
|
69
dependencies/Pipfile.lock
generated
vendored
69
dependencies/Pipfile.lock
generated
vendored
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "f8d7f08c1efbacea7c92868babd54bc040413f865e3cd7ea8d4bebab310b3da3"
|
||||
"sha256": "f8ea3853c4bb4533103043533d08982436551678c827f59809c94bf0ee54a187"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
|
@ -47,24 +47,25 @@
|
|||
},
|
||||
"boto3": {
|
||||
"hashes": [
|
||||
"sha256:07bd0872e9178b637baefb82aff8abb76197770c9fc60c4d6575564ba878e3e4"
|
||||
"sha256:726937b33b5bf6036a3889ea6ecad632439d1a53c3dd4497c1c426d348e2dae3",
|
||||
"sha256:781f5d1bbec6c2fe902e8855c1c8a576845eaa06630e817b0e422afa4788620e"
|
||||
],
|
||||
"version": "==1.14.22"
|
||||
"version": "==1.14.34"
|
||||
},
|
||||
"botocore": {
|
||||
"hashes": [
|
||||
"sha256:4d084dfcfcdf21ac2df17d017607ca53d53ac6c2fa17484cdd87ef78daba06b8",
|
||||
"sha256:f491d3c29d7dda8c8907c520bc96d77a67a8953dfed7f55c250799849e213640"
|
||||
"sha256:8170f6a236cdfd053275be7b2f1531c90c517d903d38b21d064dc6ba23f20870",
|
||||
"sha256:ff39ec54893397df3c14dd516aedeaff12bb7c363051d8b9bd5fc86bc3e68231"
|
||||
],
|
||||
"version": "==1.17.22"
|
||||
"version": "==1.17.34"
|
||||
},
|
||||
"cfn-lint": {
|
||||
"hashes": [
|
||||
"sha256:443e7c1a57a8909050b51873cbf53628e253b6a2ab00fba9d25f89e75f13f1b6",
|
||||
"sha256:b2a7b9a84688d87d0d49d1dad2602382ba2a8be9d20d87278f4d42c59f71eddc"
|
||||
"sha256:85df66deca0319003b4af3efbd4689e5004608b4e9ee7cc54c7a27e5c0ca1685",
|
||||
"sha256:f7c63a4bf6c32c56cbe1af023ff93eb88a4bad784368dcb25724941981e92f16"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.34.0"
|
||||
"version": "==0.34.1"
|
||||
},
|
||||
"decorator": {
|
||||
"hashes": [
|
||||
|
@ -81,6 +82,14 @@
|
|||
],
|
||||
"version": "==0.15.2"
|
||||
},
|
||||
"flake8": {
|
||||
"hashes": [
|
||||
"sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c",
|
||||
"sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.8.3"
|
||||
},
|
||||
"isort": {
|
||||
"hashes": [
|
||||
"sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1",
|
||||
|
@ -171,6 +180,26 @@
|
|||
],
|
||||
"version": "==0.8.0"
|
||||
},
|
||||
"pycodestyle": {
|
||||
"hashes": [
|
||||
"sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367",
|
||||
"sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"
|
||||
],
|
||||
"version": "==2.6.0"
|
||||
},
|
||||
"pyflakes": {
|
||||
"hashes": [
|
||||
"sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92",
|
||||
"sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"
|
||||
],
|
||||
"version": "==2.2.0"
|
||||
},
|
||||
"pyhcl": {
|
||||
"hashes": [
|
||||
"sha256:2d9b9dcdf1023d812bfed561ba72c99104c5b3f52e558d595130a44ce081b003"
|
||||
],
|
||||
"version": "==0.4.4"
|
||||
},
|
||||
"pylint": {
|
||||
"hashes": [
|
||||
"sha256:7dd78437f2d8d019717dbf287772d0b2dbdfd13fc016aa7faa08d67bccc46adc",
|
||||
|
@ -216,14 +245,6 @@
|
|||
],
|
||||
"version": "==0.3.3"
|
||||
},
|
||||
"shyaml": {
|
||||
"hashes": [
|
||||
"sha256:3a57e380f66043c661d417106a0f101f8068c80caa2afef57c90447b88526c3d",
|
||||
"sha256:ac9066eed5b8445de1f83a99106ca96a77900b6873de327fd50d3e3102084752"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.6.1"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
|
||||
|
@ -231,6 +252,14 @@
|
|||
],
|
||||
"version": "==1.15.0"
|
||||
},
|
||||
"terrascan": {
|
||||
"hashes": [
|
||||
"sha256:2003638e2e38feba9215df3add2ee99565731b86dbd5c43fd8982b1a4ddac927",
|
||||
"sha256:b44d8c7eac96aa8094865061783ab0e8e849690b8187b75778caf5dd34002d18"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.2.3"
|
||||
},
|
||||
"toml": {
|
||||
"hashes": [
|
||||
"sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f",
|
||||
|
@ -240,11 +269,11 @@
|
|||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527",
|
||||
"sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115"
|
||||
"sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a",
|
||||
"sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"
|
||||
],
|
||||
"markers": "python_version != '3.4'",
|
||||
"version": "==1.25.9"
|
||||
"version": "==1.25.10"
|
||||
},
|
||||
"wrapt": {
|
||||
"hashes": [
|
||||
|
|
1659
dependencies/package-lock.json
generated
vendored
1659
dependencies/package-lock.json
generated
vendored
File diff suppressed because it is too large
Load diff
14
dependencies/package.json
vendored
14
dependencies/package.json
vendored
|
@ -3,14 +3,13 @@
|
|||
"dependencies": {
|
||||
"@coffeelint/cli": "^3.2.10",
|
||||
"@stoplight/spectral": "^5.4.0",
|
||||
"@typescript-eslint/eslint-plugin": "^3.7.0",
|
||||
"@typescript-eslint/parser": "^3.7.0",
|
||||
"@typescript-eslint/eslint-plugin": "^3.7.0",
|
||||
"@typescript-eslint/eslint-plugin": "^3.8.0",
|
||||
"@typescript-eslint/parser": "^3.8.0",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"dockerfilelint": "^1.5.0",
|
||||
"eslint": "^7.5.0",
|
||||
"eslint": "^7.6.0",
|
||||
"eslint-config-prettier": "^6.11.0",
|
||||
"eslint-plugin-jest": "^23.18.0",
|
||||
"eslint-plugin-jest": "^23.20.0",
|
||||
"htmlhint": "^0.14.1",
|
||||
"jsonlint": "^1.6.3",
|
||||
"markdownlint-cli": "^0.23.2",
|
||||
|
@ -19,6 +18,9 @@
|
|||
"standard": "^14.3.4",
|
||||
"stylelint": "^13.6.1",
|
||||
"stylelint-config-standard": "^20.0.0",
|
||||
"typescript": "^3.9.7"
|
||||
"npm-groovy-lint": "^6.1.0",
|
||||
"typescript": "^3.9.7",
|
||||
"asl-validator": "^1.7.0"
|
||||
|
||||
}
|
||||
}
|
||||
|
|
7
dependencies/phive.xml
vendored
Normal file
7
dependencies/phive.xml
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<phive xmlns="https://phar.io/phive">
|
||||
<!-- When adding new linter, do not forget to add its GPG key ID to Dockerfile -->
|
||||
<phar name="phpcs" version="^3.5" installed="3.5.5" location="/usr/local/bin/phpcs" copy="true"/>
|
||||
<phar name="phpstan" version="^0.12" installed="0.12.34" location="/usr/local/bin/phpstan" copy="true"/>
|
||||
<phar name="psalm" version="^3.12" installed="3.12.2" location="/usr/local/bin/psalm" copy="true"/>
|
||||
</phive>
|
File diff suppressed because it is too large
Load diff
|
@ -16,11 +16,8 @@ function BuildFileList() {
|
|||
################
|
||||
# print header #
|
||||
################
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Pulling in code history and branches..."
|
||||
fi
|
||||
debug "----------------------------------------------"
|
||||
debug "Pulling in code history and branches..."
|
||||
|
||||
#################################################################################
|
||||
# Switch codebase back to the default branch to get a list of all files changed #
|
||||
|
@ -40,19 +37,15 @@ function BuildFileList() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Error
|
||||
echo "Failed to switch to ${DEFAULT_BRANCH} branch to get files changed!"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${SWITCH_CMD}]${NC}"
|
||||
exit 1
|
||||
info "Failed to switch to ${DEFAULT_BRANCH} branch to get files changed!"
|
||||
fatal "[${SWITCH_CMD}]"
|
||||
fi
|
||||
|
||||
################
|
||||
# print header #
|
||||
################
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Generating Diff with:[git diff --name-only '${DEFAULT_BRANCH}..${GITHUB_SHA}' --diff-filter=d]"
|
||||
fi
|
||||
debug "----------------------------------------------"
|
||||
debug "Generating Diff with:[git diff --name-only '${DEFAULT_BRANCH}..${GITHUB_SHA}' --diff-filter=d]"
|
||||
|
||||
#################################################
|
||||
# Get the Array of files changed in the commits #
|
||||
|
@ -69,43 +62,42 @@ function BuildFileList() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Error
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to gain a list of all files changed!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${RAW_FILE_ARRAY[*]}]${NC}"
|
||||
exit 1
|
||||
error "Failed to gain a list of all files changed!"
|
||||
fatal "[${RAW_FILE_ARRAY[*]}]"
|
||||
fi
|
||||
|
||||
################################################
|
||||
# Iterate through the array of all files found #
|
||||
################################################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "Files that have been modified in the commit(s):"
|
||||
info "----------------------------------------------"
|
||||
info "Files that have been modified in the commit(s):"
|
||||
for FILE in "${RAW_FILE_ARRAY[@]}"; do
|
||||
###########################
|
||||
# Get the files extension #
|
||||
###########################
|
||||
# Extract just the file and extension, reverse it, cut off extension,
|
||||
# reverse it back, substitute to lowercase
|
||||
FILE_TYPE=$(basename "${FILE}" | rev | cut -f1 -d'.' | rev | awk '{print tolower($0)}')
|
||||
# Extract just the file extension
|
||||
FILE_TYPE=${FILE##*.}
|
||||
# To lowercase
|
||||
FILE_TYPE=${FILE_TYPE,,}
|
||||
|
||||
##############
|
||||
# Print file #
|
||||
##############
|
||||
echo "File:[${FILE}], File_type:[${FILE_TYPE}]"
|
||||
info "File:[${FILE}], File_type:[${FILE_TYPE}]"
|
||||
|
||||
#########
|
||||
# DEBUG #
|
||||
#########
|
||||
#echo "FILE_TYPE:[${FILE_TYPE}]"
|
||||
debug "FILE_TYPE:[${FILE_TYPE}]"
|
||||
|
||||
#####################
|
||||
# Get the CFN files #
|
||||
#####################
|
||||
################################
|
||||
# Get the CLOUDFORMATION files #
|
||||
################################
|
||||
if [ "${FILE_TYPE}" == "yml" ] || [ "${FILE_TYPE}" == "yaml" ]; then
|
||||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
FILE_ARRAY_YML+=("${FILE}")
|
||||
FILE_ARRAY_YAML+=("${FILE}")
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
|
@ -118,7 +110,7 @@ function BuildFileList() {
|
|||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
FILE_ARRAY_CFN+=("${FILE}")
|
||||
FILE_ARRAY_CLOUDFORMATION+=("${FILE}")
|
||||
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
|
@ -158,7 +150,16 @@ function BuildFileList() {
|
|||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
FILE_ARRAY_CFN+=("${FILE}")
|
||||
FILE_ARRAY_CLOUDFORMATION+=("${FILE}")
|
||||
fi
|
||||
############################################
|
||||
# Check if the file is AWS States Language #
|
||||
############################################
|
||||
if DetectAWSStatesFIle "${FILE}"; then
|
||||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
FILE_ARRAY_STATES+=("${FILE}")
|
||||
fi
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
|
@ -211,9 +212,9 @@ function BuildFileList() {
|
|||
######################
|
||||
# Get the RAKU files #
|
||||
######################
|
||||
elif [ "${FILE_TYPE}" == "raku" ] || [ "${FILE_TYPE}" == "rakumod" ] \
|
||||
|| [ "${FILE_TYPE}" == "rakutest" ] || [ "${FILE_TYPE}" == "pm6" ] \
|
||||
|| [ "${FILE_TYPE}" == "pl6" ] || [ "${FILE_TYPE}" == "p6" ] ; then
|
||||
elif [ "${FILE_TYPE}" == "raku" ] || [ "${FILE_TYPE}" == "rakumod" ] ||
|
||||
[ "${FILE_TYPE}" == "rakutest" ] || [ "${FILE_TYPE}" == "pm6" ] ||
|
||||
[ "${FILE_TYPE}" == "pl6" ] || [ "${FILE_TYPE}" == "p6" ]; then
|
||||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
|
@ -229,7 +230,10 @@ function BuildFileList() {
|
|||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
FILE_ARRAY_PHP+=("${FILE}")
|
||||
FILE_ARRAY_PHP_BUILTIN+=("${FILE}")
|
||||
FILE_ARRAY_PHP_PHPCS+=("${FILE}")
|
||||
FILE_ARRAY_PHP_PHPSTAN+=("${FILE}")
|
||||
FILE_ARRAY_PHP_PSALM+=("${FILE}")
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
|
@ -253,7 +257,8 @@ function BuildFileList() {
|
|||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
FILE_ARRAY_PYTHON+=("${FILE}")
|
||||
FILE_ARRAY_PYTHON_PYLINT+=("${FILE}")
|
||||
FILE_ARRAY_PYTHON_FLAKE8+=("${FILE}")
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
|
@ -343,6 +348,7 @@ function BuildFileList() {
|
|||
# Append the file to the array #
|
||||
################################
|
||||
FILE_ARRAY_TERRAFORM+=("${FILE}")
|
||||
FILE_ARRAY_TERRAFORM_TERRASCAN+=("${FILE}")
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
|
@ -382,6 +388,15 @@ function BuildFileList() {
|
|||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
READ_ONLY_CHANGE_FLAG=1
|
||||
elif [ "$FILE_TYPE" == "lua" ]; then
|
||||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
FILE_ARRAY_LUA+=("$FILE")
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
READ_ONLY_CHANGE_FLAG=1
|
||||
############################
|
||||
# Get the Protocol Buffers files #
|
||||
############################
|
||||
|
@ -430,6 +445,15 @@ function BuildFileList() {
|
|||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
READ_ONLY_CHANGE_FLAG=1
|
||||
elif [ "$FILE_TYPE" == "groovy" ] || [ "$FILE_TYPE" == "jenkinsfile" ] || [ "$FILE_TYPE" == "gradle" ]; then
|
||||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
FILE_ARRAY_GROOVY+=("$FILE")
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
READ_ONLY_CHANGE_FLAG=1
|
||||
else
|
||||
##############################################
|
||||
# Use file to see if we can parse what it is #
|
||||
|
@ -443,8 +467,8 @@ function BuildFileList() {
|
|||
#######################
|
||||
# It is a bash script #
|
||||
#######################
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} Found bash script without extension:[.sh]${NC}"
|
||||
echo "Please update file with proper extensions."
|
||||
warn "Found bash script without extension:[.sh]"
|
||||
info "Please update file with proper extensions."
|
||||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
|
@ -457,8 +481,8 @@ function BuildFileList() {
|
|||
#######################
|
||||
# It is a Ruby script #
|
||||
#######################
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} Found ruby script without extension:[.rb]${NC}"
|
||||
echo "Please update file with proper extensions."
|
||||
warn "Found ruby script without extension:[.rb]"
|
||||
info "Please update file with proper extensions."
|
||||
################################
|
||||
# Append the file to the array #
|
||||
################################
|
||||
|
@ -471,7 +495,7 @@ function BuildFileList() {
|
|||
############################
|
||||
# Extension was not found! #
|
||||
############################
|
||||
echo -e "${NC}${F[Y]} - WARN!${NC} Failed to get filetype for:[${FILE}]!${NC}"
|
||||
warn "Failed to get filetype for:[${FILE}]!"
|
||||
##########################################################
|
||||
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||
##########################################################
|
||||
|
@ -480,7 +504,7 @@ function BuildFileList() {
|
|||
fi
|
||||
done
|
||||
|
||||
echo ${READ_ONLY_CHANGE_FLAG} > /dev/null 2>&1 || true # Workaround SC2034
|
||||
export READ_ONLY_CHANGE_FLAG # Workaround SC2034
|
||||
|
||||
#########################################
|
||||
# Need to switch back to branch of code #
|
||||
|
@ -497,15 +521,13 @@ function BuildFileList() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Error
|
||||
echo "Failed to switch back to branch!"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${SWITCH2_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to switch back to branch!"
|
||||
fatal "[${SWITCH2_CMD}]"
|
||||
fi
|
||||
|
||||
################
|
||||
# Footer print #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo -e "${NC}${F[B]}Successfully gathered list of files...${NC}"
|
||||
info "----------------------------------------------"
|
||||
info "Successfully gathered list of files..."
|
||||
}
|
||||
|
|
1416
lib/linter.sh
1416
lib/linter.sh
File diff suppressed because it is too large
Load diff
53
lib/log.sh
Normal file
53
lib/log.sh
Normal file
|
@ -0,0 +1,53 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
declare -Agr B=(
|
||||
[B]=$(echo -e "\e[44m")
|
||||
[C]=$(echo -e "\e[46m")
|
||||
[G]=$(echo -e "\e[42m")
|
||||
[K]=$(echo -e "\e[40m")
|
||||
[M]=$(echo -e "\e[45m")
|
||||
[R]=$(echo -e "\e[41m")
|
||||
[W]=$(echo -e "\e[47m")
|
||||
[Y]=$(echo -e "\e[43m")
|
||||
)
|
||||
declare -Agr F=(
|
||||
[B]=$(echo -e "\e[0;34m")
|
||||
[C]=$(echo -e "\e[0;36m")
|
||||
[G]=$(echo -e "\e[0;32m")
|
||||
[K]=$(echo -e "\e[0;30m")
|
||||
[M]=$(echo -e "\e[0;35m")
|
||||
[R]=$(echo -e "\e[0;31m")
|
||||
[W]=$(echo -e "\e[0;37m")
|
||||
[Y]=$(echo -e "\e[0;33m")
|
||||
)
|
||||
readonly NC=$(echo -e "\e[0m")
|
||||
|
||||
export B
|
||||
export F
|
||||
export NC
|
||||
|
||||
# Log Functions
|
||||
LOG_TEMP=$(mktemp) || echo "Failed to create temporary log file."
|
||||
export LOG_TEMP
|
||||
echo "super-linter Log" > "${LOG_TEMP}"
|
||||
log() {
|
||||
local TOTERM=${1:-}
|
||||
local MESSAGE=${2:-}
|
||||
echo -e "${MESSAGE:-}" | (
|
||||
if [[ -n ${TOTERM} ]]; then
|
||||
tee -a "${LOG_TEMP}" >&2
|
||||
else
|
||||
cat >> "${LOG_TEMP}" 2>&1
|
||||
fi
|
||||
)
|
||||
}
|
||||
trace() { log "${LOG_TRACE:-}" "${NC}$(date +"%F %T") ${F[B]}[TRACE ]${NC} $*${NC}"; }
|
||||
debug() { log "${LOG_DEBUG:-}" "${NC}$(date +"%F %T") ${F[B]}[DEBUG ]${NC} $*${NC}"; }
|
||||
info() { log "${LOG_VERBOSE:-}" "${NC}$(date +"%F %T") ${F[B]}[INFO ]${NC} $*${NC}"; }
|
||||
notice() { log "true" "${NC}$(date +"%F %T") ${F[G]}[NOTICE]${NC} $*${NC}"; }
|
||||
warn() { log "true" "${NC}$(date +"%F %T") ${F[Y]}[WARN ]${NC} $*${NC}"; }
|
||||
error() { log "true" "${NC}$(date +"%F %T") ${F[R]}[ERROR ]${NC} $*${NC}"; }
|
||||
fatal() {
|
||||
log "true" "${NC}$(date +"%F %T") ${B[R]}${F[W]}[FATAL ]${NC} $*${NC}"
|
||||
exit 1
|
||||
}
|
|
@ -8,12 +8,12 @@ cat << EOF
|
|||
@///////@///////////////@@@@ ( @,
|
||||
@/(&/@//////////////////// @
|
||||
@////////////////////////@@ @
|
||||
@%////////(//////////%/////&@ @@ *,@
|
||||
@@@@@/@/#/////(&////////////////// .@
|
||||
*@@@@@. .%///(//@//////////////////&. .@@, @%
|
||||
@@% .&@&&/@.@//&/////(////////// @@@@@@@@@ .. &@
|
||||
@@% @@@@@ @&/////////////////# @/ V @@/ ,@@@ @
|
||||
@@@% @@@@ .%@@@@//////#@ @ @@ @ .,.
|
||||
@%////////(//////////%/////&@ @@ *,@ ______________
|
||||
@@@@@/@/#/////(&////////////////// .@ / \\
|
||||
*@@@@@. .%///(//@//////////////////&. .@@, @% / Don't mind me \\
|
||||
@@% .&@&&/@.@//&/////(////////// @@@@@@@@@ .. &@ / I'm just looking \\
|
||||
@@% @@@@@ @&/////////////////# @/ V @@/ ,@@@ @ < for some trash... |
|
||||
@@@% @@@@ .%@@@@//////#@ @ @@ @ .,. \\__________________/
|
||||
@@@/@( (@@@@% @/\ %
|
||||
@@@@( . .@@/\ #
|
||||
@ %@%
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
declare -Agr B=(
|
||||
[B]=$(echo -e "\e[44m")
|
||||
[C]=$(echo -e "\e[46m")
|
||||
[G]=$(echo -e "\e[42m")
|
||||
[K]=$(echo -e "\e[40m")
|
||||
[M]=$(echo -e "\e[45m")
|
||||
[R]=$(echo -e "\e[41m")
|
||||
[W]=$(echo -e "\e[47m")
|
||||
[Y]=$(echo -e "\e[43m")
|
||||
)
|
||||
declare -Agr F=(
|
||||
[B]=$(echo -e "\e[0;34m")
|
||||
[C]=$(echo -e "\e[0;36m")
|
||||
[G]=$(echo -e "\e[0;32m")
|
||||
[K]=$(echo -e "\e[0;30m")
|
||||
[M]=$(echo -e "\e[0;35m")
|
||||
[R]=$(echo -e "\e[0;31m")
|
||||
[W]=$(echo -e "\e[0;37m")
|
||||
[Y]=$(echo -e "\e[0;33m")
|
||||
)
|
||||
readonly NC=$(echo -e "\e[0m")
|
||||
|
||||
export B
|
||||
export F
|
||||
export NC
|
|
@ -13,9 +13,8 @@ function GetValidationInfo() {
|
|||
############################################
|
||||
# Print headers for user provided env vars #
|
||||
############################################
|
||||
echo ""
|
||||
echo "--------------------------------------------"
|
||||
echo "Gathering user validation information..."
|
||||
info "--------------------------------------------"
|
||||
info "Gathering user validation information..."
|
||||
|
||||
###########################################
|
||||
# Skip validation if were running locally #
|
||||
|
@ -31,10 +30,10 @@ function GetValidationInfo() {
|
|||
if [[ ${VALIDATE_ALL_CODEBASE} != "false" ]]; then
|
||||
# Set to true
|
||||
VALIDATE_ALL_CODEBASE="${DEFAULT_VALIDATE_ALL_CODEBASE}"
|
||||
echo "- Validating ALL files in code base..."
|
||||
info "- Validating ALL files in code base..."
|
||||
else
|
||||
# Its false
|
||||
echo "- Only validating [new], or [edited] files in code base..."
|
||||
info "- Only validating [new], or [edited] files in code base..."
|
||||
fi
|
||||
fi
|
||||
|
||||
|
@ -46,680 +45,64 @@ function GetValidationInfo() {
|
|||
################################
|
||||
# Convert strings to lowercase #
|
||||
################################
|
||||
VALIDATE_ANSIBLE="${VALIDATE_ANSIBLE,,}"
|
||||
VALIDATE_ARM="${VALIDATE_ARM,,}"
|
||||
VALIDATE_BASH="${VALIDATE_BASH,,}"
|
||||
VALIDATE_CLOJURE="${VALIDATE_CLOJURE,,}"
|
||||
VALIDATE_COFFEE="${VALIDATE_COFFEE,,}"
|
||||
VALIDATE_CSS="${VALIDATE_CSS,,}"
|
||||
VALIDATE_DART="${VALIDATE_DART,,}"
|
||||
VALIDATE_DOCKER="${VALIDATE_DOCKER,,}"
|
||||
VALIDATE_EDITORCONFIG="${VALIDATE_EDITORCONFIG,,}"
|
||||
VALIDATE_ENV="${VALIDATE_ENV,,}"
|
||||
VALIDATE_GO="${VALIDATE_GO,,}"
|
||||
VALIDATE_HTML="${VALIDATE_HTML,,}"
|
||||
VALIDATE_JAVASCRIPT_ES="${VALIDATE_JAVASCRIPT_ES,,}"
|
||||
VALIDATE_JAVASCRIPT_STANDARD="${VALIDATE_JAVASCRIPT_STANDARD,,}"
|
||||
VALIDATE_JSON="${VALIDATE_JSON,,}"
|
||||
VALIDATE_JSX="${VALIDATE_JSX,,}"
|
||||
VALIDATE_KOTLIN="${VALIDATE_KOTLIN,,}"
|
||||
VALIDATE_MARKDOWN="${VALIDATE_MARKDOWN,,}"
|
||||
VALIDATE_OPENAPI="${VALIDATE_OPENAPI,,}"
|
||||
VALIDATE_PERL="${VALIDATE_PERL,,}"
|
||||
VALIDATE_PHP="${VALIDATE_PHP,,}"
|
||||
VALIDATE_POWERSHELL="${VALIDATE_POWERSHELL,,}"
|
||||
VALIDATE_PROTOBUF="${VALIDATE_PROTOBUF,,}"
|
||||
VALIDATE_PYTHON="${VALIDATE_PYTHON,,}"
|
||||
VALIDATE_RAKU="${VALIDATE_RAKU,,}"
|
||||
VALIDATE_RUBY="${VALIDATE_RUBY,,}"
|
||||
VALIDATE_TERRAFORM="${VALIDATE_TERRAFORM,,}"
|
||||
VALIDATE_TSX="${VALIDATE_TSX,,}"
|
||||
VALIDATE_TYPESCRIPT_ES="${VALIDATE_TYPESCRIPT_ES,,}"
|
||||
VALIDATE_TYPESCRIPT_STANDARD="${VALIDATE_TYPESCRIPT_STANDARD,,}"
|
||||
VALIDATE_YAML="${VALIDATE_YAML,,}"
|
||||
VALIDATE_XML="${VALIDATE_XML,,}"
|
||||
# Loop through all languages
|
||||
for LANGUAGE in "${LANGUAGE_ARRAY[@]}"; do
|
||||
# build the variable
|
||||
VALIDATE_LANGUAGE="VALIDATE_${LANGUAGE}"
|
||||
# Set the value of the var to lowercase
|
||||
eval "${VALIDATE_LANGUAGE}=${!VALIDATE_LANGUAGE,,}"
|
||||
done
|
||||
|
||||
################################################
|
||||
# Determine if any linters were explicitly set #
|
||||
################################################
|
||||
ANY_SET="false"
|
||||
if [[ -n ${VALIDATE_YAML} || -n \
|
||||
${VALIDATE_JSON} || -n \
|
||||
${VALIDATE_XML} || -n \
|
||||
${VALIDATE_MARKDOWN} || -n \
|
||||
${VALIDATE_BASH} || -n \
|
||||
${VALIDATE_PERL} || -n \
|
||||
${VALIDATE_RAKU} || -n \
|
||||
${VALIDATE_PHP} || -n \
|
||||
${VALIDATE_PYTHON} || -n \
|
||||
${VALIDATE_RUBY} || -n \
|
||||
${VALIDATE_COFFEE} || -n \
|
||||
${VALIDATE_ANSIBLE} || -n \
|
||||
${VALIDATE_JAVASCRIPT_ES} || -n \
|
||||
${VALIDATE_JAVASCRIPT_STANDARD} || -n \
|
||||
${VALIDATE_TYPESCRIPT_ES} || -n \
|
||||
${VALIDATE_TYPESCRIPT_STANDARD} || -n \
|
||||
${VALIDATE_DOCKER} || -n \
|
||||
${VALIDATE_GO} || -n \
|
||||
${VALIDATE_TERRAFORM} || -n \
|
||||
${VALIDATE_POWERSHELL} || -n \
|
||||
${VALIDATE_ARM} || -n \
|
||||
${VALIDATE_CSS} || -n \
|
||||
${VALIDATE_ENV} || -n \
|
||||
${VALIDATE_CLOJURE} || -n \
|
||||
${VALIDATE_PROTOBUF} || -n \
|
||||
${VALIDATE_OPENAPI} || -n \
|
||||
${VALIDATE_KOTLIN} || -n \
|
||||
${VALIDATE_DART} || -n \
|
||||
${VALIDATE_EDITORCONFIG} || -n \
|
||||
${VALIDATE_HTML} ]]; then
|
||||
ANY_SET="true"
|
||||
fi
|
||||
|
||||
####################################
|
||||
# Validate if we should check YAML #
|
||||
####################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_YAML} ]]; then
|
||||
# YAML flag was not set - default to false
|
||||
VALIDATE_YAML="false"
|
||||
# Loop through all languages
|
||||
for LANGUAGE in "${LANGUAGE_ARRAY[@]}"; do
|
||||
# build the variable
|
||||
VALIDATE_LANGUAGE="VALIDATE_${LANGUAGE}"
|
||||
# Check to see if the variable was set
|
||||
if [ -n "${!VALIDATE_LANGUAGE}" ]; then
|
||||
# It was set, need to set flag
|
||||
ANY_SET="true"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_YAML="true"
|
||||
fi
|
||||
|
||||
####################################
|
||||
# Validate if we should check JSON #
|
||||
####################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_JSON} ]]; then
|
||||
# JSON flag was not set - default to false
|
||||
VALIDATE_JSON="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_JSON="true"
|
||||
fi
|
||||
|
||||
###################################
|
||||
# Validate if we should check XML #
|
||||
###################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_XML} ]]; then
|
||||
# XML flag was not set - default to false
|
||||
VALIDATE_XML="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_XML="true"
|
||||
fi
|
||||
|
||||
########################################
|
||||
# Validate if we should check MARKDOWN #
|
||||
########################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_MARKDOWN} ]]; then
|
||||
# MD flag was not set - default to false
|
||||
VALIDATE_MARKDOWN="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_MARKDOWN="true"
|
||||
fi
|
||||
|
||||
####################################
|
||||
# Validate if we should check BASH #
|
||||
####################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_BASH} ]]; then
|
||||
# BASH flag was not set - default to false
|
||||
VALIDATE_BASH="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_BASH="true"
|
||||
fi
|
||||
|
||||
####################################
|
||||
# Validate if we should check PERL #
|
||||
####################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_PERL} ]]; then
|
||||
# PERL flag was not set - default to false
|
||||
VALIDATE_PERL="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_PERL="true"
|
||||
fi
|
||||
|
||||
####################################
|
||||
# Validate if we should check RAKU #
|
||||
####################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_RAKU} ]]; then
|
||||
# RAKU flag was not set - default to false
|
||||
VALIDATE_RAKU="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_RAKU="true"
|
||||
fi
|
||||
|
||||
####################################
|
||||
# Validate if we should check PHP #
|
||||
####################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_PHP} ]]; then
|
||||
# PHP flag was not set - default to false
|
||||
VALIDATE_PHP="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_PHP="true"
|
||||
fi
|
||||
|
||||
######################################
|
||||
# Validate if we should check PYTHON #
|
||||
######################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_PYTHON} ]]; then
|
||||
# PYTHON flag was not set - default to false
|
||||
VALIDATE_PYTHON="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_PYTHON="true"
|
||||
fi
|
||||
|
||||
####################################
|
||||
# Validate if we should check RUBY #
|
||||
####################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_RUBY} ]]; then
|
||||
# RUBY flag was not set - default to false
|
||||
VALIDATE_RUBY="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_RUBY="true"
|
||||
fi
|
||||
|
||||
######################################
|
||||
# Validate if we should check COFFEE #
|
||||
######################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_COFFEE} ]]; then
|
||||
# COFFEE flag was not set - default to false
|
||||
VALIDATE_COFFEE="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_COFFEE="true"
|
||||
fi
|
||||
|
||||
#######################################
|
||||
# Validate if we should check ANSIBLE #
|
||||
#######################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_ANSIBLE} ]]; then
|
||||
# ANSIBLE flag was not set - default to false
|
||||
VALIDATE_ANSIBLE="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_ANSIBLE="true"
|
||||
fi
|
||||
|
||||
#############################################
|
||||
# Validate if we should check JAVASCRIPT_ES #
|
||||
#############################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_JAVASCRIPT_ES} ]]; then
|
||||
# JAVASCRIPT_ES flag was not set - default to false
|
||||
VALIDATE_JAVASCRIPT_ES="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_JAVASCRIPT_ES="true"
|
||||
fi
|
||||
done
|
||||
|
||||
###################################################
|
||||
# Validate if we should check JAVASCRIPT_STANDARD #
|
||||
# Validate if we should check individual lanuages #
|
||||
###################################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_JAVASCRIPT_STANDARD} ]]; then
|
||||
# JAVASCRIPT_STANDARD flag was not set - default to false
|
||||
VALIDATE_JAVASCRIPT_STANDARD="false"
|
||||
# Loop through all languages
|
||||
for LANGUAGE in "${LANGUAGE_ARRAY[@]}"; do
|
||||
# build the variable
|
||||
VALIDATE_LANGUAGE="VALIDATE_${LANGUAGE}"
|
||||
# Check if ANY_SET was set
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Check to see if the variable was set
|
||||
if [ -z "${!VALIDATE_LANGUAGE}" ]; then
|
||||
# Flag was not set, default to false
|
||||
eval "${VALIDATE_LANGUAGE}='false'"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
eval "${VALIDATE_LANGUAGE}='true'"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_JAVASCRIPT_STANDARD="true"
|
||||
fi
|
||||
|
||||
#############################################
|
||||
# Validate if we should check JSX #
|
||||
#############################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_JSX} ]]; then
|
||||
# JSX flag was not set - default to false
|
||||
VALIDATE_JSX="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_JSX="true"
|
||||
fi
|
||||
|
||||
#############################################
|
||||
# Validate if we should check TSX #
|
||||
#############################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_TSX} ]]; then
|
||||
# TSX flag was not set - default to false
|
||||
VALIDATE_TSX="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_TSX="true"
|
||||
fi
|
||||
|
||||
#############################################
|
||||
# Validate if we should check TYPESCRIPT_ES #
|
||||
#############################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_TYPESCRIPT_ES} ]]; then
|
||||
# TYPESCRIPT_ES flag was not set - default to false
|
||||
VALIDATE_TYPESCRIPT_ES="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_TYPESCRIPT_ES="true"
|
||||
fi
|
||||
|
||||
###################################################
|
||||
# Validate if we should check TYPESCRIPT_STANDARD #
|
||||
###################################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_TYPESCRIPT_STANDARD} ]]; then
|
||||
# TYPESCRIPT_STANDARD flag was not set - default to false
|
||||
VALIDATE_TYPESCRIPT_STANDARD="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_TYPESCRIPT_STANDARD="true"
|
||||
fi
|
||||
|
||||
######################################
|
||||
# Validate if we should check DOCKER #
|
||||
######################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_DOCKER} ]]; then
|
||||
# DOCKER flag was not set - default to false
|
||||
VALIDATE_DOCKER="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_DOCKER="true"
|
||||
fi
|
||||
|
||||
##################################
|
||||
# Validate if we should check GO #
|
||||
##################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_GO} ]]; then
|
||||
# GO flag was not set - default to false
|
||||
VALIDATE_GO="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_GO="true"
|
||||
fi
|
||||
|
||||
#########################################
|
||||
# Validate if we should check TERRAFORM #
|
||||
#########################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_TERRAFORM} ]]; then
|
||||
# TERRAFORM flag was not set - default to false
|
||||
VALIDATE_TERRAFORM="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_TERRAFORM="true"
|
||||
fi
|
||||
|
||||
#########################################
|
||||
# Validate if we should check POWERSHELL #
|
||||
#########################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_POWERSHELL} ]]; then
|
||||
# POWERSHELL flag was not set - default to false
|
||||
VALIDATE_POWERSHELL="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_POWERSHELL="true"
|
||||
fi
|
||||
|
||||
###################################
|
||||
# Validate if we should check ARM #
|
||||
###################################
|
||||
if [[ "${ANY_SET}" == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z "${VALIDATE_ARM}" ]]; then
|
||||
# ARM flag was not set - default to false
|
||||
VALIDATE_ARM="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_ARM="true"
|
||||
fi
|
||||
|
||||
###################################
|
||||
# Validate if we should check CSS #
|
||||
###################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_CSS} ]]; then
|
||||
# CSS flag was not set - default to false
|
||||
VALIDATE_CSS="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_CSS="true"
|
||||
fi
|
||||
|
||||
###################################
|
||||
# Validate if we should check ENV #
|
||||
###################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_ENV} ]]; then
|
||||
# ENV flag was not set - default to false
|
||||
VALIDATE_ENV="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_ENV="true"
|
||||
fi
|
||||
|
||||
######################################
|
||||
# Validate if we should check KOTLIN #
|
||||
######################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_KOTLIN} ]]; then
|
||||
# ENV flag was not set - default to false
|
||||
VALIDATE_KOTLIN="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_KOTLIN="true"
|
||||
fi
|
||||
|
||||
####################################
|
||||
# Validate if we should check DART #
|
||||
####################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_DART} ]]; then
|
||||
# ENV flag was not set - default to false
|
||||
VALIDATE_DART="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_DART="true"
|
||||
fi
|
||||
|
||||
#######################################
|
||||
# Validate if we should check OPENAPI #
|
||||
#######################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_OPENAPI} ]]; then
|
||||
# OPENAPI flag was not set - default to false
|
||||
VALIDATE_OPENAPI="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_OPENAPI="true"
|
||||
fi
|
||||
|
||||
#######################################
|
||||
# Validate if we should check PROTOBUF #
|
||||
#######################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_PROTOBUF} ]]; then
|
||||
# PROTOBUF flag was not set - default to false
|
||||
VALIDATE_PROTOBUF="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_PROTOBUF="true"
|
||||
fi
|
||||
|
||||
#######################################
|
||||
# Validate if we should check Clojure #
|
||||
#######################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_CLOJURE} ]]; then
|
||||
# Clojure flag was not set - default to false
|
||||
VALIDATE_CLOJURE="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_CLOJURE="true"
|
||||
fi
|
||||
|
||||
############################################
|
||||
# Validate if we should check editorconfig #
|
||||
############################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_EDITORCONFIG} ]]; then
|
||||
# EDITORCONFIG flag was not set - default to false
|
||||
VALIDATE_EDITORCONFIG="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set
|
||||
# special case checking for .editorconfig
|
||||
if [ -f "${GITHUB_WORKSPACE}/.editorconfig" ]; then
|
||||
VALIDATE_EDITORCONFIG="true"
|
||||
fi
|
||||
fi
|
||||
|
||||
####################################
|
||||
# Validate if we should check HTML #
|
||||
####################################
|
||||
if [[ ${ANY_SET} == "true" ]]; then
|
||||
# Some linter flags were set - only run those set to true
|
||||
if [[ -z ${VALIDATE_HTML} ]]; then
|
||||
# HTML flag was not set - default to false
|
||||
VALIDATE_HTML="false"
|
||||
fi
|
||||
else
|
||||
# No linter flags were set - default all to true
|
||||
VALIDATE_HTML="true"
|
||||
fi
|
||||
done
|
||||
|
||||
#######################################
|
||||
# Print which linters we are enabling #
|
||||
#######################################
|
||||
if [[ ${VALIDATE_YAML} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [YAML] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [YAML] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_JSON} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [JSON] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [JSON] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_XML} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [XML] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [XML] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_MARKDOWN} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [MARKDOWN] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [MARKDOWN] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_BASH} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [BASH] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [BASH] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_PERL} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [PERL] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [PERL] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_RAKU} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [RAKU] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [RAKU] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_PHP} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [PHP] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [PHP] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_PYTHON} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [PYTHON] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [PYTHON] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_RUBY} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [RUBY] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [RUBY] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_COFFEE} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [COFFEE] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [COFFEE] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_ANSIBLE} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [ANSIBLE] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [ANSIBLE] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_JAVASCRIPT_ES} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [JAVASCRIPT(eslint)] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [JAVASCRIPT(eslint)] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_JAVASCRIPT_STANDARD} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [JAVASCRIPT(standard)] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [JAVASCRIPT(standard)] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_TYPESCRIPT_ES} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [TYPESCRIPT(eslint)] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [TYPESCRIPT(eslint)] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_TYPESCRIPT_STANDARD} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [TYPESCRIPT(standard)] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [TYPESCRIPT(standard)] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_DOCKER} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [DOCKER] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [DOCKER] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_GO} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [GOLANG] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [GOLANG] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_TERRAFORM} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [TERRAFORM] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [TERRAFORM] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_POWERSHELL} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [POWERSHELL] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [POWERSHELL] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_ARM} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [ARM] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [ARM] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_CSS} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [CSS] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [CSS] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_CLOJURE} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [CLOJURE] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [CLOJURE] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_ENV} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [ENV] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [ENV] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_KOTLIN} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [KOTLIN] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [KOTLIN] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_OPENAPI} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [OPENAPI] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [OPENAPI] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_PROTOBUF} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [PROTOBUF] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [PROTOBUF] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_DART} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [DART] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [DART] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_EDITORCONFIG} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [EDITORCONFIG] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [EDITORCONFIG] files in code base...")
|
||||
fi
|
||||
if [[ ${VALIDATE_HTML} == "true" ]]; then
|
||||
PRINT_ARRAY+=("- Validating [HTML] files in code base...")
|
||||
else
|
||||
PRINT_ARRAY+=("- Excluding [HTML] files in code base...")
|
||||
fi
|
||||
# Loop through all languages
|
||||
for LANGUAGE in "${LANGUAGE_ARRAY[@]}"; do
|
||||
# build the variable
|
||||
VALIDATE_LANGUAGE="VALIDATE_${LANGUAGE}"
|
||||
if [[ ${!VALIDATE_LANGUAGE} == "true" ]]; then
|
||||
# We need to validate
|
||||
PRINT_ARRAY+=("- Validating [$LANGUAGE] files in code base...")
|
||||
else
|
||||
# We are skipping the language
|
||||
PRINT_ARRAY+=("- Excluding [$LANGUAGE] files in code base...")
|
||||
fi
|
||||
done
|
||||
|
||||
##############################
|
||||
# Validate Ansible Directory #
|
||||
|
@ -783,23 +166,19 @@ function GetValidationInfo() {
|
|||
ACTIONS_RUNNER_DEBUG="true"
|
||||
fi
|
||||
|
||||
###################
|
||||
# Debug on runner #
|
||||
###################
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
###########################
|
||||
# Print the validate info #
|
||||
###########################
|
||||
for LINE in "${PRINT_ARRAY[@]}"; do
|
||||
echo "${LINE}"
|
||||
done
|
||||
###########################
|
||||
# Print the validate info #
|
||||
###########################
|
||||
for LINE in "${PRINT_ARRAY[@]}"; do
|
||||
debug "${LINE}"
|
||||
done
|
||||
|
||||
echo "--- DEBUG INFO ---"
|
||||
echo "---------------------------------------------"
|
||||
RUNNER=$(whoami)
|
||||
echo "Runner:[${RUNNER}]"
|
||||
echo "ENV:"
|
||||
printenv
|
||||
echo "---------------------------------------------"
|
||||
fi
|
||||
debug "--- DEBUG INFO ---"
|
||||
debug "---------------------------------------------"
|
||||
RUNNER=$(whoami)
|
||||
debug "Runner:[${RUNNER}]"
|
||||
PRINTENV=$(printenv)
|
||||
debug "ENV:"
|
||||
debug "${PRINTENV}"
|
||||
debug "---------------------------------------------"
|
||||
}
|
||||
|
|
235
lib/worker.sh
235
lib/worker.sh
|
@ -17,7 +17,7 @@ function LintCodebase() {
|
|||
LINTER_NAME="${1}" && shift # Pull the variable and remove from array path (Example: jsonlint)
|
||||
LINTER_COMMAND="${1}" && shift # Pull the variable and remove from array path (Example: jsonlint -c ConfigFile /path/to/file)
|
||||
FILE_EXTENSIONS="${1}" && shift # Pull the variable and remove from array path (Example: *.json)
|
||||
FILE_ARRAY=("$@") # Array of files to validate (Example: ${FILE_ARRAY_JSON})
|
||||
FILE_ARRAY=("$@") # Array of files to validate (Example: ${FILE_ARRAY_JSON})
|
||||
|
||||
######################
|
||||
# Create Print Array #
|
||||
|
@ -49,14 +49,11 @@ function LintCodebase() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Failed
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find [${LINTER_NAME}] in system!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${VALIDATE_INSTALL_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to find [${LINTER_NAME}] in system!"
|
||||
fatal "[${VALIDATE_INSTALL_CMD}]"
|
||||
else
|
||||
# Success
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
echo -e "${NC}${F[B]}Successfully found binary for ${F[W]}[${LINTER_NAME}]${F[B]} in system location: ${F[W]}[${VALIDATE_INSTALL_CMD}]${NC}"
|
||||
fi
|
||||
debug "Successfully found binary for ${F[W]}[${LINTER_NAME}]${F[B]} in system location: ${F[W]}[${VALIDATE_INSTALL_CMD}]"
|
||||
fi
|
||||
|
||||
##########################
|
||||
|
@ -75,7 +72,7 @@ function LintCodebase() {
|
|||
if [ ${#FILE_ARRAY[@]} -eq 0 ] && [ "${VALIDATE_ALL_CODEBASE}" == "false" ]; then
|
||||
# No files found in commit and user has asked to not validate code base
|
||||
SKIP_FLAG=1
|
||||
# echo " - No files found in changeset to lint for language:[${FILE_TYPE}]"
|
||||
debug " - No files found in changeset to lint for language:[${FILE_TYPE}]"
|
||||
elif [ ${#FILE_ARRAY[@]} -ne 0 ]; then
|
||||
# We have files added to array of files to check
|
||||
LIST_FILES=("${FILE_ARRAY[@]}") # Copy the array into list
|
||||
|
@ -121,13 +118,13 @@ function LintCodebase() {
|
|||
#########################
|
||||
# Print the header info #
|
||||
#########################
|
||||
echo "${LINE}"
|
||||
info "${LINE}"
|
||||
done
|
||||
|
||||
########################################
|
||||
# Prepare context if TAP format output #
|
||||
########################################
|
||||
if IsTAP ; then
|
||||
if IsTAP; then
|
||||
TMPFILE=$(mktemp -q "/tmp/super-linter-${FILE_TYPE}.XXXXXX")
|
||||
INDEX=0
|
||||
mkdir -p "${REPORT_OUTPUT_FOLDER}"
|
||||
|
@ -138,10 +135,11 @@ function LintCodebase() {
|
|||
# Lint the files #
|
||||
##################
|
||||
for FILE in "${LIST_FILES[@]}"; do
|
||||
#####################
|
||||
# Get the file name #
|
||||
#####################
|
||||
###################################
|
||||
# Get the file name and directory #
|
||||
###################################
|
||||
FILE_NAME=$(basename "${FILE}" 2>&1)
|
||||
DIR_NAME=$(dirname "${FILE}" 2>&1)
|
||||
|
||||
#####################################################
|
||||
# Make sure we dont lint node modules or test cases #
|
||||
|
@ -165,8 +163,8 @@ function LintCodebase() {
|
|||
##############
|
||||
# File print #
|
||||
##############
|
||||
echo "---------------------------"
|
||||
echo "File:[${FILE}]"
|
||||
info "---------------------------"
|
||||
info "File:[${FILE}]"
|
||||
|
||||
#################################
|
||||
# Add the language to the array #
|
||||
|
@ -193,6 +191,17 @@ function LintCodebase() {
|
|||
pwsh -NoProfile -NoLogo -Command "${LINTER_COMMAND} ${FILE}; if (\${Error}.Count) { exit 1 }"
|
||||
exit $? 2>&1
|
||||
)
|
||||
###############################################################################
|
||||
# Corner case for groovy as we have to pass it as path and file in ant format #
|
||||
###############################################################################
|
||||
elif [[ ${FILE_TYPE} == "GROOVY" ]]; then
|
||||
#######################################
|
||||
# Lint the file with the updated path #
|
||||
#######################################
|
||||
LINT_CMD=$(
|
||||
cd "${GITHUB_WORKSPACE}" || exit
|
||||
${LINTER_COMMAND} --path "${DIR_NAME}" --files "$FILE_NAME" 2>&1
|
||||
)
|
||||
else
|
||||
################################
|
||||
# Lint the file with the rules #
|
||||
|
@ -202,7 +211,6 @@ function LintCodebase() {
|
|||
${LINTER_COMMAND} "${FILE}" 2>&1
|
||||
)
|
||||
fi
|
||||
|
||||
#######################
|
||||
# Load the error code #
|
||||
#######################
|
||||
|
@ -215,15 +223,16 @@ function LintCodebase() {
|
|||
#########
|
||||
# Error #
|
||||
#########
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [${LINTER_NAME}] linter!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LINT_CMD}]${NC}"
|
||||
error "Found errors in [${LINTER_NAME}] linter!"
|
||||
error "[${LINT_CMD}]"
|
||||
error "Linter CMD:[${LINTER_COMMAND} ${FILE}]"
|
||||
# Increment the error count
|
||||
(("ERRORS_FOUND_${FILE_TYPE}++"))
|
||||
|
||||
#######################################################
|
||||
# Store the linting as a temporary file in TAP format #
|
||||
#######################################################
|
||||
if IsTAP ; then
|
||||
if IsTAP; then
|
||||
NotOkTap "${INDEX}" "${FILE}" "${TMPFILE}"
|
||||
AddDetailedMessageIfEnabled "${LINT_CMD}" "${TMPFILE}"
|
||||
fi
|
||||
|
@ -231,12 +240,12 @@ function LintCodebase() {
|
|||
###########
|
||||
# Success #
|
||||
###########
|
||||
echo -e "${NC}${F[B]} - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully${NC}"
|
||||
info " - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully"
|
||||
|
||||
#######################################################
|
||||
# Store the linting as a temporary file in TAP format #
|
||||
#######################################################
|
||||
if IsTAP ; then
|
||||
if IsTAP; then
|
||||
OkTap "${INDEX}" "${FILE}" "${TMPFILE}"
|
||||
fi
|
||||
fi
|
||||
|
@ -245,7 +254,7 @@ function LintCodebase() {
|
|||
#################################
|
||||
# Generate report in TAP format #
|
||||
#################################
|
||||
if IsTAP && [ ${INDEX} -gt 0 ] ; then
|
||||
if IsTAP && [ ${INDEX} -gt 0 ]; then
|
||||
HeaderTap "${INDEX}" "${REPORT_OUTPUT_FILE}"
|
||||
cat "${TMPFILE}" >> "${REPORT_OUTPUT_FILE}"
|
||||
fi
|
||||
|
@ -262,18 +271,16 @@ function TestCodebase() {
|
|||
LINTER_COMMAND="${3}" # Pull the variable and remove from array path (Example: jsonlint -c ConfigFile /path/to/file)
|
||||
FILE_EXTENSIONS="${4}" # Pull the variable and remove from array path (Example: *.json)
|
||||
INDVIDUAL_TEST_FOLDER="${5}" # Folder for specific tests
|
||||
TESTS_RAN=0 # Incremented when tests are ran, this will help find failed finds
|
||||
TESTS_RAN=0 # Incremented when tests are ran, this will help find failed finds
|
||||
|
||||
################
|
||||
# print header #
|
||||
################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "----------------------------------------------"
|
||||
echo "Testing Codebase [${FILE_TYPE}] files..."
|
||||
echo "----------------------------------------------"
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "----------------------------------------------"
|
||||
info "Testing Codebase [${FILE_TYPE}] files..."
|
||||
info "----------------------------------------------"
|
||||
info "----------------------------------------------"
|
||||
|
||||
#####################################
|
||||
# Validate we have linter installed #
|
||||
|
@ -290,12 +297,11 @@ function TestCodebase() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Failed
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find [${LINTER_NAME}] in system!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${VALIDATE_INSTALL_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to find [${LINTER_NAME}] in system!"
|
||||
fatal "[${VALIDATE_INSTALL_CMD}]"
|
||||
else
|
||||
# Success
|
||||
echo -e "${NC}${F[B]}Successfully found binary for ${F[W]}[${LINTER_NAME}]${F[B]} in system location: ${F[W]}[${VALIDATE_INSTALL_CMD}]${NC}"
|
||||
info "Successfully found binary for ${F[W]}[${LINTER_NAME}]${F[B]} in system location: ${F[W]}[${VALIDATE_INSTALL_CMD}]"
|
||||
fi
|
||||
|
||||
##########################
|
||||
|
@ -311,7 +317,7 @@ function TestCodebase() {
|
|||
########################################
|
||||
# Prepare context if TAP output format #
|
||||
########################################
|
||||
if IsTAP ; then
|
||||
if IsTAP; then
|
||||
TMPFILE=$(mktemp -q "/tmp/super-linter-${FILE_TYPE}.XXXXXX")
|
||||
mkdir -p "${REPORT_OUTPUT_FOLDER}"
|
||||
REPORT_OUTPUT_FILE="${REPORT_OUTPUT_FOLDER}/super-linter-${FILE_TYPE}.${OUTPUT_FORMAT}"
|
||||
|
@ -325,6 +331,7 @@ function TestCodebase() {
|
|||
# Get the file name #
|
||||
#####################
|
||||
FILE_NAME=$(basename "${FILE}" 2>&1)
|
||||
DIR_NAME=$(dirname "${FILE}" 2>&1)
|
||||
|
||||
############################
|
||||
# Get the file pass status #
|
||||
|
@ -345,8 +352,8 @@ function TestCodebase() {
|
|||
##############
|
||||
# File print #
|
||||
##############
|
||||
echo "---------------------------"
|
||||
echo "File:[${FILE}]"
|
||||
info "---------------------------"
|
||||
info "File:[${FILE}]"
|
||||
|
||||
########################
|
||||
# Set the lint command #
|
||||
|
@ -399,6 +406,17 @@ function TestCodebase() {
|
|||
pwsh -NoProfile -NoLogo -Command "${LINTER_COMMAND} ${FILE}; if (\${Error}.Count) { exit 1 }"
|
||||
exit $? 2>&1
|
||||
)
|
||||
###############################################################################
|
||||
# Corner case for groovy as we have to pass it as path and file in ant format #
|
||||
###############################################################################
|
||||
elif [[ ${FILE_TYPE} == "GROOVY" ]]; then
|
||||
#######################################
|
||||
# Lint the file with the updated path #
|
||||
#######################################
|
||||
LINT_CMD=$(
|
||||
cd "${GITHUB_WORKSPACE}" || exit
|
||||
${LINTER_COMMAND} --path "${DIR_NAME}" --files "$FILE_NAME" 2>&1
|
||||
)
|
||||
else
|
||||
################################
|
||||
# Lint the file with the rules #
|
||||
|
@ -430,21 +448,21 @@ function TestCodebase() {
|
|||
#########
|
||||
# Error #
|
||||
#########
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [${LINTER_NAME}] linter!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LINT_CMD}]${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC} Linter CMD:[${LINTER_COMMAND} ${FILE}]${NC}"
|
||||
error "Found errors in [${LINTER_NAME}] linter!"
|
||||
error "[${LINT_CMD}]"
|
||||
error "Linter CMD:[${LINTER_COMMAND} ${FILE}]"
|
||||
# Increment the error count
|
||||
(("ERRORS_FOUND_${FILE_TYPE}++"))
|
||||
else
|
||||
###########
|
||||
# Success #
|
||||
###########
|
||||
echo -e "${NC}${F[B]} - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully${NC}"
|
||||
info " - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully"
|
||||
fi
|
||||
#######################################################
|
||||
# Store the linting as a temporary file in TAP format #
|
||||
#######################################################
|
||||
if IsTAP ; then
|
||||
if IsTAP; then
|
||||
OkTap "${TESTS_RAN}" "${FILE_NAME}" "${TMPFILE}"
|
||||
fi
|
||||
else
|
||||
|
@ -458,22 +476,23 @@ function TestCodebase() {
|
|||
#########
|
||||
# Error #
|
||||
#########
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [${LINTER_NAME}] linter!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} This file should have failed test case!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LINT_CMD}]${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC} Linter CMD:[${LINTER_COMMAND} ${FILE}]${NC}"
|
||||
error "Found errors in [${LINTER_NAME}] linter!"
|
||||
error "This file should have failed test case!"
|
||||
error "Command run:${NC}[\$${LINT_CMD}]"
|
||||
error "[${LINT_CMD}]"
|
||||
error "Linter CMD:[${LINTER_COMMAND} ${FILE}]"
|
||||
# Increment the error count
|
||||
(("ERRORS_FOUND_${FILE_TYPE}++"))
|
||||
else
|
||||
###########
|
||||
# Success #
|
||||
###########
|
||||
echo -e "${NC}${F[B]} - File:${F[W]}[${FILE_NAME}]${F[B]} failed test case with ${F[W]}[${LINTER_NAME}]${F[B]} successfully${NC}"
|
||||
info " - File:${F[W]}[${FILE_NAME}]${F[B]} failed test case with ${F[W]}[${LINTER_NAME}]${F[B]} successfully"
|
||||
fi
|
||||
#######################################################
|
||||
# Store the linting as a temporary file in TAP format #
|
||||
#######################################################
|
||||
if IsTAP ; then
|
||||
if IsTAP; then
|
||||
NotOkTap "${TESTS_RAN}" "${FILE_NAME}" "${TMPFILE}"
|
||||
AddDetailedMessageIfEnabled "${LINT_CMD}" "${TMPFILE}"
|
||||
fi
|
||||
|
@ -483,7 +502,7 @@ function TestCodebase() {
|
|||
###########################################################################
|
||||
# Generate report in TAP format and validate with the expected TAP output #
|
||||
###########################################################################
|
||||
if IsTAP && [ ${TESTS_RAN} -gt 0 ] ; then
|
||||
if IsTAP && [ ${TESTS_RAN} -gt 0 ]; then
|
||||
HeaderTap "${TESTS_RAN}" "${REPORT_OUTPUT_FILE}"
|
||||
cat "${TMPFILE}" >> "${REPORT_OUTPUT_FILE}"
|
||||
|
||||
|
@ -491,24 +510,24 @@ function TestCodebase() {
|
|||
# If expected TAP report exists then compare with the generated report #
|
||||
########################################################################
|
||||
EXPECTED_FILE="${GITHUB_WORKSPACE}/${TEST_CASE_FOLDER}/${INDVIDUAL_TEST_FOLDER}/reports/expected-${FILE_TYPE}.tap"
|
||||
if [ -e "${EXPECTED_FILE}" ] ; then
|
||||
if [ -e "${EXPECTED_FILE}" ]; then
|
||||
TMPFILE=$(mktemp -q "/tmp/diff-${FILE_TYPE}.XXXXXX")
|
||||
## Ignore white spaces, case sensitive
|
||||
if ! diff -a -w -i "${EXPECTED_FILE}" "${REPORT_OUTPUT_FILE}" > "${TMPFILE}" 2>&1; then
|
||||
#############################################
|
||||
# We failed to compare the reporting output #
|
||||
#############################################
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to assert TAP output:[${LINTER_NAME}]${NC}"!
|
||||
echo "Please validate the asserts!"
|
||||
error "Failed to assert TAP output:[${LINTER_NAME}]"!
|
||||
info "Please validate the asserts!"
|
||||
cat "${TMPFILE}"
|
||||
exit 1
|
||||
else
|
||||
# Success
|
||||
echo -e "${NC}${F[B]}Successfully validation in the expected TAP format for ${F[W]}[${LINTER_NAME}]${NC}"
|
||||
info "Successfully validation in the expected TAP format for ${F[W]}[${LINTER_NAME}]"
|
||||
fi
|
||||
else
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} No TAP expected file found at:[${EXPECTED_FILE}]${NC}"
|
||||
echo "skipping report assertions"
|
||||
warn "No TAP expected file found at:[${EXPECTED_FILE}]"
|
||||
info "skipping report assertions"
|
||||
#####################################
|
||||
# Append the file type to the array #
|
||||
#####################################
|
||||
|
@ -523,9 +542,8 @@ function TestCodebase() {
|
|||
#################################################
|
||||
# We failed to find files and no tests were ran #
|
||||
#################################################
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find any tests ran for the Linter:[${LINTER_NAME}]${NC}"!
|
||||
echo "Please validate logic or that tests exist!"
|
||||
exit 1
|
||||
error "Failed to find any tests ran for the Linter:[${LINTER_NAME}]"!
|
||||
fatal "Please validate logic or that tests exist!"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
|
@ -542,11 +560,9 @@ function RunTestCases() {
|
|||
#################
|
||||
# Header prints #
|
||||
#################
|
||||
echo ""
|
||||
echo "----------------------------------------------"
|
||||
echo "-------------- TEST CASE RUN -----------------"
|
||||
echo "----------------------------------------------"
|
||||
echo ""
|
||||
info "----------------------------------------------"
|
||||
info "-------------- TEST CASE RUN -----------------"
|
||||
info "----------------------------------------------"
|
||||
|
||||
#######################
|
||||
# Test case languages #
|
||||
|
@ -555,7 +571,7 @@ function RunTestCases() {
|
|||
TestCodebase "ANSIBLE" "ansible-lint" "ansible-lint -v -c ${ANSIBLE_LINTER_RULES}" ".*\.\(yml\|yaml\)\$" "ansible"
|
||||
TestCodebase "ARM" "arm-ttk" "Import-Module ${ARM_TTK_PSD1} ; \${config} = \$(Import-PowerShellDataFile -Path ${ARM_LINTER_RULES}) ; Test-AzTemplate @config -TemplatePath" ".*\.\(json\)\$" "arm"
|
||||
TestCodebase "BASH" "shellcheck" "shellcheck --color" ".*\.\(sh\|bash\|dash\|ksh\)\$" "shell"
|
||||
TestCodebase "CFN" "cfn-lint" "cfn-lint --config-file ${CFN_LINTER_RULES}" ".*\.\(json\|yml\|yaml\)\$" "cfn"
|
||||
TestCodebase "CLOUDFORMATION" "cfn-lint" "cfn-lint --config-file ${CLOUDFORMATION_LINTER_RULES}" ".*\.\(json\|yml\|yaml\)\$" "cloudformation"
|
||||
TestCodebase "CLOJURE" "clj-kondo" "clj-kondo --config ${CLOJURE_LINTER_RULES} --lint" ".*\.\(clj\|cljs\|cljc\|edn\)\$" "clojure"
|
||||
TestCodebase "COFFEESCRIPT" "coffeelint" "coffeelint -f ${COFFEESCRIPT_LINTER_RULES}" ".*\.\(coffee\)\$" "coffeescript"
|
||||
TestCodebase "CSS" "stylelint" "stylelint --config ${CSS_LINTER_RULES}" ".*\.\(css\)\$" "css"
|
||||
|
@ -564,25 +580,33 @@ function RunTestCases() {
|
|||
TestCodebase "EDITORCONFIG" "editorconfig-checker" "editorconfig-checker" ".*\.ext$" "editorconfig-checker"
|
||||
TestCodebase "ENV" "dotenv-linter" "dotenv-linter" ".*\.\(env\)\$" "env"
|
||||
TestCodebase "GO" "golangci-lint" "golangci-lint run -c ${GO_LINTER_RULES}" ".*\.\(go\)\$" "golang"
|
||||
TestCodebase "GROOVY" "npm-groovy-lint" "npm-groovy-lint -c $GROOVY_LINTER_RULES --failon error" ".*\.\(groovy\|jenkinsfile\|gradle\)\$" "groovy"
|
||||
TestCodebase "HTML" "htmlhint" "htmlhint --config ${HTML_LINTER_RULES}" ".*\.\(html\)\$" "html"
|
||||
TestCodebase "JAVASCRIPT_ES" "eslint" "eslint --no-eslintrc -c ${JAVASCRIPT_LINTER_RULES}" ".*\.\(js\)\$" "javascript"
|
||||
TestCodebase "JAVASCRIPT_STANDARD" "standard" "standard ${JAVASCRIPT_STANDARD_LINTER_RULES}" ".*\.\(js\)\$" "javascript"
|
||||
TestCodebase "JSON" "jsonlint" "jsonlint" ".*\.\(json\)\$" "json"
|
||||
TestCodebase "KOTLIN" "ktlint" "ktlint" ".*\.\(kt\|kts\)\$" "kotlin"
|
||||
TestCodebase "LUA" "lua" "luacheck" ".*\.\(lua\)\$" "lua"
|
||||
TestCodebase "MARKDOWN" "markdownlint" "markdownlint -c ${MARKDOWN_LINTER_RULES}" ".*\.\(md\)\$" "markdown"
|
||||
TestCodebase "PERL" "perl" "perl -Mstrict -cw" ".*\.\(pl\)\$" "perl"
|
||||
TestCodebase "PHP" "php" "php -l" ".*\.\(php\)\$" "php"
|
||||
TestCodebase "PHP_BUILTIN" "php" "php -l" ".*\.\(php\)\$" "php"
|
||||
TestCodebase "PHP_PHPCS" "phpcs" "phpcs --standard=${PHP_PHPCS_LINTER_RULES}" ".*\.\(php\)\$" "php"
|
||||
TestCodebase "PHP_PHPSTAN" "phpstan" "phpstan analyse --no-progress --no-ansi -c ${PHP_PHPSTAN_LINTER_RULES}" ".*\.\(php\)\$" "php"
|
||||
TestCodebase "PHP_PSALM" "psalm" "psalm --config=${PHP_PSALM_LINTER_RULES}" ".*\.\(php\)\$" "php"
|
||||
TestCodebase "OPENAPI" "spectral" "spectral lint -r ${OPENAPI_LINTER_RULES}" ".*\.\(ymlopenapi\|jsonopenapi\)\$" "openapi"
|
||||
TestCodebase "POWERSHELL" "pwsh" "Invoke-ScriptAnalyzer -EnableExit -Settings ${POWERSHELL_LINTER_RULES} -Path" ".*\.\(ps1\|psm1\|psd1\|ps1xml\|pssc\|psrc\|cdxml\)\$" "powershell"
|
||||
TestCodebase "PROTOBUF" "protolint" "protolint lint --config_path ${PROTOBUF_LINTER_RULES}" ".*\.\(proto\)\$" "protobuf"
|
||||
TestCodebase "PYTHON" "pylint" "pylint --rcfile ${PYTHON_LINTER_RULES}" ".*\.\(py\)\$" "python"
|
||||
TestCodebase "PYTHON_PYLINT" "pylint" "pylint --rcfile ${PYTHON_PYLINT_LINTER_RULES}" ".*\.\(py\)\$" "python"
|
||||
TestCodebase "PYTHON_FLAKE8" "flake8" "flake8 --config ${PYTHON_FLAKE8_LINTER_RULES}" ".*\.\(py\)\$" "python"
|
||||
TestCodebase "RAKU" "raku" "raku -c" ".*\.\(raku\|rakumod\|rakutest\|pm6\|pl6\|p6\)\$" "raku"
|
||||
TestCodebase "RUBY" "rubocop" "rubocop -c ${RUBY_LINTER_RULES}" ".*\.\(rb\)\$" "ruby"
|
||||
TestCodebase "STATES" "asl-validator" "asl-validator --json-path" ".*\.\(json\)\$" "states"
|
||||
TestCodebase "TERRAFORM" "tflint" "tflint -c ${TERRAFORM_LINTER_RULES}" ".*\.\(tf\)\$" "terraform"
|
||||
TestCodebase "TERRAFORM_TERRASCAN" "terrascan" "terrascan -f " ".*\.\(tf\)\$" "terraform_terrascan"
|
||||
TestCodebase "TYPESCRIPT_ES" "eslint" "eslint --no-eslintrc -c ${TYPESCRIPT_LINTER_RULES}" ".*\.\(ts\)\$" "typescript"
|
||||
TestCodebase "TYPESCRIPT_STANDARD" "standard" "standard --parser @typescript-eslint/parser --plugin @typescript-eslint/eslint-plugin ${TYPESCRIPT_STANDARD_LINTER_RULES}" ".*\.\(ts\)\$" "typescript"
|
||||
TestCodebase "XML" "xmllint" "xmllint" ".*\.\(xml\)\$" "xml"
|
||||
TestCodebase "YML" "yamllint" "yamllint -c ${YAML_LINTER_RULES}" ".*\.\(yml\|yaml\)\$" "yml"
|
||||
TestCodebase "YAML" "yamllint" "yamllint -c ${YAML_LINTER_RULES}" ".*\.\(yml\|yaml\)\$" "yaml"
|
||||
|
||||
#################
|
||||
# Footer prints #
|
||||
|
@ -629,16 +653,12 @@ function LintAnsibleFiles() {
|
|||
##############################
|
||||
if [ ${ERROR_CODE} -ne 0 ]; then
|
||||
# Failed
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find ${LINTER_NAME} in system!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${VALIDATE_INSTALL_CMD}]${NC}"
|
||||
exit 1
|
||||
error "Failed to find ${LINTER_NAME} in system!"
|
||||
fatal "[${VALIDATE_INSTALL_CMD}]"
|
||||
else
|
||||
# Success
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
# Success
|
||||
echo -e "${NC}${F[B]}Successfully found binary in system${NC}"
|
||||
echo "Location:[${VALIDATE_INSTALL_CMD}]"
|
||||
fi
|
||||
debug "Successfully found binary in system"
|
||||
debug "Location:[${VALIDATE_INSTALL_CMD}]"
|
||||
fi
|
||||
|
||||
##########################
|
||||
|
@ -673,7 +693,7 @@ function LintAnsibleFiles() {
|
|||
###################################
|
||||
# Send message that were skipping #
|
||||
###################################
|
||||
#echo "- Skipping Ansible lint run as file(s) that were modified were read only..."
|
||||
debug "- Skipping Ansible lint run as file(s) that were modified were read only..."
|
||||
############################
|
||||
# Create flag to skip loop #
|
||||
############################
|
||||
|
@ -688,14 +708,14 @@ function LintAnsibleFiles() {
|
|||
#########################
|
||||
# Print the header line #
|
||||
#########################
|
||||
echo "${LINE}"
|
||||
info "${LINE}"
|
||||
done
|
||||
fi
|
||||
|
||||
########################################
|
||||
# Prepare context if TAP output format #
|
||||
########################################
|
||||
if IsTAP ; then
|
||||
if IsTAP; then
|
||||
TMPFILE=$(mktemp -q "/tmp/super-linter-${FILE_TYPE}.XXXXXX")
|
||||
INDEX=0
|
||||
mkdir -p "${REPORT_OUTPUT_FOLDER}"
|
||||
|
@ -728,8 +748,8 @@ function LintAnsibleFiles() {
|
|||
##############
|
||||
# File print #
|
||||
##############
|
||||
echo "---------------------------"
|
||||
echo "File:[${FILE}]"
|
||||
info "---------------------------"
|
||||
info "File:[${FILE}]"
|
||||
|
||||
################################
|
||||
# Lint the file with the rules #
|
||||
|
@ -748,15 +768,15 @@ function LintAnsibleFiles() {
|
|||
#########
|
||||
# Error #
|
||||
#########
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [${LINTER_NAME}] linter!${NC}"
|
||||
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${LINT_CMD}]${NC}"
|
||||
error "Found errors in [${LINTER_NAME}] linter!"
|
||||
error "[${LINT_CMD}]"
|
||||
# Increment error count
|
||||
((ERRORS_FOUND_ANSIBLE++))
|
||||
|
||||
#######################################################
|
||||
# Store the linting as a temporary file in TAP format #
|
||||
#######################################################
|
||||
if IsTAP ; then
|
||||
if IsTAP; then
|
||||
NotOkTap "${INDEX}" "${FILE}" "${TMPFILE}"
|
||||
AddDetailedMessageIfEnabled "${LINT_CMD}" "${TMPFILE}"
|
||||
fi
|
||||
|
@ -765,12 +785,12 @@ function LintAnsibleFiles() {
|
|||
###########
|
||||
# Success #
|
||||
###########
|
||||
echo -e "${NC}${F[B]} - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully${NC}"
|
||||
info " - File:${F[W]}[${FILE_NAME}]${F[B]} was linted with ${F[W]}[${LINTER_NAME}]${F[B]} successfully"
|
||||
|
||||
#######################################################
|
||||
# Store the linting as a temporary file in TAP format #
|
||||
#######################################################
|
||||
if IsTAP ; then
|
||||
if IsTAP; then
|
||||
OkTap "${INDEX}" "${FILE}" "${TMPFILE}"
|
||||
fi
|
||||
fi
|
||||
|
@ -779,27 +799,22 @@ function LintAnsibleFiles() {
|
|||
#################################
|
||||
# Generate report in TAP format #
|
||||
#################################
|
||||
if IsTAP && [ ${INDEX} -gt 0 ] ; then
|
||||
if IsTAP && [ ${INDEX} -gt 0 ]; then
|
||||
HeaderTap "${INDEX}" "${REPORT_OUTPUT_FILE}"
|
||||
cat "${TMPFILE}" >> "${REPORT_OUTPUT_FILE}"
|
||||
fi
|
||||
else # No ansible directory found in path
|
||||
###############################
|
||||
# Check to see if debug is on #
|
||||
###############################
|
||||
if [[ ${ACTIONS_RUNNER_DEBUG} == "true" ]]; then
|
||||
########################
|
||||
# No Ansible dir found #
|
||||
########################
|
||||
echo -e "${NC}${F[Y]}WARN!${NC} No Ansible base directory found at:[${ANSIBLE_DIRECTORY}]${NC}"
|
||||
echo "skipping ansible lint"
|
||||
fi
|
||||
else
|
||||
########################
|
||||
# No Ansible dir found #
|
||||
########################
|
||||
warn "No Ansible base directory found at:[${ANSIBLE_DIRECTORY}]"
|
||||
debug "skipping ansible lint"
|
||||
fi
|
||||
}
|
||||
################################################################################
|
||||
#### Function IsTap ############################################################
|
||||
function IsTAP() {
|
||||
if [ "${OUTPUT_FORMAT}" == "tap" ] ; then
|
||||
if [ "${OUTPUT_FORMAT}" == "tap" ]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
|
@ -809,7 +824,7 @@ function IsTAP() {
|
|||
#### Function TransformTAPDetails ##############################################
|
||||
function TransformTAPDetails() {
|
||||
DATA=${1}
|
||||
if [ -n "${DATA}" ] && [ "${OUTPUT_DETAILS}" == "detailed" ] ; then
|
||||
if [ -n "${DATA}" ] && [ "${OUTPUT_DETAILS}" == "detailed" ]; then
|
||||
#########################################################
|
||||
# Transform new lines to \\n, remove colours and colons #
|
||||
#########################################################
|
||||
|
@ -822,8 +837,8 @@ function HeaderTap() {
|
|||
################
|
||||
# Pull in Vars #
|
||||
################
|
||||
INDEX="${1}" # File being validated
|
||||
OUTPUT_FILE="${2}" # Output location
|
||||
INDEX="${1}" # File being validated
|
||||
OUTPUT_FILE="${2}" # Output location
|
||||
|
||||
###################
|
||||
# Print the goods #
|
||||
|
@ -836,9 +851,9 @@ function OkTap() {
|
|||
################
|
||||
# Pull in Vars #
|
||||
################
|
||||
INDEX="${1}" # Location
|
||||
FILE="${2}" # File being validated
|
||||
TEMP_FILE="${3}" # Temp file location
|
||||
INDEX="${1}" # Location
|
||||
FILE="${2}" # File being validated
|
||||
TEMP_FILE="${3}" # Temp file location
|
||||
|
||||
###################
|
||||
# Print the goods #
|
||||
|
@ -851,9 +866,9 @@ function NotOkTap() {
|
|||
################
|
||||
# Pull in Vars #
|
||||
################
|
||||
INDEX="${1}" # Location
|
||||
FILE="${2}" # File being validated
|
||||
TEMP_FILE="${3}" # Temp file location
|
||||
INDEX="${1}" # Location
|
||||
FILE="${2}" # File being validated
|
||||
TEMP_FILE="${3}" # Temp file location
|
||||
|
||||
###################
|
||||
# Print the goods #
|
||||
|
@ -866,14 +881,14 @@ function AddDetailedMessageIfEnabled() {
|
|||
################
|
||||
# Pull in Vars #
|
||||
################
|
||||
LINT_CMD="${1}" # Linter command
|
||||
TEMP_FILE="${2}" # Temp file
|
||||
LINT_CMD="${1}" # Linter command
|
||||
TEMP_FILE="${2}" # Temp file
|
||||
|
||||
####################
|
||||
# Check the return #
|
||||
####################
|
||||
DETAILED_MSG=$(TransformTAPDetails "${LINT_CMD}")
|
||||
if [ -n "${DETAILED_MSG}" ] ; then
|
||||
if [ -n "${DETAILED_MSG}" ]; then
|
||||
printf " ---\n message: %s\n ...\n" "${DETAILED_MSG}" >> "${TEMP_FILE}"
|
||||
fi
|
||||
}
|
||||
|
|
545
package-lock.json
generated
545
package-lock.json
generated
|
@ -1,545 +0,0 @@
|
|||
{
|
||||
"requires": true,
|
||||
"lockfileVersion": 1,
|
||||
"dependencies": {
|
||||
"@types/color-name": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz",
|
||||
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ=="
|
||||
},
|
||||
"ajv": {
|
||||
"version": "6.12.3",
|
||||
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.3.tgz",
|
||||
"integrity": "sha512-4K0cK3L1hsqk9xIb2z9vs/XU+PGJZ9PNpJRDS9YLzmNdX6jmVPfamLvTJr0aDAusnHyCHO6MjzlkAsgtqp9teA==",
|
||||
"requires": {
|
||||
"fast-deep-equal": "^3.1.1",
|
||||
"fast-json-stable-stringify": "^2.0.0",
|
||||
"json-schema-traverse": "^0.4.1",
|
||||
"uri-js": "^4.2.2"
|
||||
}
|
||||
},
|
||||
"ansi-styles": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz",
|
||||
"integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==",
|
||||
"requires": {
|
||||
"@types/color-name": "^1.1.1",
|
||||
"color-convert": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"asn1": {
|
||||
"version": "0.2.4",
|
||||
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
|
||||
"integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
|
||||
"requires": {
|
||||
"safer-buffer": "~2.1.0"
|
||||
}
|
||||
},
|
||||
"assert-plus": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
|
||||
"integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU="
|
||||
},
|
||||
"async": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/async/-/async-3.2.0.tgz",
|
||||
"integrity": "sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw=="
|
||||
},
|
||||
"asynckit": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
|
||||
},
|
||||
"aws-sign2": {
|
||||
"version": "0.7.0",
|
||||
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
|
||||
"integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg="
|
||||
},
|
||||
"aws4": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.10.0.tgz",
|
||||
"integrity": "sha512-3YDiu347mtVtjpyV3u5kVqQLP242c06zwDOgpeRnybmXlYYsLbtTrUBUm8i8srONt+FWobl5aibnU1030PeeuA=="
|
||||
},
|
||||
"balanced-match": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
||||
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
|
||||
},
|
||||
"bcrypt-pbkdf": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
|
||||
"integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=",
|
||||
"requires": {
|
||||
"tweetnacl": "^0.14.3"
|
||||
}
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"requires": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
}
|
||||
},
|
||||
"caseless": {
|
||||
"version": "0.12.0",
|
||||
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
|
||||
"integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
|
||||
},
|
||||
"chalk": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.0.0.tgz",
|
||||
"integrity": "sha512-N9oWFcegS0sFr9oh1oz2d7Npos6vNoWW9HvtCg5N1KRFpUhaAhvTv5Y58g880fZaEYSNm3qDz8SU1UrGvp+n7A==",
|
||||
"requires": {
|
||||
"ansi-styles": "^4.1.0",
|
||||
"supports-color": "^7.1.0"
|
||||
}
|
||||
},
|
||||
"color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"requires": {
|
||||
"color-name": "~1.1.4"
|
||||
}
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
|
||||
},
|
||||
"combined-stream": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||
"requires": {
|
||||
"delayed-stream": "~1.0.0"
|
||||
}
|
||||
},
|
||||
"commander": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz",
|
||||
"integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg=="
|
||||
},
|
||||
"concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
|
||||
},
|
||||
"core-util-is": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
|
||||
"integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac="
|
||||
},
|
||||
"dashdash": {
|
||||
"version": "1.14.1",
|
||||
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
|
||||
"integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=",
|
||||
"requires": {
|
||||
"assert-plus": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"delayed-stream": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||
"integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk="
|
||||
},
|
||||
"ecc-jsbn": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
|
||||
"integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=",
|
||||
"requires": {
|
||||
"jsbn": "~0.1.0",
|
||||
"safer-buffer": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"extend": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
|
||||
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
|
||||
},
|
||||
"extsprintf": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
|
||||
"integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU="
|
||||
},
|
||||
"fast-deep-equal": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
||||
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
|
||||
},
|
||||
"fast-json-stable-stringify": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
|
||||
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="
|
||||
},
|
||||
"forever-agent": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
|
||||
"integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE="
|
||||
},
|
||||
"form-data": {
|
||||
"version": "2.3.3",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
|
||||
"integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
|
||||
"requires": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.6",
|
||||
"mime-types": "^2.1.12"
|
||||
}
|
||||
},
|
||||
"fs.realpath": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
|
||||
},
|
||||
"getpass": {
|
||||
"version": "0.1.7",
|
||||
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
|
||||
"integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=",
|
||||
"requires": {
|
||||
"assert-plus": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"glob": {
|
||||
"version": "7.1.6",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
|
||||
"integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
|
||||
"requires": {
|
||||
"fs.realpath": "^1.0.0",
|
||||
"inflight": "^1.0.4",
|
||||
"inherits": "2",
|
||||
"minimatch": "^3.0.4",
|
||||
"once": "^1.3.0",
|
||||
"path-is-absolute": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"glob-base": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/glob-base/-/glob-base-0.3.0.tgz",
|
||||
"integrity": "sha1-27Fk9iIbHAscz4Kuoyi0l98Oo8Q=",
|
||||
"requires": {
|
||||
"glob-parent": "^2.0.0",
|
||||
"is-glob": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"glob-parent": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-2.0.0.tgz",
|
||||
"integrity": "sha1-gTg9ctsFT8zPUzbaqQLxgvbtuyg=",
|
||||
"requires": {
|
||||
"is-glob": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"har-schema": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
|
||||
"integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI="
|
||||
},
|
||||
"har-validator": {
|
||||
"version": "5.1.3",
|
||||
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz",
|
||||
"integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==",
|
||||
"requires": {
|
||||
"ajv": "^6.5.5",
|
||||
"har-schema": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"has-flag": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
||||
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="
|
||||
},
|
||||
"htmlhint": {
|
||||
"version": "0.14.1",
|
||||
"resolved": "https://registry.npmjs.org/htmlhint/-/htmlhint-0.14.1.tgz",
|
||||
"integrity": "sha512-VWKrljlwF8tEKH48YPfC30zYKhrsMqm70d7vXswivEqd3DSva8ZlIzfeCa3YWFEFRIIhiXKgKurlqEpCtYMCAA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"async": "3.2.0",
|
||||
"chalk": "4.0.0",
|
||||
"commander": "5.1.0",
|
||||
"glob": "7.1.6",
|
||||
"parse-glob": "3.0.4",
|
||||
"request": "2.88.2",
|
||||
"strip-json-comments": "3.1.0",
|
||||
"xml": "1.0.1"
|
||||
}
|
||||
},
|
||||
"http-signature": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
|
||||
"integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=",
|
||||
"requires": {
|
||||
"assert-plus": "^1.0.0",
|
||||
"jsprim": "^1.2.2",
|
||||
"sshpk": "^1.7.0"
|
||||
}
|
||||
},
|
||||
"inflight": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
|
||||
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
|
||||
"requires": {
|
||||
"once": "^1.3.0",
|
||||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"inherits": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||
},
|
||||
"is-dotfile": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/is-dotfile/-/is-dotfile-1.0.3.tgz",
|
||||
"integrity": "sha1-pqLzL/0t+wT1yiXs0Pa4PPeYoeE="
|
||||
},
|
||||
"is-extglob": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz",
|
||||
"integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA="
|
||||
},
|
||||
"is-glob": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz",
|
||||
"integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=",
|
||||
"requires": {
|
||||
"is-extglob": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"is-typedarray": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
|
||||
"integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo="
|
||||
},
|
||||
"isstream": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
|
||||
"integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo="
|
||||
},
|
||||
"jsbn": {
|
||||
"version": "0.1.1",
|
||||
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
|
||||
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM="
|
||||
},
|
||||
"json-schema": {
|
||||
"version": "0.2.3",
|
||||
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
|
||||
"integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM="
|
||||
},
|
||||
"json-schema-traverse": {
|
||||
"version": "0.4.1",
|
||||
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
|
||||
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
|
||||
},
|
||||
"json-stringify-safe": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
|
||||
"integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus="
|
||||
},
|
||||
"jsprim": {
|
||||
"version": "1.4.1",
|
||||
"resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz",
|
||||
"integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=",
|
||||
"requires": {
|
||||
"assert-plus": "1.0.0",
|
||||
"extsprintf": "1.3.0",
|
||||
"json-schema": "0.2.3",
|
||||
"verror": "1.10.0"
|
||||
}
|
||||
},
|
||||
"mime-db": {
|
||||
"version": "1.44.0",
|
||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.44.0.tgz",
|
||||
"integrity": "sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg=="
|
||||
},
|
||||
"mime-types": {
|
||||
"version": "2.1.27",
|
||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.27.tgz",
|
||||
"integrity": "sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w==",
|
||||
"requires": {
|
||||
"mime-db": "1.44.0"
|
||||
}
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
},
|
||||
"oauth-sign": {
|
||||
"version": "0.9.0",
|
||||
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
|
||||
"integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ=="
|
||||
},
|
||||
"once": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
|
||||
"requires": {
|
||||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"parse-glob": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/parse-glob/-/parse-glob-3.0.4.tgz",
|
||||
"integrity": "sha1-ssN2z7EfNVE7rdFz7wu246OIORw=",
|
||||
"requires": {
|
||||
"glob-base": "^0.3.0",
|
||||
"is-dotfile": "^1.0.0",
|
||||
"is-extglob": "^1.0.0",
|
||||
"is-glob": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"path-is-absolute": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
|
||||
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18="
|
||||
},
|
||||
"performance-now": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
|
||||
"integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns="
|
||||
},
|
||||
"psl": {
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz",
|
||||
"integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ=="
|
||||
},
|
||||
"punycode": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
|
||||
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
|
||||
},
|
||||
"qs": {
|
||||
"version": "6.5.2",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
|
||||
"integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA=="
|
||||
},
|
||||
"request": {
|
||||
"version": "2.88.2",
|
||||
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
|
||||
"integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
|
||||
"requires": {
|
||||
"aws-sign2": "~0.7.0",
|
||||
"aws4": "^1.8.0",
|
||||
"caseless": "~0.12.0",
|
||||
"combined-stream": "~1.0.6",
|
||||
"extend": "~3.0.2",
|
||||
"forever-agent": "~0.6.1",
|
||||
"form-data": "~2.3.2",
|
||||
"har-validator": "~5.1.3",
|
||||
"http-signature": "~1.2.0",
|
||||
"is-typedarray": "~1.0.0",
|
||||
"isstream": "~0.1.2",
|
||||
"json-stringify-safe": "~5.0.1",
|
||||
"mime-types": "~2.1.19",
|
||||
"oauth-sign": "~0.9.0",
|
||||
"performance-now": "^2.1.0",
|
||||
"qs": "~6.5.2",
|
||||
"safe-buffer": "^5.1.2",
|
||||
"tough-cookie": "~2.5.0",
|
||||
"tunnel-agent": "^0.6.0",
|
||||
"uuid": "^3.3.2"
|
||||
}
|
||||
},
|
||||
"safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
|
||||
},
|
||||
"safer-buffer": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
|
||||
},
|
||||
"sshpk": {
|
||||
"version": "1.16.1",
|
||||
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
|
||||
"integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==",
|
||||
"requires": {
|
||||
"asn1": "~0.2.3",
|
||||
"assert-plus": "^1.0.0",
|
||||
"bcrypt-pbkdf": "^1.0.0",
|
||||
"dashdash": "^1.12.0",
|
||||
"ecc-jsbn": "~0.1.1",
|
||||
"getpass": "^0.1.1",
|
||||
"jsbn": "~0.1.0",
|
||||
"safer-buffer": "^2.0.2",
|
||||
"tweetnacl": "~0.14.0"
|
||||
}
|
||||
},
|
||||
"strip-json-comments": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.0.tgz",
|
||||
"integrity": "sha512-e6/d0eBu7gHtdCqFt0xJr642LdToM5/cN4Qb9DbHjVx1CP5RyeM+zH7pbecEmDv/lBqb0QH+6Uqq75rxFPkM0w=="
|
||||
},
|
||||
"supports-color": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz",
|
||||
"integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==",
|
||||
"requires": {
|
||||
"has-flag": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"tough-cookie": {
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
|
||||
"integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
|
||||
"requires": {
|
||||
"psl": "^1.1.28",
|
||||
"punycode": "^2.1.1"
|
||||
}
|
||||
},
|
||||
"tunnel-agent": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
|
||||
"integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
|
||||
"requires": {
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"tweetnacl": {
|
||||
"version": "0.14.5",
|
||||
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
|
||||
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
|
||||
},
|
||||
"uri-js": {
|
||||
"version": "4.2.2",
|
||||
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz",
|
||||
"integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==",
|
||||
"requires": {
|
||||
"punycode": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"uuid": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
|
||||
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
|
||||
},
|
||||
"verror": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
|
||||
"integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=",
|
||||
"requires": {
|
||||
"assert-plus": "^1.0.0",
|
||||
"core-util-is": "1.0.2",
|
||||
"extsprintf": "^1.2.0"
|
||||
}
|
||||
},
|
||||
"wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
|
||||
},
|
||||
"xml": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/xml/-/xml-1.0.1.tgz",
|
||||
"integrity": "sha1-eLpyAgApxbyHuKgaPPzXS0ovweU="
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue