mirror of
https://github.com/super-linter/super-linter.git
synced 2024-11-22 16:51:05 -05:00
Merge remote-tracking branch 'upstream/master' into feature/tap-format
* upstream/master: (149 commits) rm dupe end brackets fix botched merge fixed docker bug fixing insta;; hunting a bug adding some small cleanup Bump Actions-R-Us/actions-tagger from v2.0.0 to v2.0.1 Bump actions/github-script from v2.3.1 to v2 Add HTML support to super-linter Remove ARM_TTK_VERSION from ENV in Dockerfile Remove noise from ARG in Dockerfile Minor fix(1) for TestCodebase [ARM] Minor fix for TestCodebase [ARM] Update ARM Test Cases readme Minor updates following merge from upstream/master Minor bugfixes Test case updates Add `editorconfig-checker` Typo Use full version numbers for GHA Dependabot will update these automatically ...
This commit is contained in:
commit
7bf7ecdbd3
64 changed files with 4370 additions and 2294 deletions
|
@ -16,20 +16,19 @@
|
||||||
###########
|
###########
|
||||||
# Globals #
|
# Globals #
|
||||||
###########
|
###########
|
||||||
GITHUB_WORKSPACE="${GITHUB_WORKSPACE}" # GitHub Workspace
|
GITHUB_WORKSPACE="${GITHUB_WORKSPACE}" # GitHub Workspace
|
||||||
DOCKER_USERNAME="${DOCKER_USERNAME}" # Username to login to DockerHub
|
DOCKER_USERNAME="${DOCKER_USERNAME}" # Username to login to DockerHub
|
||||||
DOCKER_PASSWORD="${DOCKER_PASSWORD}" # Password to login to DockerHub
|
DOCKER_PASSWORD="${DOCKER_PASSWORD}" # Password to login to DockerHub
|
||||||
IMAGE_REPO="${IMAGE_REPO}" # Image repo to upload the image
|
IMAGE_REPO="${IMAGE_REPO}" # Image repo to upload the image
|
||||||
IMAGE_VERSION="${IMAGE_VERSION}" # Version to tag the image
|
IMAGE_VERSION="${IMAGE_VERSION}" # Version to tag the image
|
||||||
DOCKERFILE_PATH="${DOCKERFILE_PATH}" # Path to the Dockerfile to be uploaded
|
DOCKERFILE_PATH="${DOCKERFILE_PATH}" # Path to the Dockerfile to be uploaded
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
############################ FUNCTIONS BELOW ###################################
|
############################ FUNCTIONS BELOW ###################################
|
||||||
################################################################################
|
################################################################################
|
||||||
################################################################################
|
################################################################################
|
||||||
#### Function Header ###########################################################
|
#### Function Header ###########################################################
|
||||||
Header()
|
Header() {
|
||||||
{
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "-------------------------------------------------------"
|
echo "-------------------------------------------------------"
|
||||||
echo "----- GitHub Actions remove image from DockerHub ------"
|
echo "----- GitHub Actions remove image from DockerHub ------"
|
||||||
|
@ -38,8 +37,7 @@ Header()
|
||||||
}
|
}
|
||||||
################################################################################
|
################################################################################
|
||||||
#### Function ValidateInput ####################################################
|
#### Function ValidateInput ####################################################
|
||||||
ValidateInput()
|
ValidateInput() {
|
||||||
{
|
|
||||||
# Need to validate we have the basic variables
|
# Need to validate we have the basic variables
|
||||||
################
|
################
|
||||||
# Print header #
|
# Print header #
|
||||||
|
@ -54,8 +52,8 @@ ValidateInput()
|
||||||
# Validate GITHUB_WORKSPACE #
|
# Validate GITHUB_WORKSPACE #
|
||||||
############################
|
############################
|
||||||
if [ -z "$GITHUB_WORKSPACE" ]; then
|
if [ -z "$GITHUB_WORKSPACE" ]; then
|
||||||
echo "ERROR! Failed to get [GITHUB_WORKSPACE]!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_WORKSPACE]!${NC}"
|
||||||
echo "ERROR:[$GITHUB_WORKSPACE]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$GITHUB_WORKSPACE]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo "Successfully found:[GITHUB_WORKSPACE], value:[$GITHUB_WORKSPACE]"
|
echo "Successfully found:[GITHUB_WORKSPACE], value:[$GITHUB_WORKSPACE]"
|
||||||
|
@ -66,15 +64,15 @@ ValidateInput()
|
||||||
#######################
|
#######################
|
||||||
if [ -z "$IMAGE_REPO" ]; then
|
if [ -z "$IMAGE_REPO" ]; then
|
||||||
# No repo was pulled
|
# No repo was pulled
|
||||||
echo "ERROR! Failed to get [IMAGE_REPO]!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [IMAGE_REPO]!${NC}"
|
||||||
echo "ERROR:[$IMAGE_REPO]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$IMAGE_REPO]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
elif [[ "$IMAGE_REPO" == "github/super-linter" ]]; then
|
elif [[ $IMAGE_REPO == "github/super-linter" ]]; then
|
||||||
# Found our main repo
|
# Found our main repo
|
||||||
echo "Successfully found:[IMAGE_REPO], value:[$IMAGE_REPO]"
|
echo "Successfully found:[IMAGE_REPO], value:[$IMAGE_REPO]"
|
||||||
else
|
else
|
||||||
# This is a fork and we cant pull vars or any info
|
# This is a fork and we cant pull vars or any info
|
||||||
echo "WARN! No image to cleanup as this is a forked branch, and not being built with current automation!"
|
echo -e "${NC}${F[Y]}WARN!${NC} No image to cleanup as this is a forked branch, and not being built with current automation!${NC}"
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -82,8 +80,8 @@ ValidateInput()
|
||||||
# Validate IMAGE_VERSION #
|
# Validate IMAGE_VERSION #
|
||||||
##########################
|
##########################
|
||||||
if [ -z "$IMAGE_VERSION" ]; then
|
if [ -z "$IMAGE_VERSION" ]; then
|
||||||
echo "ERROR! Failed to get [IMAGE_VERSION]!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [IMAGE_VERSION]!${NC}"
|
||||||
echo "ERROR:[$IMAGE_VERSION]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$IMAGE_VERSION]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo "Successfully found:[IMAGE_VERSION], value:[$IMAGE_VERSION]"
|
echo "Successfully found:[IMAGE_VERSION], value:[$IMAGE_VERSION]"
|
||||||
|
@ -93,8 +91,8 @@ ValidateInput()
|
||||||
# Validate DOCKER_USERNAME #
|
# Validate DOCKER_USERNAME #
|
||||||
############################
|
############################
|
||||||
if [ -z "$DOCKER_USERNAME" ]; then
|
if [ -z "$DOCKER_USERNAME" ]; then
|
||||||
echo "ERROR! Failed to get [DOCKER_USERNAME]!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_USERNAME]!${NC}"
|
||||||
echo "ERROR:[$DOCKER_USERNAME]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$DOCKER_USERNAME]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo "Successfully found:[DOCKER_USERNAME], value:[$DOCKER_USERNAME]"
|
echo "Successfully found:[DOCKER_USERNAME], value:[$DOCKER_USERNAME]"
|
||||||
|
@ -104,8 +102,8 @@ ValidateInput()
|
||||||
# Validate DOCKER_PASSWORD #
|
# Validate DOCKER_PASSWORD #
|
||||||
############################
|
############################
|
||||||
if [ -z "$DOCKER_PASSWORD" ]; then
|
if [ -z "$DOCKER_PASSWORD" ]; then
|
||||||
echo "ERROR! Failed to get [DOCKER_PASSWORD]!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_PASSWORD]!${NC}"
|
||||||
echo "ERROR:[$DOCKER_PASSWORD]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$DOCKER_PASSWORD]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo "Successfully found:[DOCKER_PASSWORD], value:[********]"
|
echo "Successfully found:[DOCKER_PASSWORD], value:[********]"
|
||||||
|
@ -114,7 +112,7 @@ ValidateInput()
|
||||||
##################################################
|
##################################################
|
||||||
# Check if we need to get the name of the branch #
|
# Check if we need to get the name of the branch #
|
||||||
##################################################
|
##################################################
|
||||||
if [[ "$IMAGE_VERSION" != "latest" ]]; then
|
if [[ $IMAGE_VERSION != "latest" ]]; then
|
||||||
##################################
|
##################################
|
||||||
# Remove non alpha-numeric chars #
|
# Remove non alpha-numeric chars #
|
||||||
##################################
|
##################################
|
||||||
|
@ -131,8 +129,7 @@ ValidateInput()
|
||||||
}
|
}
|
||||||
################################################################################
|
################################################################################
|
||||||
#### Function LoginToDocker ####################################################
|
#### Function LoginToDocker ####################################################
|
||||||
LoginToDocker()
|
LoginToDocker() {
|
||||||
{
|
|
||||||
################
|
################
|
||||||
# Print header #
|
# Print header #
|
||||||
################
|
################
|
||||||
|
@ -157,8 +154,8 @@ LoginToDocker()
|
||||||
##############################
|
##############################
|
||||||
if [ $ERROR_CODE -ne 0 ]; then
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
# ERROR
|
# ERROR
|
||||||
echo "ERROR! Failed to authenticate to DockerHub!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to authenticate to DockerHub!${NC}"
|
||||||
echo "ERROR:[$LOGIN_CMD]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$LOGIN_CMD]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
# SUCCESS
|
# SUCCESS
|
||||||
|
@ -167,8 +164,7 @@ LoginToDocker()
|
||||||
}
|
}
|
||||||
################################################################################
|
################################################################################
|
||||||
#### Function RemoveImage ######################################################
|
#### Function RemoveImage ######################################################
|
||||||
RemoveImage()
|
RemoveImage() {
|
||||||
{
|
|
||||||
################
|
################
|
||||||
# Print header #
|
# Print header #
|
||||||
################
|
################
|
||||||
|
@ -187,23 +183,23 @@ RemoveImage()
|
||||||
-d "{\"username\": \"$DOCKER_USERNAME\", \"password\": \"$DOCKER_PASSWORD\"}" \
|
-d "{\"username\": \"$DOCKER_USERNAME\", \"password\": \"$DOCKER_PASSWORD\"}" \
|
||||||
"https://hub.docker.com/v2/users/login/" | jq -r .token 2>&1)
|
"https://hub.docker.com/v2/users/login/" | jq -r .token 2>&1)
|
||||||
|
|
||||||
#######################
|
#######################
|
||||||
# Load the ERROR_CODE #
|
# Load the ERROR_CODE #
|
||||||
#######################
|
#######################
|
||||||
ERROR_CODE=$?
|
ERROR_CODE=$?
|
||||||
|
|
||||||
##############################
|
##############################
|
||||||
# Check the shell for errors #
|
# Check the shell for errors #
|
||||||
##############################
|
##############################
|
||||||
if [ $ERROR_CODE -ne 0 ]; then
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
# ERROR
|
# ERROR
|
||||||
echo "ERROR! Failed to gain token from DockerHub!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to gain token from DockerHub!${NC}"
|
||||||
echo "ERROR:[$TOKEN]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$TOKEN]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
# SUCCESS
|
# SUCCESS
|
||||||
echo "Successfully gained auth token from DockerHub!"
|
echo "Successfully gained auth token from DockerHub!"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
#################################
|
#################################
|
||||||
# Remove the tag from DockerHub #
|
# Remove the tag from DockerHub #
|
||||||
|
@ -222,8 +218,8 @@ RemoveImage()
|
||||||
##############################
|
##############################
|
||||||
if [ $ERROR_CODE -ne 0 ]; then
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
# ERROR
|
# ERROR
|
||||||
echo "ERROR! Failed to remove tag from DockerHub!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to remove tag from DockerHub!${NC}"
|
||||||
echo "ERROR:[$REMOVE_CMD]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$REMOVE_CMD]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
# SUCCESS
|
# SUCCESS
|
||||||
|
@ -232,8 +228,7 @@ RemoveImage()
|
||||||
}
|
}
|
||||||
################################################################################
|
################################################################################
|
||||||
#### Function Footer ###########################################################
|
#### Function Footer ###########################################################
|
||||||
Footer()
|
Footer() {
|
||||||
{
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "-------------------------------------------------------"
|
echo "-------------------------------------------------------"
|
||||||
echo "The step has completed"
|
echo "The step has completed"
|
||||||
|
|
|
@ -53,8 +53,8 @@ CheckGHEPid()
|
||||||
# Check the shell for errors #
|
# Check the shell for errors #
|
||||||
##############################
|
##############################
|
||||||
if [ $ERROR_CODE -ne 0 ]; then
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
echo "ERROR! Failed to sleep!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to sleep!${NC}"
|
||||||
echo "ERROR:[$SLEEP_CMD]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$SLEEP_CMD]${NC}"
|
||||||
echo "Will try to call apply as last effort..."
|
echo "Will try to call apply as last effort..."
|
||||||
####################################
|
####################################
|
||||||
# Call config apply as last effort #
|
# Call config apply as last effort #
|
||||||
|
@ -87,8 +87,7 @@ CheckGHEProcess()
|
||||||
####################################################
|
####################################################
|
||||||
# Check to see if the process is alive and running #
|
# Check to see if the process is alive and running #
|
||||||
####################################################
|
####################################################
|
||||||
# shellcheck disable=SC2009
|
CHECK_PROCESS_CMD=$(pgrep -f "$GHE_APPLY_COMMAND" 2>&1)
|
||||||
CHECK_PROCESS_CMD=$(ps -aef |grep "$GHE_APPLY_COMMAND" |grep -v grep 2>&1)
|
|
||||||
|
|
||||||
#######################
|
#######################
|
||||||
# Load the error code #
|
# Load the error code #
|
||||||
|
@ -118,8 +117,8 @@ CheckGHEProcess()
|
||||||
# Check the shell for errors #
|
# Check the shell for errors #
|
||||||
##############################
|
##############################
|
||||||
if [ $ERROR_CODE -ne 0 ]; then
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
echo "ERROR! Failed to sleep!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to sleep!${NC}"
|
||||||
echo "ERROR:[$SLEEP_CMD]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$SLEEP_CMD]${NC}"
|
||||||
echo "Will try to call apply as last effort..."
|
echo "Will try to call apply as last effort..."
|
||||||
####################################
|
####################################
|
||||||
# Call config apply as last effort #
|
# Call config apply as last effort #
|
||||||
|
@ -162,12 +161,12 @@ RunConfigApply()
|
||||||
##############################
|
##############################
|
||||||
if [ $ERROR_CODE -ne 0 ]; then
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
# Errors
|
# Errors
|
||||||
echo "ERROR! Failed to run config apply command!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to run config apply command!${NC}"
|
||||||
echo "ERROR:[$APPLY_CMD]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$APPLY_CMD]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
# Success
|
# Success
|
||||||
echo "Successfully ran $GHE_APPLY_COMMAND"
|
echo -e "${NC}${F[B]}Successfully ran ${F[C]}$GHE_APPLY_COMMAND${NC}"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
13
.automation/test/arm/README.md
Normal file
13
.automation/test/arm/README.md
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# ARM Test Cases
|
||||||
|
This folder holds the test cases for **Azure Resource Manager (ARM)**.
|
||||||
|
|
||||||
|
## Additional Docs
|
||||||
|
No Additional information is needed for this test case.
|
||||||
|
|
||||||
|
## Good Test Cases
|
||||||
|
The test cases denoted: `LANGUAGE_good_FILE.EXTENSION` are all valid, and should pass successfully when linted.
|
||||||
|
- **Note:** They are linted utilizing the default linter rules.
|
||||||
|
|
||||||
|
## Bad Test Cases
|
||||||
|
The test cases denoted: `LANGUAGE_bad_FILE.EXTENSION` are **NOT** valid, and should trigger errors when linted.
|
||||||
|
- **Note:** They are linted utilizing the default linter rules.
|
53
.automation/test/arm/arm_bad_1.json
Normal file
53
.automation/test/arm/arm_bad_1.json
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
{
|
||||||
|
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
|
||||||
|
"contentVersion": "",
|
||||||
|
"parameters": {
|
||||||
|
"Network.Config": {
|
||||||
|
"type": "object",
|
||||||
|
"metadata": {
|
||||||
|
"description": "Properties object for the Virtual Network"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"variables": {
|
||||||
|
"Network.Name": "[parameters('Network.Config').name]",
|
||||||
|
"Network.Location": "[parameters('Network.Config').location]",
|
||||||
|
"Network.Tags": "[parameters('Network.Config').tags]",
|
||||||
|
"Network.Properties.AddressSpace.AddressPrefixes": "[parameters('Network.Config').addressPrefixes]",
|
||||||
|
"Network.Properties.DhcpOptions.DnsServers": "[parameters('Network.Config').dnsServers]",
|
||||||
|
"copy": [
|
||||||
|
{
|
||||||
|
"name": "Network.Properties.Subnets",
|
||||||
|
"count": "[length(parameters('Network.Config').subnetConfig)]",
|
||||||
|
"input": {
|
||||||
|
"name": "[concat(parameters('Network.Config').subnetConfig[copyIndex('Network.Properties.Subnets')].aksId,'-',parameters('Network.Config').locationId)]",
|
||||||
|
"properties": "[parameters('Network.Config').subnetConfig[copyIndex('Network.Properties.Subnets')].properties]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"Test.Blank.Variable": []
|
||||||
|
},
|
||||||
|
"resources": [
|
||||||
|
{
|
||||||
|
"name": "[variables('Network.Name')]",
|
||||||
|
"type": "Microsoft.Network/virtualNetworks",
|
||||||
|
"apiVersion": "2019-12-01",
|
||||||
|
"tags": "[variables('Network.Tags')]",
|
||||||
|
"properties": {
|
||||||
|
"addressSpace": {
|
||||||
|
"addressPrefixes": "[variables('Network.Properties.AddressSpace.AddressPrefixes')]"
|
||||||
|
},
|
||||||
|
"dhcpOptions": {
|
||||||
|
"dnsServers": "[variables('Network.Properties.DhcpOptions.DnsServers')]"
|
||||||
|
},
|
||||||
|
"subnets": "[variables('Network.Properties.Subnets')]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"outputs": {
|
||||||
|
"state": {
|
||||||
|
"type": "object",
|
||||||
|
"value": "[reference(resourceId('Microsoft.Network/virtualNetworks',variables('Network.Name')), '2019-12-01', 'Full')]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
53
.automation/test/arm/arm_good_1.json
Normal file
53
.automation/test/arm/arm_good_1.json
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
{
|
||||||
|
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
|
||||||
|
"contentVersion": "1.0.0.0",
|
||||||
|
"parameters": {
|
||||||
|
"Network.Config": {
|
||||||
|
"type": "object",
|
||||||
|
"metadata": {
|
||||||
|
"description": "Properties object for the Virtual Network"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"variables": {
|
||||||
|
"Network.Name": "[parameters('Network.Config').name]",
|
||||||
|
"Network.Location": "[parameters('Network.Config').location]",
|
||||||
|
"Network.Tags": "[parameters('Network.Config').tags]",
|
||||||
|
"Network.Properties.AddressSpace.AddressPrefixes": "[parameters('Network.Config').addressPrefixes]",
|
||||||
|
"Network.Properties.DhcpOptions.DnsServers": "[parameters('Network.Config').dnsServers]",
|
||||||
|
"copy": [
|
||||||
|
{
|
||||||
|
"name": "Network.Properties.Subnets",
|
||||||
|
"count": "[length(parameters('Network.Config').subnetConfig)]",
|
||||||
|
"input": {
|
||||||
|
"name": "[concat(parameters('Network.Config').subnetConfig[copyIndex('Network.Properties.Subnets')].aksId,'-',parameters('Network.Config').locationId)]",
|
||||||
|
"properties": "[parameters('Network.Config').subnetConfig[copyIndex('Network.Properties.Subnets')].properties]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"resources": [
|
||||||
|
{
|
||||||
|
"name": "[variables('Network.Name')]",
|
||||||
|
"type": "Microsoft.Network/virtualNetworks",
|
||||||
|
"apiVersion": "2019-12-01",
|
||||||
|
"location": "[variables('Network.Location')]",
|
||||||
|
"tags": "[variables('Network.Tags')]",
|
||||||
|
"properties": {
|
||||||
|
"addressSpace": {
|
||||||
|
"addressPrefixes": "[variables('Network.Properties.AddressSpace.AddressPrefixes')]"
|
||||||
|
},
|
||||||
|
"dhcpOptions": {
|
||||||
|
"dnsServers": "[variables('Network.Properties.DhcpOptions.DnsServers')]"
|
||||||
|
},
|
||||||
|
"subnets": "[variables('Network.Properties.Subnets')]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"outputs": {
|
||||||
|
"state": {
|
||||||
|
"type": "object",
|
||||||
|
"value": "[reference(resourceId('Microsoft.Network/virtualNetworks',variables('Network.Name')), '2019-12-01', 'Full')]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
13
.automation/test/cfn/README.md
Normal file
13
.automation/test/cfn/README.md
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# AWS CloudFormation Test Cases
|
||||||
|
This folder holds the test cases for **CloudFormation**.
|
||||||
|
|
||||||
|
## Additional Docs
|
||||||
|
No Additional information is needed for this test case.
|
||||||
|
|
||||||
|
## Good Test Cases
|
||||||
|
The test cases denoted: `LANGUAGE_good_FILE.EXTENSION` are all valid, and should pass successfully when linted.
|
||||||
|
- **Note:** They are linted utilizing the default linter rules.
|
||||||
|
|
||||||
|
## Bad Test Cases
|
||||||
|
The test cases denoted: `LANGUAGE_bad_FILE.EXTENSION` are **NOT** valid, and should trigger errors when linted.
|
||||||
|
- **Note:** They are linted utilizing the default linter rules.
|
35
.automation/test/cfn/cfn_bad_1.json
Normal file
35
.automation/test/cfn/cfn_bad_1.json
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
{
|
||||||
|
"AWSTemplateFormatVersion": "2010-09-09",
|
||||||
|
"Description": "A sample template",
|
||||||
|
"Resources": {
|
||||||
|
"DNS": {
|
||||||
|
"Type": "AWS::Route53::HostedZonee",
|
||||||
|
"Properties": {
|
||||||
|
"HostedZoneConfig": {
|
||||||
|
"Comment": "Myhostedzoneforexample.com"
|
||||||
|
},
|
||||||
|
"Name": "example.com",
|
||||||
|
"VPCs": [
|
||||||
|
{
|
||||||
|
"VPCId": "vpc-abcd1234",
|
||||||
|
"VPCRegion": "ap-northeast-1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"VPCId": "vpc-efgh5678",
|
||||||
|
"VPCRegion": "us-west-2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"HostedZoneTags": [
|
||||||
|
{
|
||||||
|
"Key": "SampleKey1",
|
||||||
|
"Value": "SampleValue1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Key": "SampleKey2",
|
||||||
|
"Value": "SampleValue2"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
23
.automation/test/cfn/cfn_bad_2.yaml
Normal file
23
.automation/test/cfn/cfn_bad_2.yaml
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
AWSTemplateFormatVersion: "2010-09-09"
|
||||||
|
Description: A sample template
|
||||||
|
Resources:
|
||||||
|
DNS:
|
||||||
|
Type: "AWS::Route53::HostedZonee"
|
||||||
|
Properties:
|
||||||
|
HostedZoneConfig:
|
||||||
|
Comment: 'My hosted zone for example.com'
|
||||||
|
Name: 'example.com'
|
||||||
|
VPCs:
|
||||||
|
-
|
||||||
|
VPCId: 'vpc-abcd1234'
|
||||||
|
VPCRegion: 'ap-northeast-1'
|
||||||
|
-
|
||||||
|
VPCId: 'vpc-efgh5678'
|
||||||
|
VPCRegion: 'us-west-2'
|
||||||
|
HostedZoneTags:
|
||||||
|
-
|
||||||
|
Key: 'SampleKey1'
|
||||||
|
Value: 'SampleValue1'
|
||||||
|
-
|
||||||
|
Key: 'SampleKey2'
|
||||||
|
Value: 'SampleValue2'
|
16
.automation/test/cfn/cfn_bad_3.json
Normal file
16
.automation/test/cfn/cfn_bad_3.json
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
{
|
||||||
|
"Resources" : {
|
||||||
|
"myDNSRecord" : {
|
||||||
|
"Type" : "AWS::Route53::RecordSet",
|
||||||
|
"Properties" : {
|
||||||
|
"HostedZoneId" : "Z8VLZEXAMPLE",
|
||||||
|
"Name" : "test.example.com",
|
||||||
|
"ResourceRecords" : [
|
||||||
|
"192.0.2.99"
|
||||||
|
],
|
||||||
|
"Ttl" : 300,
|
||||||
|
"Type" : "A"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
10
.automation/test/cfn/cfn_bad_4.yaml
Normal file
10
.automation/test/cfn/cfn_bad_4.yaml
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
Resources:
|
||||||
|
myDNSRecord:
|
||||||
|
Type: AWS::Route53::RecordSet
|
||||||
|
Properties:
|
||||||
|
HostedZoneId : Z8VLZEXAMPLE
|
||||||
|
Name: test.example.com
|
||||||
|
ResourceRecords:
|
||||||
|
- 192.0.2.99
|
||||||
|
Ttl: 900
|
||||||
|
Type: A
|
35
.automation/test/cfn/cfn_good_1.json
Normal file
35
.automation/test/cfn/cfn_good_1.json
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
{
|
||||||
|
"AWSTemplateFormatVersion": "2010-09-09",
|
||||||
|
"Description": "A sample template",
|
||||||
|
"Resources": {
|
||||||
|
"DNS": {
|
||||||
|
"Type": "AWS::Route53::HostedZone",
|
||||||
|
"Properties": {
|
||||||
|
"HostedZoneConfig": {
|
||||||
|
"Comment": "Myhostedzoneforexample.com"
|
||||||
|
},
|
||||||
|
"Name": "example.com",
|
||||||
|
"VPCs": [
|
||||||
|
{
|
||||||
|
"VPCId": "vpc-abcd1234",
|
||||||
|
"VPCRegion": "ap-northeast-1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"VPCId": "vpc-efgh5678",
|
||||||
|
"VPCRegion": "us-west-2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"HostedZoneTags": [
|
||||||
|
{
|
||||||
|
"Key": "SampleKey1",
|
||||||
|
"Value": "SampleValue1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Key": "SampleKey2",
|
||||||
|
"Value": "SampleValue2"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
23
.automation/test/cfn/cfn_good_2.yaml
Normal file
23
.automation/test/cfn/cfn_good_2.yaml
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
AWSTemplateFormatVersion: "2010-09-09"
|
||||||
|
Description: A sample template
|
||||||
|
Resources:
|
||||||
|
DNS:
|
||||||
|
Type: "AWS::Route53::HostedZone"
|
||||||
|
Properties:
|
||||||
|
HostedZoneConfig:
|
||||||
|
Comment: 'My hosted zone for example.com'
|
||||||
|
Name: 'example.com'
|
||||||
|
VPCs:
|
||||||
|
-
|
||||||
|
VPCId: 'vpc-abcd1234'
|
||||||
|
VPCRegion: 'ap-northeast-1'
|
||||||
|
-
|
||||||
|
VPCId: 'vpc-efgh5678'
|
||||||
|
VPCRegion: 'us-west-2'
|
||||||
|
HostedZoneTags:
|
||||||
|
-
|
||||||
|
Key: 'SampleKey1'
|
||||||
|
Value: 'SampleValue1'
|
||||||
|
-
|
||||||
|
Key: 'SampleKey2'
|
||||||
|
Value: 'SampleValue2'
|
16
.automation/test/cfn/cfn_good_3.json
Normal file
16
.automation/test/cfn/cfn_good_3.json
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
{
|
||||||
|
"Resources" : {
|
||||||
|
"myDNSRecord" : {
|
||||||
|
"Type" : "AWS::Route53::RecordSet",
|
||||||
|
"Properties" : {
|
||||||
|
"HostedZoneId" : "Z8VLZEXAMPLE",
|
||||||
|
"Name" : "test.example.com",
|
||||||
|
"ResourceRecords" : [
|
||||||
|
"192.0.2.99"
|
||||||
|
],
|
||||||
|
"TTL" : 300,
|
||||||
|
"Type" : "A"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
10
.automation/test/cfn/cfn_good_4.yaml
Normal file
10
.automation/test/cfn/cfn_good_4.yaml
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
Resources:
|
||||||
|
myDNSRecord:
|
||||||
|
Type: AWS::Route53::RecordSet
|
||||||
|
Properties:
|
||||||
|
HostedZoneId : Z8VLZEXAMPLE
|
||||||
|
Name: test.example.com
|
||||||
|
ResourceRecords:
|
||||||
|
- 192.0.2.99
|
||||||
|
TTL: 900
|
||||||
|
Type: A
|
3
.automation/test/editorconfig-checker/.editorconfig
Normal file
3
.automation/test/editorconfig-checker/.editorconfig
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
[*.ext]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
13
.automation/test/editorconfig-checker/README.md
Normal file
13
.automation/test/editorconfig-checker/README.md
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# EDITORCONFIG_CHECKER Test Cases
|
||||||
|
This folder holds the test cases for **EDITORCONFIG_CHECKER**.
|
||||||
|
|
||||||
|
## Additional Docs
|
||||||
|
No Additional information is needed for this test case.
|
||||||
|
|
||||||
|
## Good Test Cases
|
||||||
|
The test cases denoted: `LANGUAGE_good_FILE.EXTENSION` are all valid, and should pass successfully when linted.
|
||||||
|
- **Note:** They are linted utilizing the default linter rules.
|
||||||
|
|
||||||
|
## Bad Test Cases
|
||||||
|
The test cases denoted: `LANGUAGE_bad_FILE.EXTENSION` are **NOT** valid, and should trigger errors when linted.
|
||||||
|
- **Note:** They are linted utilizing the default linter rules.
|
|
@ -0,0 +1,3 @@
|
||||||
|
some line
|
||||||
|
some line
|
||||||
|
some line
|
|
@ -0,0 +1,3 @@
|
||||||
|
some line
|
||||||
|
some line
|
||||||
|
some line
|
13
.automation/test/html/README.md
Normal file
13
.automation/test/html/README.md
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# HTML Test Cases
|
||||||
|
This folder holds the test cases for **HTML**.
|
||||||
|
|
||||||
|
## Additional Docs
|
||||||
|
No Additional information is needed for this test case.
|
||||||
|
|
||||||
|
## Good Test Cases
|
||||||
|
The test cases denoted: `LANGUAGE_good_FILE.EXTENSION` are all valid, and should pass successfully when linted.
|
||||||
|
- **Note:** They are linted utilizing the default linter rules.
|
||||||
|
|
||||||
|
## Bad Test Cases
|
||||||
|
The test cases denoted: `LANGUAGE_bad_FILE.EXTENSION` are **NOT** valid, and should trigger errors when linted.
|
||||||
|
- **Note:** They are linted utilizing the default linter rules.
|
99
.automation/test/html/html_bad_01.html
Normal file
99
.automation/test/html/html_bad_01.html
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>Document</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<div bad='' bad=''></div>
|
||||||
|
<input type="text"></input>
|
||||||
|
<div class"foo">
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</hello>
|
||||||
|
</test>
|
||||||
|
</div>
|
||||||
|
<test>
|
||||||
|
<hello>
|
||||||
|
<div>
|
||||||
|
<div>
|
||||||
|
<div>
|
||||||
|
<div class='' what='' something=''></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</hello>
|
||||||
|
</test>
|
||||||
|
</div>
|
||||||
|
<test>
|
||||||
|
<hello>
|
||||||
|
<div>
|
||||||
|
<div>
|
||||||
|
<div>
|
||||||
|
<div class='' what='' something=''></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</hello>
|
||||||
|
</test>
|
||||||
|
</div>
|
||||||
|
<test>
|
||||||
|
<hello>
|
||||||
|
<div>
|
||||||
|
<div>
|
||||||
|
<div>
|
||||||
|
<div class='' what='' something=''></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</hello>
|
||||||
|
</test>
|
||||||
|
</div>
|
||||||
|
<test>
|
||||||
|
<hello>
|
||||||
|
<div>
|
||||||
|
<div>
|
||||||
|
<div>
|
||||||
|
<div class='' what='' something=''></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</hello>
|
||||||
|
</test>
|
||||||
|
</div>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</bad>
|
||||||
|
</body>
|
||||||
|
</html>
|
10
.automation/test/html/html_good_01.html
Normal file
10
.automation/test/html/html_good_01.html
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>Document</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
Good HTML!
|
||||||
|
</body>
|
||||||
|
</html>
|
19
.automation/test/protobuf/README.md
Normal file
19
.automation/test/protobuf/README.md
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
# Protobuf Test Cases
|
||||||
|
|
||||||
|
This folder holds the test cases for **Protobuf**.
|
||||||
|
|
||||||
|
## Additional Docs
|
||||||
|
|
||||||
|
No Additional information is needed for this test case.
|
||||||
|
|
||||||
|
## Good Test Cases
|
||||||
|
|
||||||
|
The test cases denoted: `LANGUAGE_good_FILE.EXTENSION` are all valid, and should pass successfully when linted.
|
||||||
|
|
||||||
|
- **Note:** They are linted utilizing the default linter rules.
|
||||||
|
|
||||||
|
## Bad Test Cases
|
||||||
|
|
||||||
|
The test cases denoted: `LANGUAGE_bad_FILE.EXTENSION` are **NOT** valid, and should trigger errors when linted.
|
||||||
|
|
||||||
|
- **Note:** They are linted utilizing the default linter rules.
|
48
.automation/test/protobuf/protobuf_bad_1.proto
Normal file
48
.automation/test/protobuf/protobuf_bad_1.proto
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
syntax = "proto3";
|
||||||
|
// A broken example of the official reference
|
||||||
|
// See https://developers.google.com/protocol-buffers/docs/reference/proto3-spec#proto_file
|
||||||
|
package examplePb;
|
||||||
|
|
||||||
|
option java_package = "com.example.foo";
|
||||||
|
|
||||||
|
import "other.proto";
|
||||||
|
import public "new.proto";
|
||||||
|
|
||||||
|
import "google/protobuf/empty.proto";
|
||||||
|
import "google/protobuf/timestamp.proto";
|
||||||
|
|
||||||
|
import "myproject/other_protos.proto";
|
||||||
|
import "myproject/main_protos.proto";
|
||||||
|
|
||||||
|
enum enumAllowingAlias {
|
||||||
|
option allow_alias = true;
|
||||||
|
UNKNOWN = 0;
|
||||||
|
STARTED = 1;
|
||||||
|
RUNNING = 2 [(custom_option) = "hello world"];
|
||||||
|
}
|
||||||
|
message outer {
|
||||||
|
option (my_option).a = true;
|
||||||
|
// inner is an inner message.
|
||||||
|
message inner { // Level 2
|
||||||
|
int64 ival = 1;
|
||||||
|
}
|
||||||
|
repeated inner inner_message = 2;
|
||||||
|
EnumAllowingAlias enum_field =3;
|
||||||
|
map<int32, string> my_map = 4;
|
||||||
|
string reason_for_error = 5;
|
||||||
|
string end_of_support_version= 6;
|
||||||
|
message AccountForAdmin {}
|
||||||
|
message SpecialEndOfSupport {}
|
||||||
|
required inner inner_message = 7;
|
||||||
|
group Result = 8 {
|
||||||
|
string url = 9;
|
||||||
|
}
|
||||||
|
repeated group Result = 10 {
|
||||||
|
}
|
||||||
|
repeated inner paper = 11;
|
||||||
|
repeated group Regular = 12 {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
service SearchApi {
|
||||||
|
rpc search (SearchRequest) returns (SearchResponse) {};
|
||||||
|
};
|
18
.automation/test/protobuf/protobuf_good_1.proto
Normal file
18
.automation/test/protobuf/protobuf_good_1.proto
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
syntax = "proto3";
|
||||||
|
import public "other.proto";
|
||||||
|
option java_package = "com.example.foo";
|
||||||
|
enum EnumAllowingAlias {
|
||||||
|
option allow_alias = true;
|
||||||
|
ALLOWING_UNSPECIFIED = 0;
|
||||||
|
STARTED = 1;
|
||||||
|
RUNNING = 2 [(custom_option) = "hello world"];
|
||||||
|
}
|
||||||
|
message Outer {
|
||||||
|
option (my_option).a = true;
|
||||||
|
message Inner { // Level 2
|
||||||
|
int64 ival = 1;
|
||||||
|
}
|
||||||
|
inner inner_message = 2;
|
||||||
|
EnumAllowingAlias enum_field =3;
|
||||||
|
map<int32, string> my_map = 4;
|
||||||
|
}
|
|
@ -17,23 +17,24 @@
|
||||||
###########
|
###########
|
||||||
# Globals #
|
# Globals #
|
||||||
###########
|
###########
|
||||||
GITHUB_WORKSPACE="${GITHUB_WORKSPACE}" # GitHub Workspace
|
GITHUB_WORKSPACE="${GITHUB_WORKSPACE}" # GitHub Workspace
|
||||||
DOCKER_USERNAME="${DOCKER_USERNAME}" # Username to login to DockerHub
|
DOCKER_USERNAME="${DOCKER_USERNAME}" # Username to login to DockerHub
|
||||||
DOCKER_PASSWORD="${DOCKER_PASSWORD}" # Password to login to DockerHub
|
DOCKER_PASSWORD="${DOCKER_PASSWORD}" # Password to login to DockerHub
|
||||||
GPR_USERNAME="${GPR_USERNAME}" # Username to login to GitHub package registry
|
GPR_USERNAME="${GPR_USERNAME}" # Username to login to GitHub package registry
|
||||||
GPR_TOKEN="${GPR_TOKEN}" # Password to login to GitHub package registry
|
GPR_TOKEN="${GPR_TOKEN}" # Password to login to GitHub package registry
|
||||||
REGISTRY="${REGISTRY}" # What registry to upload | <GPR> or <Docker>
|
REGISTRY="${REGISTRY}" # What registry to upload | <GPR> or <Docker>
|
||||||
IMAGE_REPO="${IMAGE_REPO}" # Image repo to upload the image
|
IMAGE_REPO="${IMAGE_REPO}" # Image repo to upload the image
|
||||||
IMAGE_VERSION="${IMAGE_VERSION}" # Version to tag the image
|
IMAGE_VERSION="${IMAGE_VERSION}" # Version to tag the image
|
||||||
DOCKERFILE_PATH="${DOCKERFILE_PATH}" # Path to the Dockerfile to be uploaded
|
DOCKERFILE_PATH="${DOCKERFILE_PATH}" # Path to the Dockerfile to be uploaded
|
||||||
|
MAJOR_TAG='' # Major tag version if we need to update it
|
||||||
|
UPDATE_MAJOR_TAG=0 # Flag to deploy the major tag version as well
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
############################ FUNCTIONS BELOW ###################################
|
############################ FUNCTIONS BELOW ###################################
|
||||||
################################################################################
|
################################################################################
|
||||||
################################################################################
|
################################################################################
|
||||||
#### Function Header ###########################################################
|
#### Function Header ###########################################################
|
||||||
Header()
|
Header() {
|
||||||
{
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "-------------------------------------------------------"
|
echo "-------------------------------------------------------"
|
||||||
echo "---- GitHub Actions Upload image to [$REGISTRY] ----"
|
echo "---- GitHub Actions Upload image to [$REGISTRY] ----"
|
||||||
|
@ -42,8 +43,7 @@ Header()
|
||||||
}
|
}
|
||||||
################################################################################
|
################################################################################
|
||||||
#### Function ValidateInput ####################################################
|
#### Function ValidateInput ####################################################
|
||||||
ValidateInput()
|
ValidateInput() {
|
||||||
{
|
|
||||||
# Need to validate we have the basic variables
|
# Need to validate we have the basic variables
|
||||||
################
|
################
|
||||||
# Print header #
|
# Print header #
|
||||||
|
@ -58,98 +58,97 @@ ValidateInput()
|
||||||
# Validate GITHUB_WORKSPACE #
|
# Validate GITHUB_WORKSPACE #
|
||||||
#############################
|
#############################
|
||||||
if [ -z "$GITHUB_WORKSPACE" ]; then
|
if [ -z "$GITHUB_WORKSPACE" ]; then
|
||||||
echo "ERROR! Failed to get [GITHUB_WORKSPACE]!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GITHUB_WORKSPACE]!${NC}"
|
||||||
echo "ERROR:[$GITHUB_WORKSPACE]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$GITHUB_WORKSPACE]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo "Successfully found:[GITHUB_WORKSPACE], value:[$GITHUB_WORKSPACE]"
|
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GITHUB_WORKSPACE]${F[B]}, value:${F[W]}[$GITHUB_WORKSPACE]${NC}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
#####################
|
#####################
|
||||||
# Validate REGISTRY #
|
# Validate REGISTRY #
|
||||||
#####################
|
#####################
|
||||||
if [ -z "$REGISTRY" ]; then
|
if [ -z "$REGISTRY" ]; then
|
||||||
echo "ERROR! Failed to get [REGISTRY]!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [REGISTRY]!${NC}"
|
||||||
echo "ERROR:[$REGISTRY]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$REGISTRY]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo "Successfully found:[REGISTRY], value:[$REGISTRY]"
|
echo -e "${NC}${F[B]}Successfully found:${F[W]}[REGISTRY]${F[B]}, value:${F[W]}[$REGISTRY]${NC}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
#####################################################
|
#####################################################
|
||||||
# See if we need values for GitHub package Registry #
|
# See if we need values for GitHub package Registry #
|
||||||
#####################################################
|
#####################################################
|
||||||
if [[ "$REGISTRY" == "GPR" ]]; then
|
if [[ $REGISTRY == "GPR" ]]; then
|
||||||
#########################
|
#########################
|
||||||
# Validate GPR_USERNAME #
|
# Validate GPR_USERNAME #
|
||||||
#########################
|
#########################
|
||||||
if [ -z "$GPR_USERNAME" ]; then
|
if [ -z "$GPR_USERNAME" ]; then
|
||||||
echo "ERROR! Failed to get [GPR_USERNAME]!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GPR_USERNAME]!${NC}"
|
||||||
echo "ERROR:[$GPR_USERNAME]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$GPR_USERNAME]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo "Successfully found:[GPR_USERNAME], value:[$GPR_USERNAME]"
|
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GPR_USERNAME]${F[B]}, value:${F[W]}[$GPR_USERNAME]${NC}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
######################
|
######################
|
||||||
# Validate GPR_TOKEN #
|
# Validate GPR_TOKEN #
|
||||||
######################
|
######################
|
||||||
if [ -z "$GPR_TOKEN" ]; then
|
if [ -z "$GPR_TOKEN" ]; then
|
||||||
echo "ERROR! Failed to get [GPR_TOKEN]!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [GPR_TOKEN]!${NC}"
|
||||||
echo "ERROR:[$GPR_TOKEN]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$GPR_TOKEN]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo "Successfully found:[GPR_TOKEN], value:[********]"
|
echo -e "${NC}${F[B]}Successfully found:${F[W]}[GPR_TOKEN]${F[B]}, value:${F[W]}[********]${NC}"
|
||||||
fi
|
fi
|
||||||
########################################
|
########################################
|
||||||
# See if we need values for Ducker hub #
|
# See if we need values for Ducker hub #
|
||||||
########################################
|
########################################
|
||||||
elif [[ "$REGISTRY" == "Docker" ]]; then
|
elif [[ $REGISTRY == "Docker" ]]; then
|
||||||
############################
|
############################
|
||||||
# Validate DOCKER_USERNAME #
|
# Validate DOCKER_USERNAME #
|
||||||
############################
|
############################
|
||||||
if [ -z "$DOCKER_USERNAME" ]; then
|
if [ -z "$DOCKER_USERNAME" ]; then
|
||||||
echo "ERROR! Failed to get [DOCKER_USERNAME]!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_USERNAME]!${NC}"
|
||||||
echo "ERROR:[$DOCKER_USERNAME]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$DOCKER_USERNAME]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo "Successfully found:[DOCKER_USERNAME], value:[$DOCKER_USERNAME]"
|
echo -e "${NC}${F[B]}Successfully found:${F[W]}[DOCKER_USERNAME]${F[B]}, value:${F[W]}[$DOCKER_USERNAME]${NC}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
############################
|
############################
|
||||||
# Validate DOCKER_PASSWORD #
|
# Validate DOCKER_PASSWORD #
|
||||||
############################
|
############################
|
||||||
if [ -z "$DOCKER_PASSWORD" ]; then
|
if [ -z "$DOCKER_PASSWORD" ]; then
|
||||||
echo "ERROR! Failed to get [DOCKER_PASSWORD]!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKER_PASSWORD]!${NC}"
|
||||||
echo "ERROR:[$DOCKER_PASSWORD]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$DOCKER_PASSWORD]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo "Successfully found:[DOCKER_PASSWORD], value:[********]"
|
echo -e "${NC}${F[B]}Successfully found:${F[W]}[DOCKER_PASSWORD]${F[B]}, value:${F[B]}[********]${NC}"
|
||||||
fi
|
fi
|
||||||
###########################################
|
###########################################
|
||||||
# We were not passed a registry to update #
|
# We were not passed a registry to update #
|
||||||
###########################################
|
###########################################
|
||||||
else
|
else
|
||||||
echo "ERROR! Failed to find a valid registry!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find a valid registry!${NC}"
|
||||||
echo "Registry:[$REGISTRY]"
|
echo "Registry:[$REGISTRY]"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
#######################
|
#######################
|
||||||
# Validate IMAGE_REPO #
|
# Validate IMAGE_REPO #
|
||||||
#######################
|
#######################
|
||||||
if [ -z "$IMAGE_REPO" ]; then
|
if [ -z "$IMAGE_REPO" ]; then
|
||||||
echo "ERROR! Failed to get [IMAGE_REPO]!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [IMAGE_REPO]!${NC}"
|
||||||
echo "ERROR:[$IMAGE_REPO]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$IMAGE_REPO]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo "Successfully found:[IMAGE_REPO], value:[$IMAGE_REPO]"
|
echo -e "${NC}${F[B]}Successfully found:${F[W]}[IMAGE_REPO]${F[B]}, value:${F[W]}[$IMAGE_REPO]${NC}"
|
||||||
###############################################
|
###############################################
|
||||||
# Need to see if GPR registry and update name #
|
# Need to see if GPR registry and update name #
|
||||||
###############################################
|
###############################################
|
||||||
if [[ "$REGISTRY" == "GPR" ]]; then
|
if [[ $REGISTRY == "GPR" ]]; then
|
||||||
NAME="docker.pkg.github/$IMAGE_REPO"
|
NAME="docker.pkg.github.com/$IMAGE_REPO/super-linter"
|
||||||
IMAGE_REPO="$NAME"
|
IMAGE_REPO="$NAME"
|
||||||
echo "Updated [IMAGE_REPO] to:[$IMAGE_REPO] for GPR"
|
echo "Updated [IMAGE_REPO] to:[$IMAGE_REPO] for GPR"
|
||||||
fi
|
fi
|
||||||
|
@ -159,12 +158,12 @@ ValidateInput()
|
||||||
# Validate IMAGE_VERSION #
|
# Validate IMAGE_VERSION #
|
||||||
##########################
|
##########################
|
||||||
if [ -z "$IMAGE_VERSION" ]; then
|
if [ -z "$IMAGE_VERSION" ]; then
|
||||||
echo "WARN! Failed to get [IMAGE_VERSION]!"
|
echo -e "${NC}${F[Y]}WARN!${NC} Failed to get [IMAGE_VERSION]!${NC}"
|
||||||
echo "Pulling from Branch Name..."
|
echo "Pulling from Branch Name..."
|
||||||
##############################
|
##############################
|
||||||
# Get the name of the branch #
|
# Get the name of the branch #
|
||||||
##############################
|
##############################
|
||||||
BRANCH_NAME=$(git branch --contains "$GITHUB_SHA" |awk '{print $2}' 2>&1)
|
BRANCH_NAME=$(git -C "$GITHUB_WORKSPACE" branch --contains "$GITHUB_SHA" | awk '{print $2}' 2>&1)
|
||||||
|
|
||||||
#######################
|
#######################
|
||||||
# Load the error code #
|
# Load the error code #
|
||||||
|
@ -175,8 +174,8 @@ ValidateInput()
|
||||||
# Check the shell for errors #
|
# Check the shell for errors #
|
||||||
##############################
|
##############################
|
||||||
if [ $ERROR_CODE -ne 0 ]; then
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
echo "ERROR! Failed to get branch name!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get branch name!${NC}"
|
||||||
echo "ERROR:[$BRANCH_NAME]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$BRANCH_NAME]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -191,31 +190,54 @@ ValidateInput()
|
||||||
IMAGE_VERSION="$BRANCH_NAME"
|
IMAGE_VERSION="$BRANCH_NAME"
|
||||||
echo "Tag:[$IMAGE_VERSION]"
|
echo "Tag:[$IMAGE_VERSION]"
|
||||||
else
|
else
|
||||||
echo "Successfully found:[IMAGE_VERSION], value:[$IMAGE_VERSION]"
|
echo -e "${NC}${F[B]}Successfully found:${F[W]}[IMAGE_VERSION]${F[B]}, value:${F[W]}[$IMAGE_VERSION]${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
##################################
|
||||||
|
# Set regex for getting tag info #
|
||||||
|
##################################
|
||||||
|
REGEX='(v[0-9]+\.[0-9]+\.[0-9]+)' # Matches 'v1.2.3'
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Check if this is a latest to a versioned release at create new tag #
|
||||||
|
######################################################################
|
||||||
|
if [[ $IMAGE_VERSION =~ $REGEX ]]; then
|
||||||
|
# Need to get the major version, and set flag to update
|
||||||
|
|
||||||
|
#####################
|
||||||
|
# Set the major tag #
|
||||||
|
#####################
|
||||||
|
MAJOR_TAG=$(echo "$IMAGE_VERSION" | cut -d '.' -f1)
|
||||||
|
|
||||||
|
###################################
|
||||||
|
# Set flag for updating major tag #
|
||||||
|
###################################
|
||||||
|
UPDATE_MAJOR_TAG=1
|
||||||
|
|
||||||
|
echo "- Also deploying a major tag of:[$MAJOR_TAG]"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
############################
|
############################
|
||||||
# Validate DOCKERFILE_PATH #
|
# Validate DOCKERFILE_PATH #
|
||||||
############################
|
############################
|
||||||
if [ -z "$DOCKERFILE_PATH" ]; then
|
if [ -z "$DOCKERFILE_PATH" ]; then
|
||||||
echo "ERROR! Failed to get [DOCKERFILE_PATH]!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get [DOCKERFILE_PATH]!${NC}"
|
||||||
echo "ERROR:[$DOCKERFILE_PATH]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$DOCKERFILE_PATH]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo "Successfully found:[DOCKERFILE_PATH], value:[$DOCKERFILE_PATH]"
|
echo -e "${NC}${F[B]}Successfully found:${F[W]}[DOCKERFILE_PATH]${F[B]}, value:${F[W]}[$DOCKERFILE_PATH]${NC}"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
################################################################################
|
################################################################################
|
||||||
#### Function Authenticate #####################################################
|
#### Function Authenticate #####################################################
|
||||||
Authenticate()
|
Authenticate() {
|
||||||
{
|
|
||||||
################
|
################
|
||||||
# Pull in Vars #
|
# Pull in Vars #
|
||||||
################
|
################
|
||||||
USERNAME="$1" # Name to auth with
|
USERNAME="$1" # Name to auth with
|
||||||
PASSWORD="$2" # Password to auth with
|
PASSWORD="$2" # Password to auth with
|
||||||
URL="$3" # Url to auth towards
|
URL="$3" # Url to auth towards
|
||||||
NAME="$4" # name of the service
|
NAME="$4" # name of the service
|
||||||
|
|
||||||
################
|
################
|
||||||
# Print header #
|
# Print header #
|
||||||
|
@ -241,18 +263,17 @@ Authenticate()
|
||||||
##############################
|
##############################
|
||||||
if [ $ERROR_CODE -ne 0 ]; then
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
# ERROR
|
# ERROR
|
||||||
echo "ERROR! Failed to authenticate to $NAME!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to authenticate to $NAME!${NC}"
|
||||||
echo "ERROR:[$LOGIN_CMD]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$LOGIN_CMD]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
# SUCCESS
|
# SUCCESS
|
||||||
echo "Successfully authenticated to $NAME!"
|
echo -e "${NC}${F[B]}Successfully authenticated to ${F[C]}$NAME${F[B]}!${NC}"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
################################################################################
|
################################################################################
|
||||||
#### Function BuildImage #######################################################
|
#### Function BuildImage #######################################################
|
||||||
BuildImage()
|
BuildImage() {
|
||||||
{
|
|
||||||
################
|
################
|
||||||
# Print header #
|
# Print header #
|
||||||
################
|
################
|
||||||
|
@ -267,7 +288,7 @@ BuildImage()
|
||||||
################################
|
################################
|
||||||
if [ ! -f "$DOCKERFILE_PATH" ]; then
|
if [ ! -f "$DOCKERFILE_PATH" ]; then
|
||||||
# No file found
|
# No file found
|
||||||
echo "ERROR! failed to find Dockerfile at:[$DOCKERFILE_PATH]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to find Dockerfile at:[$DOCKERFILE_PATH]${NC}"
|
||||||
echo "Please make sure you give full path!"
|
echo "Please make sure you give full path!"
|
||||||
echo "Example:[/configs/Dockerfile] or [Dockerfile] if at root directory"
|
echo "Example:[/configs/Dockerfile] or [Dockerfile] if at root directory"
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -288,18 +309,41 @@ BuildImage()
|
||||||
##############################
|
##############################
|
||||||
if [ $ERROR_CODE -ne 0 ]; then
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
# ERROR
|
# ERROR
|
||||||
echo "ERROR! failed to [build] Dockerfile!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [build] Dockerfile!${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
# SUCCESS
|
# SUCCESS
|
||||||
echo "Successfully Built image!"
|
echo -e "${NC}${F[B]}Successfully Built image!${NC}"
|
||||||
echo "Info:[$BUILD_CMD]"
|
fi
|
||||||
|
|
||||||
|
########################################################
|
||||||
|
# Need to see if we need to tag a major update as well #
|
||||||
|
########################################################
|
||||||
|
if [ $UPDATE_MAJOR_TAG -eq 1 ]; then
|
||||||
|
# Tag the image with the major tag as well
|
||||||
|
docker build -t "$IMAGE_REPO:$MAJOR_TAG" -f "$DOCKERFILE_PATH" . 2>&1
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# Load the error code #
|
||||||
|
#######################
|
||||||
|
ERROR_CODE=$?
|
||||||
|
|
||||||
|
##############################
|
||||||
|
# Check the shell for errors #
|
||||||
|
##############################
|
||||||
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
|
# ERROR
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [tag] Dockerfile!${NC}"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
# SUCCESS
|
||||||
|
echo -e "${NC}${F[B]}Successfully tagged image!${NC}"
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
################################################################################
|
################################################################################
|
||||||
#### Function UploadImage ######################################################
|
#### Function UploadImage ######################################################
|
||||||
UploadImage()
|
UploadImage() {
|
||||||
{
|
|
||||||
################
|
################
|
||||||
# Print header #
|
# Print header #
|
||||||
################
|
################
|
||||||
|
@ -324,11 +368,11 @@ UploadImage()
|
||||||
##############################
|
##############################
|
||||||
if [ $ERROR_CODE -ne 0 ]; then
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
# ERROR
|
# ERROR
|
||||||
echo "ERROR! failed to [upload] Dockerfile!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [upload] Dockerfile!${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
# SUCCESS
|
# SUCCESS
|
||||||
echo "Successfully Uploaded Docker image to $REGISTRY!"
|
echo -e "${NC}${F[B]}Successfully Uploaded Docker image:${F[W]}[$IMAGE_VERSION]${F[B]} to ${F[C]}$REGISTRY${F[B]}!${NC}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
#########################
|
#########################
|
||||||
|
@ -347,8 +391,8 @@ UploadImage()
|
||||||
##############################
|
##############################
|
||||||
if [ $ERROR_CODE -ne 0 ]; then
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
# ERROR
|
# ERROR
|
||||||
echo "ERROR! Failed to get information about built Image!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to get information about built Image!${NC}"
|
||||||
echo "ERROR:[$GET_INFO_CMD]"
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$GET_INFO_CMD]${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
################
|
################
|
||||||
|
@ -357,8 +401,7 @@ UploadImage()
|
||||||
REPO=$(echo "$GET_INFO_CMD" | awk '{print $1}')
|
REPO=$(echo "$GET_INFO_CMD" | awk '{print $1}')
|
||||||
TAG=$(echo "$GET_INFO_CMD" | awk '{print $2}')
|
TAG=$(echo "$GET_INFO_CMD" | awk '{print $2}')
|
||||||
IMAGE_ID=$(echo "$GET_INFO_CMD" | awk '{print $3}')
|
IMAGE_ID=$(echo "$GET_INFO_CMD" | awk '{print $3}')
|
||||||
# shellcheck disable=SC2116
|
SIZE="${GET_INFO_CMD##* }"
|
||||||
SIZE=$(echo "${GET_INFO_CMD##* }")
|
|
||||||
|
|
||||||
###################
|
###################
|
||||||
# Print the goods #
|
# Print the goods #
|
||||||
|
@ -371,11 +414,37 @@ UploadImage()
|
||||||
echo "Size:[$SIZE]"
|
echo "Size:[$SIZE]"
|
||||||
echo "----------------------------------------------"
|
echo "----------------------------------------------"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
###############################################################
|
||||||
|
# Check if we need to upload the major tagged version as well #
|
||||||
|
###############################################################
|
||||||
|
if [ $UPDATE_MAJOR_TAG -eq 1 ]; then
|
||||||
|
############################################
|
||||||
|
# Upload the docker image that was created #
|
||||||
|
############################################
|
||||||
|
docker push "$IMAGE_REPO:$MAJOR_TAG" 2>&1
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# Load the error code #
|
||||||
|
#######################
|
||||||
|
ERROR_CODE=$?
|
||||||
|
|
||||||
|
##############################
|
||||||
|
# Check the shell for errors #
|
||||||
|
##############################
|
||||||
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
|
# ERROR
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} failed to [upload] MAJOR_TAG:[$MAJOR_TAG] Dockerfile!${NC}"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
# SUCCESS
|
||||||
|
echo -e "${NC}${F[B]}Successfully Uploaded TAGOR_TAG:${F[W]}[$MAJOR_TAG]${F[B]} Docker image to ${F[C]}$REGISTRY${F[B]}!${NC}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
################################################################################
|
################################################################################
|
||||||
#### Function Footer ###########################################################
|
#### Function Footer ###########################################################
|
||||||
Footer()
|
Footer() {
|
||||||
{
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "-------------------------------------------------------"
|
echo "-------------------------------------------------------"
|
||||||
echo "The step has completed"
|
echo "The step has completed"
|
||||||
|
@ -404,14 +473,14 @@ BuildImage
|
||||||
######################
|
######################
|
||||||
# Login to DockerHub #
|
# Login to DockerHub #
|
||||||
######################
|
######################
|
||||||
if [[ "$REGISTRY" == "Docker" ]]; then
|
if [[ $REGISTRY == "Docker" ]]; then
|
||||||
# Authenticate "Username" "Password" "Url" "Name"
|
# Authenticate "Username" "Password" "Url" "Name"
|
||||||
Authenticate "$DOCKER_USERNAME" "$DOCKER_PASSWORD" "" "Dockerhub"
|
Authenticate "$DOCKER_USERNAME" "$DOCKER_PASSWORD" "" "Dockerhub"
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# Login to GitHub Package Registry #
|
# Login to GitHub Package Registry #
|
||||||
####################################
|
####################################
|
||||||
elif [[ "$REGISTRY" == "GPR" ]]; then
|
elif [[ $REGISTRY == "GPR" ]]; then
|
||||||
# Authenticate "Username" "Password" "Url" "Name"
|
# Authenticate "Username" "Password" "Url" "Name"
|
||||||
Authenticate "$GPR_USERNAME" "$GPR_TOKEN" "https://docker.pkg.github.com" "GitHub Package Registry"
|
Authenticate "$GPR_USERNAME" "$GPR_TOKEN" "https://docker.pkg.github.com" "GitHub Package Registry"
|
||||||
|
|
||||||
|
@ -419,7 +488,7 @@ else
|
||||||
#########
|
#########
|
||||||
# ERROR #
|
# ERROR #
|
||||||
#########
|
#########
|
||||||
echo "ERROR! Registry not set correctly!"
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Registry not set correctly!${NC}"
|
||||||
echo "Registry:[$REGISTRY]"
|
echo "Registry:[$REGISTRY]"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
10
.devcontainer/README.md
Normal file
10
.devcontainer/README.md
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
# Devcontainer
|
||||||
|
This file specifies to vscode how to run the container
|
||||||
|
|
||||||
|
For format details, see [documentation](https://aka.ms/vscode-remote/devcontainer.json) or this file's [README](https://github.com/microsoft/vscode-dev-containers/tree/v0.123.0/containers/docker-existing-dockerfile)
|
||||||
|
|
||||||
|
|
||||||
|
context: Sets the run context to one level up instead of the .devcontainer folder.
|
||||||
|
dockerFile: Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename.
|
||||||
|
settings: Set *default* container specific settings.json values on container create.
|
||||||
|
extensions: Add the IDs of extensions you want installed when the container is created.
|
12
.devcontainer/devcontainer.json
Normal file
12
.devcontainer/devcontainer.json
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
{
|
||||||
|
"name": "SUPER-LINTER",
|
||||||
|
"context": "..",
|
||||||
|
"dockerFile": "..//Dockerfile",
|
||||||
|
"settings": {
|
||||||
|
"terminal.integrated.shell.linux": "/bin/bash"
|
||||||
|
},
|
||||||
|
"extensions": [
|
||||||
|
"github.vscode-pull-request-github",
|
||||||
|
"rogalmic.bash-debug"
|
||||||
|
]
|
||||||
|
}
|
3
.gitattributes
vendored
Normal file
3
.gitattributes
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
* text=auto eol=lf
|
||||||
|
*.{cmd,[cC][mM][dD]} text eol=crlf
|
||||||
|
*.{bat,[bB][aA][tT]} text eol=crlf
|
4
.github/CONTRIBUTING.md
vendored
4
.github/CONTRIBUTING.md
vendored
|
@ -31,8 +31,8 @@ Draft pull requests are also welcome to get feedback early on, or if there is so
|
||||||
|
|
||||||
## Releasing
|
## Releasing
|
||||||
If you are the current maintainer of this action:
|
If you are the current maintainer of this action:
|
||||||
1. Update `README.md` and the wiki to reflect new version number in the example workflow file sections
|
1. If a major version number change: Update `README.md` and the wiki to reflect new version number in the example workflow file sections
|
||||||
2. Draft [Release](https://help.github.com/en/github/administering-a-repository/managing-releases-in-a-repository) with a summarized changelog
|
2. Draft [Releases](https://help.github.com/en/github/administering-a-repository/managing-releases-in-a-repository) are created automatically. They just need to be checked over for accuracy before making it official.
|
||||||
3. Ensure you check the box for [publishing to the marketplace](https://help.github.com/en/actions/creating-actions/publishing-actions-in-github-marketplace#publishing-an-action)
|
3. Ensure you check the box for [publishing to the marketplace](https://help.github.com/en/actions/creating-actions/publishing-actions-in-github-marketplace#publishing-an-action)
|
||||||
4. A GitHub Action will Publish the Docker image to GitHub Package Registry once a Release is created
|
4. A GitHub Action will Publish the Docker image to GitHub Package Registry once a Release is created
|
||||||
5. A GitHub Action will Publish the Docker image to Docker Hub once a Release is created
|
5. A GitHub Action will Publish the Docker image to Docker Hub once a Release is created
|
||||||
|
|
7
.github/dependabot.yml
vendored
Normal file
7
.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
open-pull-requests-limit: 10
|
2
.github/linters/.cfnlintrc.yml
vendored
Normal file
2
.github/linters/.cfnlintrc.yml
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
include_checks:
|
||||||
|
- I
|
18
.github/linters/.ecrc
vendored
Normal file
18
.github/linters/.ecrc
vendored
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
{
|
||||||
|
"Verbose": false,
|
||||||
|
"Debug": false,
|
||||||
|
"IgnoreDefaults": false,
|
||||||
|
"SpacesAftertabs": false,
|
||||||
|
"NoColor": false,
|
||||||
|
"Exclude": [],
|
||||||
|
"AllowedContentTypes": [],
|
||||||
|
"PassedFiles": [],
|
||||||
|
"Disable": {
|
||||||
|
"EndOfLine": false,
|
||||||
|
"Indentation": false,
|
||||||
|
"InsertFinalNewline": false,
|
||||||
|
"TrimTrailingWhitespace": false,
|
||||||
|
"IndentSize": false,
|
||||||
|
"MaxLineLength": false
|
||||||
|
}
|
||||||
|
}
|
25
.github/linters/.htmlhintrc
vendored
Normal file
25
.github/linters/.htmlhintrc
vendored
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
{
|
||||||
|
"tagname-lowercase": true,
|
||||||
|
"attr-lowercase": true,
|
||||||
|
"attr-value-double-quotes": true,
|
||||||
|
"attr-value-not-empty": false,
|
||||||
|
"attr-no-duplication": true,
|
||||||
|
"doctype-first": true,
|
||||||
|
"tag-pair": true,
|
||||||
|
"tag-self-close": false,
|
||||||
|
"spec-char-escape": true,
|
||||||
|
"id-unique": true,
|
||||||
|
"src-not-empty": true,
|
||||||
|
"title-require": true,
|
||||||
|
"alt-require": true,
|
||||||
|
"doctype-html5": true,
|
||||||
|
"id-class-value": "dash",
|
||||||
|
"style-disabled": false,
|
||||||
|
"inline-style-disabled": false,
|
||||||
|
"inline-script-disabled": false,
|
||||||
|
"space-tab-mixed-disabled": "space",
|
||||||
|
"id-class-ad-disabled": false,
|
||||||
|
"href-abs-or-rel": false,
|
||||||
|
"attr-unsafe-chars": true,
|
||||||
|
"head-script-disabled": true
|
||||||
|
}
|
7
.github/linters/.protolintrc.yml
vendored
Normal file
7
.github/linters/.protolintrc.yml
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
# Lint directives.
|
||||||
|
lint:
|
||||||
|
# Linter rules.
|
||||||
|
# Run `protolint list` to see all available rules.
|
||||||
|
rules:
|
||||||
|
# Set the default to all linters.
|
||||||
|
all_default: false
|
3
.github/linters/.python-lint
vendored
3
.github/linters/.python-lint
vendored
|
@ -1,4 +1,5 @@
|
||||||
[MASTER]
|
[MASTER]
|
||||||
|
errors-only=
|
||||||
|
|
||||||
# A comma-separated list of package or module names from where C extensions may
|
# A comma-separated list of package or module names from where C extensions may
|
||||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||||
|
@ -157,7 +158,7 @@ output-format=text
|
||||||
reports=no
|
reports=no
|
||||||
|
|
||||||
# Activate the evaluation score.
|
# Activate the evaluation score.
|
||||||
score=yes
|
score=no
|
||||||
|
|
||||||
|
|
||||||
[REFACTORING]
|
[REFACTORING]
|
||||||
|
|
16
.github/pull_request-template.md
vendored
Normal file
16
.github/pull_request-template.md
vendored
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
<!-- Please ensure your PR title is brief and descriptive for a good changelog entry -->
|
||||||
|
<!-- Link to issue if there is one -->
|
||||||
|
<!-- markdownlint-disable -->
|
||||||
|
Fixes #
|
||||||
|
<!-- markdownlint-restore -->
|
||||||
|
|
||||||
|
<!-- Describe what the changes are -->
|
||||||
|
## Proposed Changes
|
||||||
|
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-
|
||||||
|
|
||||||
|
## Readiness Checklist
|
||||||
|
- [ ] Label as `breaking` if this is a large fundamental change
|
||||||
|
- [ ] Label as either `automation`, `bug`, `documentation`, `enhancement`, `infrastructure`, or `performance`
|
39
.github/release-drafter.yml
vendored
Normal file
39
.github/release-drafter.yml
vendored
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
name-template: 'v$RESOLVED_VERSION'
|
||||||
|
tag-template: 'v$RESOLVED_VERSION'
|
||||||
|
template: |
|
||||||
|
# Changelog
|
||||||
|
$CHANGES
|
||||||
|
|
||||||
|
See details of [all code changes](https://github.com/github/super-linter/compare/$PREVIOUS_TAG...v$RESOLVED_VERSION) since last release
|
||||||
|
|
||||||
|
categories:
|
||||||
|
- title: '🚀 Features'
|
||||||
|
labels:
|
||||||
|
- 'feature'
|
||||||
|
- 'enhancement'
|
||||||
|
- title: '🐛 Bug Fixes'
|
||||||
|
labels:
|
||||||
|
- 'fix'
|
||||||
|
- 'bugfix'
|
||||||
|
- 'bug'
|
||||||
|
- title: '🧰 Maintenance'
|
||||||
|
labels:
|
||||||
|
- 'infrastructure'
|
||||||
|
- 'automation'
|
||||||
|
- 'documentation'
|
||||||
|
- title: '🏎 Performance'
|
||||||
|
label: 'performance'
|
||||||
|
change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
|
||||||
|
version-resolver:
|
||||||
|
major:
|
||||||
|
labels:
|
||||||
|
- 'type: breaking'
|
||||||
|
minor:
|
||||||
|
labels:
|
||||||
|
- 'type: enhancement'
|
||||||
|
patch:
|
||||||
|
labels:
|
||||||
|
- 'type: bug'
|
||||||
|
- 'type: maintenance'
|
||||||
|
- 'type: documentation'
|
||||||
|
default: patch
|
53
.github/workflows/cleanup-DEV.yml
vendored
53
.github/workflows/cleanup-DEV.yml
vendored
|
@ -1,53 +0,0 @@
|
||||||
---
|
|
||||||
##########################
|
|
||||||
##########################
|
|
||||||
## Cleanup Docker Image ##
|
|
||||||
##########################
|
|
||||||
##########################
|
|
||||||
|
|
||||||
#
|
|
||||||
# Documentation:
|
|
||||||
# https://help.github.com/en/articles/workflow-syntax-for-github-actions
|
|
||||||
#
|
|
||||||
|
|
||||||
########################################
|
|
||||||
# Run job when PR is merged and closed #
|
|
||||||
########################################
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [closed]
|
|
||||||
|
|
||||||
###############
|
|
||||||
# Set the Job #
|
|
||||||
###############
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
# Name the Job
|
|
||||||
name: Cleanup Docker Image - DEV
|
|
||||||
# Set the agent to run on
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
##################
|
|
||||||
# Load all steps #
|
|
||||||
##################
|
|
||||||
steps:
|
|
||||||
##########################
|
|
||||||
# Checkout the code base #
|
|
||||||
##########################
|
|
||||||
- name: Checkout Code
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
|
|
||||||
######################
|
|
||||||
# Run Removal script #
|
|
||||||
######################
|
|
||||||
- name: Remove old image from DockerHub
|
|
||||||
env:
|
|
||||||
# Set the Env Vars
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
IMAGE_REPO: github/super-linter
|
|
||||||
IMAGE_VERSION: ${{ github.event.pull_request.head.ref }}
|
|
||||||
shell: bash
|
|
||||||
run: .automation/cleanup-docker.sh
|
|
33
.github/workflows/deploy-DEV.yml
vendored
33
.github/workflows/deploy-DEV.yml
vendored
|
@ -20,6 +20,7 @@ on:
|
||||||
push:
|
push:
|
||||||
branches-ignore:
|
branches-ignore:
|
||||||
- 'master'
|
- 'master'
|
||||||
|
pull_request: []
|
||||||
|
|
||||||
###############
|
###############
|
||||||
# Set the Job #
|
# Set the Job #
|
||||||
|
@ -30,6 +31,8 @@ jobs:
|
||||||
name: Deploy Docker Image - DEV
|
name: Deploy Docker Image - DEV
|
||||||
# Set the agent to run on
|
# Set the agent to run on
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
# Prevent duplicate run from happening when a forked push is committed
|
||||||
|
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository
|
||||||
##################
|
##################
|
||||||
# Load all steps #
|
# Load all steps #
|
||||||
##################
|
##################
|
||||||
|
@ -38,34 +41,18 @@ jobs:
|
||||||
# Checkout the code base #
|
# Checkout the code base #
|
||||||
##########################
|
##########################
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2.3.1
|
||||||
|
|
||||||
#####################
|
###################################
|
||||||
# Run Deploy script #
|
# Build image locally for testing #
|
||||||
#####################
|
###################################
|
||||||
- name: Deploy DEV image to DockerHub
|
- name: Build image
|
||||||
env:
|
|
||||||
# Set the Env Vars
|
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
IMAGE_REPO: github/super-linter
|
|
||||||
DOCKERFILE_PATH: Dockerfile
|
|
||||||
REGISTRY: Docker
|
|
||||||
shell: bash
|
shell: bash
|
||||||
run: .automation/upload-docker.sh
|
run: docker build --no-cache -t github/super-linter:${GITHUB_SHA} .
|
||||||
|
|
||||||
#######################
|
|
||||||
# Get the branch name #
|
|
||||||
#######################
|
|
||||||
- name: Extract branch name
|
|
||||||
shell: bash
|
|
||||||
run: echo "::set-env name=BRANCH_NAME::$(echo ${GITHUB_REF#refs/heads/} | sed 's/[^[:alnum:]]//g')"
|
|
||||||
|
|
||||||
################################
|
################################
|
||||||
# Run Linter against code base #
|
# Run Linter against code base #
|
||||||
################################
|
################################
|
||||||
- name: Run Test Cases
|
- name: Run Test Cases
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: docker run -e RUN_LOCAL=true -e TEST_CASE_RUN=true -v ${GITHUB_WORKSPACE}:/tmp/lint github/super-linter:${GITHUB_SHA}
|
||||||
docker pull github/super-linter:${BRANCH_NAME}
|
|
||||||
docker run -e RUN_LOCAL=true -e TEST_CASE_RUN=true -v ${GITHUB_WORKSPACE}:/tmp/lint github/super-linter:${BRANCH_NAME}
|
|
||||||
|
|
2
.github/workflows/deploy-PROD.yml
vendored
2
.github/workflows/deploy-PROD.yml
vendored
|
@ -35,7 +35,7 @@ jobs:
|
||||||
# Checkout the code base #
|
# Checkout the code base #
|
||||||
##########################
|
##########################
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2.3.1
|
||||||
|
|
||||||
#####################
|
#####################
|
||||||
# Run Deploy script #
|
# Run Deploy script #
|
||||||
|
|
2
.github/workflows/deploy-RELEASE.yml
vendored
2
.github/workflows/deploy-RELEASE.yml
vendored
|
@ -35,7 +35,7 @@ jobs:
|
||||||
# Checkout the code base #
|
# Checkout the code base #
|
||||||
##########################
|
##########################
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2.3.1
|
||||||
|
|
||||||
###################################
|
###################################
|
||||||
# Run Deploy script for Dockerhub #
|
# Run Deploy script for Dockerhub #
|
||||||
|
|
16
.github/workflows/draft-release.yml
vendored
Normal file
16
.github/workflows/draft-release.yml
vendored
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
name: Release Drafter
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
# branches to consider in the event; optional, defaults to all
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update_release_draft:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
# Drafts your next Release notes as Pull Requests are merged into "master"
|
||||||
|
- uses: release-drafter/release-drafter@v5
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
6
.github/workflows/stack-linter.yml
vendored
6
.github/workflows/stack-linter.yml
vendored
|
@ -15,8 +15,8 @@
|
||||||
#############################
|
#############################
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches-ignore:
|
branches: master
|
||||||
- 'master'
|
pull_request: []
|
||||||
|
|
||||||
###############
|
###############
|
||||||
# Set the Job #
|
# Set the Job #
|
||||||
|
@ -35,7 +35,7 @@ jobs:
|
||||||
# Checkout the code base #
|
# Checkout the code base #
|
||||||
##########################
|
##########################
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2.3.1
|
||||||
|
|
||||||
################################
|
################################
|
||||||
# Run Linter against code base #
|
# Run Linter against code base #
|
||||||
|
|
61
.github/workflows/stale.yml
vendored
Normal file
61
.github/workflows/stale.yml
vendored
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
---
|
||||||
|
###############################
|
||||||
|
###############################
|
||||||
|
## StaleBot for Super-Linter ##
|
||||||
|
###############################
|
||||||
|
###############################
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
# every day at 0:00 UTC
|
||||||
|
- cron: "0 0 * * *"
|
||||||
|
issue_comment:
|
||||||
|
types: [created, deleted, edited]
|
||||||
|
|
||||||
|
###################
|
||||||
|
# Name of the Job #
|
||||||
|
###################
|
||||||
|
name: "Stale[bot]"
|
||||||
|
|
||||||
|
###############
|
||||||
|
# Run the job #
|
||||||
|
###############
|
||||||
|
jobs:
|
||||||
|
#######################
|
||||||
|
# Mark an Issue Stale #
|
||||||
|
#######################
|
||||||
|
markstale:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# only run on schedule
|
||||||
|
if: "github.event_name == 'schedule'"
|
||||||
|
steps:
|
||||||
|
- name: Mark issue stale
|
||||||
|
uses: actions/stale@v3.0.7
|
||||||
|
with:
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
stale-issue-message: "This issue has been automatically marked as stale because it has not had recent activity.\nIt will be closed in 14 days if no further activity occurs.\nThank you for your contributions.\n\nIf you think this issue should stay open, please remove the `O: stale 🤖` label or comment on the issue."
|
||||||
|
stale-pr-message: "This pull request has been automatically marked as stale because it has not had recent activity.\nIt will be closed in 14 days if no further activity occurs.\nThank you for your contributions.\n\nIf you think this pull request should stay open, please remove the `O: stale 🤖` label or comment on the pull request."
|
||||||
|
days-before-stale: 30
|
||||||
|
days-before-close: 14
|
||||||
|
stale-issue-label: "O: stale 🤖"
|
||||||
|
exempt-issue-label: "O: backlog 🤖"
|
||||||
|
stale-pr-label: "O: stale 🤖"
|
||||||
|
exempt-pr-label: "O: backlog 🤖"
|
||||||
|
|
||||||
|
##################
|
||||||
|
# Mark not stale #
|
||||||
|
##################
|
||||||
|
marknotstale:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# do not run on schedule
|
||||||
|
if: "github.event_name == 'issue_comment' && contains(github.event.issue.labels.*.name, 'O: stale 🤖') && github.event.issue.user.type != 'Bot'"
|
||||||
|
steps:
|
||||||
|
- name: Mark issue not stale
|
||||||
|
uses: actions/github-script@v2
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
github.issues.removeLabel({
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
name: 'O: stale 🤖'
|
||||||
|
})
|
44
.github/workflows/versioning.yml
vendored
Normal file
44
.github/workflows/versioning.yml
vendored
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
---
|
||||||
|
#########################
|
||||||
|
#########################
|
||||||
|
## Version GitHub Tags ##
|
||||||
|
#########################
|
||||||
|
#########################
|
||||||
|
|
||||||
|
#
|
||||||
|
# Documentation:
|
||||||
|
# https://help.github.com/en/articles/workflow-syntax-for-github-actions
|
||||||
|
#
|
||||||
|
|
||||||
|
##########################
|
||||||
|
# Name of the action job #
|
||||||
|
##########################
|
||||||
|
name: Keep GitHub tag versions up-to-date
|
||||||
|
|
||||||
|
#####################################################
|
||||||
|
# Run the job when a release is published or edited #
|
||||||
|
#####################################################
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [published, edited]
|
||||||
|
|
||||||
|
#################
|
||||||
|
# Start the job #
|
||||||
|
#################
|
||||||
|
jobs:
|
||||||
|
actions-tagger:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
#############################
|
||||||
|
# Check out the latest code #
|
||||||
|
#############################
|
||||||
|
- uses: actions/checkout@v2.3.1
|
||||||
|
|
||||||
|
######################
|
||||||
|
# Run the tag action #
|
||||||
|
######################
|
||||||
|
- uses: Actions-R-Us/actions-tagger@v2.0.1
|
||||||
|
with:
|
||||||
|
publish_latest_tag: true
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: "${{secrets.GITHUB_TOKEN}}"
|
16
.vscode/launch.json
vendored
Normal file
16
.vscode/launch.json
vendored
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
{
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"type": "bashdb",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Test Linter",
|
||||||
|
"program": ".vscode/testlinter.sh",
|
||||||
|
"cwd": "${workspaceFolder}",
|
||||||
|
"internalConsoleOptions": "openOnSessionStart",
|
||||||
|
"presentation": {
|
||||||
|
"group": "aLinter"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
26
.vscode/testlinter.sh
vendored
Normal file
26
.vscode/testlinter.sh
vendored
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# At this point you can use the debug console to add export GITHUB_WORKSPACE=/path to test only a specific folder
|
||||||
|
# You can also use ln -s /path /tmp/lint as an alternative
|
||||||
|
# If you do neither, this will default to running against the test automation files
|
||||||
|
|
||||||
|
###########
|
||||||
|
# GLOBALS #
|
||||||
|
###########
|
||||||
|
CODE_PATH='/tmp/lint' # Path to code base
|
||||||
|
|
||||||
|
##################
|
||||||
|
# Check the path #
|
||||||
|
##################
|
||||||
|
if [ ! -L $CODE_PATH ]; then
|
||||||
|
# Create symbolic link
|
||||||
|
ln -s "$PWD"/.automation/test $CODE_PATH
|
||||||
|
fi
|
||||||
|
|
||||||
|
#########################
|
||||||
|
# Export to run locally #
|
||||||
|
#########################
|
||||||
|
export RUN_LOCAL=true
|
||||||
|
|
||||||
|
# shellcheck source=/dev/null
|
||||||
|
source "$PWD"/lib/linter.sh
|
61
Dockerfile
61
Dockerfile
|
@ -18,6 +18,21 @@ LABEL com.github.actions.name="GitHub Super-Linter" \
|
||||||
com.github.actions.color="red" \
|
com.github.actions.color="red" \
|
||||||
maintainer="GitHub DevOps <github_devops@github.com>"
|
maintainer="GitHub DevOps <github_devops@github.com>"
|
||||||
|
|
||||||
|
################################
|
||||||
|
# Set ARG values used in Build #
|
||||||
|
################################
|
||||||
|
# PowerShell & PSScriptAnalyzer
|
||||||
|
ARG PWSH_VERSION='latest'
|
||||||
|
ARG PWSH_DIRECTORY='/opt/microsoft/powershell'
|
||||||
|
ARG PSSA_VERSION='latest'
|
||||||
|
# arm-ttk
|
||||||
|
ARG ARM_TTK_URI='https://github.com/Azure/arm-ttk.git'
|
||||||
|
ARG ARM_TTK_DIRECTORY='/opt/microsoft/arm-ttk'
|
||||||
|
# clj-kondo
|
||||||
|
ARG CLJ_KONDO_VERSION='2020.06.21'
|
||||||
|
# Go Linter
|
||||||
|
ARG GO_VERSION='v1.27.0'
|
||||||
|
|
||||||
####################
|
####################
|
||||||
# Run APK installs #
|
# Run APK installs #
|
||||||
####################
|
####################
|
||||||
|
@ -39,21 +54,32 @@ RUN apk add --no-cache \
|
||||||
#########################################
|
#########################################
|
||||||
# Reference: https://docs.microsoft.com/en-us/powershell/scripting/install/installing-powershell-core-on-linux?view=powershell-7
|
# Reference: https://docs.microsoft.com/en-us/powershell/scripting/install/installing-powershell-core-on-linux?view=powershell-7
|
||||||
# Slightly modified to always retrieve latest stable Powershell version
|
# Slightly modified to always retrieve latest stable Powershell version
|
||||||
RUN mkdir -p /opt/microsoft/powershell/7 \
|
# If changing PWSH_VERSION='latest' to a specific version, use format PWSH_VERSION='tags/v7.0.2'
|
||||||
&& curl -s https://api.github.com/repos/powershell/powershell/releases/latest \
|
RUN mkdir -p ${PWSH_DIRECTORY} \
|
||||||
|
&& curl -s https://api.github.com/repos/powershell/powershell/releases/${PWSH_VERSION} \
|
||||||
| grep browser_download_url \
|
| grep browser_download_url \
|
||||||
| grep linux-alpine-x64 \
|
| grep linux-alpine-x64 \
|
||||||
| cut -d '"' -f 4 \
|
| cut -d '"' -f 4 \
|
||||||
| xargs -n 1 wget -O - \
|
| xargs -n 1 wget -O - \
|
||||||
| tar -xzC /opt/microsoft/powershell/7 \
|
| tar -xzC ${PWSH_DIRECTORY} \
|
||||||
&& ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh \
|
&& ln -sf ${PWSH_DIRECTORY}/pwsh /usr/bin/pwsh \
|
||||||
&& pwsh -c 'install-module psscriptanalyzer -force'
|
&& pwsh -c 'Install-Module -Name PSScriptAnalyzer -RequiredVersion ${PSSA_VERSION} -Scope AllUsers -Force'
|
||||||
|
|
||||||
|
#############################################################
|
||||||
|
# Install Azure Resource Manager Template Toolkit (arm-ttk) #
|
||||||
|
#############################################################
|
||||||
|
# Depends on PowerShell
|
||||||
|
# Reference https://github.com/Azure/arm-ttk
|
||||||
|
# Reference https://docs.microsoft.com/en-us/azure/azure-resource-manager/templates/test-toolkit
|
||||||
|
ENV ARM_TTK_PSD1="${ARM_TTK_DIRECTORY}/arm-ttk/arm-ttk.psd1"
|
||||||
|
RUN git clone "${ARM_TTK_URI}" "${ARM_TTK_DIRECTORY}" \
|
||||||
|
&& ln -sTf "$ARM_TTK_PSD1" /usr/bin/arm-ttk
|
||||||
|
|
||||||
#####################
|
#####################
|
||||||
# Run Pip3 Installs #
|
# Run Pip3 Installs #
|
||||||
#####################
|
#####################
|
||||||
RUN pip3 --no-cache-dir install --upgrade --no-cache-dir \
|
RUN pip3 --no-cache-dir install --upgrade --no-cache-dir \
|
||||||
yamllint pylint yq
|
yamllint pylint yq cfn-lint shyaml
|
||||||
|
|
||||||
####################
|
####################
|
||||||
# Run NPM Installs #
|
# Run NPM Installs #
|
||||||
|
@ -73,6 +99,7 @@ RUN npm config set package-lock false \
|
||||||
stylelint \
|
stylelint \
|
||||||
stylelint-config-standard \
|
stylelint-config-standard \
|
||||||
@stoplight/spectral \
|
@stoplight/spectral \
|
||||||
|
htmlhint \
|
||||||
&& npm --no-cache install \
|
&& npm --no-cache install \
|
||||||
markdownlint-cli \
|
markdownlint-cli \
|
||||||
jsonlint prettyjson \
|
jsonlint prettyjson \
|
||||||
|
@ -86,7 +113,8 @@ RUN npm config set package-lock false \
|
||||||
@typescript-eslint/parser \
|
@typescript-eslint/parser \
|
||||||
eslint-plugin-jest \
|
eslint-plugin-jest \
|
||||||
stylelint \
|
stylelint \
|
||||||
stylelint-config-standard
|
stylelint-config-standard \
|
||||||
|
htmlhint
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# Install dockerfilelint from repo #
|
# Install dockerfilelint from repo #
|
||||||
|
@ -117,7 +145,6 @@ RUN wget -qO- "https://github.com/koalaman/shellcheck/releases/download/stable/s
|
||||||
#####################
|
#####################
|
||||||
# Install Go Linter #
|
# Install Go Linter #
|
||||||
#####################
|
#####################
|
||||||
ARG GO_VERSION='v1.27.0'
|
|
||||||
RUN wget -O- -nvq https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s "$GO_VERSION"
|
RUN wget -O- -nvq https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s "$GO_VERSION"
|
||||||
|
|
||||||
##################
|
##################
|
||||||
|
@ -126,6 +153,14 @@ RUN wget -O- -nvq https://raw.githubusercontent.com/golangci/golangci-lint/maste
|
||||||
RUN curl -Ls "$(curl -Ls https://api.github.com/repos/terraform-linters/tflint/releases/latest | grep -o -E "https://.+?_linux_amd64.zip")" -o tflint.zip && unzip tflint.zip && rm tflint.zip \
|
RUN curl -Ls "$(curl -Ls https://api.github.com/repos/terraform-linters/tflint/releases/latest | grep -o -E "https://.+?_linux_amd64.zip")" -o tflint.zip && unzip tflint.zip && rm tflint.zip \
|
||||||
&& mv "tflint" /usr/bin/
|
&& mv "tflint" /usr/bin/
|
||||||
|
|
||||||
|
######################
|
||||||
|
# Install protolint #
|
||||||
|
######################
|
||||||
|
RUN curl -LsS "$(curl -Ls https://api.github.com/repos/yoheimuta/protolint/releases/latest | grep -o -E "https://.+?_Linux_x86_64.tar.gz")" -o protolint.tar.gz \
|
||||||
|
&& tar -xzf protolint.tar.gz \
|
||||||
|
&& rm protolint.tar.gz \
|
||||||
|
&& mv "protolint" /usr/bin/
|
||||||
|
|
||||||
#########################
|
#########################
|
||||||
# Install dotenv-linter #
|
# Install dotenv-linter #
|
||||||
#########################
|
#########################
|
||||||
|
@ -135,7 +170,6 @@ RUN wget "https://github.com/dotenv-linter/dotenv-linter/releases/latest/downloa
|
||||||
#####################
|
#####################
|
||||||
# Install clj-kondo #
|
# Install clj-kondo #
|
||||||
#####################
|
#####################
|
||||||
ARG CLJ_KONDO_VERSION='2020.06.12'
|
|
||||||
RUN curl -sLO https://github.com/borkdude/clj-kondo/releases/download/v${CLJ_KONDO_VERSION}/clj-kondo-${CLJ_KONDO_VERSION}-linux-static-amd64.zip \
|
RUN curl -sLO https://github.com/borkdude/clj-kondo/releases/download/v${CLJ_KONDO_VERSION}/clj-kondo-${CLJ_KONDO_VERSION}-linux-static-amd64.zip \
|
||||||
&& unzip clj-kondo-${CLJ_KONDO_VERSION}-linux-static-amd64.zip \
|
&& unzip clj-kondo-${CLJ_KONDO_VERSION}-linux-static-amd64.zip \
|
||||||
&& rm clj-kondo-${CLJ_KONDO_VERSION}-linux-static-amd64.zip \
|
&& rm clj-kondo-${CLJ_KONDO_VERSION}-linux-static-amd64.zip \
|
||||||
|
@ -147,6 +181,12 @@ RUN curl -sLO https://github.com/borkdude/clj-kondo/releases/download/v${CLJ_KON
|
||||||
RUN curl -sSLO https://github.com/pinterest/ktlint/releases/latest/download/ktlint && chmod a+x ktlint \
|
RUN curl -sSLO https://github.com/pinterest/ktlint/releases/latest/download/ktlint && chmod a+x ktlint \
|
||||||
&& mv "ktlint" /usr/bin/
|
&& mv "ktlint" /usr/bin/
|
||||||
|
|
||||||
|
################################
|
||||||
|
# Install editorconfig-checker #
|
||||||
|
################################
|
||||||
|
RUN wget -qO- "https://github.com/editorconfig-checker/editorconfig-checker/releases/latest/download/ec-linux-amd64.tar.gz" | tar -xzf - \
|
||||||
|
&& mv "bin/ec-linux-amd64" /usr/bin/editorconfig-checker
|
||||||
|
|
||||||
###########################################
|
###########################################
|
||||||
# Load GitHub Env Vars for GitHub Actions #
|
# Load GitHub Env Vars for GitHub Actions #
|
||||||
###########################################
|
###########################################
|
||||||
|
@ -179,7 +219,10 @@ ENV GITHUB_SHA=${GITHUB_SHA} \
|
||||||
VALIDATE_CLOJURE=${VALIDATE_CLOJURE} \
|
VALIDATE_CLOJURE=${VALIDATE_CLOJURE} \
|
||||||
VALIDATE_KOTLIN=${VALIDATE_KOTLIN} \
|
VALIDATE_KOTLIN=${VALIDATE_KOTLIN} \
|
||||||
VALIDATE_POWERSHELL=${VALIDATE_POWERSHELL} \
|
VALIDATE_POWERSHELL=${VALIDATE_POWERSHELL} \
|
||||||
|
VALIDATE_ARM=${VALIDATE_ARM} \
|
||||||
VALIDATE_OPENAPI=${VALIDATE_OPENAPI} \
|
VALIDATE_OPENAPI=${VALIDATE_OPENAPI} \
|
||||||
|
VALIDATE_PROTOBUF=${VALIDATE_PROTOBUF} \
|
||||||
|
VALIDATE_EDITORCONFIG=${VALIDATE_EDITORCONFIG} \
|
||||||
ANSIBLE_DIRECTORY=${ANSIBLE_DIRECTORY} \
|
ANSIBLE_DIRECTORY=${ANSIBLE_DIRECTORY} \
|
||||||
RUN_LOCAL=${RUN_LOCAL} \
|
RUN_LOCAL=${RUN_LOCAL} \
|
||||||
TEST_CASE_RUN=${TEST_CASE_RUN} \
|
TEST_CASE_RUN=${TEST_CASE_RUN} \
|
||||||
|
|
52
README.md
52
README.md
|
@ -8,7 +8,6 @@ The end goal of this tool:
|
||||||
- Build guidelines for code layout and format
|
- Build guidelines for code layout and format
|
||||||
- Automate the process to help streamline code reviews
|
- Automate the process to help streamline code reviews
|
||||||
|
|
||||||
|
|
||||||
## Table of Contents
|
## Table of Contents
|
||||||
|
|
||||||
- [How it works](#how-it-works)
|
- [How it works](#how-it-works)
|
||||||
|
@ -35,16 +34,25 @@ Developers on **GitHub** can call the **GitHub Action** to lint their code base
|
||||||
| *Language* | *Linter* |
|
| *Language* | *Linter* |
|
||||||
| --- | --- |
|
| --- | --- |
|
||||||
| **Ansible** | [ansible-lint](https://github.com/ansible/ansible-lint) |
|
| **Ansible** | [ansible-lint](https://github.com/ansible/ansible-lint) |
|
||||||
|
| **Azure Resource Manager (ARM)** | [arm-ttk](https://github.com/azure/arm-ttk) |
|
||||||
|
| **AWS CloudFormation templates** | [cfn-lint](https://github.com/aws-cloudformation/cfn-python-lint/) |
|
||||||
| **CSS** | [stylelint](https://stylelint.io/) |
|
| **CSS** | [stylelint](https://stylelint.io/) |
|
||||||
| **Clojure** | [clj-kondo](https://github.com/borkdude/clj-kondo) |
|
| **Clojure** | [clj-kondo](https://github.com/borkdude/clj-kondo) |
|
||||||
| **CoffeeScript** | [coffeelint](https://coffeelint.github.io/) |
|
| **CoffeeScript** | [coffeelint](https://coffeelint.github.io/) |
|
||||||
| **Dockerfile** | [dockerfilelint](https://github.com/replicatedhq/dockerfilelint.git) |
|
| **Dockerfile** | [dockerfilelint](https://github.com/replicatedhq/dockerfilelint.git) |
|
||||||
|
| **EDITORCONFIG** | [editorconfig-checker](https://github.com/editorconfig-checker/editorconfig-checker) |
|
||||||
|
| **ENV** | [dotenv-linter](https://github.com/dotenv-linter/dotenv-linter) |
|
||||||
| **Golang** | [golangci-lint](https://github.com/golangci/golangci-lint) |
|
| **Golang** | [golangci-lint](https://github.com/golangci/golangci-lint) |
|
||||||
|
| **HTMLHint** | [HTMLHint](https://github.com/htmlhint/HTMLHint) |
|
||||||
| **JavaScript** | [eslint](https://eslint.org/) [standard js](https://standardjs.com/) |
|
| **JavaScript** | [eslint](https://eslint.org/) [standard js](https://standardjs.com/) |
|
||||||
| **JSON** | [jsonlint](https://github.com/zaach/jsonlint) |
|
| **JSON** | [jsonlint](https://github.com/zaach/jsonlint) |
|
||||||
|
| **Kotlin** | [ktlint](https://github.com/pinterest/ktlint) |
|
||||||
| **Markdown** | [markdownlint](https://github.com/igorshubovych/markdownlint-cli#readme) |
|
| **Markdown** | [markdownlint](https://github.com/igorshubovych/markdownlint-cli#readme) |
|
||||||
|
| **OpenAPI** | [spectral](https://github.com/stoplightio/spectral) |
|
||||||
| **Perl** | [perl](https://pkgs.alpinelinux.org/package/edge/main/x86/perl) |
|
| **Perl** | [perl](https://pkgs.alpinelinux.org/package/edge/main/x86/perl) |
|
||||||
| **PHP** | [PHP](https://www.php.net/) |
|
| **PHP** | [PHP](https://www.php.net/) |
|
||||||
|
| **PowerShell** | [PSScriptAnalyzer](https://github.com/PowerShell/Psscriptanalyzer) |
|
||||||
|
| **Protocol Buffers** | [protolint](https://github.com/yoheimuta/protolint) |
|
||||||
| **Python3** | [pylint](https://www.pylint.org/) |
|
| **Python3** | [pylint](https://www.pylint.org/) |
|
||||||
| **Ruby** | [RuboCop](https://github.com/rubocop-hq/rubocop) |
|
| **Ruby** | [RuboCop](https://github.com/rubocop-hq/rubocop) |
|
||||||
| **Shell** | [Shellcheck](https://github.com/koalaman/shellcheck) |
|
| **Shell** | [Shellcheck](https://github.com/koalaman/shellcheck) |
|
||||||
|
@ -52,16 +60,17 @@ Developers on **GitHub** can call the **GitHub Action** to lint their code base
|
||||||
| **TypeScript** | [eslint](https://eslint.org/) [standard js](https://standardjs.com/) |
|
| **TypeScript** | [eslint](https://eslint.org/) [standard js](https://standardjs.com/) |
|
||||||
| **XML** | [LibXML](http://xmlsoft.org/) |
|
| **XML** | [LibXML](http://xmlsoft.org/) |
|
||||||
| **YAML** | [YamlLint](https://github.com/adrienverge/yamllint) |
|
| **YAML** | [YamlLint](https://github.com/adrienverge/yamllint) |
|
||||||
| **PowerShell** | [PSScriptAnalyzer](https://github.com/PowerShell/Psscriptanalyzer) |
|
|
||||||
| **ENV** | [dotenv-linter](https://github.com/dotenv-linter/dotenv-linter) |
|
|
||||||
| **Kotlin** | [ktlint](https://github.com/pinterest/ktlint) |
|
|
||||||
| **OpenAPI** | [spectral](https://github.com/stoplightio/spectral) |
|
|
||||||
|
|
||||||
## How to use
|
## How to use
|
||||||
|
More in-depth [tutorial](https://www.youtube.com/watch?v=EDAmFKO4Zt0&t=118s) available
|
||||||
|
|
||||||
To use this **GitHub** Action you will need to complete the following:
|
To use this **GitHub** Action you will need to complete the following:
|
||||||
- Add the **GitHub** Action: **Super-Linter** to your current **GitHub** Actions workflow
|
1. Create a new file in your repository called `.github/workflows/linter.yml`
|
||||||
- Enjoy your more *stable*, and *cleaner* code base
|
2. Copy the example workflow from below into that new file, no extra configuration required
|
||||||
- Check out the [Wiki](https://github.com/github/super-linter/wiki) for customization options
|
3. Commit that file to a new branch
|
||||||
|
4. Open up a pull request and observe the action working
|
||||||
|
5. Enjoy your more *stable*, and *cleaner* code base
|
||||||
|
6. Check out the [Wiki](https://github.com/github/super-linter/wiki) for customization options
|
||||||
|
|
||||||
### Example connecting GitHub Action Workflow
|
### Example connecting GitHub Action Workflow
|
||||||
In your repository you should have a `.github/workflows` folder with **GitHub** Action similar to below:
|
In your repository you should have a `.github/workflows` folder with **GitHub** Action similar to below:
|
||||||
|
@ -89,8 +98,10 @@ name: Lint Code Base
|
||||||
#############################
|
#############################
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches-ignore:
|
branches-ignore: [master]
|
||||||
- 'master'
|
# Remove the line above to run when pushing to master
|
||||||
|
pull_request:
|
||||||
|
branches: [master]
|
||||||
|
|
||||||
###############
|
###############
|
||||||
# Set the Job #
|
# Set the Job #
|
||||||
|
@ -116,14 +127,16 @@ jobs:
|
||||||
# Run Linter against code base #
|
# Run Linter against code base #
|
||||||
################################
|
################################
|
||||||
- name: Lint Code Base
|
- name: Lint Code Base
|
||||||
uses: docker://github/super-linter:v2.2.0
|
uses: docker://github/super-linter:v3
|
||||||
env:
|
env:
|
||||||
VALIDATE_ALL_CODEBASE: false
|
VALIDATE_ALL_CODEBASE: false
|
||||||
VALIDATE_ANSIBLE: false
|
DEFAULT_BRANCH: master
|
||||||
|
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
**NOTE:** Using the line:`uses: docker://github/super-linter:v2.2.0` will pull the image down from **DockerHub** and run the **GitHub Super-Linter**. Using the line: `uses: github/super-linter@v2.2.0` will build and compile the **GitHub Super-Linter** at build time. This can be far more costly in time...
|
**NOTE:**
|
||||||
|
Using the line:`uses: docker://github/super-linter:v3` will pull the image down from **DockerHub** and run the **GitHub Super-Linter**. Using the line: `uses: github/super-linter@v3` will build and compile the **GitHub Super-Linter** at build time. *This can be far more costly in time...*
|
||||||
|
|
||||||
## Environment variables
|
## Environment variables
|
||||||
The super-linter allows you to pass the following `ENV` variables to be able to trigger different functionality.
|
The super-linter allows you to pass the following `ENV` variables to be able to trigger different functionality.
|
||||||
|
@ -161,12 +174,17 @@ and won't run anything unexpected.
|
||||||
| **VALIDATE_DOCKER** | `true` | Flag to enable or disable the linting process of the language. |
|
| **VALIDATE_DOCKER** | `true` | Flag to enable or disable the linting process of the language. |
|
||||||
| **VALIDATE_GO** | `true` | Flag to enable or disable the linting process of the language. |
|
| **VALIDATE_GO** | `true` | Flag to enable or disable the linting process of the language. |
|
||||||
| **VALIDATE_POWERSHELL** | `true` | Flag to enable or disable the linting process of the language. |
|
| **VALIDATE_POWERSHELL** | `true` | Flag to enable or disable the linting process of the language. |
|
||||||
|
| **VALIDATE_ARM** | `true` | Flag to enable or disable the linting process of the language. |
|
||||||
| **VALIDATE_TERRAFORM** | `true` | Flag to enable or disable the linting process of the language. |
|
| **VALIDATE_TERRAFORM** | `true` | Flag to enable or disable the linting process of the language. |
|
||||||
| **VALIDATE_CSS** | `true` | Flag to enable or disable the linting process of the language. |
|
| **VALIDATE_CSS** | `true` | Flag to enable or disable the linting process of the language. |
|
||||||
| **VALIDATE_ENV** | `true` | Flag to enable or disable the linting process of the language. |
|
| **VALIDATE_ENV** | `true` | Flag to enable or disable the linting process of the language. |
|
||||||
| **VALIDATE_CLOJURE** | `true` | Flag to enable or disable the linting process of the language. |
|
| **VALIDATE_CLOJURE** | `true` | Flag to enable or disable the linting process of the language. |
|
||||||
|
| **VALIDATE_HTML** | `true` | Flag to enable or disable the linting process of the language. |
|
||||||
| **VALIDATE_KOTLIN** | `true` | Flag to enable or disable the linting process of the language. |
|
| **VALIDATE_KOTLIN** | `true` | Flag to enable or disable the linting process of the language. |
|
||||||
| **VALIDATE_OPENAPI** | `true` | Flag to enable or disable the linting process of the language. |
|
| **VALIDATE_OPENAPI** | `true` | Flag to enable or disable the linting process of the language. |
|
||||||
|
| **VALIDATE_CLOUDFORMATION** | `true` | Flag to enable or disable the linting process of the language. |
|
||||||
|
| **VALIDATE_PROTOBUF** | `true` | Flag to enable or disable the linting process of the language. |
|
||||||
|
| **VALIDATE_EDITORCONFIG** | `true` | Flag to enable or disable the linting process with the editorconfig. |
|
||||||
| **ANSIBLE_DIRECTORY** | `/ansible` | Flag to set the root directory for Ansible file location(s). |
|
| **ANSIBLE_DIRECTORY** | `/ansible` | Flag to set the root directory for Ansible file location(s). |
|
||||||
| **ACTIONS_RUNNER_DEBUG** | `false` | Flag to enable additional information about the linter, versions, and additional output. |
|
| **ACTIONS_RUNNER_DEBUG** | `false` | Flag to enable additional information about the linter, versions, and additional output. |
|
||||||
| **DISABLE_ERRORS** | `false` | Flag to have the linter complete with exit code 0 even if errors were detected. |
|
| **DISABLE_ERRORS** | `false` | Flag to have the linter complete with exit code 0 even if errors were detected. |
|
||||||
|
@ -204,12 +222,18 @@ The **Super-Linter** has *CI/CT/CD* configured utilizing **GitHub** Actions.
|
||||||
## Limitations
|
## Limitations
|
||||||
Below are a list of the known limitations for the **GitHub Super-Linter**:
|
Below are a list of the known limitations for the **GitHub Super-Linter**:
|
||||||
- Due to being completely packaged at run time, you will not be able to update dependencies or change versions of the enclosed linters and binaries
|
- Due to being completely packaged at run time, you will not be able to update dependencies or change versions of the enclosed linters and binaries
|
||||||
- Reading additional details from `package.json` are not read by the **GitHub Super-Linter**
|
- Additional details from `package.json` are not read by the **GitHub Super-Linter**
|
||||||
- Downloading additional codebases as dependencies from private repositories will fail due to lack of permissions
|
- Downloading additional codebases as dependencies from private repositories will fail due to lack of permissions
|
||||||
|
|
||||||
## How to contribute
|
## How to contribute
|
||||||
If you would like to help contribute to this **GitHub** Action, please see [CONTRIBUTING](https://github.com/github/super-linter/blob/master/.github/CONTRIBUTING.md)
|
If you would like to help contribute to this **GitHub** Action, please see [CONTRIBUTING](https://github.com/github/super-linter/blob/master/.github/CONTRIBUTING.md)
|
||||||
|
|
||||||
|
### Visual Studio Code
|
||||||
|
You can checkout this repository using [Container Remote Development](https://code.visualstudio.com/docs/remote/containers), and debug the linter using the `Test Linter` task.
|
||||||
|
![Example](https://user-images.githubusercontent.com/15258962/85165778-2d2ce700-b21b-11ea-803e-3f6709d8e609.gif)
|
||||||
|
|
||||||
|
We will also support [Github Codespaces](https://github.com/features/codespaces/) once it becomes available
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
### License
|
### License
|
||||||
|
|
30
TEMPLATES/.arm-ttk.psd1
Normal file
30
TEMPLATES/.arm-ttk.psd1
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
# Documentation:
|
||||||
|
# - Test Parameters: https://docs.microsoft.com/en-us/azure/azure-resource-manager/templates/test-toolkit#test-parameters
|
||||||
|
# - Test Cases: https://docs.microsoft.com/en-us/azure/azure-resource-manager/templates/test-cases
|
||||||
|
@{
|
||||||
|
# Test = @(
|
||||||
|
# 'Parameters Property Must Exist',
|
||||||
|
# 'Parameters Must Be Referenced',
|
||||||
|
# 'Secure String Parameters Cannot Have Default',
|
||||||
|
# 'Location Should Not Be Hardcoded',
|
||||||
|
# 'Resources Should Have Location',
|
||||||
|
# 'VM Size Should Be A Parameter',
|
||||||
|
# 'Min And Max Value Are Numbers',
|
||||||
|
# 'artifacts-parameter',
|
||||||
|
# 'Variables Must Be Referenced',
|
||||||
|
# 'Dynamic Variable References Should Not Use Concat',
|
||||||
|
# 'apiVersions Should Be Recent',
|
||||||
|
# 'Providers apiVersions Is Not Permitted',
|
||||||
|
# 'Template Should Not Contain Blanks',
|
||||||
|
# 'IDs Should Be Derived From ResourceIDs',
|
||||||
|
# 'ResourceIds should not contain',
|
||||||
|
# 'DependsOn Must Not Be Conditional',
|
||||||
|
# 'Deployment Resources Must Not Be Debug',
|
||||||
|
# 'adminUsername Should Not Be A Literal',
|
||||||
|
# 'VM Images Should Use Latest Version',
|
||||||
|
# 'Virtual-Machines-Should-Not-Be-Preview',
|
||||||
|
# 'ManagedIdentityExtension must not be used',
|
||||||
|
# 'Outputs Must Not Contain Secrets'
|
||||||
|
# )
|
||||||
|
# Skip = @()
|
||||||
|
}
|
2
TEMPLATES/.cfnlintrc.yml
Normal file
2
TEMPLATES/.cfnlintrc.yml
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
include_checks:
|
||||||
|
- I
|
25
TEMPLATES/.htmlhintrc
Normal file
25
TEMPLATES/.htmlhintrc
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
{
|
||||||
|
"tagname-lowercase": true,
|
||||||
|
"attr-lowercase": true,
|
||||||
|
"attr-value-double-quotes": true,
|
||||||
|
"attr-value-not-empty": false,
|
||||||
|
"attr-no-duplication": true,
|
||||||
|
"doctype-first": true,
|
||||||
|
"tag-pair": true,
|
||||||
|
"tag-self-close": false,
|
||||||
|
"spec-char-escape": true,
|
||||||
|
"id-unique": true,
|
||||||
|
"src-not-empty": true,
|
||||||
|
"title-require": true,
|
||||||
|
"alt-require": true,
|
||||||
|
"doctype-html5": true,
|
||||||
|
"id-class-value": "dash",
|
||||||
|
"style-disabled": false,
|
||||||
|
"inline-style-disabled": false,
|
||||||
|
"inline-script-disabled": false,
|
||||||
|
"space-tab-mixed-disabled": "space",
|
||||||
|
"id-class-ad-disabled": false,
|
||||||
|
"href-abs-or-rel": false,
|
||||||
|
"attr-unsafe-chars": true,
|
||||||
|
"head-script-disabled": true
|
||||||
|
}
|
7
TEMPLATES/.protolintrc.yml
Normal file
7
TEMPLATES/.protolintrc.yml
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
# Lint directives.
|
||||||
|
lint:
|
||||||
|
# Linter rules.
|
||||||
|
# Run `protolint list` to see all available rules.
|
||||||
|
rules:
|
||||||
|
# Set the default to all linters.
|
||||||
|
all_default: false
|
|
@ -1,4 +1,5 @@
|
||||||
[MASTER]
|
[MASTER]
|
||||||
|
errors-only=
|
||||||
|
|
||||||
# A comma-separated list of package or module names from where C extensions may
|
# A comma-separated list of package or module names from where C extensions may
|
||||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||||
|
@ -157,7 +158,7 @@ output-format=text
|
||||||
reports=no
|
reports=no
|
||||||
|
|
||||||
# Activate the evaluation score.
|
# Activate the evaluation score.
|
||||||
score=yes
|
score=no
|
||||||
|
|
||||||
|
|
||||||
[REFACTORING]
|
[REFACTORING]
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
// https://github.com/terraform-linters/tflint/blob/master/docs/guides/config.md
|
// https://github.com/terraform-linters/tflint/blob/master/docs/guides/config.md
|
||||||
config {
|
config {
|
||||||
module = true
|
module = false
|
||||||
deep_check = false
|
deep_check = false
|
||||||
force = false
|
force = false
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,22 @@
|
||||||
# Disabling linters and Rules
|
# Disabling linters and Rules
|
||||||
If you find you need to ignore certain **errors** and **warnings**, you will need to know the *format* to disable the **Super-Linter** rules.
|
Linters can often require additional configuration to ensure they work with your codebase and your team's coding style, to avoid flagging false-positives. The **GitHub Super-Linter** has set up some default configurations for each linter which should work reasonably well with common code bases, but many of the linters can be configured to disable certain rules or configure the rules to ignore certain pieces of codes.
|
||||||
Below are examples and documentation for each language and the various methods to disable.
|
|
||||||
|
To run with your own configuration for a linter, copy the relevant [`TEMPLATE` configuration file for the linter you are using from this repo](https://github.com/github/super-linter/tree/master/TEMPLATES) into the `.github/linters` folder in your own repository, and then edit it to modify, disable - or even add - rules and configuration to suit how you want your code checked.
|
||||||
|
|
||||||
|
How the changes are made differ for each linter, and also how much the **Github Super-Linter** has decided to change the linter's defaults. So, for some linters (e.g. [pylint for python](https://github.com/github/super-linter/blob/master/TEMPLATES/.python-lint)), there may be a large configuration file. For others (e.g. [stylelint for CSS](https://github.com/github/super-linter/blob/master/TEMPLATES/.stylelintrc.json)) the default configuration file may initially be nearly empty. And for some (e.g. StandardJS) it may not be possible to change configuration at all so there is no Template file.
|
||||||
|
|
||||||
|
Where a configuration file exists in your repo, it will be used in preference to the default one in the **GitHub Super-Linter** `TEMPLATES` directory (not in addition to it), and where one doesn't exist the `TEMPLATES` version will be used. So you should copy the complete configuration file you require to change from the `TEMPLATES` directory and not just the lines of config you want to change.
|
||||||
|
|
||||||
|
It is possible to have custom configurations for some linters, and continue to use the default from `TEMPLATES` directory for others, so if you use `Python` and `JavaScript` and only need to tweak the `Python` rules, then you only need to have a custom configuration for *pylint* and continue to use the default `TEMPLATE` from the main repo for *ESLint*, for example.
|
||||||
|
|
||||||
|
For some linters it is also possible to override rules on a case by case level with directives in your code. Where this is possible we try to note how to do this in the specific linter sections below, but the official linter documentation will likely give more detail on this.
|
||||||
|
|
||||||
## Table of Linters
|
## Table of Linters
|
||||||
- [Ruby](#ruby)
|
- [Ruby](#ruby)
|
||||||
- [Shell](#shell)
|
- [Shell](#shell)
|
||||||
- [Ansible](#ansible)
|
- [Ansible](#ansible)
|
||||||
- [YAML](#yaml)
|
- [YAML](#yaml)
|
||||||
|
- [AWS CloudFormation templates](#cfn)
|
||||||
- [Python](#python3)
|
- [Python](#python3)
|
||||||
- [JSON](#json)
|
- [JSON](#json)
|
||||||
- [Markdown](#markdown)
|
- [Markdown](#markdown)
|
||||||
|
@ -21,10 +31,13 @@ Below are examples and documentation for each language and the various methods t
|
||||||
- [Golang](#golang)
|
- [Golang](#golang)
|
||||||
- [Dockerfile](#dockerfile)
|
- [Dockerfile](#dockerfile)
|
||||||
- [Terraform](#terraform)
|
- [Terraform](#terraform)
|
||||||
- [CSS](#stylelint)
|
- [CSS](#css)
|
||||||
- [ENV](#dotenv-linter)
|
- [ENV](#dotenv-linter)
|
||||||
- [Kotlin](#kotlin)
|
- [Kotlin](#kotlin)
|
||||||
- [OpenAPI](#openapi)
|
- [OpenAPI](#openapi)
|
||||||
|
- [Protocol Buffers](#protocol-buffers)
|
||||||
|
- [EDITORCONFIG-CHECKER](#editorconfig-checker)
|
||||||
|
- [HTML](#html)
|
||||||
|
|
||||||
<!-- toc -->
|
<!-- toc -->
|
||||||
|
|
||||||
|
@ -238,6 +251,42 @@ var = "terrible code down here..."
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
## AWS CloudFormation templates
|
||||||
|
- [cfn-lint](https://github.com/aws-cloudformation/cfn-python-lint/)
|
||||||
|
|
||||||
|
### cfn-lint Config file
|
||||||
|
- `.github/linters/.cfnlintrc.yml`
|
||||||
|
- You can pass multiple rules and overwrite default rules
|
||||||
|
- File should be located at: `.github/linters/.cfnlintrc.yml`
|
||||||
|
|
||||||
|
### cfn-lint disable single line
|
||||||
|
- There is currently **No** way to disable rules inline of the file(s)
|
||||||
|
|
||||||
|
### cfn-lint disable code block
|
||||||
|
You can disable both [template](https://github.com/aws-cloudformation/cfn-python-lint/#template-based-metadata) or [resource](https://github.com/aws-cloudformation/cfn-python-lint/#resource-based-metadata) via [metadata](https://github.com/aws-cloudformation/cfn-python-lint/#metadata):
|
||||||
|
```yaml
|
||||||
|
Resources:
|
||||||
|
myInstance:
|
||||||
|
Type: AWS::EC2::Instance
|
||||||
|
Metadata:
|
||||||
|
cfn-lint:
|
||||||
|
config:
|
||||||
|
ignore_checks:
|
||||||
|
- E3030
|
||||||
|
Properties:
|
||||||
|
InstanceType: nt.x4superlarge
|
||||||
|
ImageId: ami-abc1234
|
||||||
|
```
|
||||||
|
|
||||||
|
### cfn-lint disable entire file
|
||||||
|
If you need to ignore an entire file, you can update the `.github/linters/.cfnlintrc.yml` to ignore certain files and locations
|
||||||
|
```yaml
|
||||||
|
ignore_templates:
|
||||||
|
- codebuild.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
## JSON
|
## JSON
|
||||||
- [jsonlint](https://github.com/zaach/jsonlint)
|
- [jsonlint](https://github.com/zaach/jsonlint)
|
||||||
|
|
||||||
|
@ -632,6 +681,53 @@ import package.b.*
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
## Protocol Buffers
|
||||||
|
|
||||||
|
- [protolint](https://github.com/yoheimuta/protolint)
|
||||||
|
|
||||||
|
### protolint Config file
|
||||||
|
|
||||||
|
- `.github/linters/.protolintrc.yml`
|
||||||
|
- You can add, extend, and disable rules
|
||||||
|
- Documentation at [Rules](https://github.com/yoheimuta/protolint#rules) and [Configuring](https://github.com/yoheimuta/protolint#configuring)
|
||||||
|
|
||||||
|
### protolint disable single line
|
||||||
|
|
||||||
|
```protobuf
|
||||||
|
enum Foo {
|
||||||
|
// protolint:disable:next ENUM_FIELD_NAMES_UPPER_SNAKE_CASE
|
||||||
|
firstValue = 0;
|
||||||
|
second_value = 1; // protolint:disable:this ENUM_FIELD_NAMES_UPPER_SNAKE_CASE
|
||||||
|
THIRD_VALUE = 2;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### protolint disable code block
|
||||||
|
|
||||||
|
```protobuf
|
||||||
|
// protolint:disable ENUM_FIELD_NAMES_UPPER_SNAKE_CASE
|
||||||
|
enum Foo {
|
||||||
|
firstValue = 0;
|
||||||
|
second_value = 1;
|
||||||
|
THIRD_VALUE = 2;
|
||||||
|
}
|
||||||
|
// protolint:enable ENUM_FIELD_NAMES_UPPER_SNAKE_CASE
|
||||||
|
```
|
||||||
|
|
||||||
|
### protolint disable entire file
|
||||||
|
|
||||||
|
- You can disable entire files with the `lint.files.exclude` property in `.protolintrc.yml`
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Lint directives.
|
||||||
|
lint:
|
||||||
|
# Linter files to walk.
|
||||||
|
files:
|
||||||
|
# The specific files to exclude.
|
||||||
|
exclude:
|
||||||
|
- path/to/file
|
||||||
|
```
|
||||||
|
|
||||||
## Clojure
|
## Clojure
|
||||||
- [clj-kondo](https://github.com/borkdude/clj-kondo)
|
- [clj-kondo](https://github.com/borkdude/clj-kondo)
|
||||||
- Since clj-kondo approaches static analysis in a very Clojure way, it is advised to read the [configuration docs](https://github.com/borkdude/clj-kondo/blob/master/doc/config.md)
|
- Since clj-kondo approaches static analysis in a very Clojure way, it is advised to read the [configuration docs](https://github.com/borkdude/clj-kondo/blob/master/doc/config.md)
|
||||||
|
@ -648,4 +744,51 @@ import package.b.*
|
||||||
### clj-kondo disable entire file
|
### clj-kondo disable entire file
|
||||||
```clojure
|
```clojure
|
||||||
{:output {:exclude-files ["path/to/file"]}}
|
{:output {:exclude-files ["path/to/file"]}}
|
||||||
|
|
||||||
|
## EDITORCONFIG-CHECKER
|
||||||
|
- [editorconfig-checker](https://github.com/editorconfig-checker/editorconfig-checker)
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
### editorconfig-checker Config file
|
||||||
|
- `.github/linters/.ecrc`
|
||||||
|
- This linter will also use the [`.editorconfig`](https://editorconfig.org/) of your project
|
||||||
|
|
||||||
|
### editorconfig-checker disable single line
|
||||||
|
-
|
||||||
|
```js
|
||||||
|
<LINE> // editorconfig-checker-disable-line
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### editorconfig-checker disable code block
|
||||||
|
- There is currently **No** way to disable rules inline of the file(s)
|
||||||
|
|
||||||
|
### editorconfig-checker disable entire file
|
||||||
|
-
|
||||||
|
```js
|
||||||
|
// editorconfig-checker-disable-file
|
||||||
|
```
|
||||||
|
- You can disable entire files with the `Exclude` property in `.ecrc`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"Exclude": [
|
||||||
|
"path/to/file",
|
||||||
|
"^regular\\/expression\\.ext$"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## HTML
|
||||||
|
- [htmlhint](https://htmlhint.com/)
|
||||||
|
|
||||||
|
### htmlhint standard Config file
|
||||||
|
- `.github/linters/.htmlhintrc`
|
||||||
|
|
||||||
|
### htmlhint disable single line
|
||||||
|
- There is currently **No** way to disable rules in a single line
|
||||||
|
|
||||||
|
### htmlhint disable code block
|
||||||
|
- There is currently **No** way to disable rules in a code block
|
||||||
|
|
||||||
|
### htmlhint disable entire file
|
||||||
|
- There is currently **No** way to disable rules in an entire file
|
15
lib/README.md
Normal file
15
lib/README.md
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
# Super-Linter Library
|
||||||
|
|
||||||
|
## Main script
|
||||||
|
The file `linter.sh` is the main script that is called for the process and loads all other scripts as functions.
|
||||||
|
|
||||||
|
## Functions
|
||||||
|
The additional files in the folder are functions to help streamline the main build process and allow for easier maintenance.
|
||||||
|
- `possum.sh`
|
||||||
|
- Official mascot of the **Super-Linter**
|
||||||
|
- `buildFileList.sh`
|
||||||
|
- Functions to help find files that were modified, or need to be scanned
|
||||||
|
- `validation.sh`
|
||||||
|
- Logic to see what linters are enabled
|
||||||
|
- `worker.sh`
|
||||||
|
- Calls to the various linters and the test cases for each
|
461
lib/buildFileList.sh
Executable file
461
lib/buildFileList.sh
Executable file
|
@ -0,0 +1,461 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
################################################################################
|
||||||
|
########### Super-Linter Build File List Functions @admiralawkbar ##############
|
||||||
|
################################################################################
|
||||||
|
################################################################################
|
||||||
|
########################## FUNCTION CALLS BELOW ################################
|
||||||
|
################################################################################
|
||||||
|
################################################################################
|
||||||
|
#### Function BuildFileList ####################################################
|
||||||
|
function BuildFileList() {
|
||||||
|
# Need to build a list of all files changed
|
||||||
|
# This can be pulled from the GITHUB_EVENT_PATH payload
|
||||||
|
|
||||||
|
################
|
||||||
|
# print header #
|
||||||
|
################
|
||||||
|
if [[ $ACTIONS_RUNNER_DEBUG == "true" ]]; then
|
||||||
|
echo ""
|
||||||
|
echo "----------------------------------------------"
|
||||||
|
echo "Pulling in code history and branches..."
|
||||||
|
fi
|
||||||
|
|
||||||
|
#################################################################################
|
||||||
|
# Switch codebase back to the default branch to get a list of all files changed #
|
||||||
|
#################################################################################
|
||||||
|
SWITCH_CMD=$(
|
||||||
|
git -C "$GITHUB_WORKSPACE" pull --quiet
|
||||||
|
git -C "$GITHUB_WORKSPACE" checkout "$DEFAULT_BRANCH" 2>&1
|
||||||
|
)
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# Load the error code #
|
||||||
|
#######################
|
||||||
|
ERROR_CODE=$?
|
||||||
|
|
||||||
|
##############################
|
||||||
|
# Check the shell for errors #
|
||||||
|
##############################
|
||||||
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
|
# Error
|
||||||
|
echo "Failed to switch to $DEFAULT_BRANCH branch to get files changed!"
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$SWITCH_CMD]${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
################
|
||||||
|
# print header #
|
||||||
|
################
|
||||||
|
if [[ $ACTIONS_RUNNER_DEBUG == "true" ]]; then
|
||||||
|
echo ""
|
||||||
|
echo "----------------------------------------------"
|
||||||
|
echo "Generating Diff with:[git diff --name-only '$DEFAULT_BRANCH..$GITHUB_SHA' --diff-filter=d]"
|
||||||
|
fi
|
||||||
|
|
||||||
|
#################################################
|
||||||
|
# Get the Array of files changed in the commits #
|
||||||
|
#################################################
|
||||||
|
mapfile -t RAW_FILE_ARRAY < <(git -C "$GITHUB_WORKSPACE" diff --name-only "$DEFAULT_BRANCH..$GITHUB_SHA" --diff-filter=d 2>&1)
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# Load the error code #
|
||||||
|
#######################
|
||||||
|
ERROR_CODE=$?
|
||||||
|
|
||||||
|
##############################
|
||||||
|
# Check the shell for errors #
|
||||||
|
##############################
|
||||||
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
|
# Error
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to gain a list of all files changed!${NC}"
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[${RAW_FILE_ARRAY[*]}]${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
################################################
|
||||||
|
# Iterate through the array of all files found #
|
||||||
|
################################################
|
||||||
|
echo ""
|
||||||
|
echo "----------------------------------------------"
|
||||||
|
echo "Files that have been modified in the commit(s):"
|
||||||
|
for FILE in "${RAW_FILE_ARRAY[@]}"; do
|
||||||
|
###########################
|
||||||
|
# Get the files extension #
|
||||||
|
###########################
|
||||||
|
# Extract just the file and extension, reverse it, cut off extension,
|
||||||
|
# reverse it back, substitute to lowercase
|
||||||
|
FILE_TYPE=$(basename "$FILE" | rev | cut -f1 -d'.' | rev | awk '{print tolower($0)}')
|
||||||
|
|
||||||
|
##############
|
||||||
|
# Print file #
|
||||||
|
##############
|
||||||
|
echo "File:[$FILE], File_type:[$FILE_TYPE]"
|
||||||
|
|
||||||
|
#########
|
||||||
|
# DEBUG #
|
||||||
|
#########
|
||||||
|
#echo "FILE_TYPE:[$FILE_TYPE]"
|
||||||
|
|
||||||
|
#####################
|
||||||
|
# Get the CFN files #
|
||||||
|
#####################
|
||||||
|
if [ "$FILE_TYPE" == "yml" ] || [ "$FILE_TYPE" == "yaml" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_YML+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
|
||||||
|
#####################################
|
||||||
|
# Check if the file is CFN template #
|
||||||
|
#####################################
|
||||||
|
if DetectCloudFormationFile "$FILE"; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_CFN+=("$FILE")
|
||||||
|
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
fi
|
||||||
|
######################
|
||||||
|
# Get the JSON files #
|
||||||
|
######################
|
||||||
|
elif [ "$FILE_TYPE" == "json" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_JSON+=("$FILE")
|
||||||
|
############################
|
||||||
|
# Check if file is OpenAPI #
|
||||||
|
############################
|
||||||
|
if DetectOpenAPIFile "$FILE"; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_OPENAPI+=("$FILE")
|
||||||
|
fi
|
||||||
|
############################
|
||||||
|
# Check if file is ARM #
|
||||||
|
############################
|
||||||
|
if DetectARMFile "$FILE"; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_ARM+=("$FILE")
|
||||||
|
fi
|
||||||
|
#####################################
|
||||||
|
# Check if the file is CFN template #
|
||||||
|
#####################################
|
||||||
|
if DetectCloudFormationFile "$FILE"; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_CFN+=("$FILE")
|
||||||
|
fi
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
#####################
|
||||||
|
# Get the XML files #
|
||||||
|
#####################
|
||||||
|
elif [ "$FILE_TYPE" == "xml" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_XML+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
##########################
|
||||||
|
# Get the MARKDOWN files #
|
||||||
|
##########################
|
||||||
|
elif [ "$FILE_TYPE" == "md" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_MD+=("$FILE")
|
||||||
|
######################
|
||||||
|
# Get the BASH files #
|
||||||
|
######################
|
||||||
|
elif [ "$FILE_TYPE" == "sh" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_BASH+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
######################
|
||||||
|
# Get the PERL files #
|
||||||
|
######################
|
||||||
|
elif [ "$FILE_TYPE" == "pl" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_PERL+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
######################
|
||||||
|
# Get the PHP files #
|
||||||
|
######################
|
||||||
|
elif [ "$FILE_TYPE" == "php" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_PHP+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
######################
|
||||||
|
# Get the RUBY files #
|
||||||
|
######################
|
||||||
|
elif [ "$FILE_TYPE" == "rb" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_RUBY+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
########################
|
||||||
|
# Get the PYTHON files #
|
||||||
|
########################
|
||||||
|
elif [ "$FILE_TYPE" == "py" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_PYTHON+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
########################
|
||||||
|
# Get the COFFEE files #
|
||||||
|
########################
|
||||||
|
elif [ "$FILE_TYPE" == "coffee" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_COFFEESCRIPT+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
############################
|
||||||
|
# Get the JavaScript files #
|
||||||
|
############################
|
||||||
|
elif [ "$FILE_TYPE" == "js" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_JAVASCRIPT_ES+=("$FILE")
|
||||||
|
FILE_ARRAY_JAVASCRIPT_STANDARD+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
############################
|
||||||
|
# Get the TypeScript files #
|
||||||
|
############################
|
||||||
|
elif [ "$FILE_TYPE" == "ts" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_TYPESCRIPT_ES+=("$FILE")
|
||||||
|
FILE_ARRAY_TYPESCRIPT_STANDARD+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
########################
|
||||||
|
# Get the Golang files #
|
||||||
|
########################
|
||||||
|
elif [ "$FILE_TYPE" == "go" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_GO+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
###########################
|
||||||
|
# Get the Terraform files #
|
||||||
|
###########################
|
||||||
|
elif [ "$FILE_TYPE" == "tf" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_TERRAFORM+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
###########################
|
||||||
|
# Get the Powershell files #
|
||||||
|
###########################
|
||||||
|
elif [ "$FILE_TYPE" == "ps1" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_POWERSHELL+=("$FILE")
|
||||||
|
elif [ "$FILE_TYPE" == "css" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_CSS+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
elif [ "$FILE_TYPE" == "env" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_ENV+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
elif [ "$FILE_TYPE" == "kt" ] || [ "$FILE_TYPE" == "kts" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_KOTLIN+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
############################
|
||||||
|
# Get the Protocol Buffers files #
|
||||||
|
############################
|
||||||
|
elif [ "$FILE_TYPE" == "proto" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_PROTOBUF+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
elif [ "$FILE" == "dockerfile" ] || [ "$FILE_TYPE" == "dockerfile" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_DOCKER+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
elif [ "$FILE_TYPE" == "clj" ] || [ "$FILE_TYPE" == "cljs" ] || [ "$FILE_TYPE" == "cljc" ] || [ "$FILE_TYPE" == "edn" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_CLOJURE+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
elif [ "$FILE_TYPE" == "html" ]; then
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
##############################p##
|
||||||
|
FILE_ARRAY_HTML+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
else
|
||||||
|
##############################################
|
||||||
|
# Use file to see if we can parse what it is #
|
||||||
|
##############################################
|
||||||
|
GET_FILE_TYPE_CMD=$(file "$FILE" 2>&1)
|
||||||
|
|
||||||
|
#################
|
||||||
|
# Check if bash #
|
||||||
|
#################
|
||||||
|
if [[ $GET_FILE_TYPE_CMD == *"Bourne-Again shell script"* ]]; then
|
||||||
|
#######################
|
||||||
|
# It is a bash script #
|
||||||
|
#######################
|
||||||
|
echo -e "${NC}${F[Y]}WARN!${NC} Found bash script without extension:[.sh]${NC}"
|
||||||
|
echo "Please update file with proper extensions."
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_BASH+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
elif [[ $GET_FILE_TYPE_CMD == *"Ruby script"* ]]; then
|
||||||
|
#######################
|
||||||
|
# It is a Ruby script #
|
||||||
|
#######################
|
||||||
|
echo -e "${NC}${F[Y]}WARN!${NC} Found ruby script without extension:[.rb]${NC}"
|
||||||
|
echo "Please update file with proper extensions."
|
||||||
|
################################
|
||||||
|
# Append the file to the array #
|
||||||
|
################################
|
||||||
|
FILE_ARRAY_RUBY+=("$FILE")
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
else
|
||||||
|
############################
|
||||||
|
# Extension was not found! #
|
||||||
|
############################
|
||||||
|
echo -e "${NC}${F[Y]} - WARN!${NC} Failed to get filetype for:[$FILE]!${NC}"
|
||||||
|
##########################################################
|
||||||
|
# Set the READ_ONLY_CHANGE_FLAG since this could be exec #
|
||||||
|
##########################################################
|
||||||
|
READ_ONLY_CHANGE_FLAG=1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo ${READ_ONLY_CHANGE_FLAG} > /dev/null 2>&1 || true # Workaround SC2034
|
||||||
|
|
||||||
|
#########################################
|
||||||
|
# Need to switch back to branch of code #
|
||||||
|
#########################################
|
||||||
|
SWITCH2_CMD=$(git -C "$GITHUB_WORKSPACE" checkout --progress --force "$GITHUB_SHA" 2>&1)
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# Load the error code #
|
||||||
|
#######################
|
||||||
|
ERROR_CODE=$?
|
||||||
|
|
||||||
|
##############################
|
||||||
|
# Check the shell for errors #
|
||||||
|
##############################
|
||||||
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
|
# Error
|
||||||
|
echo "Failed to switch back to branch!"
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$SWITCH2_CMD]${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
################
|
||||||
|
# Footer print #
|
||||||
|
################
|
||||||
|
echo ""
|
||||||
|
echo "----------------------------------------------"
|
||||||
|
echo -e "${NC}${F[B]}Successfully gathered list of files...${NC}"
|
||||||
|
}
|
2516
lib/linter.sh
2516
lib/linter.sh
File diff suppressed because it is too large
Load diff
|
@ -1,5 +1,5 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
cat <<EOF
|
cat << EOF
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
/@@#///////@@/(@//@%/(@.@( @@
|
/@@#///////@@/(@//@%/(@.@( @@
|
||||||
|
|
27
lib/termColors.sh
Normal file
27
lib/termColors.sh
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
declare -Agr B=(
|
||||||
|
[B]=$(echo -e "\e[44m")
|
||||||
|
[C]=$(echo -e "\e[46m")
|
||||||
|
[G]=$(echo -e "\e[42m")
|
||||||
|
[K]=$(echo -e "\e[40m")
|
||||||
|
[M]=$(echo -e "\e[45m")
|
||||||
|
[R]=$(echo -e "\e[41m")
|
||||||
|
[W]=$(echo -e "\e[47m")
|
||||||
|
[Y]=$(echo -e "\e[43m")
|
||||||
|
)
|
||||||
|
declare -Agr F=(
|
||||||
|
[B]=$(echo -e "\e[0;34m")
|
||||||
|
[C]=$(echo -e "\e[0;36m")
|
||||||
|
[G]=$(echo -e "\e[0;32m")
|
||||||
|
[K]=$(echo -e "\e[0;30m")
|
||||||
|
[M]=$(echo -e "\e[0;35m")
|
||||||
|
[R]=$(echo -e "\e[0;31m")
|
||||||
|
[W]=$(echo -e "\e[0;37m")
|
||||||
|
[Y]=$(echo -e "\e[0;33m")
|
||||||
|
)
|
||||||
|
readonly NC=$(echo -e "\e[0m")
|
||||||
|
|
||||||
|
export B
|
||||||
|
export F
|
||||||
|
export NC
|
730
lib/validation.sh
Executable file
730
lib/validation.sh
Executable file
|
@ -0,0 +1,730 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
################################################################################
|
||||||
|
########### Super-Linter Validation Functions @admiralawkbar ###################
|
||||||
|
################################################################################
|
||||||
|
################################################################################
|
||||||
|
########################## FUNCTION CALLS BELOW ################################
|
||||||
|
################################################################################
|
||||||
|
################################################################################
|
||||||
|
#### Function GetValidationInfo ################################################
|
||||||
|
function GetValidationInfo() {
|
||||||
|
############################################
|
||||||
|
# Print headers for user provided env vars #
|
||||||
|
############################################
|
||||||
|
echo ""
|
||||||
|
echo "--------------------------------------------"
|
||||||
|
echo "Gathering user validation information..."
|
||||||
|
|
||||||
|
###########################################
|
||||||
|
# Skip validation if were running locally #
|
||||||
|
###########################################
|
||||||
|
if [[ $RUN_LOCAL != "true" ]]; then
|
||||||
|
###############################
|
||||||
|
# Convert string to lowercase #
|
||||||
|
###############################
|
||||||
|
VALIDATE_ALL_CODEBASE=$(echo "$VALIDATE_ALL_CODEBASE" | awk '{print tolower($0)}')
|
||||||
|
######################################
|
||||||
|
# Validate we should check all files #
|
||||||
|
######################################
|
||||||
|
if [[ $VALIDATE_ALL_CODEBASE != "false" ]]; then
|
||||||
|
# Set to true
|
||||||
|
VALIDATE_ALL_CODEBASE="$DEFAULT_VALIDATE_ALL_CODEBASE"
|
||||||
|
echo "- Validating ALL files in code base..."
|
||||||
|
else
|
||||||
|
# Its false
|
||||||
|
echo "- Only validating [new], or [edited] files in code base..."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
######################
|
||||||
|
# Create Print Array #
|
||||||
|
######################
|
||||||
|
PRINT_ARRAY=()
|
||||||
|
|
||||||
|
################################
|
||||||
|
# Convert strings to lowercase #
|
||||||
|
################################
|
||||||
|
VALIDATE_YAML=$(echo "$VALIDATE_YAML" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_JSON=$(echo "$VALIDATE_JSON" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_XML=$(echo "$VALIDATE_XML" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_MD=$(echo "$VALIDATE_MD" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_BASH=$(echo "$VALIDATE_BASH" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_PERL=$(echo "$VALIDATE_PERL" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_PHP=$(echo "$VALIDATE_PHP" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_PYTHON=$(echo "$VALIDATE_PYTHON" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_RUBY=$(echo "$VALIDATE_RUBY" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_COFFEE=$(echo "$VALIDATE_COFFEE" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_ANSIBLE=$(echo "$VALIDATE_ANSIBLE" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_JAVASCRIPT_ES=$(echo "$VALIDATE_JAVASCRIPT_ES" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_JAVASCRIPT_STANDARD=$(echo "$VALIDATE_JAVASCRIPT_STANDARD" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_TYPESCRIPT_ES=$(echo "$VALIDATE_TYPESCRIPT_ES" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_TYPESCRIPT_STANDARD=$(echo "$VALIDATE_TYPESCRIPT_STANDARD" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_DOCKER=$(echo "$VALIDATE_DOCKER" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_GO=$(echo "$VALIDATE_GO" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_TERRAFORM=$(echo "$VALIDATE_TERRAFORM" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_POWERSHELL=$(echo "$VALIDATE_POWERSHELL" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_ARM=$(echo "$VALIDATE_ARM" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_CSS=$(echo "$VALIDATE_CSS" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_ENV=$(echo "$VALIDATE_ENV" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_CLOJURE=$(echo "$VALIDATE_CLOJURE" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_KOTLIN=$(echo "$VALIDATE_KOTLIN" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_PROTOBUF=$(echo "$VALIDATE_PROTOBUF" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_OPENAPI=$(echo "$VALIDATE_OPENAPI" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_EDITORCONFIG=$(echo "$VALIDATE_EDITORCONFIG" | awk '{print tolower($0)}')
|
||||||
|
VALIDATE_HTML=$(echo "$VALIDATE_HTML" | awk '{print tolower($0)}')
|
||||||
|
|
||||||
|
################################################
|
||||||
|
# Determine if any linters were explicitly set #
|
||||||
|
################################################
|
||||||
|
ANY_SET="false"
|
||||||
|
if [[ -n $VALIDATE_YAML || -n \
|
||||||
|
$VALIDATE_JSON || -n \
|
||||||
|
$VALIDATE_XML || -n \
|
||||||
|
$VALIDATE_MD || -n \
|
||||||
|
$VALIDATE_BASH || -n \
|
||||||
|
$VALIDATE_PERL || -n \
|
||||||
|
$VALIDATE_PHP || -n \
|
||||||
|
$VALIDATE_PYTHON || -n \
|
||||||
|
$VALIDATE_RUBY || -n \
|
||||||
|
$VALIDATE_COFFEE || -n \
|
||||||
|
$VALIDATE_ANSIBLE || -n \
|
||||||
|
$VALIDATE_JAVASCRIPT_ES || -n \
|
||||||
|
$VALIDATE_JAVASCRIPT_STANDARD || -n \
|
||||||
|
$VALIDATE_TYPESCRIPT_ES || -n \
|
||||||
|
$VALIDATE_TYPESCRIPT_STANDARD || -n \
|
||||||
|
$VALIDATE_DOCKER || -n \
|
||||||
|
$VALIDATE_GO || -n \
|
||||||
|
$VALIDATE_TERRAFORM || -n \
|
||||||
|
$VALIDATE_POWERSHELL || -n \
|
||||||
|
$VALIDATE_ARM || -n \
|
||||||
|
$VALIDATE_CSS || -n \
|
||||||
|
$VALIDATE_ENV || -n \
|
||||||
|
$VALIDATE_CLOJURE || -n \
|
||||||
|
$VALIDATE_PROTOBUF || -n \
|
||||||
|
$VALIDATE_OPENAPI || -n \
|
||||||
|
$VALIDATE_KOTLIN || -n \
|
||||||
|
$VALIDATE_EDITORCONFIG || -n \
|
||||||
|
$VALIDATE_HTML ]]; then
|
||||||
|
ANY_SET="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
####################################
|
||||||
|
# Validate if we should check YAML #
|
||||||
|
####################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_YAML ]]; then
|
||||||
|
# YAML flag was not set - default to false
|
||||||
|
VALIDATE_YAML="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_YAML="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
####################################
|
||||||
|
# Validate if we should check JSON #
|
||||||
|
####################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_JSON ]]; then
|
||||||
|
# JSON flag was not set - default to false
|
||||||
|
VALIDATE_JSON="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_JSON="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
###################################
|
||||||
|
# Validate if we should check XML #
|
||||||
|
###################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_XML ]]; then
|
||||||
|
# XML flag was not set - default to false
|
||||||
|
VALIDATE_XML="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_XML="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
########################################
|
||||||
|
# Validate if we should check MARKDOWN #
|
||||||
|
########################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_MD ]]; then
|
||||||
|
# MD flag was not set - default to false
|
||||||
|
VALIDATE_MD="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_MD="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
####################################
|
||||||
|
# Validate if we should check BASH #
|
||||||
|
####################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_BASH ]]; then
|
||||||
|
# BASH flag was not set - default to false
|
||||||
|
VALIDATE_BASH="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_BASH="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
####################################
|
||||||
|
# Validate if we should check PERL #
|
||||||
|
####################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_PERL ]]; then
|
||||||
|
# PERL flag was not set - default to false
|
||||||
|
VALIDATE_PERL="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_PERL="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
####################################
|
||||||
|
# Validate if we should check PHP #
|
||||||
|
####################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_PHP ]]; then
|
||||||
|
# PHP flag was not set - default to false
|
||||||
|
VALIDATE_PHP="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_PHP="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
######################################
|
||||||
|
# Validate if we should check PYTHON #
|
||||||
|
######################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_PYTHON ]]; then
|
||||||
|
# PYTHON flag was not set - default to false
|
||||||
|
VALIDATE_PYTHON="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_PYTHON="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
####################################
|
||||||
|
# Validate if we should check RUBY #
|
||||||
|
####################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_RUBY ]]; then
|
||||||
|
# RUBY flag was not set - default to false
|
||||||
|
VALIDATE_RUBY="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_RUBY="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
######################################
|
||||||
|
# Validate if we should check COFFEE #
|
||||||
|
######################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_COFFEE ]]; then
|
||||||
|
# COFFEE flag was not set - default to false
|
||||||
|
VALIDATE_COFFEE="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_COFFEE="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
#######################################
|
||||||
|
# Validate if we should check ANSIBLE #
|
||||||
|
#######################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_ANSIBLE ]]; then
|
||||||
|
# ANSIBLE flag was not set - default to false
|
||||||
|
VALIDATE_ANSIBLE="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_ANSIBLE="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
#############################################
|
||||||
|
# Validate if we should check JAVASCRIPT_ES #
|
||||||
|
#############################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_JAVASCRIPT_ES ]]; then
|
||||||
|
# JAVASCRIPT_ES flag was not set - default to false
|
||||||
|
VALIDATE_JAVASCRIPT_ES="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_JAVASCRIPT_ES="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
###################################################
|
||||||
|
# Validate if we should check JAVASCRIPT_STANDARD #
|
||||||
|
###################################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_JAVASCRIPT_STANDARD ]]; then
|
||||||
|
# JAVASCRIPT_STANDARD flag was not set - default to false
|
||||||
|
VALIDATE_JAVASCRIPT_STANDARD="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_JAVASCRIPT_STANDARD="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
#############################################
|
||||||
|
# Validate if we should check TYPESCRIPT_ES #
|
||||||
|
#############################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_TYPESCRIPT_ES ]]; then
|
||||||
|
# TYPESCRIPT_ES flag was not set - default to false
|
||||||
|
VALIDATE_TYPESCRIPT_ES="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_TYPESCRIPT_ES="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
###################################################
|
||||||
|
# Validate if we should check TYPESCRIPT_STANDARD #
|
||||||
|
###################################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_TYPESCRIPT_STANDARD ]]; then
|
||||||
|
# TYPESCRIPT_STANDARD flag was not set - default to false
|
||||||
|
VALIDATE_TYPESCRIPT_STANDARD="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_TYPESCRIPT_STANDARD="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
######################################
|
||||||
|
# Validate if we should check DOCKER #
|
||||||
|
######################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_DOCKER ]]; then
|
||||||
|
# DOCKER flag was not set - default to false
|
||||||
|
VALIDATE_DOCKER="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_DOCKER="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
##################################
|
||||||
|
# Validate if we should check GO #
|
||||||
|
##################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_GO ]]; then
|
||||||
|
# GO flag was not set - default to false
|
||||||
|
VALIDATE_GO="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_GO="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
#########################################
|
||||||
|
# Validate if we should check TERRAFORM #
|
||||||
|
#########################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_TERRAFORM ]]; then
|
||||||
|
# TERRAFORM flag was not set - default to false
|
||||||
|
VALIDATE_TERRAFORM="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_TERRAFORM="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
#########################################
|
||||||
|
# Validate if we should check POWERSHELL #
|
||||||
|
#########################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_POWERSHELL ]]; then
|
||||||
|
# POWERSHELL flag was not set - default to false
|
||||||
|
VALIDATE_POWERSHELL="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_POWERSHELL="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
###################################
|
||||||
|
# Validate if we should check ARM #
|
||||||
|
###################################
|
||||||
|
if [[ "$ANY_SET" == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z "$VALIDATE_ARM" ]]; then
|
||||||
|
# ARM flag was not set - default to false
|
||||||
|
VALIDATE_ARM="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_ARM="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
###################################
|
||||||
|
# Validate if we should check CSS #
|
||||||
|
###################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_CSS ]]; then
|
||||||
|
# CSS flag was not set - default to false
|
||||||
|
VALIDATE_CSS="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_CSS="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
###################################
|
||||||
|
# Validate if we should check ENV #
|
||||||
|
###################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_ENV ]]; then
|
||||||
|
# ENV flag was not set - default to false
|
||||||
|
VALIDATE_ENV="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_ENV="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
######################################
|
||||||
|
# Validate if we should check KOTLIN #
|
||||||
|
######################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_KOTLIN ]]; then
|
||||||
|
# ENV flag was not set - default to false
|
||||||
|
VALIDATE_KOTLIN="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_KOTLIN="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
#######################################
|
||||||
|
# Validate if we should check OPENAPI #
|
||||||
|
#######################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_OPENAPI ]]; then
|
||||||
|
# OPENAPI flag was not set - default to false
|
||||||
|
VALIDATE_OPENAPI="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_OPENAPI="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
#######################################
|
||||||
|
# Validate if we should check PROTOBUF #
|
||||||
|
#######################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_PROTOBUF ]]; then
|
||||||
|
# PROTOBUF flag was not set - default to false
|
||||||
|
VALIDATE_PROTOBUF="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_PROTOBUF="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
#######################################
|
||||||
|
# Validate if we should check Clojure #
|
||||||
|
#######################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_CLOJURE ]]; then
|
||||||
|
# Clojure flag was not set - default to false
|
||||||
|
VALIDATE_CLOJURE="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_CLOJURE="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
############################################
|
||||||
|
# Validate if we should check editorconfig #
|
||||||
|
############################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_EDITORCONFIG ]]; then
|
||||||
|
# EDITORCONFIG flag was not set - default to false
|
||||||
|
VALIDATE_EDITORCONFIG="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_EDITORCONFIG="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
####################################
|
||||||
|
# Validate if we should check HTML #
|
||||||
|
####################################
|
||||||
|
if [[ $ANY_SET == "true" ]]; then
|
||||||
|
# Some linter flags were set - only run those set to true
|
||||||
|
if [[ -z $VALIDATE_HTML ]]; then
|
||||||
|
# HTML flag was not set - default to false
|
||||||
|
VALIDATE_HTML="false"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# No linter flags were set - default all to true
|
||||||
|
VALIDATE_HTML="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
#######################################
|
||||||
|
# Print which linters we are enabling #
|
||||||
|
#######################################
|
||||||
|
if [[ $VALIDATE_YAML == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [YAML] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [YAML] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_JSON == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [JSON] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [JSON] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_XML == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [XML] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [XML] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_MD == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [MARKDOWN] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [MARKDOWN] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_BASH == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [BASH] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [BASH] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_PERL == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [PERL] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [PERL] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_PHP == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [PHP] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [PHP] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_PYTHON == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [PYTHON] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [PYTHON] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_RUBY == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [RUBY] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [RUBY] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_COFFEE == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [COFFEE] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [COFFEE] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_ANSIBLE == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [ANSIBLE] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [ANSIBLE] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_JAVASCRIPT_ES == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [JAVASCRIPT(eslint)] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [JAVASCRIPT(eslint)] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_JAVASCRIPT_STANDARD == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [JAVASCRIPT(standard)] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [JAVASCRIPT(standard)] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_TYPESCRIPT_ES == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [TYPESCRIPT(eslint)] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [TYPESCRIPT(eslint)] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_TYPESCRIPT_STANDARD == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [TYPESCRIPT(standard)] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [TYPESCRIPT(standard)] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_DOCKER == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [DOCKER] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [DOCKER] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_GO == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [GOLANG] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [GOLANG] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_TERRAFORM == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [TERRAFORM] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [TERRAFORM] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_POWERSHELL == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [POWERSHELL] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [POWERSHELL] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_ARM == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [ARM] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [ARM] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_CSS == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [CSS] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [CSS] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_CLOJURE == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [CLOJURE] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [CLOJURE] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_ENV == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [ENV] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [ENV] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_KOTLIN == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [KOTLIN] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [KOTLIN] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_OPENAPI == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [OPENAPI] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [OPENAPI] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_PROTOBUF == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [PROTOBUF] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [PROTOBUF] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_EDITORCONFIG == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [EDITORCONFIG] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [EDITORCONFIG] files in code base...")
|
||||||
|
fi
|
||||||
|
if [[ $VALIDATE_HTML == "true" ]]; then
|
||||||
|
PRINT_ARRAY+=("- Validating [HTML] files in code base...")
|
||||||
|
else
|
||||||
|
PRINT_ARRAY+=("- Excluding [HTML] files in code base...")
|
||||||
|
fi
|
||||||
|
|
||||||
|
##############################
|
||||||
|
# Validate Ansible Directory #
|
||||||
|
##############################
|
||||||
|
if [ -z "$ANSIBLE_DIRECTORY" ]; then
|
||||||
|
# No Value, need to default
|
||||||
|
ANSIBLE_DIRECTORY="$DEFAULT_ANSIBLE_DIRECTORY"
|
||||||
|
else
|
||||||
|
# Check if first char is '/'
|
||||||
|
if [[ ${ANSIBLE_DIRECTORY:0:1} == "/" ]]; then
|
||||||
|
# Remove first char
|
||||||
|
ANSIBLE_DIRECTORY="${ANSIBLE_DIRECTORY:1}"
|
||||||
|
fi
|
||||||
|
# Need to give it full path
|
||||||
|
TEMP_ANSIBLE_DIRECTORY="$GITHUB_WORKSPACE/$ANSIBLE_DIRECTORY"
|
||||||
|
# Set the value
|
||||||
|
ANSIBLE_DIRECTORY="$TEMP_ANSIBLE_DIRECTORY"
|
||||||
|
fi
|
||||||
|
|
||||||
|
###############################
|
||||||
|
# Get the disable errors flag #
|
||||||
|
###############################
|
||||||
|
if [ -z "$DISABLE_ERRORS" ]; then
|
||||||
|
##################################
|
||||||
|
# No flag passed, set to default #
|
||||||
|
##################################
|
||||||
|
DISABLE_ERRORS="$DEFAULT_DISABLE_ERRORS"
|
||||||
|
fi
|
||||||
|
|
||||||
|
###############################
|
||||||
|
# Convert string to lowercase #
|
||||||
|
###############################
|
||||||
|
DISABLE_ERRORS=$(echo "$DISABLE_ERRORS" | awk '{print tolower($0)}')
|
||||||
|
|
||||||
|
############################
|
||||||
|
# Set to false if not true #
|
||||||
|
############################
|
||||||
|
if [ "$DISABLE_ERRORS" != "true" ]; then
|
||||||
|
DISABLE_ERRORS="false"
|
||||||
|
fi
|
||||||
|
|
||||||
|
############################
|
||||||
|
# Get the run verbose flag #
|
||||||
|
############################
|
||||||
|
if [ -z "$ACTIONS_RUNNER_DEBUG" ]; then
|
||||||
|
##################################
|
||||||
|
# No flag passed, set to default #
|
||||||
|
##################################
|
||||||
|
ACTIONS_RUNNER_DEBUG="$DEFAULT_ACTIONS_RUNNER_DEBUG"
|
||||||
|
fi
|
||||||
|
|
||||||
|
###############################
|
||||||
|
# Convert string to lowercase #
|
||||||
|
###############################
|
||||||
|
ACTIONS_RUNNER_DEBUG=$(echo "$ACTIONS_RUNNER_DEBUG" | awk '{print tolower($0)}')
|
||||||
|
|
||||||
|
############################
|
||||||
|
# Set to true if not false #
|
||||||
|
############################
|
||||||
|
if [ "$ACTIONS_RUNNER_DEBUG" != "false" ]; then
|
||||||
|
ACTIONS_RUNNER_DEBUG="true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
###################
|
||||||
|
# Debug on runner #
|
||||||
|
###################
|
||||||
|
if [[ $ACTIONS_RUNNER_DEBUG == "true" ]]; then
|
||||||
|
###########################
|
||||||
|
# Print the validate info #
|
||||||
|
###########################
|
||||||
|
for LINE in "${PRINT_ARRAY[@]}"; do
|
||||||
|
echo "$LINE"
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "--- DEBUG INFO ---"
|
||||||
|
echo "---------------------------------------------"
|
||||||
|
RUNNER=$(whoami)
|
||||||
|
echo "Runner:[$RUNNER]"
|
||||||
|
echo "ENV:"
|
||||||
|
printenv
|
||||||
|
echo "---------------------------------------------"
|
||||||
|
fi
|
||||||
|
}
|
764
lib/worker.sh
Executable file
764
lib/worker.sh
Executable file
|
@ -0,0 +1,764 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
################################################################################
|
||||||
|
########### Super-Linter linting Functions @admiralawkbar ######################
|
||||||
|
################################################################################
|
||||||
|
################################################################################
|
||||||
|
########################## FUNCTION CALLS BELOW ################################
|
||||||
|
################################################################################
|
||||||
|
################################################################################
|
||||||
|
#### Function LintCodebase #####################################################
|
||||||
|
function LintCodebase() {
|
||||||
|
####################
|
||||||
|
# Pull in the vars #
|
||||||
|
####################
|
||||||
|
FILE_TYPE="$1" && shift # Pull the variable and remove from array path (Example: JSON)
|
||||||
|
LINTER_NAME="$1" && shift # Pull the variable and remove from array path (Example: jsonlint)
|
||||||
|
LINTER_COMMAND="$1" && shift # Pull the variable and remove from array path (Example: jsonlint -c ConfigFile /path/to/file)
|
||||||
|
FILE_EXTENSIONS="$1" && shift # Pull the variable and remove from array path (Example: *.json)
|
||||||
|
FILE_ARRAY=("$@") # Array of files to validate (Example: $FILE_ARRAY_JSON)
|
||||||
|
|
||||||
|
######################
|
||||||
|
# Create Print Array #
|
||||||
|
######################
|
||||||
|
PRINT_ARRAY=()
|
||||||
|
|
||||||
|
################
|
||||||
|
# print header #
|
||||||
|
################
|
||||||
|
PRINT_ARRAY+=("")
|
||||||
|
PRINT_ARRAY+=("----------------------------------------------")
|
||||||
|
PRINT_ARRAY+=("----------------------------------------------")
|
||||||
|
PRINT_ARRAY+=("Linting [$FILE_TYPE] files...")
|
||||||
|
PRINT_ARRAY+=("----------------------------------------------")
|
||||||
|
PRINT_ARRAY+=("----------------------------------------------")
|
||||||
|
|
||||||
|
#####################################
|
||||||
|
# Validate we have linter installed #
|
||||||
|
#####################################
|
||||||
|
VALIDATE_INSTALL_CMD=$(command -v "$LINTER_NAME" 2>&1)
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# Load the error code #
|
||||||
|
#######################
|
||||||
|
ERROR_CODE=$?
|
||||||
|
|
||||||
|
##############################
|
||||||
|
# Check the shell for errors #
|
||||||
|
##############################
|
||||||
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
|
# Failed
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find [$LINTER_NAME] in system!${NC}"
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$VALIDATE_INSTALL_CMD]${NC}"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
# Success
|
||||||
|
if [[ $ACTIONS_RUNNER_DEBUG == "true" ]]; then
|
||||||
|
echo -e "${NC}${F[B]}Successfully found binary for ${F[W]}[$LINTER_NAME]${F[B]} in system location: ${F[W]}[$VALIDATE_INSTALL_CMD]${NC}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
##########################
|
||||||
|
# Initialize empty Array #
|
||||||
|
##########################
|
||||||
|
LIST_FILES=()
|
||||||
|
|
||||||
|
################
|
||||||
|
# Set the flag #
|
||||||
|
################
|
||||||
|
SKIP_FLAG=0
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
# Check to see if we need to go through array or all files #
|
||||||
|
############################################################
|
||||||
|
if [ ${#FILE_ARRAY[@]} -eq 0 ] && [ "$VALIDATE_ALL_CODEBASE" == "false" ]; then
|
||||||
|
# No files found in commit and user has asked to not validate code base
|
||||||
|
SKIP_FLAG=1
|
||||||
|
# echo " - No files found in changeset to lint for language:[$FILE_TYPE]"
|
||||||
|
elif [ ${#FILE_ARRAY[@]} -ne 0 ]; then
|
||||||
|
# We have files added to array of files to check
|
||||||
|
LIST_FILES=("${FILE_ARRAY[@]}") # Copy the array into list
|
||||||
|
else
|
||||||
|
###############################################################################
|
||||||
|
# Set the file seperator to newline to allow for grabbing objects with spaces #
|
||||||
|
###############################################################################
|
||||||
|
IFS=$'\n'
|
||||||
|
|
||||||
|
#################################
|
||||||
|
# Get list of all files to lint #
|
||||||
|
#################################
|
||||||
|
mapfile -t LIST_FILES < <(find "$GITHUB_WORKSPACE" -type f -regex "$FILE_EXTENSIONS" 2>&1)
|
||||||
|
|
||||||
|
###########################
|
||||||
|
# Set IFS back to default #
|
||||||
|
###########################
|
||||||
|
IFS="$DEFAULT_IFS"
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
# Set it back to empty if loaded with blanks from scanning #
|
||||||
|
############################################################
|
||||||
|
if [ ${#LIST_FILES[@]} -lt 1 ]; then
|
||||||
|
######################
|
||||||
|
# Set to empty array #
|
||||||
|
######################
|
||||||
|
LIST_FILES=()
|
||||||
|
#############################
|
||||||
|
# Skip as we found no files #
|
||||||
|
#############################
|
||||||
|
SKIP_FLAG=1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
###############################
|
||||||
|
# Check if any data was found #
|
||||||
|
###############################
|
||||||
|
if [ $SKIP_FLAG -eq 0 ]; then
|
||||||
|
######################
|
||||||
|
# Print Header array #
|
||||||
|
######################
|
||||||
|
for LINE in "${PRINT_ARRAY[@]}"; do
|
||||||
|
#########################
|
||||||
|
# Print the header info #
|
||||||
|
#########################
|
||||||
|
echo "$LINE"
|
||||||
|
done
|
||||||
|
|
||||||
|
########################################
|
||||||
|
# Prepare context if TAP format output #
|
||||||
|
########################################
|
||||||
|
if IsTAP ; then
|
||||||
|
TMPFILE=$(mktemp -q "/tmp/super-linter-${FILE_TYPE}.XXXXXX")
|
||||||
|
INDEX=0
|
||||||
|
mkdir -p "${REPORT_OUTPUT_FOLDER}"
|
||||||
|
REPORT_OUTPUT_FILE="${REPORT_OUTPUT_FOLDER}/super-linter-${FILE_TYPE}.${OUTPUT_FORMAT}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
##################
|
||||||
|
# Lint the files #
|
||||||
|
##################
|
||||||
|
for FILE in "${LIST_FILES[@]}"; do
|
||||||
|
#####################
|
||||||
|
# Get the file name #
|
||||||
|
#####################
|
||||||
|
FILE_NAME=$(basename "$FILE" 2>&1)
|
||||||
|
|
||||||
|
#####################################################
|
||||||
|
# Make sure we dont lint node modules or test cases #
|
||||||
|
#####################################################
|
||||||
|
if [[ $FILE == *"node_modules"* ]]; then
|
||||||
|
# This is a node modules file
|
||||||
|
continue
|
||||||
|
elif [[ $FILE == *"$TEST_CASE_FOLDER"* ]]; then
|
||||||
|
# This is the test cases, we should always skip
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
##################################
|
||||||
|
# Increase the linted file index #
|
||||||
|
##################################
|
||||||
|
(("INDEX++"))
|
||||||
|
|
||||||
|
##############
|
||||||
|
# File print #
|
||||||
|
##############
|
||||||
|
echo "---------------------------"
|
||||||
|
echo "File:[$FILE]"
|
||||||
|
|
||||||
|
####################
|
||||||
|
# Set the base Var #
|
||||||
|
####################
|
||||||
|
LINT_CMD=''
|
||||||
|
|
||||||
|
####################################
|
||||||
|
# Corner case for pwsh subshell #
|
||||||
|
# - PowerShell (PSScriptAnalyzer) #
|
||||||
|
# - ARM (arm-ttk) #
|
||||||
|
####################################
|
||||||
|
if [[ $FILE_TYPE == "POWERSHELL" ]] || [[ $FILE_TYPE == "ARM" ]]; then
|
||||||
|
################################
|
||||||
|
# Lint the file with the rules #
|
||||||
|
################################
|
||||||
|
# Need to run PowerShell commands using pwsh -c, also exit with exit code from inner subshell
|
||||||
|
LINT_CMD=$(
|
||||||
|
cd "$GITHUB_WORKSPACE" || exit
|
||||||
|
pwsh -NoProfile -NoLogo -Command "$LINTER_COMMAND $FILE; if (\$Error.Count) { exit 1 }"
|
||||||
|
exit $? 2>&1
|
||||||
|
)
|
||||||
|
else
|
||||||
|
################################
|
||||||
|
# Lint the file with the rules #
|
||||||
|
################################
|
||||||
|
LINT_CMD=$(
|
||||||
|
cd "$GITHUB_WORKSPACE" || exit
|
||||||
|
$LINTER_COMMAND "$FILE" 2>&1
|
||||||
|
)
|
||||||
|
fi
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# Load the error code #
|
||||||
|
#######################
|
||||||
|
ERROR_CODE=$?
|
||||||
|
|
||||||
|
##############################
|
||||||
|
# Check the shell for errors #
|
||||||
|
##############################
|
||||||
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
|
#########
|
||||||
|
# Error #
|
||||||
|
#########
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [$LINTER_NAME] linter!${NC}"
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$LINT_CMD]${NC}"
|
||||||
|
# Increment the error count
|
||||||
|
(("ERRORS_FOUND_$FILE_TYPE++"))
|
||||||
|
|
||||||
|
#######################################################
|
||||||
|
# Store the linting as a temporary file in TAP format #
|
||||||
|
#######################################################
|
||||||
|
if IsTAP ; then
|
||||||
|
echo "not ok ${INDEX} - ${FILE}" >> "${TMPFILE}"
|
||||||
|
##########################################
|
||||||
|
# Report the detailed message if enabled #
|
||||||
|
##########################################
|
||||||
|
DETAILED_MSG=$(TransformTAPDetails "$LINT_CMD")
|
||||||
|
if [ -n "${DETAILED_MSG}" ] ; then
|
||||||
|
printf " ---\n message: %s\n ...\n" "$DETAILED_MSG" >> "${TMPFILE}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
###########
|
||||||
|
# Success #
|
||||||
|
###########
|
||||||
|
echo -e "${NC}${F[B]} - File:${F[W]}[$FILE_NAME]${F[B]} was linted with ${F[W]}[$LINTER_NAME]${F[B]} successfully${NC}"
|
||||||
|
|
||||||
|
#######################################################
|
||||||
|
# Store the linting as a temporary file in TAP format #
|
||||||
|
#######################################################
|
||||||
|
if IsTAP ; then
|
||||||
|
echo "ok ${INDEX} - ${FILE}" >> "${TMPFILE}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
#################################
|
||||||
|
# Generate report in TAP format #
|
||||||
|
#################################
|
||||||
|
if IsTAP && [ ${INDEX} -gt 0 ] ; then
|
||||||
|
printf "TAP version 13\n1..%s\n" "${INDEX}" > "${REPORT_OUTPUT_FILE}"
|
||||||
|
cat "${TMPFILE}" >> "${REPORT_OUTPUT_FILE}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
################################################################################
|
||||||
|
#### Function TestCodebase #####################################################
|
||||||
|
function TestCodebase() {
|
||||||
|
####################
|
||||||
|
# Pull in the vars #
|
||||||
|
####################
|
||||||
|
FILE_TYPE="$1" # Pull the variable and remove from array path (Example: JSON)
|
||||||
|
LINTER_NAME="$2" # Pull the variable and remove from array path (Example: jsonlint)
|
||||||
|
LINTER_COMMAND="$3" # Pull the variable and remove from array path (Example: jsonlint -c ConfigFile /path/to/file)
|
||||||
|
FILE_EXTENSIONS="$4" # Pull the variable and remove from array path (Example: *.json)
|
||||||
|
INDVIDUAL_TEST_FOLDER="$5" # Folder for specific tests
|
||||||
|
TESTS_RAN=0 # Incremented when tests are ran, this will help find failed finds
|
||||||
|
|
||||||
|
################
|
||||||
|
# print header #
|
||||||
|
################
|
||||||
|
echo ""
|
||||||
|
echo "----------------------------------------------"
|
||||||
|
echo "----------------------------------------------"
|
||||||
|
echo "Testing Codebase [$FILE_TYPE] files..."
|
||||||
|
echo "----------------------------------------------"
|
||||||
|
echo "----------------------------------------------"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
#####################################
|
||||||
|
# Validate we have linter installed #
|
||||||
|
#####################################
|
||||||
|
VALIDATE_INSTALL_CMD=$(command -v "$LINTER_NAME" 2>&1)
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# Load the error code #
|
||||||
|
#######################
|
||||||
|
ERROR_CODE=$?
|
||||||
|
|
||||||
|
##############################
|
||||||
|
# Check the shell for errors #
|
||||||
|
##############################
|
||||||
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
|
# Failed
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find [$LINTER_NAME] in system!${NC}"
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$VALIDATE_INSTALL_CMD]${NC}"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
# Success
|
||||||
|
echo -e "${NC}${F[B]}Successfully found binary for ${F[W]}[$LINTER_NAME]${F[B]} in system location: ${F[W]}[$VALIDATE_INSTALL_CMD]${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
##########################
|
||||||
|
# Initialize empty Array #
|
||||||
|
##########################
|
||||||
|
LIST_FILES=()
|
||||||
|
|
||||||
|
#################################
|
||||||
|
# Get list of all files to lint #
|
||||||
|
#################################
|
||||||
|
mapfile -t LIST_FILES < <(find "$GITHUB_WORKSPACE/$TEST_CASE_FOLDER/$INDVIDUAL_TEST_FOLDER" -type f -regex "$FILE_EXTENSIONS" ! -path "$GITHUB_WORKSPACE/$TEST_CASE_FOLDER/ansible/ghe-initialize/*" 2>&1)
|
||||||
|
|
||||||
|
##################
|
||||||
|
# Lint the files #
|
||||||
|
##################
|
||||||
|
for FILE in "${LIST_FILES[@]}"; do
|
||||||
|
#####################
|
||||||
|
# Get the file name #
|
||||||
|
#####################
|
||||||
|
FILE_NAME=$(basename "$FILE" 2>&1)
|
||||||
|
|
||||||
|
############################
|
||||||
|
# Get the file pass status #
|
||||||
|
############################
|
||||||
|
# Example: markdown_good_1.md -> good
|
||||||
|
FILE_STATUS=$(echo "$FILE_NAME" | cut -f2 -d'_')
|
||||||
|
|
||||||
|
#########################################################
|
||||||
|
# If not found, assume it should be linted successfully #
|
||||||
|
#########################################################
|
||||||
|
if [ -z "$FILE_STATUS" ] || [[ $FILE == *"README"* ]]; then
|
||||||
|
##################################
|
||||||
|
# Set to good for proper linting #
|
||||||
|
##################################
|
||||||
|
FILE_STATUS="good"
|
||||||
|
fi
|
||||||
|
|
||||||
|
##############
|
||||||
|
# File print #
|
||||||
|
##############
|
||||||
|
echo "---------------------------"
|
||||||
|
echo "File:[$FILE]"
|
||||||
|
|
||||||
|
########################
|
||||||
|
# Set the lint command #
|
||||||
|
########################
|
||||||
|
LINT_CMD=''
|
||||||
|
|
||||||
|
#######################################
|
||||||
|
# Check if docker and get folder name #
|
||||||
|
#######################################
|
||||||
|
if [[ $FILE_TYPE == "DOCKER" ]]; then
|
||||||
|
if [[ $FILE == *"good"* ]]; then
|
||||||
|
#############
|
||||||
|
# Good file #
|
||||||
|
#############
|
||||||
|
FILE_STATUS='good'
|
||||||
|
else
|
||||||
|
############
|
||||||
|
# Bad file #
|
||||||
|
############
|
||||||
|
FILE_STATUS='bad'
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
#####################
|
||||||
|
# Check for ansible #
|
||||||
|
#####################
|
||||||
|
if [[ $FILE_TYPE == "ANSIBLE" ]]; then
|
||||||
|
########################################
|
||||||
|
# Make sure we dont lint certain files #
|
||||||
|
########################################
|
||||||
|
if [[ $FILE == *"vault.yml"* ]] || [[ $FILE == *"galaxy.yml"* ]]; then
|
||||||
|
# This is a file we dont look at
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
################################
|
||||||
|
# Lint the file with the rules #
|
||||||
|
################################
|
||||||
|
LINT_CMD=$(
|
||||||
|
cd "$GITHUB_WORKSPACE/$TEST_CASE_FOLDER/$INDVIDUAL_TEST_FOLDER" || exit
|
||||||
|
$LINTER_COMMAND "$FILE" 2>&1
|
||||||
|
)
|
||||||
|
elif [[ $FILE_TYPE == "POWERSHELL" ]] || [[ $FILE_TYPE == "ARM" ]]; then
|
||||||
|
################################
|
||||||
|
# Lint the file with the rules #
|
||||||
|
################################
|
||||||
|
# Need to run PowerShell commands using pwsh -c, also exit with exit code from inner subshell
|
||||||
|
LINT_CMD=$(
|
||||||
|
cd "$GITHUB_WORKSPACE/$TEST_CASE_FOLDER" || exit
|
||||||
|
pwsh -NoProfile -NoLogo -Command "$LINTER_COMMAND $FILE; if (\$Error.Count) { exit 1 }"
|
||||||
|
exit $? 2>&1
|
||||||
|
)
|
||||||
|
else
|
||||||
|
################################
|
||||||
|
# Lint the file with the rules #
|
||||||
|
################################
|
||||||
|
LINT_CMD=$(
|
||||||
|
cd "$GITHUB_WORKSPACE/$TEST_CASE_FOLDER" || exit
|
||||||
|
$LINTER_COMMAND "$FILE" 2>&1
|
||||||
|
)
|
||||||
|
fi
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# Load the error code #
|
||||||
|
#######################
|
||||||
|
ERROR_CODE=$?
|
||||||
|
|
||||||
|
########################################
|
||||||
|
# Check for if it was supposed to pass #
|
||||||
|
########################################
|
||||||
|
if [[ $FILE_STATUS == "good" ]]; then
|
||||||
|
##############################
|
||||||
|
# Check the shell for errors #
|
||||||
|
##############################
|
||||||
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
|
#########
|
||||||
|
# Error #
|
||||||
|
#########
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [$LINTER_NAME] linter!${NC}"
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$LINT_CMD]${NC}"
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC} Linter CMD:[$LINTER_COMMAND $FILE]${NC}"
|
||||||
|
# Increment the error count
|
||||||
|
(("ERRORS_FOUND_$FILE_TYPE++"))
|
||||||
|
# Increment counter that check was ran
|
||||||
|
((TESTS_RAN++))
|
||||||
|
else
|
||||||
|
###########
|
||||||
|
# Success #
|
||||||
|
###########
|
||||||
|
echo -e "${NC}${F[B]} - File:${F[W]}[$FILE_NAME]${F[B]} was linted with ${F[W]}[$LINTER_NAME]${F[B]} successfully${NC}"
|
||||||
|
# Increment counter that check was ran
|
||||||
|
((TESTS_RAN++))
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
#######################################
|
||||||
|
# File status = bad, this should fail #
|
||||||
|
#######################################
|
||||||
|
##############################
|
||||||
|
# Check the shell for errors #
|
||||||
|
##############################
|
||||||
|
if [ $ERROR_CODE -eq 0 ]; then
|
||||||
|
#########
|
||||||
|
# Error #
|
||||||
|
#########
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [$LINTER_NAME] linter!${NC}"
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} This file should have failed test case!${NC}"
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$LINT_CMD]${NC}"
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC} Linter CMD:[$LINTER_COMMAND $FILE]${NC}"
|
||||||
|
# Increment the error count
|
||||||
|
(("ERRORS_FOUND_$FILE_TYPE++"))
|
||||||
|
# Increment counter that check was ran
|
||||||
|
((TESTS_RAN++))
|
||||||
|
else
|
||||||
|
###########
|
||||||
|
# Success #
|
||||||
|
###########
|
||||||
|
echo -e "${NC}${F[B]} - File:${F[W]}[$FILE_NAME]${F[B]} failed test case with ${F[W]}[$LINTER_NAME]${F[B]} successfully${NC}"
|
||||||
|
# Increment counter that check was ran
|
||||||
|
((TESTS_RAN++))
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
##############################
|
||||||
|
# Validate we ran some tests #
|
||||||
|
##############################
|
||||||
|
if [ "$TESTS_RAN" -eq 0 ]; then
|
||||||
|
#################################################
|
||||||
|
# We failed to find files and no tests were ran #
|
||||||
|
#################################################
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find any tests ran for the Linter:[$LINTER_NAME]${NC}"!
|
||||||
|
echo "Please validate logic or that tests exist!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
################################################################################
|
||||||
|
#### Function RunTestCases #####################################################
|
||||||
|
function RunTestCases() {
|
||||||
|
# This loop will run the test cases and exclude user code
|
||||||
|
# This is called from the automation process to validate new code
|
||||||
|
# When a PR is opened, the new code is validated with the default branch
|
||||||
|
# version of linter.sh, and a new container is built with the latest codebase
|
||||||
|
# for testing. That container is spun up, and ran,
|
||||||
|
# with the flag: TEST_CASE_RUN=true
|
||||||
|
# So that the new code can be validated against the test cases
|
||||||
|
|
||||||
|
#################
|
||||||
|
# Header prints #
|
||||||
|
#################
|
||||||
|
echo ""
|
||||||
|
echo "----------------------------------------------"
|
||||||
|
echo "-------------- TEST CASE RUN -----------------"
|
||||||
|
echo "----------------------------------------------"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# Test case languages #
|
||||||
|
#######################
|
||||||
|
# TestCodebase "Language" "Linter" "Linter-command" "Regex to find files" "Test Folder"
|
||||||
|
TestCodebase "YML" "yamllint" "yamllint -c $YAML_LINTER_RULES" ".*\.\(yml\|yaml\)\$" "yml"
|
||||||
|
TestCodebase "JSON" "jsonlint" "jsonlint" ".*\.\(json\)\$" "json"
|
||||||
|
TestCodebase "XML" "xmllint" "xmllint" ".*\.\(xml\)\$" "xml"
|
||||||
|
TestCodebase "MARKDOWN" "markdownlint" "markdownlint -c $MD_LINTER_RULES" ".*\.\(md\)\$" "markdown"
|
||||||
|
TestCodebase "BASH" "shellcheck" "shellcheck --color" ".*\.\(sh\)\$" "shell"
|
||||||
|
TestCodebase "PYTHON" "pylint" "pylint --rcfile $PYTHON_LINTER_RULES" ".*\.\(py\)\$" "python"
|
||||||
|
TestCodebase "PERL" "perl" "perl -Mstrict -cw" ".*\.\(pl\)\$" "perl"
|
||||||
|
TestCodebase "PHP" "php" "php -l" ".*\.\(php\)\$" "php"
|
||||||
|
TestCodebase "RUBY" "rubocop" "rubocop -c $RUBY_LINTER_RULES" ".*\.\(rb\)\$" "ruby"
|
||||||
|
TestCodebase "GO" "golangci-lint" "golangci-lint run -c $GO_LINTER_RULES" ".*\.\(go\)\$" "golang"
|
||||||
|
TestCodebase "COFFEESCRIPT" "coffeelint" "coffeelint -f $COFFEESCRIPT_LINTER_RULES" ".*\.\(coffee\)\$" "coffeescript"
|
||||||
|
TestCodebase "JAVASCRIPT_ES" "eslint" "eslint --no-eslintrc -c $JAVASCRIPT_LINTER_RULES" ".*\.\(js\)\$" "javascript"
|
||||||
|
TestCodebase "JAVASCRIPT_STANDARD" "standard" "standard $JAVASCRIPT_STANDARD_LINTER_RULES" ".*\.\(js\)\$" "javascript"
|
||||||
|
TestCodebase "TYPESCRIPT_ES" "eslint" "eslint --no-eslintrc -c $TYPESCRIPT_LINTER_RULES" ".*\.\(ts\)\$" "typescript"
|
||||||
|
TestCodebase "TYPESCRIPT_STANDARD" "standard" "standard --parser @typescript-eslint/parser --plugin @typescript-eslint/eslint-plugin $TYPESCRIPT_STANDARD_LINTER_RULES" ".*\.\(ts\)\$" "typescript"
|
||||||
|
TestCodebase "DOCKER" "/dockerfilelint/bin/dockerfilelint" "/dockerfilelint/bin/dockerfilelint -c $DOCKER_LINTER_RULES" ".*\(Dockerfile\)\$" "docker"
|
||||||
|
TestCodebase "ANSIBLE" "ansible-lint" "ansible-lint -v -c $ANSIBLE_LINTER_RULES" ".*\.\(yml\|yaml\)\$" "ansible"
|
||||||
|
TestCodebase "TERRAFORM" "tflint" "tflint -c $TERRAFORM_LINTER_RULES" ".*\.\(tf\)\$" "terraform"
|
||||||
|
TestCodebase "CFN" "cfn-lint" "cfn-lint --config-file $CFN_LINTER_RULES" ".*\.\(json\|yml\|yaml\)\$" "cfn"
|
||||||
|
TestCodebase "POWERSHELL" "pwsh" "Invoke-ScriptAnalyzer -EnableExit -Settings $POWERSHELL_LINTER_RULES -Path" ".*\.\(ps1\|psm1\|psd1\|ps1xml\|pssc\|psrc\|cdxml\)\$" "powershell"
|
||||||
|
TestCodebase "ARM" "arm-ttk" "Import-Module $ARM_TTK_PSD1 ; \$config = \$(Import-PowerShellDataFile -Path $ARM_LINTER_RULES) ; Test-AzTemplate @config -TemplatePath" ".*\.\(json\)\$" "arm"
|
||||||
|
TestCodebase "CSS" "stylelint" "stylelint --config $CSS_LINTER_RULES" ".*\.\(css\)\$" "css"
|
||||||
|
TestCodebase "ENV" "dotenv-linter" "dotenv-linter" ".*\.\(env\)\$" "env"
|
||||||
|
TestCodebase "CLOJURE" "clj-kondo" "clj-kondo --config $CLOJURE_LINTER_RULES --lint" ".*\.\(clj\|cljs\|cljc\|edn\)\$" "clojure"
|
||||||
|
TestCodebase "KOTLIN" "ktlint" "ktlint" ".*\.\(kt\|kts\)\$" "kotlin"
|
||||||
|
TestCodebase "PROTOBUF" "protolint" "protolint lint --config_path $PROTOBUF_LINTER_RULES" ".*\.\(proto\)\$" "protobuf"
|
||||||
|
TestCodebase "OPENAPI" "spectral" "spectral lint -r $OPENAPI_LINTER_RULES" ".*\.\(ymlopenapi\|jsonopenapi\)\$" "openapi"
|
||||||
|
TestCodebase "HTML" "htmlhint" "htmlhint --config $HTML_LINTER_RULES" ".*\.\(html\)\$" "html"
|
||||||
|
|
||||||
|
#################
|
||||||
|
# Footer prints #
|
||||||
|
#################
|
||||||
|
# Call the footer to display run information
|
||||||
|
# and exit with error code
|
||||||
|
Footer
|
||||||
|
}
|
||||||
|
################################################################################
|
||||||
|
#### Function LintAnsibleFiles #################################################
|
||||||
|
function LintAnsibleFiles() {
|
||||||
|
######################
|
||||||
|
# Create Print Array #
|
||||||
|
######################
|
||||||
|
PRINT_ARRAY=()
|
||||||
|
|
||||||
|
################
|
||||||
|
# print header #
|
||||||
|
################
|
||||||
|
PRINT_ARRAY+=("")
|
||||||
|
PRINT_ARRAY+=("----------------------------------------------")
|
||||||
|
PRINT_ARRAY+=("----------------------------------------------")
|
||||||
|
PRINT_ARRAY+=("Linting [Ansible] files...")
|
||||||
|
PRINT_ARRAY+=("----------------------------------------------")
|
||||||
|
PRINT_ARRAY+=("----------------------------------------------")
|
||||||
|
|
||||||
|
######################
|
||||||
|
# Name of the linter #
|
||||||
|
######################
|
||||||
|
LINTER_NAME="ansible-lint"
|
||||||
|
|
||||||
|
###########################################
|
||||||
|
# Validate we have ansible-lint installed #
|
||||||
|
###########################################
|
||||||
|
VALIDATE_INSTALL_CMD=$(command -v "$LINTER_NAME" 2>&1)
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# Load the error code #
|
||||||
|
#######################
|
||||||
|
ERROR_CODE=$?
|
||||||
|
|
||||||
|
##############################
|
||||||
|
# Check the shell for errors #
|
||||||
|
##############################
|
||||||
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
|
# Failed
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Failed to find $LINTER_NAME in system!${NC}"
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$VALIDATE_INSTALL_CMD]${NC}"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
# Success
|
||||||
|
if [[ $ACTIONS_RUNNER_DEBUG == "true" ]]; then
|
||||||
|
# Success
|
||||||
|
echo -e "${NC}${F[B]}Successfully found binary in system${NC}"
|
||||||
|
echo "Location:[$VALIDATE_INSTALL_CMD]"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
##########################
|
||||||
|
# Initialize empty Array #
|
||||||
|
##########################
|
||||||
|
LIST_FILES=()
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# Create flag to skip #
|
||||||
|
#######################
|
||||||
|
SKIP_FLAG=0
|
||||||
|
|
||||||
|
######################################################
|
||||||
|
# Only go into ansible linter if we have base folder #
|
||||||
|
######################################################
|
||||||
|
if [ -d "$ANSIBLE_DIRECTORY" ]; then
|
||||||
|
|
||||||
|
#################################
|
||||||
|
# Get list of all files to lint #
|
||||||
|
#################################
|
||||||
|
mapfile -t LIST_FILES < <(ls "$ANSIBLE_DIRECTORY/*.yml" 2>&1)
|
||||||
|
|
||||||
|
###############################################################
|
||||||
|
# Set the list to empty if only MD and TXT files were changed #
|
||||||
|
###############################################################
|
||||||
|
# No need to run the full ansible checks on read only file changes
|
||||||
|
if [ "$READ_ONLY_CHANGE_FLAG" -eq 0 ]; then
|
||||||
|
##########################
|
||||||
|
# Set the array to empty #
|
||||||
|
##########################
|
||||||
|
LIST_FILES=()
|
||||||
|
###################################
|
||||||
|
# Send message that were skipping #
|
||||||
|
###################################
|
||||||
|
#echo "- Skipping Ansible lint run as file(s) that were modified were read only..."
|
||||||
|
############################
|
||||||
|
# Create flag to skip loop #
|
||||||
|
############################
|
||||||
|
SKIP_FLAG=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
####################################
|
||||||
|
# Check if we have data to look at #
|
||||||
|
####################################
|
||||||
|
if [ $SKIP_FLAG -eq 0 ]; then
|
||||||
|
for LINE in "${PRINT_ARRAY[@]}"; do
|
||||||
|
#########################
|
||||||
|
# Print the header line #
|
||||||
|
#########################
|
||||||
|
echo "$LINE"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
########################################
|
||||||
|
# Prepare context if TAP output format #
|
||||||
|
########################################
|
||||||
|
if IsTAP ; then
|
||||||
|
TMPFILE=$(mktemp -q "/tmp/super-linter-${FILE_TYPE}.XXXXXX")
|
||||||
|
INDEX=0
|
||||||
|
mkdir -p "${REPORT_OUTPUT_FOLDER}"
|
||||||
|
REPORT_OUTPUT_FILE="${REPORT_OUTPUT_FOLDER}/super-linter-${FILE_TYPE}.${OUTPUT_FORMAT}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
##################
|
||||||
|
# Lint the files #
|
||||||
|
##################
|
||||||
|
for FILE in "${LIST_FILES[@]}"; do
|
||||||
|
|
||||||
|
########################################
|
||||||
|
# Make sure we dont lint certain files #
|
||||||
|
########################################
|
||||||
|
if [[ $FILE == *"vault.yml"* ]] || [[ $FILE == *"galaxy.yml"* ]]; then
|
||||||
|
# This is a file we dont look at
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
##################################
|
||||||
|
# Increase the linted file index #
|
||||||
|
##################################
|
||||||
|
(("INDEX++"))
|
||||||
|
|
||||||
|
####################
|
||||||
|
# Get the filename #
|
||||||
|
####################
|
||||||
|
FILE_NAME=$(basename "$ANSIBLE_DIRECTORY/$FILE" 2>&1)
|
||||||
|
|
||||||
|
##############
|
||||||
|
# File print #
|
||||||
|
##############
|
||||||
|
echo "---------------------------"
|
||||||
|
echo "File:[$FILE]"
|
||||||
|
|
||||||
|
################################
|
||||||
|
# Lint the file with the rules #
|
||||||
|
################################
|
||||||
|
LINT_CMD=$("$LINTER_NAME" -v -c "$ANSIBLE_LINTER_RULES" "$ANSIBLE_DIRECTORY/$FILE" 2>&1)
|
||||||
|
|
||||||
|
#######################
|
||||||
|
# Load the error code #
|
||||||
|
#######################
|
||||||
|
ERROR_CODE=$?
|
||||||
|
|
||||||
|
##############################
|
||||||
|
# Check the shell for errors #
|
||||||
|
##############################
|
||||||
|
if [ $ERROR_CODE -ne 0 ]; then
|
||||||
|
#########
|
||||||
|
# Error #
|
||||||
|
#########
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR!${NC} Found errors in [$LINTER_NAME] linter!${NC}"
|
||||||
|
echo -e "${NC}${B[R]}${F[W]}ERROR:${NC}[$LINT_CMD]${NC}"
|
||||||
|
# Increment error count
|
||||||
|
((ERRORS_FOUND_ANSIBLE++))
|
||||||
|
|
||||||
|
#######################################################
|
||||||
|
# Store the linting as a temporary file in TAP format #
|
||||||
|
#######################################################
|
||||||
|
if IsTAP ; then
|
||||||
|
echo "not ok ${INDEX} - ${FILE}" >> "${TMPFILE}"
|
||||||
|
##########################################
|
||||||
|
# Report the detailed message if enabled #
|
||||||
|
##########################################
|
||||||
|
DETAILED_MSG=$(TransformTAPDetails "$LINT_CMD")
|
||||||
|
if [ -n "${DETAILED_MSG}" ] ; then
|
||||||
|
printf " ---\n message: %s\n ...\n" "$DETAILED_MSG" >> "${TMPFILE}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
else
|
||||||
|
###########
|
||||||
|
# Success #
|
||||||
|
###########
|
||||||
|
echo -e "${NC}${F[B]} - File:${F[W]}[$FILE_NAME]${F[B]} was linted with ${F[W]}[$LINTER_NAME]${F[B]} successfully${NC}"
|
||||||
|
|
||||||
|
#######################################################
|
||||||
|
# Store the linting as a temporary file in TAP format #
|
||||||
|
#######################################################
|
||||||
|
if IsTAP ; then
|
||||||
|
echo "ok ${INDEX} - ${FILE}" >> "${TMPFILE}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
#################################
|
||||||
|
# Generate report in TAP format #
|
||||||
|
#################################
|
||||||
|
if IsTAP && [ ${INDEX} -gt 0 ] ; then
|
||||||
|
printf "TAP version 13\n1..%s\n" "${INDEX}" > "${REPORT_OUTPUT_FILE}"
|
||||||
|
cat "${TMPFILE}" >> "${REPORT_OUTPUT_FILE}"
|
||||||
|
fi
|
||||||
|
else # No ansible directory found in path
|
||||||
|
###############################
|
||||||
|
# Check to see if debug is on #
|
||||||
|
###############################
|
||||||
|
if [[ $ACTIONS_RUNNER_DEBUG == "true" ]]; then
|
||||||
|
########################
|
||||||
|
# No Ansible dir found #
|
||||||
|
########################
|
||||||
|
echo -e "${NC}${F[Y]}WARN!${NC} No Ansible base directory found at:[$ANSIBLE_DIRECTORY]${NC}"
|
||||||
|
echo "skipping ansible lint"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
################################################################################
|
||||||
|
#### Function IsTap ############################################################
|
||||||
|
function IsTAP() {
|
||||||
|
if [ "${OUTPUT_FORMAT}" == "tap" ] ; then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
################################################################################
|
||||||
|
#### Function TransformTAPDetails ##############################################
|
||||||
|
function TransformTAPDetails() {
|
||||||
|
DATA=$1
|
||||||
|
if [ -n "${DATA}" ] && [ "${OUTPUT_DETAILS}" == "detailed" ] ; then
|
||||||
|
#########################################################
|
||||||
|
# Transform new lines to \\n, remove colours and colons #
|
||||||
|
#########################################################
|
||||||
|
echo "${DATA}" | awk 'BEGIN{RS="\n";ORS="\\n"}1' | sed -r "s/\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[mGK]//g" | tr ':' ' '
|
||||||
|
fi
|
||||||
|
}
|
545
package-lock.json
generated
Normal file
545
package-lock.json
generated
Normal file
|
@ -0,0 +1,545 @@
|
||||||
|
{
|
||||||
|
"requires": true,
|
||||||
|
"lockfileVersion": 1,
|
||||||
|
"dependencies": {
|
||||||
|
"@types/color-name": {
|
||||||
|
"version": "1.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz",
|
||||||
|
"integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ=="
|
||||||
|
},
|
||||||
|
"ajv": {
|
||||||
|
"version": "6.12.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.3.tgz",
|
||||||
|
"integrity": "sha512-4K0cK3L1hsqk9xIb2z9vs/XU+PGJZ9PNpJRDS9YLzmNdX6jmVPfamLvTJr0aDAusnHyCHO6MjzlkAsgtqp9teA==",
|
||||||
|
"requires": {
|
||||||
|
"fast-deep-equal": "^3.1.1",
|
||||||
|
"fast-json-stable-stringify": "^2.0.0",
|
||||||
|
"json-schema-traverse": "^0.4.1",
|
||||||
|
"uri-js": "^4.2.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ansi-styles": {
|
||||||
|
"version": "4.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz",
|
||||||
|
"integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==",
|
||||||
|
"requires": {
|
||||||
|
"@types/color-name": "^1.1.1",
|
||||||
|
"color-convert": "^2.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"asn1": {
|
||||||
|
"version": "0.2.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
|
||||||
|
"integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
|
||||||
|
"requires": {
|
||||||
|
"safer-buffer": "~2.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"assert-plus": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU="
|
||||||
|
},
|
||||||
|
"async": {
|
||||||
|
"version": "3.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/async/-/async-3.2.0.tgz",
|
||||||
|
"integrity": "sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw=="
|
||||||
|
},
|
||||||
|
"asynckit": {
|
||||||
|
"version": "0.4.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||||
|
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
|
||||||
|
},
|
||||||
|
"aws-sign2": {
|
||||||
|
"version": "0.7.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
|
||||||
|
"integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg="
|
||||||
|
},
|
||||||
|
"aws4": {
|
||||||
|
"version": "1.10.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.10.0.tgz",
|
||||||
|
"integrity": "sha512-3YDiu347mtVtjpyV3u5kVqQLP242c06zwDOgpeRnybmXlYYsLbtTrUBUm8i8srONt+FWobl5aibnU1030PeeuA=="
|
||||||
|
},
|
||||||
|
"balanced-match": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
|
||||||
|
},
|
||||||
|
"bcrypt-pbkdf": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
|
||||||
|
"integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=",
|
||||||
|
"requires": {
|
||||||
|
"tweetnacl": "^0.14.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"brace-expansion": {
|
||||||
|
"version": "1.1.11",
|
||||||
|
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||||
|
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||||
|
"requires": {
|
||||||
|
"balanced-match": "^1.0.0",
|
||||||
|
"concat-map": "0.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"caseless": {
|
||||||
|
"version": "0.12.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
|
||||||
|
"integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
|
||||||
|
},
|
||||||
|
"chalk": {
|
||||||
|
"version": "4.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.0.0.tgz",
|
||||||
|
"integrity": "sha512-N9oWFcegS0sFr9oh1oz2d7Npos6vNoWW9HvtCg5N1KRFpUhaAhvTv5Y58g880fZaEYSNm3qDz8SU1UrGvp+n7A==",
|
||||||
|
"requires": {
|
||||||
|
"ansi-styles": "^4.1.0",
|
||||||
|
"supports-color": "^7.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"color-convert": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||||
|
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||||
|
"requires": {
|
||||||
|
"color-name": "~1.1.4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"color-name": {
|
||||||
|
"version": "1.1.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||||
|
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
|
||||||
|
},
|
||||||
|
"combined-stream": {
|
||||||
|
"version": "1.0.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||||
|
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||||
|
"requires": {
|
||||||
|
"delayed-stream": "~1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"commander": {
|
||||||
|
"version": "5.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz",
|
||||||
|
"integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg=="
|
||||||
|
},
|
||||||
|
"concat-map": {
|
||||||
|
"version": "0.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||||
|
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
|
||||||
|
},
|
||||||
|
"core-util-is": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
|
||||||
|
"integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac="
|
||||||
|
},
|
||||||
|
"dashdash": {
|
||||||
|
"version": "1.14.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
|
||||||
|
"integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=",
|
||||||
|
"requires": {
|
||||||
|
"assert-plus": "^1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"delayed-stream": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk="
|
||||||
|
},
|
||||||
|
"ecc-jsbn": {
|
||||||
|
"version": "0.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
|
||||||
|
"integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=",
|
||||||
|
"requires": {
|
||||||
|
"jsbn": "~0.1.0",
|
||||||
|
"safer-buffer": "^2.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"extend": {
|
||||||
|
"version": "3.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
|
||||||
|
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
|
||||||
|
},
|
||||||
|
"extsprintf": {
|
||||||
|
"version": "1.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
|
||||||
|
"integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU="
|
||||||
|
},
|
||||||
|
"fast-deep-equal": {
|
||||||
|
"version": "3.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
||||||
|
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
|
||||||
|
},
|
||||||
|
"fast-json-stable-stringify": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
|
||||||
|
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="
|
||||||
|
},
|
||||||
|
"forever-agent": {
|
||||||
|
"version": "0.6.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
|
||||||
|
"integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE="
|
||||||
|
},
|
||||||
|
"form-data": {
|
||||||
|
"version": "2.3.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
|
||||||
|
"integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
|
||||||
|
"requires": {
|
||||||
|
"asynckit": "^0.4.0",
|
||||||
|
"combined-stream": "^1.0.6",
|
||||||
|
"mime-types": "^2.1.12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fs.realpath": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
|
||||||
|
},
|
||||||
|
"getpass": {
|
||||||
|
"version": "0.1.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
|
||||||
|
"integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=",
|
||||||
|
"requires": {
|
||||||
|
"assert-plus": "^1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"glob": {
|
||||||
|
"version": "7.1.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
|
||||||
|
"integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
|
||||||
|
"requires": {
|
||||||
|
"fs.realpath": "^1.0.0",
|
||||||
|
"inflight": "^1.0.4",
|
||||||
|
"inherits": "2",
|
||||||
|
"minimatch": "^3.0.4",
|
||||||
|
"once": "^1.3.0",
|
||||||
|
"path-is-absolute": "^1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"glob-base": {
|
||||||
|
"version": "0.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/glob-base/-/glob-base-0.3.0.tgz",
|
||||||
|
"integrity": "sha1-27Fk9iIbHAscz4Kuoyi0l98Oo8Q=",
|
||||||
|
"requires": {
|
||||||
|
"glob-parent": "^2.0.0",
|
||||||
|
"is-glob": "^2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"glob-parent": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-2.0.0.tgz",
|
||||||
|
"integrity": "sha1-gTg9ctsFT8zPUzbaqQLxgvbtuyg=",
|
||||||
|
"requires": {
|
||||||
|
"is-glob": "^2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"har-schema": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
|
||||||
|
"integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI="
|
||||||
|
},
|
||||||
|
"har-validator": {
|
||||||
|
"version": "5.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz",
|
||||||
|
"integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==",
|
||||||
|
"requires": {
|
||||||
|
"ajv": "^6.5.5",
|
||||||
|
"har-schema": "^2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"has-flag": {
|
||||||
|
"version": "4.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
||||||
|
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="
|
||||||
|
},
|
||||||
|
"htmlhint": {
|
||||||
|
"version": "0.14.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/htmlhint/-/htmlhint-0.14.1.tgz",
|
||||||
|
"integrity": "sha512-VWKrljlwF8tEKH48YPfC30zYKhrsMqm70d7vXswivEqd3DSva8ZlIzfeCa3YWFEFRIIhiXKgKurlqEpCtYMCAA==",
|
||||||
|
"dev": true,
|
||||||
|
"requires": {
|
||||||
|
"async": "3.2.0",
|
||||||
|
"chalk": "4.0.0",
|
||||||
|
"commander": "5.1.0",
|
||||||
|
"glob": "7.1.6",
|
||||||
|
"parse-glob": "3.0.4",
|
||||||
|
"request": "2.88.2",
|
||||||
|
"strip-json-comments": "3.1.0",
|
||||||
|
"xml": "1.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"http-signature": {
|
||||||
|
"version": "1.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
|
||||||
|
"integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=",
|
||||||
|
"requires": {
|
||||||
|
"assert-plus": "^1.0.0",
|
||||||
|
"jsprim": "^1.2.2",
|
||||||
|
"sshpk": "^1.7.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"inflight": {
|
||||||
|
"version": "1.0.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
|
||||||
|
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
|
||||||
|
"requires": {
|
||||||
|
"once": "^1.3.0",
|
||||||
|
"wrappy": "1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"inherits": {
|
||||||
|
"version": "2.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||||
|
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||||
|
},
|
||||||
|
"is-dotfile": {
|
||||||
|
"version": "1.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/is-dotfile/-/is-dotfile-1.0.3.tgz",
|
||||||
|
"integrity": "sha1-pqLzL/0t+wT1yiXs0Pa4PPeYoeE="
|
||||||
|
},
|
||||||
|
"is-extglob": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA="
|
||||||
|
},
|
||||||
|
"is-glob": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz",
|
||||||
|
"integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=",
|
||||||
|
"requires": {
|
||||||
|
"is-extglob": "^1.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"is-typedarray": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
|
||||||
|
"integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo="
|
||||||
|
},
|
||||||
|
"isstream": {
|
||||||
|
"version": "0.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
|
||||||
|
"integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo="
|
||||||
|
},
|
||||||
|
"jsbn": {
|
||||||
|
"version": "0.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
|
||||||
|
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM="
|
||||||
|
},
|
||||||
|
"json-schema": {
|
||||||
|
"version": "0.2.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
|
||||||
|
"integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM="
|
||||||
|
},
|
||||||
|
"json-schema-traverse": {
|
||||||
|
"version": "0.4.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
|
||||||
|
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
|
||||||
|
},
|
||||||
|
"json-stringify-safe": {
|
||||||
|
"version": "5.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
|
||||||
|
"integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus="
|
||||||
|
},
|
||||||
|
"jsprim": {
|
||||||
|
"version": "1.4.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz",
|
||||||
|
"integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=",
|
||||||
|
"requires": {
|
||||||
|
"assert-plus": "1.0.0",
|
||||||
|
"extsprintf": "1.3.0",
|
||||||
|
"json-schema": "0.2.3",
|
||||||
|
"verror": "1.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"mime-db": {
|
||||||
|
"version": "1.44.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.44.0.tgz",
|
||||||
|
"integrity": "sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg=="
|
||||||
|
},
|
||||||
|
"mime-types": {
|
||||||
|
"version": "2.1.27",
|
||||||
|
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.27.tgz",
|
||||||
|
"integrity": "sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w==",
|
||||||
|
"requires": {
|
||||||
|
"mime-db": "1.44.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"minimatch": {
|
||||||
|
"version": "3.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||||
|
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||||
|
"requires": {
|
||||||
|
"brace-expansion": "^1.1.7"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"oauth-sign": {
|
||||||
|
"version": "0.9.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
|
||||||
|
"integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ=="
|
||||||
|
},
|
||||||
|
"once": {
|
||||||
|
"version": "1.4.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||||
|
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
|
||||||
|
"requires": {
|
||||||
|
"wrappy": "1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"parse-glob": {
|
||||||
|
"version": "3.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/parse-glob/-/parse-glob-3.0.4.tgz",
|
||||||
|
"integrity": "sha1-ssN2z7EfNVE7rdFz7wu246OIORw=",
|
||||||
|
"requires": {
|
||||||
|
"glob-base": "^0.3.0",
|
||||||
|
"is-dotfile": "^1.0.0",
|
||||||
|
"is-extglob": "^1.0.0",
|
||||||
|
"is-glob": "^2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"path-is-absolute": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
|
||||||
|
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18="
|
||||||
|
},
|
||||||
|
"performance-now": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
|
||||||
|
"integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns="
|
||||||
|
},
|
||||||
|
"psl": {
|
||||||
|
"version": "1.8.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz",
|
||||||
|
"integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ=="
|
||||||
|
},
|
||||||
|
"punycode": {
|
||||||
|
"version": "2.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
|
||||||
|
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
|
||||||
|
},
|
||||||
|
"qs": {
|
||||||
|
"version": "6.5.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
|
||||||
|
"integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA=="
|
||||||
|
},
|
||||||
|
"request": {
|
||||||
|
"version": "2.88.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
|
||||||
|
"integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
|
||||||
|
"requires": {
|
||||||
|
"aws-sign2": "~0.7.0",
|
||||||
|
"aws4": "^1.8.0",
|
||||||
|
"caseless": "~0.12.0",
|
||||||
|
"combined-stream": "~1.0.6",
|
||||||
|
"extend": "~3.0.2",
|
||||||
|
"forever-agent": "~0.6.1",
|
||||||
|
"form-data": "~2.3.2",
|
||||||
|
"har-validator": "~5.1.3",
|
||||||
|
"http-signature": "~1.2.0",
|
||||||
|
"is-typedarray": "~1.0.0",
|
||||||
|
"isstream": "~0.1.2",
|
||||||
|
"json-stringify-safe": "~5.0.1",
|
||||||
|
"mime-types": "~2.1.19",
|
||||||
|
"oauth-sign": "~0.9.0",
|
||||||
|
"performance-now": "^2.1.0",
|
||||||
|
"qs": "~6.5.2",
|
||||||
|
"safe-buffer": "^5.1.2",
|
||||||
|
"tough-cookie": "~2.5.0",
|
||||||
|
"tunnel-agent": "^0.6.0",
|
||||||
|
"uuid": "^3.3.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"safe-buffer": {
|
||||||
|
"version": "5.2.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||||
|
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
|
||||||
|
},
|
||||||
|
"safer-buffer": {
|
||||||
|
"version": "2.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||||
|
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
|
||||||
|
},
|
||||||
|
"sshpk": {
|
||||||
|
"version": "1.16.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
|
||||||
|
"integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==",
|
||||||
|
"requires": {
|
||||||
|
"asn1": "~0.2.3",
|
||||||
|
"assert-plus": "^1.0.0",
|
||||||
|
"bcrypt-pbkdf": "^1.0.0",
|
||||||
|
"dashdash": "^1.12.0",
|
||||||
|
"ecc-jsbn": "~0.1.1",
|
||||||
|
"getpass": "^0.1.1",
|
||||||
|
"jsbn": "~0.1.0",
|
||||||
|
"safer-buffer": "^2.0.2",
|
||||||
|
"tweetnacl": "~0.14.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"strip-json-comments": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-e6/d0eBu7gHtdCqFt0xJr642LdToM5/cN4Qb9DbHjVx1CP5RyeM+zH7pbecEmDv/lBqb0QH+6Uqq75rxFPkM0w=="
|
||||||
|
},
|
||||||
|
"supports-color": {
|
||||||
|
"version": "7.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz",
|
||||||
|
"integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==",
|
||||||
|
"requires": {
|
||||||
|
"has-flag": "^4.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tough-cookie": {
|
||||||
|
"version": "2.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
|
||||||
|
"integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
|
||||||
|
"requires": {
|
||||||
|
"psl": "^1.1.28",
|
||||||
|
"punycode": "^2.1.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tunnel-agent": {
|
||||||
|
"version": "0.6.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
|
||||||
|
"integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
|
||||||
|
"requires": {
|
||||||
|
"safe-buffer": "^5.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tweetnacl": {
|
||||||
|
"version": "0.14.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
|
||||||
|
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
|
||||||
|
},
|
||||||
|
"uri-js": {
|
||||||
|
"version": "4.2.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz",
|
||||||
|
"integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==",
|
||||||
|
"requires": {
|
||||||
|
"punycode": "^2.1.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"uuid": {
|
||||||
|
"version": "3.4.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
|
||||||
|
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
|
||||||
|
},
|
||||||
|
"verror": {
|
||||||
|
"version": "1.10.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
|
||||||
|
"integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=",
|
||||||
|
"requires": {
|
||||||
|
"assert-plus": "^1.0.0",
|
||||||
|
"core-util-is": "1.0.2",
|
||||||
|
"extsprintf": "^1.2.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"wrappy": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||||
|
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
|
||||||
|
},
|
||||||
|
"xml": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/xml/-/xml-1.0.1.tgz",
|
||||||
|
"integrity": "sha1-eLpyAgApxbyHuKgaPPzXS0ovweU="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in a new issue