Skip to content

Commit

Permalink
fix(query): refactored query Resource Not Using Tags to lower executi…
Browse files Browse the repository at this point in the history
…on time

Signed-off-by: João Reigota <[email protected]>
  • Loading branch information
cx-joao-reigota committed Jan 14, 2022
2 parents 084d828 + 0ccbdf2 commit 5ae6604
Show file tree
Hide file tree
Showing 2,032 changed files with 1,239 additions and 192 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
queries_path = {
'ansible': os.path.join(queries_basepath, 'ansible', '**', '*'),
'azureresourcemanager': os.path.join(queries_basepath, 'azureResourceManager', '*'),
'cloudformation': os.path.join(queries_basepath, 'cloudFormation', '*'),
'cloudformation': os.path.join(queries_basepath, 'cloudFormation', '**', '*'),
'openapi': os.path.join(queries_basepath, 'openAPI', '**', '*'),
'k8s': os.path.join(queries_basepath, 'k8s', '*'),
'common': os.path.join(queries_basepath, 'common', '*'),
Expand Down Expand Up @@ -72,21 +72,22 @@ def queries_count(path):
samples_summary[f'{key}_{ext}_samples'] = ext_samples
samples_summary['total'] += ext_samples

print("::group::Queries Metrics")
print(tabulate([[key, value] for key, value in summary.items()], headers=[
'Platform', 'Count'], tablefmt='orgtbl'))
print("::endgroup::")
print()
print(f"::set-output name=total_queries::{summary['total']}")
print()
print("::group::Rego File Metrics")
print(tabulate([[key, value] for key, value in rego_summary.items()], headers=[
'Platform', 'Count'], tablefmt='orgtbl'))
print("::endgroup::")
print()
print(f"::set-output name=total_rego_files::{rego_summary['total']}")
print()
print("::group::Sample File Metrics")
print(tabulate([[key, value] for key, value in samples_summary.items()], headers=[
'Samples', 'Count'], tablefmt='orgtbl'))
print("::endgroup::")
if __name__ == '__main__':
print("::group::Queries Metrics")
print(tabulate([[key, value] for key, value in summary.items()], headers=[
'Platform', 'Count'], tablefmt='orgtbl'))
print("::endgroup::")
print()
print(f"::set-output name=total_queries::{summary['total']}")
print()
print("::group::Rego File Metrics")
print(tabulate([[key, value] for key, value in rego_summary.items()], headers=[
'Platform', 'Count'], tablefmt='orgtbl'))
print("::endgroup::")
print()
print(f"::set-output name=total_rego_files::{rego_summary['total']}")
print()
print("::group::Sample File Metrics")
print(tabulate([[key, value] for key, value in samples_summary.items()], headers=[
'Samples', 'Count'], tablefmt='orgtbl'))
print("::endgroup::")
155 changes: 155 additions & 0 deletions .github/scripts/statistics/get_statistics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
import os
import glob
from datetime import date
import sys
import argparse
from tabulate import tabulate
import requests

base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(os.path.join(base, "metrics"))

from get_metrics import queries_count, queries_path, samples_ext


def get_statistics(test_coverage, total_tests, go_loc):
latest_realease_url = "https://api.github.com/repos/Checkmarx/kics/releases/latest"
releases_url = "https://api.github.com/repos/Checkmarx/kics/releases"
dockerhub_url = "https://hub.docker.com/v2/repositories/checkmarx/kics"
repo_url = "https://api.github.com/repos/Checkmarx/kics"

date = get_date()
version = get_version(latest_realease_url)
total_queries, code_samples = get_total_queries()
all_dockerhub_pulls = get_dockerhub_pulls(dockerhub_url)
stars, forks = get_info_from_repo(repo_url)
all_github_downloads = get_github_downloads(releases_url)
bug_open_count, bug_closed_count, feature_request_open_count, feature_request_closed_count = get_relevant_issues_info()
e2e_tests = get_e2e_tests()

return {'date': date,
'version': version,
'total_queries': total_queries,
'go_loc': go_loc,
'dockerhub_pulls': all_dockerhub_pulls,
'github_stars': stars,
'github_forks': forks,
'github_downloads': all_github_downloads,
'bugs_open' : bug_open_count,
'bugs_closed' : bug_closed_count,
'feature_requests_open' : feature_request_open_count,
'feature_requests_closed' : feature_request_closed_count,
'total_tests': total_tests,
'test_coverage': test_coverage,
'code_samples': code_samples,
'e2e_tests' : e2e_tests
}


def get_github_downloads(releases_url):
gh_resp = requests.get(releases_url)
if gh_resp.status_code == 200:
response_body = gh_resp.json()
all_github_downloads = sum([item for sublist in [[asset['download_count'] for asset in release['assets']]
for release in response_body] for item in sublist])

return all_github_downloads

def get_dockerhub_pulls(dockerhub_url):
dkr_resp = requests.get(dockerhub_url)
if dkr_resp.status_code == 200:
response_body = dkr_resp.json()
all_dockerhub_pulls = response_body['pull_count']

return all_dockerhub_pulls

def get_info_from_repo(repo_url):
info_resp = requests.get(repo_url)
if info_resp.status_code == 200:
response_body = info_resp.json()
stars = response_body['watchers_count']
forks = response_body['forks']

return stars, forks


def get_relevant_issues_info():
base_url = "https://api.github.com/search/issues?q=repo:Checkmarx/kics+type:"
bug_open = base_url + "issue+state:open+label:bug"
bug_closed = base_url + "issue+state:closed+label:bug"
feature_request_open = base_url + "issue+state:open+label:\"feature%20request\""
feature_request_closed = base_url + "issue+state:closed+label:\"feature%20request\""

target_urls = [ { bug_open : ""}, { bug_closed : "" }, {feature_request_open: ""}, {feature_request_closed: ""} ]

for target_url in target_urls:
for url in target_url:
target_resp = requests.get(url)
if target_resp.status_code == 200:
response_body = target_resp.json()
target_url[url] = response_body["total_count"]

bug_open_count = target_urls[0][bug_open]
bug_closed_count = target_urls[1][bug_closed]
feature_request_open_count = target_urls[2][feature_request_open]
feature_request_closed_count = target_urls[3][feature_request_closed]

return bug_open_count, bug_closed_count, feature_request_open_count, feature_request_closed_count

def get_e2e_tests():
_, _, files = next(os.walk("./././e2e/testcases"))
e2e_tests = len(files) - 1

return e2e_tests

def get_total_queries():
total_queries = 0
total_samples = 0
for key, value in queries_path.items():
metadata_path = os.path.join(value, 'metadata.json')
platform_count = sum([queries_count(path)
for path in glob.glob(metadata_path)])
total_queries += platform_count

for ext in samples_ext[key]:
sample_path = os.path.join(value, 'test', f'*.{ext}')
ext_samples = len([path for path in glob.glob(sample_path)])

total_samples += ext_samples

return total_queries, total_samples

def get_version(latest_realease_url):
latest_resp = requests.get(latest_realease_url)
if latest_resp.status_code == 200:
response_body = latest_resp.json()
version = response_body['name']

return version

def get_date():
current_date = date.today().strftime("%Y/%m/%d")

return current_date


parser = argparse.ArgumentParser(
description='Extract test coverage and total tests')
parser.add_argument('-c', '--coverage', metavar='COV',
required=True)
parser.add_argument('-t', '--total-tests', metavar='TESTS',
required=True)
parser.add_argument('-g', '--goloc', metavar='GOLOC',
required=True)

args = parser.parse_args()

def main():
statistics = get_statistics(args.coverage, args.total_tests, args.goloc)

print(tabulate([[key, value] for key, value in statistics.items()], headers=[
'KICS_KPIS', statistics["date"]], tablefmt='orgtbl'))


if __name__ == "__main__":
main()
2 changes: 2 additions & 0 deletions .github/scripts/statistics/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
requests
tabulate
2 changes: 1 addition & 1 deletion .github/workflows/go-ci-metrics.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
id: metrics
run: |
pip3 install -r .github/scripts/metrics/requirements.txt
python3 .github/scripts/metrics/get-metrics.py
python3 .github/scripts/metrics/get_metrics.py
- name: Generate badge
run: |
curl -L \
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/go-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
uses: golangci/[email protected]
with:
version: v1.37
args: -c .golangci.yml
args: -c .golangci.yml --timeout 5m
skip-go-installation: true
go-generate:
name: go-generate
Expand Down
39 changes: 39 additions & 0 deletions .github/workflows/statistics.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
name: statistics

on:
workflow_dispatch:
release:
type: [published]
jobs:
statistics:
name: test-statistics
runs-on: ubuntu-latest
steps:
- name: Checkout Source
uses: actions/[email protected]
- name: Set up Go 1.17.x
uses: actions/setup-go@v2
with:
go-version: 1.17.x
- name: Run test metrics script
id: testcoverage
run: |
make test-coverage-report | tee test-results
TOTAL_TESTS=$(cat test-results | grep -v TestQueriesContent/ | grep -v TestQueriesMetadata/ | grep -v TestQueries/ | grep PASS | wc -l)
echo "Total Tests :: ${TOTAL_TESTS}"
echo "::set-output name=total_tests::${TOTAL_TESTS}"
- name: Get Go_LOC
id: go_loc
run: |
sudo apt-get install cloc
GO_LOC=$(cloc . | grep Go | grep -Eo '[0-9]+$')
echo "::set-output name=goloc::${GO_LOC}"
- uses: actions/[email protected]
with:
python-version: "3.x"
- name: Run test statistics script
id: metrics
run: |
pip3 install -r .github/scripts/statistics/requirements.txt
python3 .github/scripts/statistics/get_statistics.py -c ${{steps.testcoverage.outputs.coverage}} -t ${{steps.testcoverage.outputs.total_tests}} -g ${{steps.go_loc.outputs.goloc}}
8 changes: 8 additions & 0 deletions assets/libraries/common.rego
Original file line number Diff line number Diff line change
Expand Up @@ -343,6 +343,10 @@ get_encryption_if_exists(resource) = encryption {
} else = encryption {
valid_key(resource.encryption_info, "encryption_at_rest_kms_key_arn")
encryption := "encrypted"
} else = encryption {
fields := {"sqs_managed_sse_enabled", "kms_master_key_id", "encryption_options", "server_side_encryption_configuration"}
valid_key(resource, fields[_])
encryption := "encrypted"
} else = encryption {
encryption := "unencrypted"
}
Expand Down Expand Up @@ -423,3 +427,7 @@ any_principal(statement) {
is_array(statement.Principal.AWS)
contains(statement.Principal.AWS[_], "*")
}

is_recommended_tls(field) {
inArray({"TLSv1.2_2018", "TLSv1.2_2019", "TLSv1.2_2021"}, field)
}
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
package Cx

import data.generic.ansible as ansLib
import data.generic.ansible as ans_lib
import data.generic.common as common_lib

modules := {"community.aws.cloudfront_distribution", "cloudfront_distribution"}

CxPolicy[result] {
task := ansLib.tasks[id][t]
task := ans_lib.tasks[id][t]
cloudfront := task[modules[m]]

ansLib.checkState(cloudfront)
ans_lib.checkState(cloudfront)
not common_lib.valid_key(cloudfront, "viewer_certificate")

result := {
Expand All @@ -18,23 +18,25 @@ CxPolicy[result] {
"issueType": "MissingAttribute",
"keyExpectedValue": "cloudfront_distribution.viewer_certificate is defined",
"keyActualValue": "cloudfront_distribution.viewer_certificate is undefined",
"searchLine": common_lib.build_search_line(["playbooks", t, modules[m]], []),
}
}

CxPolicy[result] {
task := ansLib.tasks[id][t]
task := ans_lib.tasks[id][t]
cloudfront := task[modules[m]]

ansLib.checkState(cloudfront)
ans_lib.checkState(cloudfront)
protocol_version := cloudfront.viewer_certificate.minimum_protocol_version

not common_lib.inArray(["TLSv1.2_2018", "TLSv1.2_2019"], protocol_version)
not common_lib.is_recommended_tls(protocol_version)

result := {
"documentId": id,
"searchKey": sprintf("name={{%s}}.{{%s}}.viewer_certificate.minimum_protocol_version", [task.name, modules[m]]),
"issueType": "IncorrectValue",
"keyExpectedValue": sprintf("name={{%s}}.{{%s}}.viewer_certificate.minimum_protocol_version' is TLSv1.2_x", [task.name, modules[m]]),
"keyActualValue": sprintf("name={{%s}}.{{%s}}.viewer_certificate.minimum_protocol_version' is %s", [task.name, modules[m], protocol_version]),
"searchLine": common_lib.build_search_line(["playbooks", t, modules[m], "viewer_certificate", "minimum_protocol_version"], []),
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ CxPolicy[result] {
"issueType": "MissingAttribute",
"keyExpectedValue": sprintf("Resources.%s.Properties.DistributionConfig.ViewerCertificate' is defined", [name]),
"keyActualValue": sprintf("Resources.%s.Properties.DistributionConfig.ViewerCertificate' is undefined", [name]),
"searchLine": common_lib.build_search_line(["Resources", name, "Properties", "DistributionConfig"], []),
}
}

Expand All @@ -22,13 +23,14 @@ CxPolicy[result] {
resource.Type == "AWS::CloudFront::Distribution"
properties := resource.Properties
protocolVer := properties.DistributionConfig.ViewerCertificate.MinimumProtocolVersion
not common_lib.inArray({"TLSv1.2_2018", "TLSv1.2_2019","TLSv1.2-2018", "TLSv1.2-2019"}, protocolVer)
not common_lib.is_recommended_tls(protocolVer)

result := {
"documentId": input.document[i].id,
"searchKey": sprintf("Resources.%s.Properties.DistributionConfig.ViewerCertificate.MinimumProtocolVersion", [name]),
"issueType": "IncorrectValue",
"keyExpectedValue": sprintf("Resources.%s.Properties.DistributionConfig.ViewerCertificate.MinimumProtocolVersion' should be at least 1.2", [name]),
"keyActualValue": sprintf("Resources.%s.Properties.DistributionConfig.ViewerCertificate.MinimumProtocolVersion' lesser than 1.2", [name]),
"keyExpectedValue": sprintf("Resources.%s.Properties.DistributionConfig.ViewerCertificate.MinimumProtocolVersion' is TLSv1.2_x", [name]),
"keyActualValue": sprintf("Resources.%s.Properties.DistributionConfig.ViewerCertificate.MinimumProtocolVersion' is %s", [name, protocolVer]),
"searchLine": common_lib.build_search_line(["Resources", name, "Properties", "DistributionConfig", "ViewerCertificate", "MinimumProtocolVersion"], []),
}
}
Loading

0 comments on commit 5ae6604

Please sign in to comment.