chore: remove contrib folder + rename folders

This commit is contained in:
Nasreddine Bencherchali
2023-04-21 17:25:21 +02:00
parent 9ae42d481b
commit c2400ac374
189 changed files with 0 additions and 720 deletions
-32
View File
@@ -1,32 +0,0 @@
#!/usr/bin/env python3
# Remove all hunks from a patch that don't add the id attribute to minimize the impact (removed
# comments etc.) of sigma_uuid script.
#
# Usually used as follows:
# 1. Add UUIDs to rules:
# tools/sigma_uuid -er rules
# 2. Generate and filter patch
# git diff | contrib/filter-uuid-patch > rule-uuid.diff
# 3. Reset to previous state
# git reset --hard
# 4. Apply filtered patch
# patch -p1 < rule-uuid.diff
#
# This tool requires an installed unidiff package.
from unidiff import PatchSet
from sys import argv, stdin
try:
with open(argv[1], "r") as f:
patch = PatchSet(f.readlines())
except IndexError:
patch = PatchSet(stdin.readlines())
for patched_file in patch:
for h in reversed(range(len(patched_file))):
hunk = patched_file[h]
if not any([ line.is_added and line.value.startswith("id: ") for line in hunk ]):
del patched_file[h]
print(str(patch))
-63
View File
@@ -1,63 +0,0 @@
#!/usr/bin/env python3
# Copyright 2021 wagga40 (https://github.com/wagga40)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Project: sigma2CSV.py
Date: 07 aug 2021
Author: wagga40 (https://github.com/wagga40)
Version: 1.0
Description:
Asked by frak113 in issue #1787 (https://github.com/SigmaHQ/sigma/issues/1787#issuecomment-894618060)
This script converts sigma rules to a CSV format for statistics puprpose.
For now, it only keeps title, description, level, tags and author fields.
Feel free to modify it according to your needs.
Requirements:
$ pip install pyyaml
"""
import yaml
import glob
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-r", "--rulesdirectory", help="Sub-directory generated by rules-search", required=True, type=str)
parser.add_argument("-f", "--fileext", help="Rule file extension", default="yml", type=str)
parser.add_argument("-d", "--delimiter", help="Separator", default=",", type=str)
parser.add_argument("--oneline", help="Put all tags on a single line", action="store_true")
args = parser.parse_args()
files = glob.glob(args.rulesdirectory + "/**/*." + args.fileext, recursive=True)
# for each file in the given directory
for file in files:
d={}
with open(file, 'r') as stream:
docs = yaml.load_all(stream, Loader=yaml.FullLoader)
for doc in docs:
for k,v in doc.items():
if k in ['title','description','tags','level','author']: # Modify here if you want to include other fields
d[k]=v
# Check for optional fields
if "author" not in d: d["author"]=""
if "level" not in d: d["level"]=""
if args.oneline: # All tags will be on a single line
if "tags" in d:
expandTags = args.delimiter.join([ tags for tags in d["tags"] if "attack" in tags ]) # Only output attack related tags
print(f'{d["title"]}{args.delimiter}{d["description"]}{args.delimiter}{d["level"]}{args.delimiter}{d["author"]}{args.delimiter}{expandTags}')
else:
print(f'{d["title"]}{args.delimiter}{d["description"]}{args.delimiter}{d["level"]}{args.delimiter}{d["author"]}')
else:
if "tags" in d:
for tag in d["tags"]:
if "attack" in tag: # Only output attack related tags
print(f'{d["title"]}{args.delimiter}{d["description"]}{args.delimiter}{d["level"]}{args.delimiter}{d["author"]}{args.delimiter}{tag}')
-173
View File
@@ -1,173 +0,0 @@
#!/usr/bin/python
# Copyright 2018 David Routin
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Project: sigma2elastalert.py
Date: 25 Feb 2018
Author: David ROUTIN (@Rewt_1)
Version: 1.0
Description: This script creates elastalert configuration files from Sigma SIEM rules.
"""
import re
import os
import glob
import subprocess
import argparse
import yaml
import traceback
parser = argparse.ArgumentParser()
parser.add_argument("--eshost", help="Elasticsearch host", type=str, required=True)
parser.add_argument("--esport", help="Elasticsearch port", type=str, required=True)
parser.add_argument("--ruledir", help="sigma rule directory path to convert", type=str, required=True)
parser.add_argument("--index", help="Elasticsearch index name egs: \"winlogbeat-*\"", type=str, required=True)
parser.add_argument("--email", help="email address to send mail alert", type=str, required=True)
parser.add_argument("--outdir", help="output directory to create elastalert rules", type=str, required=True)
parser.add_argument("--sigmac", help="Sigmac location", default="../tools/sigmac", type=str)
parser.add_argument("--realerttime", help="Realert time (optional value, default 5 minutes)", type=str, default=5)
parser.add_argument("--debug", help="Show debug output", type=bool, default=False)
args = parser.parse_args()
custom_query_keys = ["sensor", "Hostname", "EventID", "src_ip", "dst_ip"]
template="""es_host: ESHOST
es_port: ESPORT
name: "TITLE"
description: "DESCRIPTION"
index: INDEX
filter:
- query:
query_string:
query: 'QUERY'
realert:
minutes: MINUTES
query_key: UNIQKEYS
type: any
include: UNIQKEYS
alert:
- "email"
# (required, email specific)
# a list of email addresses to send alerts to
email:
- "EMAIL"
"""
def return_json_obj(x,custom_query_keys):
"""
Function used to filter all ES query object as unique value including predefined list from custom_query_keys
:param x: must contains ES query output
:param custom_query_keys: takes the list of predefined element to match in document
:return: a clean list (set) of all the query keys (EventID,TargetUserName...)
"""
# type: (str, list) -> list
y = x.replace(" ", "\n").split()
out = set()
for i in y:
out.update(re.findall("([a-zA-Z]+)\:", i))
for qk in custom_query_keys:
try:
out.remove(qk)
except:
pass
out = list(out)
count = 0
for qk in custom_query_keys:
count += 1
out.insert(count-1, qk)
return out
def rule_element(file_content, elements):
"""
Function used to get specific element from yaml document and return content
:type file_content: str
:type elements: list
:param file_content:
:param elements: list of elements of the yaml document to get "title", "description"
:return: the value of the key in the yaml document
"""
try:
yaml.safe_load(file_content.replace("---",""))
except:
raise Exception('Unsupported')
element_output = ""
for e in elements:
try:
element_output = yaml.safe_load(file_content.replace("---",""))[e]
except:
pass
if element_output is None:
return ""
return element_output
def get_rule_as_esqs(file):
"""
Function used to get Elastic query output from rule fome
:type file: str
:param file: rule filename
:return: string es query
"""
if not os.path.exists(args.sigmac):
print("Cannot find sigmac rule coverter at '%s', please set a correct location via '--sigmac'")
cmd = [args.sigmac, file, "--target", "es-qs"]
output = subprocess.Popen(cmd,stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.read()
if "unsupported" in output:
raise Exception('Unsupported output at this time')
output = output.split("\n")
# Remove empty string from \n
output = [a for a in output if a]
# Handle case of multiple queries returned
if len(output) > 1:
return " OR ".join(output)
return "".join(output)
# Dictionary that contains args set at launch time
convert_args = {
"ESHOST": args.eshost,
"ESPORT": args.esport,
"INDEX": args.index,
"EMAIL": args.email,
"MINUTES": args.realerttime
}
for file in glob.glob(args.ruledir + "/*"):
output_elast_config = template
try:
print("Processing %s ..." % file)
with open(file, "rb") as f:
file_content = f.read()
# Dictionary that contains args with values returned by functions
translate_func = {'QUERY': get_rule_as_esqs(file),
'TITLE': rule_element(file_content, ["title", "name"]),
'DESCRIPTION': rule_element(file_content, ["description"]),
'UNIQKEYS': str(return_json_obj(get_rule_as_esqs(file), custom_query_keys))
}
for entry in convert_args:
output_elast_config = re.sub(entry, str(convert_args[entry]), output_elast_config)
for entry in translate_func:
output_elast_config = re.sub(entry, translate_func[entry], output_elast_config)
print("Converting file " + file)
with open(os.path.join(args.outdir, "sigma-" + file.split("/")[-1]), "w") as f:
f.write(output_elast_config)
except Exception as e:
if args.debug:
traceback.print_exc()
print("error " + str(file) + "----" + str(e))
pass
-261
View File
@@ -1,261 +0,0 @@
#!/usr/bin/python
# Copyright 2018 juju4
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Project: sigma2sumologic.py
Date: 11 Jan 2019
Author: juju4
Version: 1.0
Description: This script executes sumologic search queries from Sigma SIEM rules.
Workflow:
1. Convert rules with sigmac
2. Enrich: add ignore+local custom rules, priority
3. Format
4. Get results and save to txt/xlsx files
Requirements:
$ pip install sumologic-sdk pyyaml pandas openpyxl
"""
import re
import os
import sys
import stat
import glob
import subprocess
import argparse
import yaml
import traceback
import logging
from sumologic import SumoLogic
import time
import datetime
import json
import pandas
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
formatter = logging.Formatter('%(asctime)s - %(name)s - p%(process)s {%(pathname)s:%(lineno)d} - %(levelname)s - %(message)s')
handler = logging.FileHandler('sigma2sumo.log')
handler.setFormatter(formatter)
logger.addHandler(handler)
parser = argparse.ArgumentParser(description='Execute sigma rules in sumologic')
parser.add_argument("--conf", help="script yaml config file", type=str, required=True)
parser.add_argument("--accessid", help="Sumologic Access ID", type=str, required=False)
parser.add_argument("--accesskey", help="Sumologic Access Key", type=str, required=False)
parser.add_argument("--endpoint", help="Sumologic url endpoint", type=str, required=False)
parser.add_argument("--ruledir", help="sigma rule directory path to convert", type=str, required=False)
parser.add_argument("--outdir", help="output directory to create rules", type=str, required=False)
parser.add_argument("--sigmac", help="Sigmac location", default="../tools/sigmac", type=str)
parser.add_argument("--realerttime", help="Realert time (optional value, default 5 minutes)", type=str, default=5)
parser.add_argument("--debug", help="Show debug output", type=bool, default=False)
args = parser.parse_args()
LIMIT = 100
delay = 5
def rule_element(file_content, elements):
"""
Function used to get specific element from yaml document and return content
:type file_content: str
:type elements: list
:param file_content:
:param elements: list of elements of the yaml document to get "title", "description"
:return: the value of the key in the yaml document
"""
try:
logger.debug("file_content: %s" % file_content)
yaml.safe_load(file_content.replace("---", ""))
except TypeError:
raise Exception('Unsupported')
element_output = ""
for e in elements:
try:
element_output = yaml.safe_load(file_content.replace("---", ""))[e]
except TypeError:
pass
if element_output is None:
return ""
return element_output
def get_rule_as_sumologic(file):
"""
Function used to get sumologic query output from rule file
:type file: str
:param file: rule filename
:return: string query
"""
if not os.path.exists(args.sigmac):
logger.error("Cannot find sigmac rule coverter at '%s', please set a correct location via '--sigmac'")
cmd = [args.sigmac, file, "--target", "sumologic"]
logger.info('get_rule_as_sumologic cmd: %s' % cmd)
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = process.communicate()
# output is byte-string...
output = output.decode("utf-8")
err = err.decode("utf-8")
logger.info('get_rule_as_sumologic output: %s' % output)
logger.info('get_rule_as_sumologic stderr: %s' % err)
if err or "unsupported" in err:
logger.error('Unsupported output at this time')
raise Exception('Unsupported output at this time')
output = output.split("\n")
# Remove empty string from \n
output = [a for a in output if a]
# Handle case of multiple queries returned
if len(output) > 1:
return " OR ".join(output)
return "".join(output)
if args.help:
parser.print_help()
if args.conf:
with open(args.conf, 'r') as ymlfile:
cfg = yaml.load(ymlfile)
args.accessid = cfg['accessid']
args.accesskey = cfg['accesskey']
args.endpoint = cfg['endpoint']
args.ruledir = cfg['ruledir']
args.outdir = cfg['outdir']
args.sigmac = cfg['sigmac']
try:
args.recursive = cfg['recursive']
except TypeError:
args.recursive = False
if args.recursive:
globpath = args.ruledir + "/**/*.yml"
else:
globpath = args.ruledir + "/*.yml"
logger.debug("args: %s" % args)
logger.debug("globpath: %s" % globpath)
if args.outdir and not os.path.isdir(args.outdir):
os.mkdir(args.outdir, stat.S_IRWXU)
# non-recursive (above, not working...)
# for file in glob.iglob(args.ruledir + "/*.yml"):
# recursive
for file in glob.iglob(globpath, recursive=True):
file_basename = os.path.basename(os.path.splitext(file)[0])
file_basenamepath = os.path.splitext(file)[0]
file_ext = os.path.splitext(file)[1]
try:
if file_ext != '.yml':
continue
logger.info("Processing %s ..." % file_basename)
with open(file, "rb") as f:
file_content = f.read()
logger.info("Rule file: %s" % file)
sumo_query = get_rule_as_sumologic(file)
logger.info(" Checking if custom query file: %s" % file_basenamepath + '.custom')
if os.path.isfile(file_basenamepath + '.custom'):
# FIXME! want to add something in the middle for parsing for example...
logger.info(" Adding custom part to end query from: %s" % file_basenamepath + '.custom')
with open(file_basenamepath + '.custom', "rb") as f:
# FIXME ! manage pipe inside queries
if "| count" in sumo_query:
pos = sumo_query.find('| count')
sumo_query = sumo_query[:pos] + f.read().decode('utf-8') + sumo_query[pos:]
else:
sumo_query += " " + f.read().decode('utf-8')
elif 'count ' not in sumo_query and ('EventID=' in sumo_query):
sumo_query += " | count _sourceCategory, hostname, EventID, msg_summary, _raw"
elif 'count ' not in sumo_query:
sumo_query += " | count _sourceCategory, hostname, _raw"
logger.debug("Final sumo query: %s" % sumo_query)
except Exception as e:
if args.debug:
traceback.print_exc()
logger.exception("error generating sumo query " + str(file) + "----" + str(e))
with open(os.path.join(args.outdir, "sigma-" + file_basename + '-error-generation.txt'), "w") as f:
# f.write(json.dumps(r, indent=4, sort_keys=True) + " ERROR: %s\n\nQUERY: %s" % (e, sumo_query))
f.write(" ERROR for file: %s\n\Exception:\n %s" % (file, e))
continue
try:
# Run query
# https://github.com/SumoLogic/sumologic-python-sdk/blob/3ad8033deb028ac45ac4099f11c04785fa426f51/scripts/search-job.py
sumo = SumoLogic(args.accessid, args.accesskey, args.endpoint)
toTime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")
fromTime = datetime.datetime.strptime(toTime, "%Y-%m-%dT%H:%M:%S") - datetime.timedelta(hours=24)
fromTime = fromTime.strftime("%Y-%m-%dT%H:%M:%S")
timeZone = 'UTC'
byReceiptTime = True
sj = sumo.search_job(sumo_query, fromTime, toTime, timeZone, byReceiptTime)
status = sumo.search_job_status(sj)
while status['state'] != 'DONE GATHERING RESULTS':
if status['state'] == 'CANCELLED':
break
time.sleep(delay)
status = sumo.search_job_status(sj)
except Exception as e:
if args.debug:
traceback.print_exc()
logger.exception("error searching sumo " + str(file) + "----" + str(e))
with open(os.path.join(args.outdir, "sigma-" + file_basename + '-error.txt'), "w") as f:
# f.write(json.dumps(r, indent=4, sort_keys=True) + " ERROR: %s\n\nQUERY: %s" % (e, sumo_query))
f.write(" ERROR: %s\n\nQUERY: %s" % (e, sumo_query))
pass
logger.debug("Sumo search job status: %s" % status['state'])
try:
if status['state'] == 'DONE GATHERING RESULTS':
count = status['recordCount']
# compensate bad limit check
limit = count if count < LIMIT and count != 0 else LIMIT
r = sumo.search_job_records(sj, limit=limit)
logger.debug("Sumo search results: %s" % r)
logger.debug("Saving final sumo query for %s to %s" % (file, os.path.join(args.outdir, "sigma-" + file_basename + '.sumo')))
with open(os.path.join(args.outdir, "sigma-" + file_basename + '.sumo'), "w") as f:
f.write(sumo_query)
if r and r['records'] != []:
logger.info("Saving results")
# as json text file
with open(os.path.join(args.outdir, "sigma-" + file_basename + '.txt'), "w") as f:
f.write(json.dumps(r, indent=4, sort_keys=True))
# as excel file
df = pandas.io.json.json_normalize(r['records'])
with pandas.ExcelWriter(os.path.join(args.outdir, "sigma-" + file_basename + ".xlsx")) as writer:
df.to_excel(writer, 'data')
pandas.DataFrame({'References': [
"timeframe: from %s to %s" % (fromTime, toTime),
"Sumo endpoint: %s" % args.endpoint,
"Sumo query: %s" % sumo_query
]}).to_excel(writer, 'comments')
# and do whatever you want, email alert, report, ticket...
except Exception as e:
if args.debug:
traceback.print_exc()
logger.exception("error saving results " + str(file) + "----" + str(e))
pass
-31
View File
@@ -1,31 +0,0 @@
#!/bin/bash
# Copyright 2022 Tim Shelton
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
if [ $# -ne 3 ]; then
echo "Usage: $0 <target> <target config> <output file>"
echo "Ex: $0 hawk ./tools/config/hawk.yml output.txt"
exit 1
fi
FILEDIFF=$(git fetch && git diff --name-only ..origin | egrep "rules/" )
cd ..
echo "Updating ${FILEDIFF}"
git pull origin master
python3 ./tools/sigmac --target $1 -c $2 ${FILEDIFF} > $3
E=$(pwd)
cd -
echo "Output file can be found in $E"
-160
View File
@@ -1,160 +0,0 @@
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Project: sigmacover.py
Date: 26/09/2021
Author: frack113
Version: 1.1
Description:
get cover of the rules vs backend
Requirements:
python 3.7 min
$ pip install ruyaml
Todo:
- clean code and bug
- better use of subprocess.run
- have idea
"""
import re
import subprocess
import pathlib
import ruyaml
import json
import copy
import platform
import argparse
def get_sigmac(name,conf):
infos = []
if conf == None:
options = ["python","../tools/sigmac","-t",name,"--debug","-rI","-o","dump.txt","../rules"]
else:
options = ["python","../tools/sigmac","-t",name,"-c",conf,"--debug","-rI","-o","dump.txt","../rules"]
if platform.system() == "Windows":
si = subprocess.STARTUPINFO()
si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
ret = subprocess.run(options,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
startupinfo=si
)
my_regex = "Convertion Sigma input \S+\\\\(\w+\.yml) (\w+)"
else:
ret = subprocess.run(options,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
my_regex = "Convertion Sigma input \S+/(\w+\.yml) (\w+)"
if not ret.returncode == 0:
print (f"error {ret.returncode} in sigmac")
log = pathlib.Path("sigmac.log")
with log.open() as f:
lines = f.readlines()
for line in lines:
if "Convertion Sigma input" in line:
info = re.findall(my_regex,line)[0]
infos.append(info)
log.unlink()
dump = pathlib.Path("dump.txt")
if dump.exists():
dump.unlink()
return infos
def update_dict(my_dict,my_data,backend):
for file,state in my_data:
my_dict[file][backend] = state
#the backend dict command line options
backend_dict = {
"ala": None,
"ala-rule": None,
"arcsight": "../tools/config/elk-winlogbeat.yml",
"arcsight-esm": "../tools/config/elk-winlogbeat.yml",
"carbonblack": "../tools/config/elk-winlogbeat.yml",
"chronicle": "../tools/config/elk-winlogbeat.yml",
"crowdstrike": "../tools/config/elk-winlogbeat.yml",
"csharp" : None,
"devo": "../tools/config/elk-winlogbeat.yml",
"ee-outliers": "../tools/config/winlogbeat-modules-enabled.yml",
"elastalert": "../tools/config/winlogbeat-modules-enabled.yml",
"elastalert-dsl": "../tools/config/winlogbeat-modules-enabled.yml",
"es-dsl": "../tools/config/winlogbeat-modules-enabled.yml",
"es-eql": "../tools/config/winlogbeat-modules-enabled.yml",
"es-qs": "../tools/config/winlogbeat-modules-enabled.yml",
"es-qs-lr": "../tools/config/logrhythm_winevent.yml",
"es-rule": "../tools/config/winlogbeat-modules-enabled.yml",
"es-rule-eql": "../tools/config/winlogbeat-modules-enabled.yml",
"fireeye-helix": "../tools/config/elk-winlogbeat.yml",
"graylog" : None,
"grep" : None,
"humio": "../tools/config/elk-winlogbeat.yml",
"kibana": "../tools/config/winlogbeat-modules-enabled.yml",
"kibana-ndjson": "../tools/config/winlogbeat-modules-enabled.yml",
"lacework" : None,
"limacharlie" : None,
"logiq" : None,
"logpoint" : None,
"mdatp" : None,
"netwitness" : None,
"netwitness-epl" : None,
"opensearch-monitor": "../tools/config/winlogbeat.yml",
"powershell" : None,
"qradar" : None,
"qualys" : None,
"sentinel-rule" : None,
"splunk": "../tools/config/splunk-windows.yml",
"splunkdm": "../tools/config/splunk-windows.yml",
"splunkxml": "../tools/config/splunk-windows.yml",
"sql": "../tools/config/elk-winlogbeat.yml",
"sqlite": "../tools/config/elk-winlogbeat.yml",
"stix": "../tools/config/stix2.0.yml",
"sumologic" : None,
"sumologic-cse" : None,
"sumologic-cse-rule" : None,
"sysmon": "../tools/config/elk-windows.yml",
"uberagent" : None,
"xpack-watcher": "../tools/config/winlogbeat-modules-enabled.yml",
}
print("""
███ ███ ████ █▄┼▄█ ███ ┼┼ ███ ███ █▄█ ███ ███
█▄▄ ┼█┼ █┼▄▄ █┼█┼█ █▄█ ┼┼ █┼┼ █┼█ ███ █▄┼ █▄┼
▄▄█ ▄█▄ █▄▄█ █┼┼┼█ █┼█ ┼┼ ███ █▄█ ┼█┼ █▄▄ █┼█
v1.1 bugfix
please wait during the tests
""")
argparser = argparse.ArgumentParser(description="Check Sigma rules with all backend.")
argparser.add_argument("--target", "-t", choices=["yaml","json"], help="Output target format")
cmdargs = argparser.parse_args()
if cmdargs.target == None:
print("No outpout use -h to see help")
exit()
#init dict of all rules
default_key_test = {key : "NO TEST" for key in backend_dict.keys()}
the_dico ={}
rules = pathlib.Path("../rules").glob("**/*.yml")
for rule in rules:
the_dico[rule.name] = copy.deepcopy(default_key_test)
#Check all the backend
for name,opt in backend_dict.items():
print (f"check backend : {name}")
result = get_sigmac(name,opt)
update_dict(the_dico,result,name)
#Save
if cmdargs.target.lower() == "yaml":
cover = pathlib.Path("sigmacover.yml")
with cover.open("w") as file:
ruyaml.dump(the_dico, file, Dumper=ruyaml.RoundTripDumper)
else:
cover = pathlib.Path("sigmacover.json")
with cover.open("w") as file:
json_dumps_str = json.dumps(the_dico, indent=4)
file.write(json_dumps_str)

Some files were not shown because too many files have changed in this diff Show More