Merge remote-tracking branch 'upstream/master'

This commit is contained in:
juju4
2017-10-29 14:14:47 -04:00
23 changed files with 251 additions and 110 deletions
+1
View File
@@ -5,6 +5,7 @@ cache: pip
install:
- pip install pyyaml
- pip install yamllint
- pip install coverage
script:
- make test
+35 -8
View File
@@ -2,13 +2,40 @@
test: test-yaml test-sigmac
test-yaml:
yamllint .
yamllint rules
test-sigmac:
tools/sigmac.py -l
tools/sigmac.py -rvdI -t es-qs rules/
tools/sigmac.py -rvdI -t kibana rules/
tools/sigmac.py -rvdI -t xpack-watcher rules/
tools/sigmac.py -rvdI -t splunk rules/
tools/sigmac.py -rvdI -t logpoint rules/
tools/sigmac.py -rvdI -t fieldlist rules/
rm -f .coverage
coverage run -a --include=tools/* tools/sigmac.py -l
coverage run -a --include=tools/* tools/sigmac.py -rvdI -t es-qs rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -t kibana rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -t xpack-watcher rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -t splunk rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -t logpoint rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -c tools/config/elk-windows.yml -t es-qs rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -c tools/config/elk-linux.yml -t es-qs rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -c tools/config/elk-windows.yml -t kibana rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -c tools/config/elk-linux.yml -t kibana rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -c tools/config/elk-windows.yml -t xpack-watcher rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -c tools/config/elk-linux.yml -t xpack-watcher rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -c tools/config/elk-defaultindex.yml -t xpack-watcher rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -c tools/config/splunk-windows-all.yml -t splunk rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -c tools/config/logpoint-windows-all.yml -t logpoint rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -t grep rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -rvdI -t fieldlist rules/ > /dev/null
coverage run -a --include=tools/* tools/sigmac.py -t xpack-watcher -O output=plain -O es=es -O foobar rules/windows/builtin/win_susp_failed_logons_single_source.yml > /dev/null
! coverage run -a --include=tools/* tools/sigmac.py -t xpack-watcher -O output=foobar -O es=es -O foobar rules/windows/builtin/win_susp_failed_logons_single_source.yml > /dev/null
! coverage run -a --include=tools/* tools/sigmac.py -t es-qs tests/not_existing.yml > /dev/null
! coverage run -a --include=tools/* tools/sigmac.py -t es-qs tests/invalid_yaml.yml > /dev/null
! coverage run -a --include=tools/* tools/sigmac.py -t es-qs tests/invalid_sigma-no_identifiers.yml > /dev/null
! coverage run -a --include=tools/* tools/sigmac.py -t es-qs tests/invalid_sigma-no_condition.yml > /dev/null
! coverage run -a --include=tools/* tools/sigmac.py -t es-qs tests/invalid_sigma-invalid_identifier_reference.yml > /dev/null
! coverage run -a --include=tools/* tools/sigmac.py -t es-qs tests/invalid_sigma-invalid_aggregation.yml > /dev/null
! coverage run -a --include=tools/* tools/sigmac.py -t es-qs tests/invalid_sigma-wrong_identifier_definition.yml > /dev/null
! coverage run -a --include=tools/* tools/sigmac.py -t es-qs rules/windows/builtin/win_susp_failed_logons_single_source.yml
! coverage run -a --include=tools/* tools/sigmac.py -t es-qs -o /not_possible rules/windows/sysmon/sysmon_mimikatz_detection_lsass.yml
! coverage run -a --include=tools/* tools/sigmac.py -t es-qs -c not_existing rules/windows/sysmon/sysmon_mimikatz_detection_lsass.yml
! coverage run -a --include=tools/* tools/sigmac.py -t es-qs -c tests/invalid_yaml.yml rules/windows/sysmon/sysmon_mimikatz_detection_lsass.yml
! coverage run -a --include=tools/* tools/sigmac.py -t es-qs -c tests/invalid_config.yml rules/windows/sysmon/sysmon_mimikatz_detection_lsass.yml
! coverage run -a --include=tools/* tools/sigmac.py -rvI -c tools/config/elk-defaultindex.yml -t kibana rules/ > /dev/null
coverage report --fail-under=90
+5 -1
View File
@@ -9,7 +9,7 @@ Generic Signature Format for SIEM Systems
Sigma is a generic and open signature format that allows you to describe relevant log events in a straight forward manner. The rule format is very flexible, easy to write and applicable to any type of log file. The main purpose of this project is to provide a structured form in which researchers or analysts can describe their once developed detection methods and make them shareable with others.
Sigma is for log files what [Snort](https://www.snort.org/) is for network traffic and [YARA](https://github.com/VirusTotal/yara) is for files.
Sigma is for log files what [Snort](https://www.snort.org/) is for network traffic and [YARA](https://github.com/VirusTotal/yara) is for files.
This repository contains:
@@ -17,6 +17,10 @@ This repository contains:
* Open repository for sigma signatures in the ```./rules```subfolder
* A converter that generate searches/queries for different SIEM systems [work in progress]
## Hack.lu 2017 Talk
[![Sigma - Generic Signatures for Log Events](https://preview.ibb.co/cMCigR/Screen_Shot_2017_10_18_at_15_47_15.png)](https://www.youtube.com/watch?v=OheVuE9Ifhs "Sigma - Generic Signatures for Log Events")
# Use Cases
* Describe your once discovered detection method in Sigma to make it sharable
+16
View File
@@ -0,0 +1,16 @@
title: Ps.exe Renamed SysInternals Tool
description: Detects renamed SysInternals tool execution with a binary named ps.exe as used by Dragonfly APT group and documentied in TA17-293A report
reference: https://www.us-cert.gov/ncas/alerts/TA17-293A
author: Florian Roth
date: 2017/10/22
logsource:
product: windows
service: sysmon
detection:
selection:
EventID: 1
CommandLine: 'ps.exe -accepteula'
condition: selection
falsepositives:
- Renamed SysInternals tool
level: high
@@ -0,0 +1,19 @@
title: Flash Player Update from Suspicious Location
status: experimental
description: Detects a flashplayer update from an unofficial location
reference: https://gist.github.com/roycewilliams/a723aaf8a6ac3ba4f817847610935cfb
author: Florian Roth
logsource:
category: proxy
detection:
selection:
cs-uri-query:
- '*/install_flash_player.exe'
- '*/flash_install.php*'
filter:
cs-uri-query: '*.adobe.com/*'
condition: selection and not filter
falsepositives:
- Unknown flash download locations
level: high
@@ -8,10 +8,11 @@ logsource:
service: powershell
description: 'It is recommanded to use the new "Script Block Logging" of PowerShell v5 https://adsecurity.org/?p=2277'
detection:
EventID: 4103
keywords:
selection:
EventID: 4103
keywords:
- 'PS ATTACK!!!'
condition: keywords
condition: selection and keywords
falsepositives:
- Pentesters
level: high
@@ -7,9 +7,9 @@ logsource:
service: sysmon
detection:
selection:
- EventID: 10
TargetImage: 'C:\windows\system32\lsass.exe'
GrantedAccess: '0x1410'
EventID: 10
TargetImage: 'C:\windows\system32\lsass.exe'
GrantedAccess: '0x1410'
condition: selection
falsepositives:
- unknown
@@ -23,6 +23,7 @@ detection:
- '*\sh.exe'
- '*\bash.exe'
- '*\scrcons.exe'
- '*\schtasks.exe' # see https://www.hybrid-analysis.com/sample/b409538c99f99b94a5035d9fa44a506b41be0feb23e89b7e4d272ba791aa6002?environmentId=100
- '*\regsvr32.exe' # see https://twitter.com/subTee/status/899283365647458305
- '*\hh.exe' # see https://www.hybrid-analysis.com/sample/6abc2b63f1865a847ff7f5a9d49bb944397b36f5503b9718d6f91f93d60f7cd7?environmentId=100
condition: selection
@@ -0,0 +1,25 @@
title: New RUN Key Pointing to Suspicious Folder
status: experimental
description: Detects suspicious new RUN key element pointing to an executable in a suspicious folder
author: Florian Roth
date: 2017/10/17
logsource:
product: windows
service: sysmon
detection:
selection:
EventID: 13
TargetObject: '\REGISTRY\MACHINE\SOFTWARE\Microsoft\Windows\CurrentVersion\Run\*'
Details:
- 'C:\Windows\Temp\*'
- '*\AppData\*'
- 'C:\$Recycle.bin\*'
- 'C:\Temp\*'
- 'C:\Users\Public\*'
- 'C:\Users\Default\*'
condition: selection
fields:
- Image
falsepositives:
- Software with rare behaviour
level: high
+4
View File
@@ -0,0 +1,4 @@
logsources: invalid
fieldmappings:
client_ip: clientip
url: request
@@ -0,0 +1,7 @@
title: Parse error in aggregation
logsource:
product: linux
detection:
foo:
- test
condition: foo | foo bar
@@ -0,0 +1,7 @@
title: Missing identifiers in detection section
logsource:
product: linux
detection:
foo:
- test
condition: bar
+6
View File
@@ -0,0 +1,6 @@
title: Missing condition
logsource:
product: linux
detection:
expression:
- test
+3
View File
@@ -0,0 +1,3 @@
title: Missing identifiers in detection section
logsource:
product: linux
@@ -0,0 +1,6 @@
title: Wrong identifier value type
logsource:
product: linux
detection:
foo: test
condition: foo
+1
View File
@@ -0,0 +1 @@
foo: bar: foobar
+69 -74
View File
@@ -56,61 +56,6 @@ class SingleOutput:
def close(self):
self.fd.close()
class MultiOutput:
"""
Multiple file output
Prepares multiple SingleOutput instances with basename + suffix as file names, on for each suffix.
The switch() method is used to switch between these outputs.
This class must be inherited and suffixes must be a dict as follows: file id -> suffix
"""
suffixes = None
def __init__(self, basename):
"""Initializes all outputs with basename and corresponding suffix as SingleOutput object."""
if suffixes == None:
raise NotImplementedError("OutputMulti must be derived, at least suffixes must be set")
if type(basename) != str:
raise TypeError("OutputMulti constructor basename parameter must be string")
self.outputs = dict()
self.output = None
for name, suffix in self.suffixes:
self.outputs[name] = SingleOutput(basename + suffix)
def select(self, name):
"""Select an output as current output"""
self.output = self.outputs[name]
def print(self, *args, **kwargs):
self.output.print(*args, **kwargs)
def close(self):
for out in self.outputs:
out.close()
class StringOutput(SingleOutput):
"""Collect input silently and return resulting string."""
def __init__(self, filename=None):
self.out = ""
def print(self, *args, **kwargs):
try:
del kwargs['file']
except KeyError:
pass
print(*args, file=self, **kwargs)
def write(self, s):
self.out += s
def result(self):
return self.out
def close(self):
pass
### Generic backend base classes and mixins
class BaseBackend:
"""Base class for all backends"""
@@ -190,16 +135,29 @@ class BaseBackend:
"""
pass
class SingleTextQueryBackend(BaseBackend):
class QuoteCharMixin:
"""
This class adds the cleanValue method that quotes and filters characters according to the configuration in
the attributes provided by the mixin.
"""
reEscape = None # match characters that must be quoted
escapeSubst = "\\\\\g<1>" # Substitution that is applied to characters/strings matched for escaping by reEscape
reClear = None # match characters that are cleaned out completely
def cleanValue(self, val):
if self.reEscape:
val = self.reEscape.sub(self.escapeSubst, val)
if self.reClear:
val = self.reClear.sub("", val)
return val
class SingleTextQueryBackend(BaseBackend, QuoteCharMixin):
"""Base class for backends that generate one text-based expression from a Sigma rule"""
identifier = "base-textquery"
active = False
output_class = SingleOutput
# the following class variables define the generation and behavior of queries from a parse tree some are prefilled with default values that are quite usual
reEscape = None # match characters that must be quoted
escapeSubst = "\\\\\g<1>" # Substitution that is applied to characters/strings matched for escaping by reEscape
reClear = None # match characters that are cleaned out completely
andToken = None # Token used for linking expressions with logical AND
orToken = None # Same for OR
notToken = None # Same for NOT
@@ -211,13 +169,6 @@ class SingleTextQueryBackend(BaseBackend):
mapListsSpecialHandling = False # Same handling for map items with list values as for normal values (strings, integers) if True, generateMapItemListNode method is called with node
mapListValueExpression = None # Syntax for field/value condititons where map value is a list
def cleanValue(self, val):
if self.reEscape:
val = self.reEscape.sub(self.escapeSubst, val)
if self.reClear:
val = self.reClear.sub("", val)
return val
def generateANDNode(self, node):
return self.andToken.join([self.generateNode(val) for val in node])
@@ -323,8 +274,6 @@ class KibanaBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin):
pass
indices = sigmaparser.get_logsource().index
if len(indices) == 0:
indices = ["logstash-*"]
for parsed in sigmaparser.condparsed:
result = self.generateNode(parsed.parsedSearch)
@@ -332,8 +281,7 @@ class KibanaBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin):
for index in indices:
final_rulename = rulename
if len(indices) > 1: # add index names if rule must be replicated because of ambigiuous index patterns
final_rulename += "-" + indexname
title = "%s (%s)" % (sigmaparser.parsedyaml["title"], index)
raise NotSupportedError("Multiple target indices are not supported by Kibana")
else:
title = sigmaparser.parsedyaml["title"]
try:
@@ -408,8 +356,6 @@ class XPackWatcherBackend(ElasticsearchQuerystringBackend, MultiRuleOutputMixin)
# creating condition
indices = sigmaparser.get_logsource().index
if len(indices) == 0:
indices = ["logstash-*"]
for condition in sigmaparser.condparsed:
result = self.generateNode(condition.parsedSearch)
@@ -518,10 +464,10 @@ class SplunkBackend(SingleTextQueryBackend):
listSeparator = " "
valueExpression = "\"%s\""
mapExpression = "%s=%s"
mapListsSpecialHandling = False
mapListsSpecialHandling = True
mapListValueExpression = "%s IN %s"
def generateMapItemListNode(self, node):
def generateMapItemListNode(self, key, value):
return "(" + (" OR ".join(['%s=%s' % (key, self.generateValueNode(item)) for item in value])) + ")"
def generateAggregation(self, agg):
@@ -534,6 +480,46 @@ class SplunkBackend(SingleTextQueryBackend):
else:
return " | stats %s(%s) as val by %s | search val %s %s" % (agg.aggfunc_notrans, agg.aggfield, agg.groupfield, agg.cond_op, agg.condition)
class GrepBackend(BaseBackend, QuoteCharMixin):
"""Generates Perl compatible regular expressions and puts 'grep -P' around it"""
identifier = "grep"
active = True
output_class = SingleOutput
reEscape = re.compile("([\\|()\[\]{}.^$])")
def generate(self, sigmaparser):
for parsed in sigmaparser.condparsed:
self.output.print("grep -P '^%s'" % self.generateNode(parsed.parsedSearch))
def cleanValue(self, val):
val = super().cleanValue(val)
return re.sub("\\*", ".*", val)
def generateORNode(self, node):
return "(?:%s)" % "|".join([".*" + self.generateNode(val) for val in node])
def generateANDNode(self, node):
return "".join(["(?=.*%s)" % self.generateNode(val) for val in node])
def generateNOTNode(self, node):
return "(?!.*%s)" % self.generateNode(node.item)
def generateSubexpressionNode(self, node):
return "(?:.*%s)" % self.generateNode(node.items)
def generateListNode(self, node):
if not set([type(value) for value in node]).issubset({str, int}):
raise TypeError("List values must be strings or numbers")
return self.generateORNode(node)
def generateMapItemNode(self, node):
key, value = node
return self.generateNode(value)
def generateValueNode(self, node):
return self.cleanValue(str(node))
### Backends for developement purposes
class FieldnameListBackend(BaseBackend):
@@ -579,3 +565,12 @@ def flatten(l):
yield from flatten(i)
else:
yield i
# Exceptions
class BackendError(Exception):
"""Base exception for backend-specific errors."""
pass
class NotSupportedError(BackendError):
"""Exception is raised if some output is required that is not supported by the target language."""
pass
+3
View File
@@ -0,0 +1,3 @@
defaultindex:
- logstash-*
- filebeat-*
+1
View File
@@ -12,3 +12,4 @@ logsources:
fieldmappings:
client_ip: clientip
url: request
defaultindex: logstash-*
-7
View File
@@ -1,7 +0,0 @@
logsources:
sysmon:
product: windows
service: sysmon
index: logstash-windows-*
conditions:
EventLog: Microsoft-Windows-Sysmon
+1
View File
@@ -22,3 +22,4 @@ logsources:
service: dns-server
conditions:
EventLog: 'DNS Server'
defaultindex: logstash-*
+21 -7
View File
@@ -19,8 +19,6 @@ class SigmaParser:
def parse_sigma(self):
try: # definition uniqueness check
for definitionName, definition in self.parsedyaml["detection"].items():
if definitionName in self.definitions:
raise SigmaParseError("Definition '%s' was already defined" % (definitionName))
self.definitions[definitionName] = definition
self.extract_values(definition) # builds key-values-table in self.values
except KeyError:
@@ -45,7 +43,7 @@ class SigmaParser:
try:
definition = self.definitions[definitionName]
except KeyError as e:
raise SigmaParseError("Unknown definition '%s'" % (definitionName)) from e
raise SigmaParseError("Unknown definition '%s'" % definitionName) from e
return self.parse_definition(definition, condOverride)
def parse_definition(self, definition, condOverride=None):
@@ -711,6 +709,7 @@ class SigmaConfiguration:
self.fieldmappings = dict()
self.logsources = dict()
self.logsourcemerging = SigmaLogsourceConfiguration.MM_AND
self.defaultindex = None
self.backend = None
else:
config = yaml.safe_load(configyaml)
@@ -730,6 +729,11 @@ class SigmaConfiguration:
except KeyError:
self.logsourcemerging = SigmaLogsourceConfiguration.MM_AND
try:
self.defaultindex = config['defaultindex']
except KeyError:
self.defaultindex = None
self.logsources = list()
self.backend = None
@@ -743,7 +747,7 @@ class SigmaConfiguration:
def get_logsource(self, category, product, service):
"""Return merged log source definition of all logosurces that match criteria"""
matching = [logsource for logsource in self.logsources if logsource.matches(category, product, service)]
return SigmaLogsourceConfiguration(matching)
return SigmaLogsourceConfiguration(matching, self.defaultindex)
def set_backend(self, backend):
"""Set backend. This is used by other code to determine target properties for index addressing"""
@@ -754,7 +758,7 @@ class SigmaConfiguration:
if type(logsources) != dict:
raise SigmaConfigParseError("Logsources must be a map")
for name, logsource in logsources.items():
self.logsources.append(SigmaLogsourceConfiguration(logsource, name, self.logsourcemerging, self.get_indexfield()))
self.logsources.append(SigmaLogsourceConfiguration(logsource, self.defaultindex, name, self.logsourcemerging, self.get_indexfield()))
def get_indexfield(self):
"""Get index condition if index field name is configured"""
@@ -766,7 +770,7 @@ class SigmaLogsourceConfiguration:
MM_AND = "and" # Merge all conditions with AND
MM_OR = "or" # Merge all conditions with OR
def __init__(self, logsource=None, name=None, mergemethod=MM_AND, indexfield=None):
def __init__(self, logsource=None, defaultindex=None, name=None, mergemethod=MM_AND, indexfield=None):
self.name = name
self.indexfield = indexfield
if logsource == None: # create empty object
@@ -798,6 +802,13 @@ class SigmaLogsourceConfiguration:
# Merge all index patterns
self.index = list(set([index for ls in logsource for index in ls.index])) # unique(flat(logsources.index))
if len(self.index) == 0 and defaultindex is not None: # if no index pattern matched and default index is present: use default index
if type(defaultindex) == str:
self.index = [defaultindex]
elif type(defaultindex) == list and all([type(i) == str for i in defaultindex]):
self.index = defaultindex
else:
raise TypeError("Default index must be string or list of strings")
# "merge" index field (should never differ between instances because it is provided by backend class
indexfields = [ ls.indexfield for ls in logsource if ls.indexfield != None ]
@@ -844,13 +855,16 @@ class SigmaLogsourceConfiguration:
index = logsource['index']
if type(index) not in (str, list):
raise SigmaConfigParseError("Logsource index must be string or list of strings")
if type(index) == list and not set([type(index) for index in logsource['index']]).issubset({str}):
if type(index) == list and not all([type(index) == str for index in logsource['index']]):
raise SigmaConfigParseError("Logsource index patterns must be strings")
if type(index) == list:
self.index = index
else:
self.index = [ index ]
else:
# no default index handling here - this branch is executed if log source definitions are parsed from
# config and these must not necessarily contain an index definition. A valid index may later be result
# from a merge, where default index handling applies.
self.index = []
if 'conditions' in logsource:
+13 -7
View File
@@ -59,19 +59,20 @@ if cmdargs.config:
sigmaconfig = SigmaConfiguration(f)
except OSError as e:
print("Failed to open Sigma configuration file %s: %s" % (conffile, str(e)), file=sys.stderr)
except yaml.parser.ParserError as e:
exit(5)
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
print("Sigma configuration file %s is no valid YAML: %s" % (conffile, str(e)), file=sys.stderr)
except SigmaParseError as e:
exit(6)
except SigmaConfigParseError as e:
print("Sigma configuration parse error in %s: %s" % (conffile, str(e)), file=sys.stderr)
exit(7)
backend_options = backends.BackendOptions(cmdargs.backend_option)
try:
backend = backends.getBackend(cmdargs.target)(sigmaconfig, backend_options, cmdargs.output)
except LookupError as e:
print("Backend not found!", file=sys.stderr)
sys.exit(2)
except IOError:
# not existing backend is already detected by argument parser
except IOError as e:
print("Failed to open output file '%s': %s" % (cmdargs.output, str(e)), file=sys.stderr)
exit(1)
@@ -90,7 +91,7 @@ for sigmafile in get_inputs(cmdargs.inputs, cmdargs.recurse):
except OSError as e:
print("Failed to open Sigma file %s: %s" % (sigmafile, str(e)), file=sys.stderr)
error = 5
except yaml.parser.ParserError as e:
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e:
print("Sigma file %s is no valid YAML: %s" % (sigmafile, str(e)), file=sys.stderr)
error = 3
if not cmdargs.defer_abort:
@@ -100,6 +101,11 @@ for sigmafile in get_inputs(cmdargs.inputs, cmdargs.recurse):
error = 4
if not cmdargs.defer_abort:
sys.exit(error)
except backends.BackendError as e:
print("Backend error in %s: %s" % (sigmafile, str(e)), file=sys.stderr)
error = 8
if not cmdargs.defer_abort:
sys.exit(error)
except NotImplementedError as e:
print("An unsupported feature is required for this Sigma rule: " + str(e), file=sys.stderr)
print("Feel free to contribute for fun and fame, this is open source :) -> https://github.com/Neo23x0/sigma", file=sys.stderr)